From e7ee16e4683ab7b42326bba7d69006b6be982b6a Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Thu, 12 May 2022 17:20:08 +0900 Subject: [PATCH 01/17] rename get_account_id_from_lambda_arn to get_account_id_from_arn --- handlers/aws/cloudwatch_logs_trigger.py | 4 ++-- handlers/aws/kinesis_trigger.py | 4 ++-- handlers/aws/s3_sqs_trigger.py | 4 ++-- handlers/aws/sqs_trigger.py | 4 ++-- handlers/aws/utils.py | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/handlers/aws/cloudwatch_logs_trigger.py b/handlers/aws/cloudwatch_logs_trigger.py index 8be2c2e5..00c68596 100644 --- a/handlers/aws/cloudwatch_logs_trigger.py +++ b/handlers/aws/cloudwatch_logs_trigger.py @@ -13,7 +13,7 @@ from storage import CommonStorage, StorageFactory from .event import _default_event -from .utils import extractor_events_from_field, get_account_id_from_lambda_arn +from .utils import extractor_events_from_field, get_account_id_from_arn def _from_awslogs_data_to_event(awslogs_data: str) -> Any: @@ -82,7 +82,7 @@ def _handle_cloudwatch_logs_event( timeout of the lambda it will call the sqs continuing handler """ - account_id = get_account_id_from_lambda_arn(input_id) + account_id = get_account_id_from_arn(input_id) log_group_name = event["logGroup"] log_stream_name = event["logStream"] diff --git a/handlers/aws/kinesis_trigger.py b/handlers/aws/kinesis_trigger.py index eff8da6e..15faf237 100644 --- a/handlers/aws/kinesis_trigger.py +++ b/handlers/aws/kinesis_trigger.py @@ -12,7 +12,7 @@ from .event import _default_event from .utils import ( extractor_events_from_field, - get_account_id_from_lambda_arn, + get_account_id_from_arn, get_kinesis_stream_name_type_and_region_from_arn, ) @@ -25,7 +25,7 @@ def _handle_kinesis_record( It iterates through kinesis records in the kinesis trigger and process the content of kinesis.data payload """ - account_id = get_account_id_from_lambda_arn(input_id) + account_id = get_account_id_from_arn(input_id) storage: CommonStorage = StorageFactory.create(storage_type="payload", payload=kinesis_record["kinesis"]["data"]) diff --git a/handlers/aws/s3_sqs_trigger.py b/handlers/aws/s3_sqs_trigger.py index 2527b65a..230ba339 100644 --- a/handlers/aws/s3_sqs_trigger.py +++ b/handlers/aws/s3_sqs_trigger.py @@ -15,7 +15,7 @@ from storage import CommonStorage, StorageFactory from .event import _default_event -from .utils import extractor_events_from_field, get_account_id_from_lambda_arn, get_bucket_name_from_arn +from .utils import extractor_events_from_field, get_account_id_from_arn, get_bucket_name_from_arn def _handle_s3_sqs_continuation( @@ -63,7 +63,7 @@ def _handle_s3_sqs_event( corresponding object in S3 buckets sending to the defined outputs. """ - account_id = get_account_id_from_lambda_arn(input_id) + account_id = get_account_id_from_arn(input_id) body = json.loads(sqs_record["body"]) for s3_record_n, s3_record in enumerate(body["Records"]): diff --git a/handlers/aws/sqs_trigger.py b/handlers/aws/sqs_trigger.py index 3b2d5a90..7cbfdaf0 100644 --- a/handlers/aws/sqs_trigger.py +++ b/handlers/aws/sqs_trigger.py @@ -14,7 +14,7 @@ from .event import _default_event from .utils import ( extractor_events_from_field, - get_account_id_from_lambda_arn, + get_account_id_from_arn, get_queue_url_from_sqs_arn, get_sqs_queue_name_and_region_from_arn, ) @@ -81,7 +81,7 @@ def _handle_sqs_event( content of body payload in the record. """ - account_id = get_account_id_from_lambda_arn(input_id) + account_id = get_account_id_from_arn(input_id) queue_name, aws_region = get_sqs_queue_name_and_region_from_arn(input_id) storage: CommonStorage = StorageFactory.create(storage_type="payload", payload=sqs_record["body"]) diff --git a/handlers/aws/utils.py b/handlers/aws/utils.py index 59825187..0c7107e5 100644 --- a/handlers/aws/utils.py +++ b/handlers/aws/utils.py @@ -380,7 +380,7 @@ def get_queue_url_from_sqs_arn(sqs_arn: str) -> str: return f"https://sqs.{region}.amazonaws.com/{account_id}/{queue_name}" -def get_account_id_from_lambda_arn(lambda_arn: str) -> str: +def get_account_id_from_arn(lambda_arn: str) -> str: arn_components = lambda_arn.split(":") return arn_components[4] From ae73c5473daffc42802ad4c4e9184bc48d958cdd Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Thu, 12 May 2022 17:20:45 +0900 Subject: [PATCH 02/17] type in changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b71c2f25..0830136b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -36,7 +36,7 @@ ### v0.25.1 - 2022/03/21 ##### Bug fixes -* Extract `fields` subfields at event root and make metadata for CloudWatch Logs in event in sync with Elatic Agent: [#98](https://github.com/elastic/elastic-serverless-forwarder/pull/98) +* Extract `fields` subfields at event root and make metadata for CloudWatch Logs in event in sync with Elastic Agent: [#98](https://github.com/elastic/elastic-serverless-forwarder/pull/98) ### v0.25.0 - 2022/03/15 ##### Features From e56f8290a5fd617ff6d365de29cc71e8dfb0c4cf Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Thu, 12 May 2022 17:21:29 +0900 Subject: [PATCH 03/17] bundle macro and add extra CF params --- .internal/aws/cloudformation/application.yaml | 85 +++++++ .internal/aws/cloudformation/macro.yaml | 218 ++++++++++++++++++ .internal/aws/cloudformation/template.yaml | 139 +++++++---- .internal/aws/scripts/dist.sh | 22 +- 4 files changed, 408 insertions(+), 56 deletions(-) create mode 100644 .internal/aws/cloudformation/application.yaml create mode 100644 .internal/aws/cloudformation/macro.yaml diff --git a/.internal/aws/cloudformation/application.yaml b/.internal/aws/cloudformation/application.yaml new file mode 100644 index 00000000..bff8dab6 --- /dev/null +++ b/.internal/aws/cloudformation/application.yaml @@ -0,0 +1,85 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + Elastic Serverless Forwarder + + SAM Template for the application, not intended to be deployed on its own + +Parameters: + ElasticServerlessForwarderS3ConfigFile: + Type: String + ElasticServerlessForwarderSSMSecrets: + Type: CommaDelimitedList + ElasticServerlessForwarderKMSKeys: + Type: CommaDelimitedList + ElasticServerlessForwarderSQSEvents: + Type: CommaDelimitedList + ElasticServerlessForwarderS3SQSEvents: + Type: CommaDelimitedList + ElasticServerlessForwarderKinesisEvents: + Type: CommaDelimitedList + ElasticServerlessForwarderCloudWatchLogsEvents: + Type: CommaDelimitedList + ElasticServerlessForwarderS3Buckets: + Type: CommaDelimitedList +Resources: + ElasticServerlessForwarderContinuingDLQ: + Type: AWS::SQS::Queue + Properties: + DelaySeconds: 0 + QueueName: !Join [ "-", ["elastic-serverless-forwarder-continuing-dlq", !Select [4, !Split ['-', !Select [2, !Split ['/', !Ref AWS::StackId]]]]]] + VisibilityTimeout: 910 + ElasticServerlessForwarderContinuingQueue: + Type: AWS::SQS::Queue + Properties: + DelaySeconds: 0 + QueueName: !Join [ "-", ["elastic-serverless-forwarder-continuing-queue", !Select [4, !Split ['-', !Select [2, !Split ['/', !Ref AWS::StackId]]]]]] + RedrivePolicy: { "deadLetterTargetArn" : !GetAtt ElasticServerlessForwarderContinuingDLQ.Arn, "maxReceiveCount" : 1 } + VisibilityTimeout: 910 + ElasticServerlessForwarderReplayDLQ: + Type: AWS::SQS::Queue + Properties: + DelaySeconds: 0 + QueueName: !Join [ "-", ["elastic-serverless-forwarder-replay-dlq", !Select [4, !Split ['-', !Select [2, !Split ['/', !Ref AWS::StackId]]]]]] + VisibilityTimeout: 910 + ElasticServerlessForwarderReplayQueue: + Type: AWS::SQS::Queue + Properties: + DelaySeconds: 0 + QueueName: !Join [ "-", ["elastic-serverless-forwarder-replay-queue", !Select [4, !Split ['-', !Select [2, !Split ['/', !Ref AWS::StackId]]]]]] + RedrivePolicy: { "deadLetterTargetArn" : !GetAtt ElasticServerlessForwarderReplayDLQ.Arn, "maxReceiveCount" : 3 } + VisibilityTimeout: 910 + ApplicationElasticServerlessForwarder: + Type: AWS::Serverless::Function + Properties: + Timeout: 900 + MemorySize: 512 + CodeUri: %codeUri% + Runtime: python3.9 + Architectures: + - x86_64 + Handler: main_aws.handler + Environment: + Variables: + S3_CONFIG_FILE: !Ref ElasticServerlessForwarderS3ConfigFile + SQS_CONTINUE_URL: !Ref ElasticServerlessForwarderContinuingQueue + SQS_REPLAY_URL: !Ref ElasticServerlessForwarderReplayQueue + Events: + SQSContinuingEvent: + Type: SQS + Properties: + Queue: !GetAtt ElasticServerlessForwarderContinuingQueue.Arn + BatchSize: 10 + Enabled: true + Fn::Transform: + Type: AWS::CloudFormation::Macro + Name: %sarAppName%-macro +Metadata: + AWS::ServerlessRepo::Application: + Name: %sarAppName%-application + Description: SAM Template for the application, not intended to be deployed on its own + Author: %sarAuthorName% + SemanticVersion: %semanticVersion% + LicenseUrl: %codeUri%/LICENSE.txt + HomePageUrl: https://github.com/elastic/elastic-serverless-forwarder + SourceCodeUrl: https://github.com/elastic/elastic-serverless-forwarder diff --git a/.internal/aws/cloudformation/macro.yaml b/.internal/aws/cloudformation/macro.yaml new file mode 100644 index 00000000..05a6de02 --- /dev/null +++ b/.internal/aws/cloudformation/macro.yaml @@ -0,0 +1,218 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + Elastic Serverless Forwarder + + SAM Template for the macro, not intended to be deployed on its own + +Resources: + MacroElasticServerlessForwarderFunction: + Type: AWS::Serverless::Function + Properties: + InlineCode: | + import boto3 + + cloudformation_client = boto3.client("cloudformation") + + def create_events(event): + events_fragment = {} + parameters = event["templateParameterValues"] + if "ElasticServerlessForwarderKinesisEvents" in parameters: + for kinesis_event_n, kinesis_event in enumerate(parameters["ElasticServerlessForwarderKinesisEvents"]): + if len(kinesis_event) == 0: + continue + + kinesis_event_name = f"KinesisEvent{int(kinesis_event_n)}" + events_fragment[kinesis_event_name] = { + "Type": "Kinesis", + "Properties": { + "Stream": kinesis_event, + "StartingPosition": "TRIM_HORIZON", + "BatchSize": 100, + "FunctionResponseTypes": ["ReportBatchItemFailures"], + "Enabled": True, + } + } + + if "ElasticServerlessForwarderSQSEvents" in parameters: + for sqs_event_n, sqs_event in enumerate(parameters["ElasticServerlessForwarderSQSEvents"]): + if len(sqs_event) == 0: + continue + + sqs_event_name = f"SQSEvent{int(sqs_event_n)}" + events_fragment[sqs_event_name] = { + "Type": "SQS", + "Properties": { + "Queue": sqs_event, + "BatchSize": 10, + "Enabled": True, + } + } + + if "ElasticServerlessForwarderS3SQSEvents" in parameters: + for s3_sqs_event_n, s3_sqs_event in enumerate(parameters["ElasticServerlessForwarderS3SQSEvents"]): + if len(s3_sqs_event) == 0: + continue + + s3_sqs_event_name = f"S3SQSEvent{int(s3_sqs_event_n)}" + events_fragment[s3_sqs_event_name] = { + "Type": "SQS", + "Properties": { + "Queue": s3_sqs_event, + "BatchSize": 10, + "Enabled": True, + } + } + + if "ElasticServerlessForwarderCloudWatchLogsEvents" in parameters: + for cloudwatch_logs_event_n, cloudwatch_logs_event in enumerate(parameters["ElasticServerlessForwarderCloudWatchLogsEvents"]): + arn_components = cloudwatch_logs_event.split(":") + cloudwatch_logs_group_name = arn_components[6] + if len(cloudwatch_logs_group_name) == 0: + continue + + cloudwatch_logs_event_name = f"CloudWatchLogsEvent{int(cloudwatch_logs_event_n)}" + events_fragment[cloudwatch_logs_event_name] = { + "Type": "CloudWatchLogs", + "Properties": { + "FilterPattern": "", + "LogGroupName": cloudwatch_logs_group_name, + } + } + + return events_fragment + + + def create_policy(event): + policy_fragment = { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyName": { + "Fn::Join": ["-", ["elastic-serverless-forwarder-policy", { + "Fn::Select": [4, { + "Fn::Split": ["-", { + "Fn::Select": [2, { + "Fn::Split": ["/", { + "Ref": "AWS::StackId" + }] + }] + }] + }] + }]] + }, + "PolicyDocument": { + "Version": "2012-10-17", + "Statement": [] + }, + "Roles": [{ + "Ref": "ApplicationElasticServerlessForwarderRole" + }] + } + } + + parameters = event["templateParameterValues"] + if "ElasticServerlessForwarderS3ConfigFile" in parameters: + bucket_name_and_object_key = parameters["ElasticServerlessForwarderS3ConfigFile"].split("/", 1) + resource = f"arn:aws:s3:::{bucket_name_and_object_key[0]}{'/'.join(bucket_name_and_object_key[1:])}" + if len(resource) > 0: + policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( + { + "Effect": "Allow", + "Action": "s3:GetObject", + "Resource": resource + } + ) + + if "ElasticServerlessForwarderSSMSecrets" in parameters: + if len(parameters["ElasticServerlessForwarderSSMSecrets"]) > 0: + policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( + { + "Effect": "Allow", + "Action": "secretsmanager:GetSecretValue", + "Resource": parameters["ElasticServerlessForwarderSSMSecrets"] + } + ) + + if "ElasticServerlessForwarderKMSKeys" in parameters: + if len(parameters["ElasticServerlessForwarderKMSKeys"]) > 0: + policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( + { + "Effect": "Allow", + "Action": "kms:Decrypt", + "Resource": parameters["ElasticServerlessForwarderKMSKeys"] + } + ) + + if "ElasticServerlessForwarderCloudWatchLogsEvents" in parameters: + if len(parameters["ElasticServerlessForwarderCloudWatchLogsEvents"]) > 0: + policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( + { + "Effect": "Allow", + "Action": "logs:DescribeLogGroups", + "Resource": parameters["ElasticServerlessForwarderCloudWatchLogsEvents"] + } + ) + + if "ElasticServerlessForwarderS3Buckets" in parameters: + if len(parameters["ElasticServerlessForwarderS3Buckets"]) > 0: + policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( + { + "Effect": "Allow", + "Action": "s3:ListBucket", + "Resource": parameters["ElasticServerlessForwarderS3Buckets"] + } + ) + + resources = [] + for s3_bucket_with_notification in parameters["ElasticServerlessForwarderS3Buckets"]: + resources.append(f"{s3_bucket_with_notification}/*") + + if len(resources) > 0: + policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( + { + "Effect": "Allow", + "Action": "s3:GetObject", + "Resource": resources + } + ) + + policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( + { + "Effect": "Allow", + "Action": "sqs:SendMessage", + "Resource": [ + { "Fn::GetAtt": ["ElasticServerlessForwarderReplayQueue", "Arn"] }, + { "Fn::GetAtt": ["ElasticServerlessForwarderContinuingQueue", "Arn"] }, + ] + } + ) + + return policy_fragment + + + def handler(event, context): + created_events = create_events(event) + for created_event in created_events: + event["fragment"]["ApplicationElasticServerlessForwarder"]["Properties"]["Events"][created_event] = created_events[created_event] + + created_policy = create_policy(event) + event["fragment"]["ElasticServerlessForwarderPolicy"] = created_policy + + return {"status": "SUCCESS", "requestId": event["requestId"], "fragment": event["fragment"]} + Handler: index.handler + Runtime: python3.9 + MacroElasticServerlessForwarder: + Type: AWS::CloudFormation::Macro + Properties: + Description: Expand parameters to Events and Policy for %sarAppName% + FunctionName: !GetAtt MacroElasticServerlessForwarderFunction.Arn + Name: %sarAppName%-macro +Metadata: + AWS::ServerlessRepo::Application: + Name: %sarAppName%-macro + Description: SAM Template for the macro, not intended to be deployed on its own + Author: %sarAuthorName% + SemanticVersion: %semanticVersion% + LicenseUrl: %codeUri%/LICENSE.txt + HomePageUrl: https://github.com/elastic/elastic-serverless-forwarder + SourceCodeUrl: https://github.com/elastic/elastic-serverless-forwarder diff --git a/.internal/aws/cloudformation/template.yaml b/.internal/aws/cloudformation/template.yaml index 70a76923..43f319ca 100644 --- a/.internal/aws/cloudformation/template.yaml +++ b/.internal/aws/cloudformation/template.yaml @@ -1,71 +1,110 @@ AWSTemplateFormatVersion: '2010-09-09' Transform: AWS::Serverless-2016-10-31 Description: > - elastic-serverless-forwarder + Elastic Serverless Forwarder - SAM Template for elastic-serverless-forwarder + Send observability data from your AWS environment to Elastic. Parameters: ElasticServerlessForwarderS3ConfigFile: Type: String Default: "s3://" + Description: S3 URL of the config yaml file (to be set as `S3_CONFIG_FILE` env variable) + ElasticServerlessForwarderSSMSecrets: + Type: CommaDelimitedList + Default: "" + Description: Comma delimited list of AWS SSM Secrets ARNs referenced in the config yaml file + ElasticServerlessForwarderKMSKeys: + Type: CommaDelimitedList + Default: "" + Description: Comma delimited list of AWS KMS Keys ARNs to be used for decrypting AWS SSM Secrets referenced in the config yaml file + ElasticServerlessForwarderSQSEvents: + Type: CommaDelimitedList + Default: "" + Description: Comma delimited list of Direct SQS queues ARNs to set as event triggers for the Lambda + ElasticServerlessForwarderS3SQSEvents: + Type: CommaDelimitedList + Default: "" + Description: Comma delimited list of S3 SQS Event Notifications ARNs to set as event triggers for the Lambda + ElasticServerlessForwarderKinesisEvents: + Type: CommaDelimitedList + Default: "" + Description: Comma delimited list of Kinesis Data Stream ARNs to set as event triggers for the Lambda + ElasticServerlessForwarderCloudWatchLogsEvents: + Type: CommaDelimitedList + Default: "" + Description: Comma delimited list of Cloudwatch Logs Log Groups ARNs to set subscription filters on the Lambda for + ElasticServerlessForwarderS3Buckets: + Type: CommaDelimitedList + Default: "" + Description: Comma delimited list of S3 buckets ARNs that are the sources of the S3 SQS Event Notifications Resources: - ElasticServerlessForwarderContinuingDLQ: - Type: AWS::SQS::Queue + ElasticServerlessForwarderEventMacro: + Type: AWS::Serverless::Application Properties: - DelaySeconds: 0 - QueueName: !Join [ "-", ["elastic-serverless-forwarder-continuing-dlq", !Select [4, !Split ['-', !Select [2, !Split ['/', !Ref AWS::StackId]]]]]] - VisibilityTimeout: 910 - ElasticServerlessForwarderContinuingQueue: - Type: AWS::SQS::Queue + Location: + ApplicationId: arn:aws:serverlessrepo:%awsRegion%:%accountID%:applications/elastic-serverless-forwarder-andrea-macro + SemanticVersion: %semanticVersion% + ElasticServerlessForwarderApplication: + Type: AWS::Serverless::Application Properties: - DelaySeconds: 0 - QueueName: !Join [ "-", ["elastic-serverless-forwarder-continuing-queue", !Select [4, !Split ['-', !Select [2, !Split ['/', !Ref AWS::StackId]]]]]] - RedrivePolicy: { "deadLetterTargetArn" : !GetAtt ElasticServerlessForwarderContinuingDLQ.Arn, "maxReceiveCount" : 1 } - VisibilityTimeout: 910 - ElasticServerlessForwarderReplayDLQ: - Type: AWS::SQS::Queue - Properties: - DelaySeconds: 0 - QueueName: !Join [ "-", ["elastic-serverless-forwarder-replay-dlq", !Select [4, !Split ['-', !Select [2, !Split ['/', !Ref AWS::StackId]]]]]] - VisibilityTimeout: 910 - ElasticServerlessForwarderReplayQueue: - Type: AWS::SQS::Queue - Properties: - DelaySeconds: 0 - QueueName: !Join [ "-", ["elastic-serverless-forwarder-replay-queue", !Select [4, !Split ['-', !Select [2, !Split ['/', !Ref AWS::StackId]]]]]] - RedrivePolicy: { "deadLetterTargetArn" : !GetAtt ElasticServerlessForwarderReplayDLQ.Arn, "maxReceiveCount" : 3 } - VisibilityTimeout: 910 - ElasticServerlessForwarderFunction: - Type: AWS::Serverless::Function - Properties: - Timeout: 900 - MemorySize: 512 - CodeUri: %codeUri% - Runtime: python3.9 - Architectures: - - x86_64 - Handler: main_aws.handler - Environment: - Variables: - S3_CONFIG_FILE: !Ref ElasticServerlessForwarderS3ConfigFile - SQS_CONTINUE_URL: !Ref ElasticServerlessForwarderContinuingQueue - SQS_REPLAY_URL: !Ref ElasticServerlessForwarderReplayQueue - Events: - SQSEvent: - Type: SQS - Properties: - Queue: !GetAtt ElasticServerlessForwarderContinuingQueue.Arn - BatchSize: 10 - Enabled: true + Location: + ApplicationId: arn:aws:serverlessrepo:%awsRegion%:%accountID%:applications/elastic-serverless-forwarder-andrea-application + SemanticVersion: %semanticVersion% + Parameters: + ElasticServerlessForwarderS3ConfigFile: !Ref ElasticServerlessForwarderS3ConfigFile + ElasticServerlessForwarderSSMSecrets: !Join [",", !Ref ElasticServerlessForwarderSSMSecrets] + ElasticServerlessForwarderKMSKeys: !Join [",", !Ref ElasticServerlessForwarderKMSKeys] + ElasticServerlessForwarderSQSEvents: !Join [",", !Ref ElasticServerlessForwarderSQSEvents] + ElasticServerlessForwarderS3SQSEvents: !Join [",", !Ref ElasticServerlessForwarderS3SQSEvents] + ElasticServerlessForwarderKinesisEvents: !Join [",", !Ref ElasticServerlessForwarderKinesisEvents] + ElasticServerlessForwarderCloudWatchLogsEvents: !Join [",", !Ref ElasticServerlessForwarderCloudWatchLogsEvents] + ElasticServerlessForwarderS3Buckets: !Join [",", !Ref ElasticServerlessForwarderS3Buckets] + DependsOn: ElasticServerlessForwarderEventMacro Metadata: + AWS::CloudFormation::Interface: + ParameterGroups: + - Label: + default: Elastic Serverless Forwarder Configuration options + Parameters: + - ElasticServerlessForwarderS3ConfigFile + - ElasticServerlessForwarderSSMSecrets + - ElasticServerlessForwarderKMSKeys + - Label: + default: Elastic Serverless Forwarder Events options + Parameters: + - ElasticServerlessForwarderSQSEvents + - ElasticServerlessForwarderS3SQSEvents + - ElasticServerlessForwarderKinesisEvents + - ElasticServerlessForwarderCloudWatchLogsEvents + - Label: + default: Elastic Serverless Forwarder S3 Buckets Permissions options + Parameters: + - ElasticServerlessForwarderS3Buckets + ParameterLabels: + ElasticServerlessForwarderS3ConfigFile: + default: Config File + ElasticServerlessForwarderSSMSecrets: + default: AWS SSM Secrets + ElasticServerlessForwarderKMSKeys: + default: AWS KMS Keys + ElasticServerlessForwarderCloudWatchLogsEvents: + default: Cloudwatch Logs subscription filters + ElasticServerlessForwarderKinesisEvents: + default: Kinesis Data Stream event triggers + ElasticServerlessForwarderS3SQSEvents: + default: S3 SQS Event Notifications event triggers + ElasticServerlessForwarderSQSEvents: + default: Direct SQS queues event triggers + ElasticServerlessForwarderS3Buckets: + default: S3 buckets of S3 SQS Event Notifications AWS::ServerlessRepo::Application: Name: %sarAppName% Description: Send observability data from your AWS environment to Elastic. Author: %sarAuthorName% Labels: ['s3', 'logs', 'analytics', 'observability', 'monitoring', 'Elastic'] SemanticVersion: %semanticVersion% - LicenseUrl: .aws-sam/build/ElasticServerlessForwarderFunction/LICENSE.txt - ReadmeUrl: .aws-sam/build/ElasticServerlessForwarderFunction/README.md + LicenseUrl: %codeUri%/LICENSE.txt + ReadmeUrl: %codeUri%/README.md HomePageUrl: https://github.com/elastic/elastic-serverless-forwarder SourceCodeUrl: https://github.com/elastic/elastic-serverless-forwarder diff --git a/.internal/aws/scripts/dist.sh b/.internal/aws/scripts/dist.sh index 7ceff729..cf7c1b2a 100755 --- a/.internal/aws/scripts/dist.sh +++ b/.internal/aws/scripts/dist.sh @@ -33,7 +33,7 @@ SAR_AUTHOR_NAME="${6:-Elastic}" TMPDIR=$(mktemp -d /tmp/dist.XXXXXXXXXX) CODE_URI="${TMPDIR}/sources" -trap "rm -rf ${TMPDIR}" EXIT +#trap "rm -rf ${TMPDIR}" EXIT aws s3api get-bucket-location --bucket "${BUCKET}" || aws s3api create-bucket --acl private --bucket "${BUCKET}" --region "${REGION}" --create-bucket-configuration LocationConstraint="${REGION}" @@ -59,7 +59,6 @@ cat < "${TMPDIR}/policy.json" EOF aws s3api put-bucket-policy --bucket "${BUCKET}" --policy "file://${TMPDIR}/policy.json" - mkdir -v -p "${CODE_URI}" cp -v requirements.txt "${CODE_URI}/" cp -v main_aws.py "${CODE_URI}/" @@ -68,8 +67,19 @@ find {handlers,share,shippers,storage} -not -name "*__pycache__*" -name "*.py" - cp -v LICENSE.txt "${CODE_URI}/LICENSE.txt" cp -v docs/README-AWS.md "${CODE_URI}/README.md" -sed -e "s|%codeUri%|${CODE_URI}|g" -e "s/%sarAppName%/${SAR_APP_NAME}/g" -e "s/%sarAuthorName%/${SAR_AUTHOR_NAME}/g" -e "s/%semanticVersion%/${SEMANTIC_VERSION}/g" -e "s/%codeURIBucket%/${BUCKET}/g" -e "s/%accountID%/${ACCOUNT_ID}/g" -e "s/%awsRegion%/${REGION}/g" .internal/aws/cloudformation/template.yaml > "${TMPDIR}/template.yaml" +sed -e "s|%codeUri%|${CODE_URI}|g" -e "s/%sarAppName%/${SAR_APP_NAME}/g" -e "s/%sarAuthorName%/${SAR_AUTHOR_NAME}/g" -e "s/%semanticVersion%/${SEMANTIC_VERSION}/g" -e "s/%awsRegion%/${REGION}/g" .internal/aws/cloudformation/macro.yaml > "${TMPDIR}/macro.yaml" +sed -e "s|%codeUri%|${CODE_URI}|g" -e "s/%sarAppName%/${SAR_APP_NAME}/g" -e "s/%sarAuthorName%/${SAR_AUTHOR_NAME}/g" -e "s/%semanticVersion%/${SEMANTIC_VERSION}/g" -e "s/%awsRegion%/${REGION}/g" -e "s/%accountID%/${ACCOUNT_ID}/g" .internal/aws/cloudformation/template.yaml > "${TMPDIR}/template.yaml" +sed -e "s|%codeUri%|${CODE_URI}|g" -e "s/%sarAppName%/${SAR_APP_NAME}/g" -e "s/%sarAuthorName%/${SAR_AUTHOR_NAME}/g" -e "s/%semanticVersion%/${SEMANTIC_VERSION}/g" -e "s/%awsRegion%/${REGION}/g" -e "s/%codeURIBucket%/${BUCKET}/g" .internal/aws/cloudformation/application.yaml > "${TMPDIR}/application.yaml" + +sam build --debug --use-container --build-dir "${TMPDIR}/.aws-sam/build/macro" --template-file "${TMPDIR}/macro.yaml" --region "${REGION}" +sam package --template-file "${TMPDIR}/.aws-sam/build/macro/template.yaml" --output-template-file "${TMPDIR}/.aws-sam/build/macro/packaged.yaml" --s3-bucket "${BUCKET}" --region "${REGION}" +sam publish --template "${TMPDIR}/.aws-sam/build/macro/packaged.yaml" --region "${REGION}" + +sam build --debug --use-container --build-dir "${TMPDIR}/.aws-sam/build/application" --template-file "${TMPDIR}/application.yaml" --region "${REGION}" +sam package --template-file "${TMPDIR}/.aws-sam/build/application/template.yaml" --output-template-file "${TMPDIR}/.aws-sam/build/application/packaged.yaml" --s3-bucket "${BUCKET}" --region "${REGION}" +sam publish --template "${TMPDIR}/.aws-sam/build/application/packaged.yaml" --region "${REGION}" +aws s3 cp "${TMPDIR}/.aws-sam/build/application/packaged.yaml" "s3://${BUCKET}/application.yaml" -sam build --debug --use-container --build-dir "${TMPDIR}/.aws-sam/build" --template-file "${TMPDIR}/template.yaml" --region "${REGION}" -sam package --template-file "${TMPDIR}/.aws-sam/build/template.yaml" --output-template-file "${TMPDIR}/.aws-sam/build/packaged.yaml" --s3-bucket "${BUCKET}" --region "${REGION}" -sam publish --template "${TMPDIR}/.aws-sam/build/packaged.yaml" --region "${REGION}" +sam build --debug --use-container --build-dir "${TMPDIR}/.aws-sam/build/template" --template-file "${TMPDIR}/template.yaml" --region "${REGION}" +sam package --template-file "${TMPDIR}/.aws-sam/build/template/template.yaml" --output-template-file "${TMPDIR}/.aws-sam/build/template/packaged.yaml" --s3-bucket "${BUCKET}" --region "${REGION}" +sam publish --template "${TMPDIR}/.aws-sam/build/template/packaged.yaml" --region "${REGION}" From 91b162e1de8600d67d2fc727b813ff15c8e8a8e9 Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Thu, 12 May 2022 19:58:51 +0900 Subject: [PATCH 04/17] safety checks on macro --- .internal/aws/cloudformation/macro.yaml | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/.internal/aws/cloudformation/macro.yaml b/.internal/aws/cloudformation/macro.yaml index 05a6de02..ecf5467c 100644 --- a/.internal/aws/cloudformation/macro.yaml +++ b/.internal/aws/cloudformation/macro.yaml @@ -124,47 +124,51 @@ Resources: ) if "ElasticServerlessForwarderSSMSecrets" in parameters: - if len(parameters["ElasticServerlessForwarderSSMSecrets"]) > 0: + ssm_secrets_arn = [x for x in parameters["ElasticServerlessForwarderSSMSecrets"] if len(x) > 0] + if len(ssm_secrets_arn) > 0: policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( { "Effect": "Allow", "Action": "secretsmanager:GetSecretValue", - "Resource": parameters["ElasticServerlessForwarderSSMSecrets"] + "Resource": ssm_secrets_arn } ) if "ElasticServerlessForwarderKMSKeys" in parameters: - if len(parameters["ElasticServerlessForwarderKMSKeys"]) > 0: + kms_keys_arn = [x for x in parameters["ElasticServerlessForwarderKMSKeys"] if len(x) > 0] + if len(kms_keys_arn) > 0: policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( { "Effect": "Allow", "Action": "kms:Decrypt", - "Resource": parameters["ElasticServerlessForwarderKMSKeys"] + "Resource": kms_keys_arn } ) if "ElasticServerlessForwarderCloudWatchLogsEvents" in parameters: - if len(parameters["ElasticServerlessForwarderCloudWatchLogsEvents"]) > 0: + cloudwatch_logs_arn = [x for x in parameters["ElasticServerlessForwarderCloudWatchLogsEvents"] if len(x) > 0] + if len(cloudwatch_logs_arn) > 0: policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( { "Effect": "Allow", "Action": "logs:DescribeLogGroups", - "Resource": parameters["ElasticServerlessForwarderCloudWatchLogsEvents"] + "Resource": cloudwatch_logs_arn } ) if "ElasticServerlessForwarderS3Buckets" in parameters: - if len(parameters["ElasticServerlessForwarderS3Buckets"]) > 0: + s3_buckets_arn = [x for x in parameters["ElasticServerlessForwarderS3Buckets"] if len(x) > 0] + if len(s3_buckets_arn) > 0: policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( { "Effect": "Allow", "Action": "s3:ListBucket", - "Resource": parameters["ElasticServerlessForwarderS3Buckets"] + "Resource": s3_buckets_arn } ) resources = [] - for s3_bucket_with_notification in parameters["ElasticServerlessForwarderS3Buckets"]: + for s3_bucket_with_notification in s3_buckets_arn: resources.append(f"{s3_bucket_with_notification}/*") if len(resources) > 0: From 5d2809262b55256e8d57710278a6ec0dcc7dfb42 Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Fri, 13 May 2022 18:23:59 +0900 Subject: [PATCH 05/17] make the macro more resilient and apply workaround for terraform and cloudformation bugs --- .internal/aws/cloudformation/macro.yaml | 55 ++++++++++++++----------- 1 file changed, 32 insertions(+), 23 deletions(-) diff --git a/.internal/aws/cloudformation/macro.yaml b/.internal/aws/cloudformation/macro.yaml index ecf5467c..bd207109 100644 --- a/.internal/aws/cloudformation/macro.yaml +++ b/.internal/aws/cloudformation/macro.yaml @@ -10,19 +10,23 @@ Resources: Type: AWS::Serverless::Function Properties: InlineCode: | - import boto3 + import random + import string - cloudformation_client = boto3.client("cloudformation") + + def random_suffix(): + return "".join(random.choices(string.ascii_letters + string.digits, k=10)) def create_events(event): events_fragment = {} parameters = event["templateParameterValues"] if "ElasticServerlessForwarderKinesisEvents" in parameters: - for kinesis_event_n, kinesis_event in enumerate(parameters["ElasticServerlessForwarderKinesisEvents"]): + for kinesis_event in parameters["ElasticServerlessForwarderKinesisEvents"]: + kinesis_event = kinesis_event.strip() if len(kinesis_event) == 0: continue - kinesis_event_name = f"KinesisEvent{int(kinesis_event_n)}" + kinesis_event_name = f"KinesisEvent{random_suffix()}" events_fragment[kinesis_event_name] = { "Type": "Kinesis", "Properties": { @@ -35,11 +39,12 @@ Resources: } if "ElasticServerlessForwarderSQSEvents" in parameters: - for sqs_event_n, sqs_event in enumerate(parameters["ElasticServerlessForwarderSQSEvents"]): + for sqs_event in parameters["ElasticServerlessForwarderSQSEvents"]: + sqs_event = sqs_event.strip() if len(sqs_event) == 0: continue - sqs_event_name = f"SQSEvent{int(sqs_event_n)}" + sqs_event_name = f"SQSEvent{random_suffix()}" events_fragment[sqs_event_name] = { "Type": "SQS", "Properties": { @@ -50,11 +55,12 @@ Resources: } if "ElasticServerlessForwarderS3SQSEvents" in parameters: - for s3_sqs_event_n, s3_sqs_event in enumerate(parameters["ElasticServerlessForwarderS3SQSEvents"]): + for s3_sqs_event in parameters["ElasticServerlessForwarderS3SQSEvents"]: + s3_sqs_event = s3_sqs_event.strip() if len(s3_sqs_event) == 0: continue - s3_sqs_event_name = f"S3SQSEvent{int(s3_sqs_event_n)}" + s3_sqs_event_name = f"S3SQSEvent{random_suffix()}" events_fragment[s3_sqs_event_name] = { "Type": "SQS", "Properties": { @@ -65,13 +71,15 @@ Resources: } if "ElasticServerlessForwarderCloudWatchLogsEvents" in parameters: - for cloudwatch_logs_event_n, cloudwatch_logs_event in enumerate(parameters["ElasticServerlessForwarderCloudWatchLogsEvents"]): + for cloudwatch_logs_event in parameters["ElasticServerlessForwarderCloudWatchLogsEvents"]: + cloudwatch_logs_event = cloudwatch_logs_event.strip() + if len(cloudwatch_logs_event) == 0: + continue + arn_components = cloudwatch_logs_event.split(":") cloudwatch_logs_group_name = arn_components[6] - if len(cloudwatch_logs_group_name) == 0: - continue - cloudwatch_logs_event_name = f"CloudWatchLogsEvent{int(cloudwatch_logs_event_n)}" + cloudwatch_logs_event_name = f"CloudWatchLogsEvent{random_suffix()}" events_fragment[cloudwatch_logs_event_name] = { "Type": "CloudWatchLogs", "Properties": { @@ -87,7 +95,7 @@ Resources: policy_fragment = { "Type": "AWS::IAM::Policy", "Properties": { - "PolicyName": { + "PolicyName": { "Fn::Join": ["-", ["elastic-serverless-forwarder-policy", { "Fn::Select": [4, { "Fn::Split": ["-", { @@ -112,8 +120,8 @@ Resources: parameters = event["templateParameterValues"] if "ElasticServerlessForwarderS3ConfigFile" in parameters: - bucket_name_and_object_key = parameters["ElasticServerlessForwarderS3ConfigFile"].split("/", 1) - resource = f"arn:aws:s3:::{bucket_name_and_object_key[0]}{'/'.join(bucket_name_and_object_key[1:])}" + bucket_name_and_object_key = parameters["ElasticServerlessForwarderS3ConfigFile"].replace("s3://", "") + resource = f"arn:aws:s3:::{bucket_name_and_object_key}" if len(resource) > 0: policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( { @@ -124,7 +132,8 @@ Resources: ) if "ElasticServerlessForwarderSSMSecrets" in parameters: - ssm_secrets_arn = [x for x in parameters["ElasticServerlessForwarderSSMSecrets"] if len(x) > 0] + ssm_secrets_arn = [x for x in parameters["ElasticServerlessForwarderSSMSecrets"] if len(x.strip()) > 0] + if len(ssm_secrets_arn) > 0: policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( { @@ -135,7 +144,7 @@ Resources: ) if "ElasticServerlessForwarderKMSKeys" in parameters: - kms_keys_arn = [x for x in parameters["ElasticServerlessForwarderKMSKeys"] if len(x) > 0] + kms_keys_arn = [x for x in parameters["ElasticServerlessForwarderKMSKeys"] if len(x.strip()) > 0] if len(kms_keys_arn) > 0: policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( { @@ -146,18 +155,18 @@ Resources: ) if "ElasticServerlessForwarderCloudWatchLogsEvents" in parameters: - cloudwatch_logs_arn = [x for x in parameters["ElasticServerlessForwarderCloudWatchLogsEvents"] if len(x) > 0] - if len(cloudwatch_logs_arn) > 0: + cloudwatch_logs_group_arn = [f"{':'.join(x.split(':')[0:-1])}:*:*" for x in parameters["ElasticServerlessForwarderCloudWatchLogsEvents"] if len(x.strip()) > 0] + if len(cloudwatch_logs_group_arn) > 0: policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( { "Effect": "Allow", "Action": "logs:DescribeLogGroups", - "Resource": cloudwatch_logs_arn + "Resource": cloudwatch_logs_group_arn[0] } ) if "ElasticServerlessForwarderS3Buckets" in parameters: - s3_buckets_arn = [x for x in parameters["ElasticServerlessForwarderS3Buckets"] if len(x) > 0] + s3_buckets_arn = [x for x in parameters["ElasticServerlessForwarderS3Buckets"] if len(x.strip()) > 0] if len(s3_buckets_arn) > 0: policy_fragment["Properties"]["PolicyDocument"]["Statement"].append( { @@ -185,8 +194,8 @@ Resources: "Effect": "Allow", "Action": "sqs:SendMessage", "Resource": [ - { "Fn::GetAtt": ["ElasticServerlessForwarderReplayQueue", "Arn"] }, - { "Fn::GetAtt": ["ElasticServerlessForwarderContinuingQueue", "Arn"] }, + {"Fn::GetAtt": ["ElasticServerlessForwarderReplayQueue", "Arn"]}, + {"Fn::GetAtt": ["ElasticServerlessForwarderContinuingQueue", "Arn"]}, ] } ) From d3532999b3ba1a91b25486103948712eee809861 Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Mon, 16 May 2022 18:01:53 +0900 Subject: [PATCH 06/17] fix documentation --- README.md | 2 +- docs/README-AWS.md | 519 ++++++++------------------------------------- 2 files changed, 88 insertions(+), 433 deletions(-) diff --git a/README.md b/README.md index c2e51fae..50978833 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ [![Build Status](https://beats-ci.elastic.co/job/Library/job/elastic-serverless-forwarder-mbp/job/main/badge/icon)](https://beats-ci.elastic.co/job/Library/job/elastic-serverless-forwarder-mbp/job/main/) # elastic-serverless-forwarder -Elastic Forwarder for Serverless +Elastic Serverless Forwarder ### For AWS documentation, [go here](https://github.com/elastic/elastic-serverless-forwarder/blob/main/docs/README-AWS.md) diff --git a/docs/README-AWS.md b/docs/README-AWS.md index 263446af..66fffc31 100644 --- a/docs/README-AWS.md +++ b/docs/README-AWS.md @@ -23,21 +23,21 @@ Lambda function also supports writing directly to an index, alias or a custom da **Direct SQS message payload input:** -The Lambda function supports ingesting logs contained in the payload of a SQS body record and sends them to Elastic. The SQS queue serves as a trigger for the Lambda function. When a new record gets written to an SQS queue the Lambda function gets triggered. Users will set up separate SQS queues for each type of logs, The config param for Elasticsearch output `es_index_or_datastream_name` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different SQS queues that match specific log types. +The Lambda function supports ingesting logs contained in the payload of a SQS body record and sends them to Elastic. The SQS queue serves as a trigger for the Lambda function. When a new record gets written to an SQS queue the Lambda function gets triggered. Users will set up separate SQS queues for each type of logs, The config param for Elasticsearch output `datastream` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different SQS queues that match specific log types. **S3 SQS Event Notifications input:** The Lambda function supports ingesting logs contained in the S3 bucket through an SQS notification (s3:ObjectCreated) and sends them to Elastic. The SQS queue serves as a trigger for the Lambda function. When a new log file gets written to an S3 bucket and meets the criteria (as configured including prefix/suffix), a notification to SQS is generated that triggers the Lambda function. Users will set up separate SQS queues for each type of logs (i.e. aws.vpcflow, aws.cloudtrail, aws.waf and so on). A single configuration file can have many input sections, pointing to different SQS queues that match specific log types. -The `es_index_or_datastream_name` parameter in the config file is optional. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types the users can optionally set the `es_index_or_datastream_name` value in the configuration file according to the naming convention of Elasticsearch datastream and existing integrations. If the `es_index_or_datastream_name` is not specified and it cannot be matched with any of the above AWS services then the dataset will be set to "generic" and the namespace to "default" pointing to the data stream name "logs-generic-default". +The `datastream` parameter in the config file is optional. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types the users can optionally set the `datastream` value in the configuration file according to the naming convention of Elasticsearch datastream and existing integrations. If the `es_index_or_datastream_name` is not specified and it cannot be matched with any of the above AWS services then the dataset will be set to "generic" and the namespace to "default" pointing to the data stream name "logs-generic-default". For more information, read the AWS [documentation](https://docs.aws.amazon.com/AmazonS3/latest/userguide/ways-to-add-notification-config-to-bucket.html) about creating an SQS event notifications for S3 buckets. **Kinesis Data Stream input:** -The Lambda function supports ingesting logs contained in the payload of a Kinesis data stream record and sends them to Elastic. The Kinesis data stream serves as a trigger for the Lambda function. When a new record gets written to a Kinesis data stream the Lambda function gets triggered. Users will set up separate Kinesis data streams for each type of logs, The config param for Elasticsearch output `es_index_or_datastream_name` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different Kinesis data streams that match specific log types. +The Lambda function supports ingesting logs contained in the payload of a Kinesis data stream record and sends them to Elastic. The Kinesis data stream serves as a trigger for the Lambda function. When a new record gets written to a Kinesis data stream the Lambda function gets triggered. Users will set up separate Kinesis data streams for each type of logs, The config param for Elasticsearch output `datastream` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different Kinesis data streams that match specific log types. **CloudWatch Logs subscription filter input:** -The Lambda function supports ingesting logs contained in the message payload of CloudWatch Logs events. The CloudWatch Logs serves as a trigger for the Lambda function. Users will set up separate Cloudwatch log groups for each type of logs, The config param for Elasticsearch output `es_index_or_datastream_name` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different CloudWatch Logs log groups that match specific log types. +The Lambda function supports ingesting logs contained in the message payload of CloudWatch Logs events. The CloudWatch Logs serves as a trigger for the Lambda function. Users will set up separate Cloudwatch log groups for each type of logs, The config param for Elasticsearch output `datastream` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different CloudWatch Logs log groups that match specific log types. ### Deployment: @@ -57,36 +57,27 @@ At a high level the deployment consists of the following steps: * Select "Public applications" tab * In the search box type "elastic-serverless-forwarder" and submit * Look for "elastic-serverless-forwarder" in the results and click on it - * On the "Application settings" fill the input `ElasticServerlessForwarderS3ConfigFile` with the value of the S3 url in the format "s3://bucket-name/config-file-name" pointing to the configuration file for your Elastic Forwarder for Serverless (see below), this will populate the `S3_CONFIG_FILE` environment variable of the deployed Lambda. + * On the "Application settings" fill the following parameters + * `ElasticServerlessForwarderS3ConfigFile` with the value of the S3 url in the format "s3://bucket-name/config-file-name" pointing to the configuration file for your deployment of Elastic Serverless Forwarder (see below), this will populate the `S3_CONFIG_FILE` environment variable of the deployed Lambda. + * `ElasticServerlessForwarderSSMSecrets` with a comma delimited list of AWS SSM Secrets ARNs referenced in the config yaml file (if any). + * `ElasticServerlessForwarderKMSKeys` with a comma delimited list of AWS KMS Keys ARNs to be used for decrypting AWS SSM Secrets referenced in the config yaml file (if any). + * `ElasticServerlessForwarderSQSEvents` with a comma delimited list of Direct SQS queues ARNs to set as event triggers for the Lambda (if any). + * `ElasticServerlessForwarderS3SQSEvents` with a comma delimited list of S3 SQS Event Notifications ARNs to set as event triggers for the Lambda (if any). + * `ElasticServerlessForwarderKinesisEvents` with a comma delimited list of Kinesis Data Stream ARNs to set as event triggers for the Lambda (if any). + * `ElasticServerlessForwarderCloudWatchLogsEvents` with a comma delimited list of Cloudwatch Logs Log Groups ARNs to set subscription filters on the Lambda for (if any). + * `ElasticServerlessForwarderS3Buckets` with a comma delimited list of S3 buckets ARNs that are the sources of the S3 SQS Event Notifications (if any). * Click on the "Deploy" button in the bottom right corner * Once the Applications page for "serverlessrepo-elastic-serverless-forwarder" is loaded * Click on "Deployments" tab * Monitor the "Deployment history" refreshing its status until the Status shows as "Create complete * Go to "Lambda > Functions" page in the AWS console and look for the Function Name with prefix "serverlessrepo-elastic-se-ElasticServerlessForward-" and click on it * Go to "Configuration" tab and select "Environment Variables" - * You can additionally add the following environment variables to enable Elastic APM instrumentation to your deployment of Elastic Forwarder for Serverless + * You can additionally add the following environment variables to enable Elastic APM instrumentation for your deployment of Elastic Serverless Forwarder * | Key | Value | |---------------------------|--------| |`ELASTIC_APM_ACTIVE` | `true` | |`ELASTIC_APM_SECRET_TOKEN` | token | |`ELASTIC_APM_SERVER_URL` | url | - * Still in the "Configuration" tab select "Permissions" - * Click on the link of the IAM role for the Lambda under *Execution role* -> *Role name* - * In the new window add a new policy to the role, as described at [Lambda IAM permissions and policies](#lambda-iam-permissions-and-policies) - * Back to the "Configuration" tab in the Lambda window select "Triggers" - * You can see an already defined SQS trigger for a queue with the prefix `elastic-serverless-forwarder-continuing-queue-`. This is an internal queue and should not be modified, disabled or removed. - * Click on "Add trigger" - - When using S3 SQS event notification or direct SQS message payload input: - * From "Trigger configuration" dropdown select "SQS" - * In the "SQS queue" field chose the queue or insert the ARN of the queue you want to use as trigger for your Elastic Serverless Forwarder - * The SQS queue you want to use as trigger must have a visibility timeout of 910 seconds, 10 seconds more than the Elastic Forwarder for Serverless Lambda timeout. - - When using Kinesis data stream input: - * From "Trigger configuration" dropdown select "Kinesis" - * In the "Kinesis stream" field chose the stream name you want to use as trigger for your Elastic Serverless Forwarder - - When using CloudWatch Logs events input: - * From "Trigger configuration" dropdown select "CloudWatch Logs" - * In the "Log group" field chose the log group you want to use as trigger for your Elastic Serverless Forwarder - * Click on "Add" ### Cloudformation @@ -105,437 +96,101 @@ Resources: Location: ApplicationId: 'arn:aws:serverlessrepo:eu-central-1:267093732750:applications/elastic-serverless-forwarder' SemanticVersion: '%SEMANTICVERSION%' ## UPDATE USING THE SEMANTIC VERSION + Parameters: + ElasticServerlessForwarderS3ConfigFile: "" ## FILL WITH THE VALUE OF THE S3 URL IN THE FORMAT "s3://bucket-name/config-file-name" POINTING TO THE CONFIGURATION FILE FOR YOUR DEPLOYMENT OF THE ELASTIC SERVERLESS FORWARDER + ElasticServerlessForwarderSSMSecrets: "" ## FILL WITH A COMMA DELIMITED LIST OF AWS SSM SECRETS ARNS REFERENCED IN THE CONFIG YAML FILE (IF ANY). + ElasticServerlessForwarderKMSKeys: "" ## FILL WITH A COMMA DELIMITED LIST OF AWS KMS KEYS ARNS TO BE USED FOR DECRYPTING AWS SSM SECRETS REFERENCED IN THE CONFIG YAML FILE (IF ANY). + ElasticServerlessForwarderSQSEvents: "" ## FILL WITH A COMMA DELIMITED LIST OF DIRECT SQS QUEUES ARNS TO SET AS EVENT TRIGGERS FOR THE LAMBDA (IF ANY). + ElasticServerlessForwarderS3SQSEvents: "" ## FILL WITH A COMMA DELIMITED LIST OF S3 SQS EVENT NOTIFICATIONS ARNS TO SET AS EVENT TRIGGERS FOR THE LAMBDA (IF ANY). + ElasticServerlessForwarderKinesisEvents: "" ## FILL WITH A COMMA DELIMITED LIST OF KINESIS DATA STREAM ARNS TO SET AS EVENT TRIGGERS FOR THE LAMBDA (IF ANY). + ElasticServerlessForwarderCloudWatchLogsEvents: "" ## FILL WITH A COMMA DELIMITED LIST OF CLOUDWATCH LOGS LOG GROUPS ARNS TO SET SUBSCRIPTION FILTERS ON THE LAMBDA FOR (IF ANY). + ElasticServerlessForwarderS3Buckets: "" ## FILL WITH A COMMA DELIMITED LIST OF S3 BUCKETS ARNS THAT ARE THE SOURCES OF THE S3 SQS EVENT NOTIFICATIONS (IF ANY). ``` - - * Deploy the Lambda from SAR running the following command: * ```commandline aws cloudformation deploy --template-file sar-application.yaml --stack-name esf-cloudformation-deployment --capabilities CAPABILITY_IAM CAPABILITY_AUTO_EXPAND ``` -* Import json template from deployed stack running the following commands: +### Terraform +* Save the following yaml content as `sar-application.tf` +``` +provider "aws" { + region = "" ## FILL WITH THE AWS REGION WHERE YOU WANT TO DEPLOY THE ELASTIC SERVERLESS FORWARDER +} + +data "aws_serverlessapplicationrepository_application" "esf_sar" { + application_id = "arn:aws:serverlessrepo:eu-central-1:627286350134:applications/elastic-serverless-forwarder-andrea" +} + +resource "aws_serverlessapplicationrepository_cloudformation_stack" "esf_cf_stak" { + name = "terraform-elastic-serverless-forwarder" + application_id = data.aws_serverlessapplicationrepository_application.esf_sar.application_id + semantic_version = data.aws_serverlessapplicationrepository_application.esf_sar.semantic_version + capabilities = data.aws_serverlessapplicationrepository_application.esf_sar.required_capabilities + + parameters = { + ElasticServerlessForwarderS3ConfigFile = "" ## FILL WITH THE VALUE OF THE S3 URL IN THE FORMAT "s3://bucket-name/config-file-name" POINTING TO THE CONFIGURATION FILE FOR YOUR DEPLOYMENT OF THE ELASTIC SERVERLESS FORWARDER + ElasticServerlessForwarderSSMSecrets = "" ## FILL WITH A COMMA DELIMITED LIST OF AWS SSM SECRETS ARNS REFERENCED IN THE CONFIG YAML FILE (IF ANY). + ElasticServerlessForwarderKMSKeys = "" ## FILL WITH A COMMA DELIMITED LIST OF AWS KMS KEYS ARNS TO BE USED FOR DECRYPTING AWS SSM SECRETS REFERENCED IN THE CONFIG YAML FILE (IF ANY). + ElasticServerlessForwarderSQSEvents = "" ## FILL WITH A COMMA DELIMITED LIST OF DIRECT SQS QUEUES ARNS TO SET AS EVENT TRIGGERS FOR THE LAMBDA (IF ANY). + ElasticServerlessForwarderS3SQSEvents = "" ## FILL WITH A COMMA DELIMITED LIST OF S3 SQS EVENT NOTIFICATIONS ARNS TO SET AS EVENT TRIGGERS FOR THE LAMBDA (IF ANY). + ElasticServerlessForwarderKinesisEvents = "" ## FILL WITH A COMMA DELIMITED LIST OF KINESIS DATA STREAM ARNS TO SET AS EVENT TRIGGERS FOR THE LAMBDA (IF ANY). + ElasticServerlessForwarderCloudWatchLogsEvents = "" ## FILL WITH A COMMA DELIMITED LIST OF CLOUDWATCH LOGS LOG GROUPS ARNS TO SET SUBSCRIPTION FILTERS ON THE LAMBDA FOR (IF ANY). + ElasticServerlessForwarderS3Buckets = "" ## FILL WITH A COMMA DELIMITED LIST OF S3 BUCKETS ARNS THAT ARE THE SOURCES OF THE S3 SQS EVENT NOTIFICATIONS (IF ANY). + } +} +``` +* Deploy the Lambda from SAR running the following command: * ```commandline - PARENT_STACK_ARN=$(aws cloudformation describe-stacks --stack-name esf-cloudformation-deployment --query "Stacks[0].StackId" --output text) - LAMBDA_STACK_ARN=$(aws cloudformation list-stacks --stack-status-filter CREATE_COMPLETE --query "StackSummaries[?ParentId==\`${PARENT_STACK_ARN}\`].StackId" --output text) - aws cloudformation get-template --stack-name "${LAMBDA_STACK_ARN}" --query TemplateBody > sar-lambda.json + terrafrom init ``` - -* Edit sar-lambda.json to add required permissions for the Lambda to run: - * Add `Policies` to `Resources.ElasticServerlessForwarderFunctionRole.Properties` - ```json - "Policies": [ - { - "PolicyName": "ElasticServerlessForwarderFunctionRolePolicySQSContinuingQueue", ## ADD AS IT IS FOR THE CONTINUING QUEUE - "PolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Action": [ - "sqs:SendMessage" - ], - "Resource": { - "Fn::GetAtt": [ - "ElasticServerlessForwarderContinuingQueue", - "Arn" - ] - }, - "Effect": "Allow" - } - ] - } - }, - { - "PolicyName": "ElasticServerlessForwarderFunctionRolePolicySQSReplayQueue", ## ADD AS IT IS FOR THE REPLAY QUEUE - "PolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Action": [ - "sqs:SendMessage" - ], - "Resource": { - "Fn::GetAtt": [ - "ElasticServerlessForwarderReplayQueue", - "Arn" - ] - }, - "Effect": "Allow" - } - ] - } - }, - { - "PolicyName": "ElasticServerlessForwarderFunctionRolePolicyS3Configfile", ## ADAPT TO THE CONFIG FILE IN THE S3 BUCKET - "PolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Action": [ - "s3:GetObject" - ], - "Resource": "arn:aws:s3:::%CONFIG_FILE_BUCKET_NAME%/%CONFIG_FILE_OBJECT_KEY%", - "Effect": "Allow" - } - ] - } - }, - { - "PolicyName": "ElasticServerlessForwarderFunctionRolePolicySQS", ## ADD FOR YOUR SQS QUEUES - "PolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Action": [ - "sqs:GetQueueUrl" - ], - "Resource": [ - "arn:aws:sqs:%AWS_REGION%:%AWS_ACCOUNT_ID%:%QUEUE_NAME%", - ... - ], - "Effect": "Allow" - } - ] - } - }, - { - "PolicyName": "ElasticServerlessForwarderFunctionRolePolicyKinesis", ## ADD FOR YOUR KINESIS STREAMS - "PolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Action": [ - "kinesis:GetRecords", - "kinesis:GetShardIterator", - "kinesis:DescribeStream", - "kinesis:ListShards", - "kinesis:ListStreams" - ], - "Resource": [ - "arn:aws:kinesis:%AWS_REGION%:%AWS_ACCOUNT_ID%:stream/%STREAM_NAME%", - ... - ], - "Effect": "Allow" - } - ] - } - }, - { - "PolicyName": "ElasticServerlessForwarderFunctionRolePolicyS3", ## ADD FOR YOUR S3 BUCKETS - "PolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Action": [ - "s3:ListBucket" - ], - "Resource": [ - "arn:aws:s3:::%BUCKET_NAME%", - ... - ], - "Effect": "Allow" - }, - { - "Action": [ - "s3:GetObject" - ], - "Resource": [ - "arn:aws:s3:::%BUCKET_NAME%/*", - ... - ], - "Effect": "Allow" - } - ] - } - }, - { - "PolicyName": "ElasticServerlessForwarderFunctionRolePolicySM", ## ADD FOR YOUR SECRET MANAGER SECRETS - "PolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Action": [ - "secretsmanager:GetSecretValue" - ], - "Resource": [ - "arn:aws:secretsmanager:%AWS_REGION%:%AWS_ACCOUNT_ID%:secret:%SECRET_NAME%", - ... - ], - "Effect": "Allow" - } - ] - } - }, - { - "PolicyName": "ElasticServerlessForwarderFunctionRolePolicyKMS", ## ADD FOR YOUR KMS DECRYPT KEYS - "PolicyDocument": { - "Version": "2012-10-17", - "Statement": [ - { - "Action": [ - "kms:Decrypt" - ], - "Resource": [ - "arn:aws:kms:%AWS_REGION%:%AWS_ACCOUNT_ID%:key/%KEY_ID%", - ... - ], - "Effect": "Allow" - } - ] - } - } - ] - ``` - - * Add an `AWS::Lambda::Permission` entry to `Resources` for every CloudWatch Logs log group you will use as trigger: - ```json - "ElasticServerlessForwarderCloudWatchPolicy1": { - "Type": "AWS::Lambda::Permission", - "Properties": { - "FunctionName": { - "Ref": "ElasticServerlessForwarderFunction" - }, - "Action": "lambda:InvokeFunction", - "Principal": "logs.%AWS_REGION%.amazonaws.com", - "SourceAccount": "%AWS_ACCOUNT_ID%", - "SourceArn": "arn:aws:logs:%AWS_REGION%:%AWS_ACCOUNT_ID%:log-group:%LOG_GROUP_NAME_1%:*" - } - }, - "ElasticServerlessForwarderCloudWatchPolicy2": { - "Type": "AWS::Lambda::Permission", - "Properties": { - "FunctionName": { - "Ref": "ElasticServerlessForwarderFunction" - }, - "Action": "lambda:InvokeFunction", - "Principal": "logs.%AWS_REGION%.amazonaws.com", - "SourceAccount": "%AWS_ACCOUNT_ID%", - "SourceArn": "arn:aws:logs:%AWS_REGION%:%AWS_ACCOUNT_ID%:log-group:%LOG_GROUP_NAME_2%:*" - } - } - ``` - -* Edit sar-lambda.json to further customise your deployment of Elastic Forwarder for Serverless - * Examples: - * Adding environment variables: add entries in `Resources.ElasticServerlessForwarderFunction.Environment.Variables` - ```json - "Environment": { - "Variables": { - "SQS_CONTINUE_URL": { # Do not remove this - "Ref": "ElasticServerlessForwarderContinuingQueue" - }, - "SQS_REPLAY_URL": { # Do not remove this - "Ref": "ElasticServerlessForwarderReplayQueue" - }, - "ELASTIC_APM_ACTIVE": "true", - "ELASTIC_APM_SECRET_TOKEN": "%ELASTIC_APM_SECRET_TOKEN%", - "ELASTIC_APM_SERVER_URL": "%ELASTIC_APM_SERVER_URL%", - "S3_CONFIG_FILE": "s3://bucket-name/config-file-name" - } - }, - ``` - * Adding an Event Source Mapping when using S3 SQS Event Notifications or direct SQS message payload input: - ```json - "S3SQSEventSource": { - "Type": "AWS::Lambda::EventSourceMapping", - "Properties": { - "Enabled": true, - "FunctionName": { - "Ref": "ElasticServerlessForwarderFunction" - }, - "EventSourceArn": "%SQS_ARN%" ## ADD YOUR SQS QUEUE - } - } - ``` - * Adding an Event Source Mapping when using Kinesis data stream input - ```json - "KinesisStreamEventSource": { - "Type": "AWS::Lambda::EventSourceMapping", - "Properties": { - "FunctionName": { - "Ref": "ElasticServerlessForwarderFunction" - }, - "Enabled": true, - "EventSourceArn": "arn:aws:kinesis:%AWS_REGION%:%AWS_ACCOUNT_ID%:stream/%STREAM_NAME%", ## ADD YOUR KINESIS ARN - "StartingPosition": "TRIM_HORIZON" - } - } - ``` - * Adding a subscription filter when using CloudWatch Logs subscription filter input - ```json - "CloudwatchLogsSubscriptionFilter": { - "Type": "AWS::Logs::SubscriptionFilter", - "Properties": { - "DestinationArn": { - "Fn::GetAtt": [ - "ElasticServerlessForwarderFunction", - "Arn" - ] - }, - "FilterPattern": "", ## CUSTOMISE IF YOU NEED TO FILTER EVENTS - "LogGroupName": "%LOG_GROUP_NAME%", ## ADD YOUR CLOUDWATCH LOGS LOG GROUP NAME - "StartingPosition": "TRIM_HORIZON" - } - } - ``` - - * Adding an Event Source Mapping when using the SQS replay queue - ```json - "ESFReplayQueueEventSource": { - "Type": "AWS::Lambda::EventSourceMapping", - "Properties": { - "Enabled": true, - "FunctionName": { - "Ref": "ElasticServerlessForwarderFunction" - }, - "EventSourceArn": "%ESF_REPLAY_QUEUE_ARN%" - } - } - ``` - -* Update the stack running the following command: * ```commandline - aws cloudformation update-stack --stack-name "${LAMBDA_STACK_ARN}" --template-body file://./sar-lambda.json --capabilities CAPABILITY_IAM + terrafrom apply ``` +#### Notes +The SQS queues you want to use as trigger must have a visibility timeout of 910 seconds, 10 seconds more than the Elastic Serverless Forwarder Lambda timeout. + #### Lambda IAM permissions and policies -A Lambda function has a policy, called an execution role, that grants it permission to access AWS services and resources. Lambda assumes the role when the function is invoked. The role is automatically created when the Function is deployed. The Execution role associated with your function can be seen in the Configuration->Permissions section and by default starts with the name “serverlessrepo-elastic-se-ElasticServerlessForward-”. You can add additional policies to grant minimum permission to the Lambda to be able to use configured continuing SQS queue, S3 buckets, Secrets manager (if using) and replay SQS queue. +A Lambda function has a policy, called an execution role, that grants it permission to access AWS services and resources. Lambda assumes the role when the function is invoked. The role is automatically created when the Function is deployed. The Execution role associated with your function can be seen in the Configuration->Permissions section and by default starts with the name “serverlessrepo-elastic-se-ElasticServerlessForward-”. An custom policy is added to grant minimum permissions to the Lambda to be able to use configured continuing SQS queue, S3 buckets, Kinesis data stream, CloudWatch Logs Log Groups, Secrets manager (if using) and replay SQS queue. -Verify the Lambda is given AssumeRole permission to the following `ManagedPolicyArns`. By default this is automatically created: +The Lambda is given the following `ManagedPolicyArns`. By default, these are automatically added if relevant to the Events set up: `ManagedPolicyArns`: * `arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole` +* `arn:aws:iam::aws:policy/service-role/AWSLambdaKinesisExecutionRole` * `arn:aws:iam::aws:policy/service-role/AWSLambdaSQSQueueExecutionRole` -On top of this basic permission the following policies must be provided: -* For the SQS queues resources that are reported in the `SQS_CONTINUE_URL` and `SQS_REPLAY_URL` environment variable the following action must be allowed: +On top of this basic permission the following ones are added: +* For the SQS queues resources that are reported in the `SQS_CONTINUE_URL` and `SQS_REPLAY_URL` environment variable the following action is allowed: * `sqs:SendMessage` -* For SQS queue resources that you want to use as triggers of the Lambda the proper permissions are already included by `arn:aws:iam::aws:policy/service-role/AWSLambdaSQSQueueExecutionRole`. - Only the following extra action must be allowed: - * `sqs:GetQueueUrl` - -* For Kinesis data stream resources that you want to use as triggers of the Lambda the following action must be allowed on the Kinesis data streams: - * `kinesis:GetRecords` - * `kinesis:GetShardIterator` - * `kinesis:DescribeStream` - * `kinesis:ListShards` - * `kinesis:ListStreams` - -* For the S3 bucket resource that's reported in the `S3_CONFIG_FILE` environment variable the following action must be allowed on the S3 buckets' config file object key: +* For the S3 bucket resource that's reported in the `S3_CONFIG_FILE` environment variable the following action is allowed on the S3 buckets' config file object key: * `s3:GetObject` -* For every S3 bucket resource that SQS queues are receiving notification from used by triggers of the Lambda the following action must be allowed on the S3 buckets: +* For every S3 bucket resource that SQS queues are receiving notification from used by triggers of the Lambda the following action is allowed on the S3 buckets: * `s3:ListBucket` -* For every S3 bucket resource that SQS queues are receiving notification from used by triggers of the Lambda the following action must be allowed on the S3 buckets' keys: +* For every S3 bucket resource that SQS queues are receiving notification from used by triggers of the Lambda the following action is allowed on the S3 buckets' keys: * `s3:GetObject` -* For every Secret Manager secret that you want to refer in the yaml configuration file (see below) the following action must be allowed: +* For every Secret Manager secret that you want to refer in the yaml configuration file (see below) the following action is allowed: * `secretsmanager:GetSecretValue` -* For every decrypt key that's not the default one that you used to encrypt your Secret Manager secrets with, the following action must be allowed: +* For every decrypt key that's not the default one that you used to encrypt your Secret Manager secrets with, the following action is allowed: * `kms:Decrypt` -#### Sample policy: - ```json - { - "Version": "2012-10-17", - "Statement": [ - { - "Sid": "AllowReadESFConfigFile", - "Effect": "Allow", - "Action": "s3:GetObject", - ## ADAPT TO THE CONFIG FILE IN THE S3 BUCKET - "Resource": "arn:aws:s3:::%CONFIG_FILE_BUCKET_NAME%/%CONFIG_FILE_OBJECT_KEY%" - }, - { - "Sid": "AllowWriteMessagesInSQS", - "Effect": "Allow", - "Action": "sqs:SendMessage", - "Resource": [ - ## ADAPT TO THE VALUE OF ENV VARIABLES `SQS_CONTINUE_URL` AND `SQS_REPLAY_URL` - "arn:aws:sqs:%AWS_REGION%:%AWS_ACCOUNT_ID%:%SQS_CONTINUE_URL_NAME%", - "arn:aws:sqs:%AWS_REGION%:%AWS_ACCOUNT_ID%:%SQS_REPLAY_URL_NAME%" - ] - }, - { - "Sid": "AllowAccessS3DataSourcesBuckets", - "Effect": "Allow", - "Action": "s3:ListBucket", - "Resource": [ - ## ADD FOR YOUR S3 BUCKET, - "arn:aws:s3:::%BUCKET_NAME%", - ... - ] - }, - { - "Sid": "AllowAccessS3DataSourcesObjectKeys", - "Effect": "Allow", - "Action": "s3:GetObject", - "Resource": [ - ## ADD FOR YOUR S3 BUCKET'S OBJECT KEYS, - "arn:aws:s3:::%BUCKET_NAME%/*", - ... - ] - }, - { - "Sid": "AllowAccessS3-SQSQueue", - "Effect": "Allow", - "Action": "sqs:GetQueueUrl", - "Resource": [ - ## ADD FOR YOUR SQS QUEUES - "arn:aws:sqs:%AWS_REGION%:%AWS_ACCOUNT_ID%:%QUEUE_NAME%", - ... - ] - }, - { - "Sid": "AllowAccessKinesisDataSources", - "Effect": "Allow", - "Action": [ - "kinesis:GetRecords", - "kinesis:GetShardIterator", - "kinesis:DescribeStream", - "kinesis:ListShards", - "kinesis:ListStreams" - ], - "Resource": [ - ## ADD FOR YOUR KINESIS DATA STREAMS - "arn:aws:kinesis:%AWS_REGION%:%AWS_ACCOUNT_ID%:stream/%STREAM_NAME%", - ... - ] - }, - { - "Sid": "AllowAccessSecrets", - "Effect": "Allow", - "Action": "secretsmanager:GetSecretValue", - "Resource": [ - ## ADD FOR YOUR SECRET MANAGER SECRETS - "arn:aws:secretsmanager:%AWS_REGION%:%AWS_ACCOUNT_ID%:secret:%SECRET_NAME%", - ... - ] - }, - { - "Sid": "AllowAccessKMSKey", - "Effect": "Allow", - "Action": "kms:Decrypt", - "Resource": [ - ## ADD FOR YOUR KMS DECRYPT KEYS - "arn:aws:kms:%AWS_REGION%:%AWS_ACCOUNT_ID%:key/%KEY_ID%", - ... - ] - } - ] - } - ``` +* If any CloudWatch Logs log groups is set as input of the Lamda, the following action is allowed for the resource `arn:aws:logs:%AWS_REGION%:%AWS_ACCOUNT_ID%:log-group:*:*`: + * `logs:DescribeLogGroups` #### Lambda Resource-based policy for CloudWatch Logs subscription filter input -* For CloudWatch Logs subscription filter log group resources that you want to use as triggers of the Lambda the following must be allowed as Resource-based policy in separated Policy statements: +* For CloudWatch Logs subscription filter log group resources that you want to use as triggers of the Lambda the following is allowed as Resource-based policy in separated Policy statements: * Principal: `logs.%AWS_REGION%.amazonaws.com` * Action: `lambda:InvokeFunction` * Source ARN: `arn:aws:logs:%AWS_REGION%:%AWS_ACCOUNT_ID%:log-group:%LOG_GROUP_NAME%:*` ## S3_CONFIG_FILE -The Elastic Forwarder for Serverless Lambda rely on a config yaml file to be uploaded to an S3 bucket and referenced by the `S3_CONFIG_FILE` environment variable. +The Elastic Serverless Forwarder Lambda rely on a config yaml file to be uploaded to an S3 bucket and referenced by the `S3_CONFIG_FILE` environment variable. This is the format of the config yaml file ```yaml @@ -552,7 +207,7 @@ inputs: api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo=" username: "username" password: "password" - es_index_or_datastream_name: "logs-generic-default" + datastream: "logs-generic-default" batch_max_actions: 500 batch_max_bytes: 10485760 - type: "sqs" @@ -567,7 +222,7 @@ inputs: api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo=" username: "username" password: "password" - es_index_or_datastream_name: "logs-generic-default" + datastream: "logs-generic-default" batch_max_actions: 500 batch_max_bytes: 10485760 - type: "kinesis-data-stream" @@ -582,7 +237,7 @@ inputs: api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo=" username: "username" password: "password" - es_index_or_datastream_name: "logs-generic-default" + datastream: "logs-generic-default" batch_max_actions: 500 batch_max_bytes: 10485760 - type: "cloudwatch-logs" @@ -597,14 +252,14 @@ inputs: api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo=" username: "username" password: "password" - es_index_or_datastream_name: "logs-generic-default" + datastream: "logs-generic-default" batch_max_actions: 500 batch_max_bytes: 10485760 ``` #### Fields `inputs.[]`: -A list of inputs (ie: triggers) for the Elastic Forwarder for Serverless Lambda +A list of inputs (ie: triggers) for the Elastic Serverless Forwarder Lambda `inputs.[].type`: The type of the trigger input (currently `cloudwatch-logs`, `kinesis-data-stream`, `sqs` and`s3-sqs` supported) @@ -613,7 +268,7 @@ The type of the trigger input (currently `cloudwatch-logs`, `kinesis-data-stream The arn of the trigger input according to the type. Multiple input entries can have different unique ids with the same type. `inputs.[].outputs`: -A list of outputs (ie: forwarding targets) for the Elastic Forwarder for Serverless Lambda. Only one output per type can be defined +A list of outputs (ie: forwarding targets) for the Elastic Serverless Forwarder Lambda. Only one output per type can be defined `inputs.[].outputs.[].type`: The type of the forwarding target output (currently only `elasticsearch` supported) @@ -626,9 +281,9 @@ Custom init arguments for the given forwarding target output * `args.username`: Username of the elasticsearch instance to connect to. Mandatory in case `args.api_key` is not provided. Will be ignored if `args.api_key` is defined as well. * `args.password` Password of the elasticsearch instance to connect to. Mandatory in case `args.api_key` is not provided. Will be ignored if `args.api_key` is defined as well. * `args.api_key`: Api key of elasticsearch endpoint in the format **base64encode(api_key_id:api_key_secret)**. Mandatory in case `args.username` and `args.password ` are not provided. Will take precedence over `args.username`/`args.password` if both are defined. - * `args.es_index_or_datastream_name`: Name of the index or data stream where to forward the logs to. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types, if using data stream, the users can optionally set its value in the configuration file according to the naming convention for data streams and available integrations. If the `es_index_or_datastream_name` is not specified and it cannot be matched with any of the above AWS services then the value will be set to "logs-generic-default". + * `args.datastream`: Name of data stream or the index where to forward the logs to. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types, if using data stream, the users can optionally set its value in the configuration file according to the naming convention for data streams and available integrations. If the `datastream` is not specified and it cannot be matched with any of the above AWS services then the value will be set to "logs-generic-default". Before **v0.30.0** this param was named `es_index_or_datastream_name`, that's now deprecated. It can still be used until the release of **v1.0.0**, when it will be finally removed. * `args.batch_max_actions`: Maximum number of actions to send in a single bulk request. Default value: 500 - * `args.batch_max_bytes`: Maximum size in bytes to send in a sigle bulk request. Default value: 10485760 (10MB) + * `args.batch_max_bytes`: Maximum size in bytes to send in a single bulk request. Default value: 10485760 (10MB) ## Secrets Manager Support ```yaml @@ -641,7 +296,7 @@ inputs: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_url" username: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-generic-default" + datastream: "logs-generic-default" ``` There are 2 types of secrets that can be used: - SecretString (plain text or key/value pairs) @@ -679,7 +334,7 @@ inputs: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_url" username: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-generic-default" + datastream: "logs-generic-default" ``` Using the above configuration, the tags will be set in the following way`["forwarded", "generic", "tag1", "tag2", "tag3"]` @@ -705,7 +360,7 @@ inputs: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_url" username: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-generic-default" + datastream: "logs-generic-default" ``` #### Notes @@ -725,21 +380,21 @@ When the regular expression is compiled no flags are used, please refer to [inli ## Routing support for AWS Services Logs When using Elastic integrations, as a first step users should install appropriate [integration](https://docs.elastic.co/en/integrations) assets using the Kibana UI. This sets up appropriate pre-built dashboards, ingest node configurations, and other assets that help you get the most out of the data you ingest. The integrations use [data streams](https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html) with specific [naming conventions](https://www.elastic.co/blog/an-introduction-to-the-elastic-data-stream-naming-scheme) providing users with more granular controls and flexibility on managing the ingested data. -For `S3 SQS Event Notifications input` the Lambda function supports automatic routing of several AWS service logs to the corresponding [integration](https://docs.elastic.co/en/integrations) [data streams](https://docs.elastic.co/en/integrations) for further processing and storage in the Elasticsearch cluster. It supports automatic routing of AWS CloudTrail (`aws.cloudtrail`), Amazon CloudWatch Logs (`aws.cloudwatch_logs`), Elastic Load Balancing(`aws.elb_logs`), AWS Network Firewall (`aws.firewall_logs`), Amazon VPC Flow (`aws.vpcflow`) & AWS Web Application Firewall (`aws.waf`) logs to corresponding default integrations data streams. Setting the `es_index_or_datastream_name` field in the configuration file is optional for this use case. +For `S3 SQS Event Notifications input` the Lambda function supports automatic routing of several AWS service logs to the corresponding [integration](https://docs.elastic.co/en/integrations) [data streams](https://docs.elastic.co/en/integrations) for further processing and storage in the Elasticsearch cluster. It supports automatic routing of AWS CloudTrail (`aws.cloudtrail`), Amazon CloudWatch Logs (`aws.cloudwatch_logs`), Elastic Load Balancing(`aws.elb_logs`), AWS Network Firewall (`aws.firewall_logs`), Amazon VPC Flow (`aws.vpcflow`) & AWS Web Application Firewall (`aws.waf`) logs to corresponding default integrations data streams. Setting the `datastream` field in the configuration file is optional for this use case. -For most of the other use cases, the user will need to set the `es_index_or_datastream_name` field in the configuration file to route the data to a specific data stream or an index. This value should be set in the following use cases: +For most of the other use cases, the user will need to set the `datastream` field in the configuration file to route the data to a specific data stream or an index. This value should be set in the following use cases: - Users want to write the data to a specific index, alias or a custom data stream and not to the default integration data streams. This can help some users to use the existing Elasticsearch setup like index templates, ingest pipelines or dashboards that you may have already set up and may have developed a business process around it and don’t want to change it. - When using `Kinesis Data Stream`, `CloudWatch Logs subscription filter` or `Direct SQS message payload` input. Only `S3 SQS Event Notifications input` method supports automatic routing to default integrations data streams for several AWS services logs. - When using `S3 SQS Event Notifications input` but the log types is something other than AWS CloudTrail (`aws.cloudtrail`), Amazon CloudWatch Logs (`aws.cloudwatch_logs`), Elastic Load Balancing (`aws.elb_logs`), AWS Network Firewall (`aws.firewall_logs`), Amazon VPC Flow (`aws.vpcflow`) & AWS Web Application Firewall (`aws.waf`). -If the `es_index_or_datastream_name` is not specified and it cannot be matched with any of the above AWS services then the dataset will be set to "generic" and the namespace to "default" pointing to the data stream name "logs-generic-default". +If the `datastream` is not specified and it cannot be matched with any of the above AWS services then the dataset will be set to "generic" and the namespace to "default" pointing to the data stream name "logs-generic-default". ## Setting up S3 event notification to SQS In order to set up an S3 event notification to SQS please look at the official documentation: https://docs.aws.amazon.com/AmazonS3/latest/userguide/NotificationHowTo.html The event type to set up in the notification should be `s3:ObjectCreated:*` -The Elastic Forwarder for Serverless Lambda needs to be provided extra IAM policies in order to access S3 and SQS resources in your account: please refer to [Lambda IAM permissions and policies](#lambda-iam-permissions-and-policies). +The Elastic Serverless Forwarder Lambda needs to be provided extra IAM policies in order to access S3 and SQS resources in your account: please refer to [Lambda IAM permissions and policies](#lambda-iam-permissions-and-policies). ## Error handling There are two kind of errors that can happen during the execution of the Lambda: From 8eb84829d2aef01259032ad4ab5f8fa62854b84b Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Mon, 16 May 2022 18:02:01 +0900 Subject: [PATCH 07/17] changelog --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0830136b..04acacb0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,9 @@ +### v0.30.0 - 2022/05/16 +##### Features +* Add support for AWS IaC deployment with CloudFormation and terraform: [#](https://github.com/elastic/elastic-serverless-forwarder/pull/) +##### Breaking changes +* Replace `es_index_or_datastream_name` config param with `datastream` one: [#](https://github.com/elastic/elastic-serverless-forwarder/pull/) + ### v0.29.1 - 2022/05/04 ##### Bug fixes * Handle properly `cloudwatch-logs` content payload: [#113](https://github.com/elastic/elastic-serverless-forwarder/pull/113) From 5115e1a1495d9dabec391ad36d7e8b10778777bc Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Mon, 16 May 2022 18:02:35 +0900 Subject: [PATCH 08/17] replace es_index_or_datastream_name with datastream --- .ci/jobs/elastic-serverless-agent-mbp.yml | 2 +- handlers/aws/replay_trigger.py | 2 +- share/config.py | 25 ++- shippers/es.py | 23 +- shippers/factory.py | 2 +- tests/handlers/aws/test_handler.py | 28 +-- tests/share/test_config.py | 246 ++++++++++++++++------ tests/share/test_secretsmanager.py | 36 ++-- tests/shippers/test_es.py | 24 +-- tests/shippers/test_factory.py | 10 +- 10 files changed, 254 insertions(+), 144 deletions(-) diff --git a/.ci/jobs/elastic-serverless-agent-mbp.yml b/.ci/jobs/elastic-serverless-agent-mbp.yml index 46ba31bd..ea82d06f 100644 --- a/.ci/jobs/elastic-serverless-agent-mbp.yml +++ b/.ci/jobs/elastic-serverless-agent-mbp.yml @@ -2,7 +2,7 @@ - job: name: Library/elastic-serverless-forwarder-mbp display-name: Elastic Serverless Forwarder - description: Elastic Forwarder for Serverless + description: Elastic Serverless Forwarder view: BEATS-CI project-type: multibranch concurrent: true diff --git a/handlers/aws/replay_trigger.py b/handlers/aws/replay_trigger.py index 93025674..80f8985b 100644 --- a/handlers/aws/replay_trigger.py +++ b/handlers/aws/replay_trigger.py @@ -54,7 +54,7 @@ def _handle_replay_event( if output_type == "elasticsearch": assert isinstance(output, ElasticsearchOutput) - output.es_index_or_datastream_name = output_args["es_index_or_datastream_name"] + output.datastream = output_args["datastream"] shared_logger.info("setting ElasticSearch shipper") elasticsearch: ElasticsearchShipper = ShipperFactory.create_from_output(output_type=output_type, output=output) elasticsearch.set_replay_handler(replay_handler=replay_handler.replay_handler) diff --git a/share/config.py b/share/config.py index 3358c4cd..86b22b8b 100644 --- a/share/config.py +++ b/share/config.py @@ -45,7 +45,7 @@ def __init__( username: str = "", password: str = "", api_key: str = "", - es_index_or_datastream_name: str = "", + datastream: str = "", tags: list[str] = [], batch_max_actions: int = 500, batch_max_bytes: int = 10 * 1024 * 1024, @@ -57,7 +57,7 @@ def __init__( self.username = username self.password = password self.api_key = api_key - self.es_index_or_datastream_name = es_index_or_datastream_name + self.datastream = datastream self.tags = tags self.batch_max_actions = batch_max_actions self.batch_max_bytes = batch_max_bytes @@ -80,8 +80,8 @@ def __init__( if self.username and not self.password: raise ValueError("Elasticsearch Output password must be set when using username") - if not self.es_index_or_datastream_name: - shared_logger.info("no es_index_or_datastream_name set in config") + if not self.datastream: + shared_logger.info("no datastream set in config") shared_logger.debug("tags: ", extra={"tags": self.tags}) @@ -141,15 +141,15 @@ def api_key(self, value: str) -> None: self._api_key = value @property - def es_index_or_datastream_name(self) -> str: - return self._es_index_or_datastream_name + def datastream(self) -> str: + return self._datastream - @es_index_or_datastream_name.setter - def es_index_or_datastream_name(self, value: str) -> None: + @datastream.setter + def datastream(self, value: str) -> None: if not isinstance(value, str): - raise ValueError("Elasticsearch Output es_index_or_datastream_name must be of type str") + raise ValueError("Elasticsearch Output datastream must be of type str") - self._es_index_or_datastream_name = value + self._datastream = value @property def batch_max_actions(self) -> int: @@ -290,6 +290,11 @@ def add_output(self, output_type: str, **kwargs: Any) -> None: output: Optional[Output] = None if output_type == "elasticsearch": + if "es_index_or_datastream_name" in kwargs: + if "datastream" not in kwargs: + kwargs["datastream"] = kwargs["es_index_or_datastream_name"] + + del kwargs["es_index_or_datastream_name"] output = ElasticsearchOutput(**kwargs) else: output = Output(output_type=output_type) diff --git a/shippers/es.py b/shippers/es.py index bd5fa3f8..43946171 100644 --- a/shippers/es.py +++ b/shippers/es.py @@ -29,7 +29,7 @@ def __init__( password: str = "", cloud_id: str = "", api_key: str = "", - es_index_or_datastream_name: str = "", + datastream: str = "", tags: list[str] = [], batch_max_actions: int = 500, batch_max_bytes: int = 10 * 1024 * 1024, @@ -71,7 +71,7 @@ def __init__( self._replay_handler: Optional[ReplayHandlerCallable] = None self._event_id_generator: Optional[EventIdGeneratorCallable] = None - self._es_index_or_datastream_name = es_index_or_datastream_name + self._datastream = datastream self._tags = tags self._es_index = "" @@ -156,7 +156,7 @@ def set_replay_handler(self, replay_handler: ReplayHandlerCallable) -> None: self._replay_handler = replay_handler def send(self, event: dict[str, Any]) -> str: - self._replay_args["es_index_or_datastream_name"] = self._es_index_or_datastream_name + self._replay_args["datastream"] = self._datastream if not hasattr(self, "_es_index") or self._es_index == "": self._discover_dataset(event_payload=event) @@ -194,23 +194,20 @@ def flush(self) -> None: return def _discover_dataset(self, event_payload: Dict[str, Any]) -> None: - if self._es_index_or_datastream_name != "": - if self._es_index_or_datastream_name.startswith("logs-"): - datastream_components = self._es_index_or_datastream_name.split("-") + if self._datastream != "": + if self._datastream.startswith("logs-"): + datastream_components = self._datastream.split("-") if len(datastream_components) == 3: self._dataset = datastream_components[1] self._namespace = datastream_components[2] else: shared_logger.debug( - "es_index_or_datastream_name not matching logs datastream pattern, no dataset " - "and namespace set" + "datastream not matching logs datastream pattern, no dataset " "and namespace set" ) else: - shared_logger.debug( - "es_index_or_datastream_name not matching logs datastream pattern, no dataset and namespace set" - ) + shared_logger.debug("datastream not matching logs datastream pattern, no dataset and namespace set") - self._es_index = self._es_index_or_datastream_name + self._es_index = self._datastream return else: self._namespace = "default" @@ -227,4 +224,4 @@ def _discover_dataset(self, event_payload: Dict[str, Any]) -> None: shared_logger.debug("dataset", extra={"dataset": self._dataset}) self._es_index = f"logs-{self._dataset}-{self._namespace}" - self._es_index_or_datastream_name = self._es_index + self._datastream = self._es_index diff --git a/shippers/factory.py b/shippers/factory.py index 48ff7511..a2e24751 100644 --- a/shippers/factory.py +++ b/shippers/factory.py @@ -39,7 +39,7 @@ def create_from_output(output_type: str, output: Output) -> CommonShipperType: password=output.password, cloud_id=output.cloud_id, api_key=output.api_key, - es_index_or_datastream_name=output.es_index_or_datastream_name, + datastream=output.datastream, tags=output.tags, batch_max_actions=output.batch_max_actions, batch_max_bytes=output.batch_max_bytes, diff --git a/tests/handlers/aws/test_handler.py b/tests/handlers/aws/test_handler.py index 02b75587..aa3222e8 100644 --- a/tests/handlers/aws/test_handler.py +++ b/tests/handlers/aws/test_handler.py @@ -779,7 +779,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) event["Records"][0]["messageAttributes"]["config"]["stringValue"] = config_yml @@ -803,7 +803,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -829,7 +829,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -855,7 +855,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -882,7 +882,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -907,7 +907,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -933,7 +933,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -959,7 +959,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -985,7 +985,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -1011,7 +1011,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -1037,7 +1037,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -1062,7 +1062,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -1084,7 +1084,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -1111,7 +1111,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "logs-redis.log-default" + datastream: "logs-redis.log-default" """ event = deepcopy(event_with_config) diff --git a/tests/share/test_config.py b/tests/share/test_config.py index 76aa5c0d..1114dd66 100644 --- a/tests/share/test_config.py +++ b/tests/share/test_config.py @@ -40,7 +40,7 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -51,7 +51,7 @@ def test_init(self) -> None: assert elasticsearch.password == "password" assert not elasticsearch.cloud_id assert not elasticsearch.api_key - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -61,7 +61,7 @@ def test_init(self) -> None: cloud_id="cloud_id", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -72,7 +72,7 @@ def test_init(self) -> None: assert elasticsearch.password == "password" assert not elasticsearch.elasticsearch_url assert not elasticsearch.api_key - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -81,7 +81,7 @@ def test_init(self) -> None: elasticsearch = ElasticsearchOutput( elasticsearch_url="elasticsearch_url", api_key="api_key", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -92,7 +92,7 @@ def test_init(self) -> None: assert not elasticsearch.cloud_id assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -101,7 +101,7 @@ def test_init(self) -> None: elasticsearch = ElasticsearchOutput( cloud_id="cloud_id", api_key="api_key", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -112,7 +112,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -126,7 +126,7 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", cloud_id="cloud_id", api_key="api_key", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -137,7 +137,7 @@ def test_init(self) -> None: assert not elasticsearch.cloud_id assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -148,7 +148,7 @@ def test_init(self) -> None: ): ElasticsearchOutput( elasticsearch_url="elasticsearch_url", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) with self.subTest("both username and api_key"): @@ -157,7 +157,7 @@ def test_init(self) -> None: api_key="api_key", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -168,7 +168,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -179,7 +179,7 @@ def test_init(self) -> None: api_key="api_key", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", tags=["tag1", "tag2", "tag3"], batch_max_actions=1, batch_max_bytes=1, @@ -191,7 +191,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -202,10 +202,10 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) - with self.subTest("empty es_index_or_datastream_name"): + with self.subTest("empty datastream"): elasticsearch = ElasticsearchOutput( cloud_id="cloud_id", api_key="api_key", @@ -221,7 +221,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.es_index_or_datastream_name == "" + assert elasticsearch.datastream == "" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -232,7 +232,7 @@ def test_init(self) -> None: api_key="api_key", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -243,7 +243,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -254,7 +254,7 @@ def test_init(self) -> None: api_key="api_key", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_bytes=1, ) @@ -264,7 +264,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 1 @@ -275,7 +275,7 @@ def test_init(self) -> None: api_key="api_key", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, ) @@ -285,7 +285,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 10485760 @@ -298,7 +298,7 @@ def test_init(self) -> None: elasticsearch_url=0, # type:ignore username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) with self.subTest("username not str"): @@ -307,7 +307,7 @@ def test_init(self) -> None: elasticsearch_url="", username=0, # type:ignore password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) with self.subTest("password not str"): @@ -316,7 +316,7 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password=0, # type:ignore - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) with self.subTest("cloud_id not str"): @@ -325,7 +325,7 @@ def test_init(self) -> None: cloud_id=0, # type:ignore username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) with self.subTest("api_key not str"): @@ -333,18 +333,16 @@ def test_init(self) -> None: ElasticsearchOutput( cloud_id="cloud_id", api_key=0, # type:ignore - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) - with self.subTest("es_index_or_datastream_name not str"): - with self.assertRaisesRegex( - ValueError, "Elasticsearch Output es_index_or_datastream_name must be of type str" - ): + with self.subTest("datastream not str"): + with self.assertRaisesRegex(ValueError, "Elasticsearch Output datastream must be of type str"): ElasticsearchOutput( elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name=0, # type:ignore + datastream=0, # type:ignore ) with self.subTest("batch_max_actions not int"): @@ -353,7 +351,7 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions="test", # type:ignore ) @@ -363,7 +361,7 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_bytes="test", # type:ignore ) @@ -445,7 +443,7 @@ def test_get_output_by_type(self) -> None: input_sqs = Input(input_type="s3-sqs", input_id="id") assert input_sqs.get_output_by_type(output_type="test") is None - with self.subTest("elasticsearch output"): + with self.subTest("elasticsearch output with legacy es_index_or_datastream_name"): input_sqs = Input(input_type="s3-sqs", input_id="id") input_sqs.add_output( output_type="elasticsearch", @@ -459,6 +457,35 @@ def test_get_output_by_type(self) -> None: assert isinstance(input_sqs.get_output_by_type(output_type="elasticsearch"), ElasticsearchOutput) + with self.subTest("elasticsearch output with both legacy es_index_or_datastream_name and datastream"): + input_sqs = Input(input_type="s3-sqs", input_id="id") + input_sqs.add_output( + output_type="elasticsearch", + elasticsearch_url="elasticsearch_url", + username="username", + password="password", + es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datasream", + batch_max_actions=1, + batch_max_bytes=1, + ) + + assert isinstance(input_sqs.get_output_by_type(output_type="elasticsearch"), ElasticsearchOutput) + + with self.subTest("elasticsearch output"): + input_sqs = Input(input_type="s3-sqs", input_id="id") + input_sqs.add_output( + output_type="elasticsearch", + elasticsearch_url="elasticsearch_url", + username="username", + password="password", + datastream="datastream", + batch_max_actions=1, + batch_max_bytes=1, + ) + + assert isinstance(input_sqs.get_output_by_type(output_type="elasticsearch"), ElasticsearchOutput) + def test_add_output(self) -> None: with self.subTest("elasticsearch output"): input_sqs = Input(input_type="s3-sqs", input_id="id") @@ -467,7 +494,7 @@ def test_add_output(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -491,7 +518,7 @@ def test_add_output(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -502,7 +529,7 @@ def test_add_output(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -519,7 +546,7 @@ def test_get_output_types(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -534,7 +561,7 @@ def test_delete_output_by_type(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", batch_max_actions=1, batch_max_bytes=1, ) @@ -782,7 +809,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" batch_max_actions: "test" """ ) @@ -799,7 +826,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" batch_max_bytes: "test" """ ) @@ -817,7 +844,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ ) @@ -833,7 +860,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ ) @@ -855,11 +882,52 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ ) - with self.subTest("valid input valid elasticsearch output with elasticsearch_url and http auth"): + with self.subTest("valid input valid elasticsearch output with legacy es_index_or_datastream_name"): + config = parse_config( + config_yaml=""" + inputs: + - type: s3-sqs + id: id + tags: + - "tag1" + - "tag2" + - "tag3" + outputs: + - type: elasticsearch + args: + elasticsearch_url: "elasticsearch_url" + username: "username" + password: "password" + es_index_or_datastream_name: "es_index_or_datastream_name" + """ + ) + + input_sqs = config.get_input_by_id(input_id="id") + assert input_sqs is not None + assert input_sqs.type == "s3-sqs" + assert input_sqs.id == "id" + assert input_sqs.tags == ["tag1", "tag2", "tag3"] + + elasticsearch = input_sqs.get_output_by_type(output_type="elasticsearch") + + assert elasticsearch is not None + assert isinstance(elasticsearch, ElasticsearchOutput) + assert elasticsearch.type == "elasticsearch" + assert elasticsearch.elasticsearch_url == "elasticsearch_url" + assert elasticsearch.username == "username" + assert elasticsearch.password == "password" + assert elasticsearch.datastream == "es_index_or_datastream_name" + assert elasticsearch.tags == ["tag1", "tag2", "tag3"] + assert elasticsearch.batch_max_actions == 500 + assert elasticsearch.batch_max_bytes == 10485760 + + with self.subTest( + "valid input valid elasticsearch output with both legacy es_index_or_datastream_name and datastream" + ): config = parse_config( config_yaml=""" inputs: @@ -875,6 +943,7 @@ def test_parse_config(self) -> None: elasticsearch_url: "elasticsearch_url" username: "username" password: "password" + datastream: "datastream" es_index_or_datastream_name: "es_index_or_datastream_name" """ ) @@ -893,7 +962,46 @@ def test_parse_config(self) -> None: assert elasticsearch.elasticsearch_url == "elasticsearch_url" assert elasticsearch.username == "username" assert elasticsearch.password == "password" - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" + assert elasticsearch.tags == ["tag1", "tag2", "tag3"] + assert elasticsearch.batch_max_actions == 500 + assert elasticsearch.batch_max_bytes == 10485760 + + with self.subTest("valid input valid elasticsearch output with elasticsearch_url and http auth"): + config = parse_config( + config_yaml=""" + inputs: + - type: s3-sqs + id: id + tags: + - "tag1" + - "tag2" + - "tag3" + outputs: + - type: elasticsearch + args: + elasticsearch_url: "elasticsearch_url" + username: "username" + password: "password" + datastream: "datastream" + """ + ) + + input_sqs = config.get_input_by_id(input_id="id") + assert input_sqs is not None + assert input_sqs.type == "s3-sqs" + assert input_sqs.id == "id" + assert input_sqs.tags == ["tag1", "tag2", "tag3"] + + elasticsearch = input_sqs.get_output_by_type(output_type="elasticsearch") + + assert elasticsearch is not None + assert isinstance(elasticsearch, ElasticsearchOutput) + assert elasticsearch.type == "elasticsearch" + assert elasticsearch.elasticsearch_url == "elasticsearch_url" + assert elasticsearch.username == "username" + assert elasticsearch.password == "password" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -913,7 +1021,7 @@ def test_parse_config(self) -> None: args: elasticsearch_url: "elasticsearch_url" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ ) @@ -930,7 +1038,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.elasticsearch_url == "elasticsearch_url" assert elasticsearch.api_key == "api_key" - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -951,7 +1059,7 @@ def test_parse_config(self) -> None: cloud_id: "cloud_id" username: "username" password: "password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ ) @@ -969,7 +1077,7 @@ def test_parse_config(self) -> None: assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.username == "username" assert elasticsearch.password == "password" - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -989,7 +1097,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ ) @@ -1006,7 +1114,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1022,7 +1130,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" tags: - "tag1" - "tag2" @@ -1043,7 +1151,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1062,7 +1170,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" tags: - "tag1" - "tag2" @@ -1083,7 +1191,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == ["input_tag1", "input_tag2"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1103,7 +1211,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ ) @@ -1120,7 +1228,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1142,7 +1250,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ ) @@ -1168,7 +1276,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1190,7 +1298,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ ) @@ -1211,7 +1319,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ ) @@ -1226,7 +1334,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" batch_max_actions: 1 """ ) @@ -1244,7 +1352,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 10485760 @@ -1260,7 +1368,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" batch_max_bytes: 1 """ ) @@ -1278,7 +1386,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.es_index_or_datastream_name == "es_index_or_datastream_name" + assert elasticsearch.datastream == "datastream" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 1 diff --git a/tests/share/test_secretsmanager.py b/tests/share/test_secretsmanager.py index 1c08270e..9215c970 100644 --- a/tests/share/test_secretsmanager.py +++ b/tests/share/test_secretsmanager.py @@ -93,7 +93,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaisesRegex( @@ -115,7 +115,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaisesRegex( ValueError, "Must be provided region in arn: arn:aws:secretsmanager::123456789:secret:plain_secret" @@ -134,7 +134,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaisesRegex( ValueError, @@ -154,7 +154,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaisesRegex( ValueError, @@ -175,7 +175,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaisesRegex( @@ -196,7 +196,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaises(ClientError): @@ -213,7 +213,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaisesRegex( @@ -233,7 +233,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaisesRegex( @@ -254,7 +254,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaisesRegex( @@ -276,7 +276,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaisesRegex( @@ -297,7 +297,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaisesRegex( @@ -318,7 +318,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ with self.assertRaisesRegex( @@ -341,7 +341,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secrets_manager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ mock_fetched_data = aws_sm_expander(config_yaml) @@ -355,7 +355,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secrets_manager:eu-central-1:123456789:secret:es_secrets:url" username: "mock_elastic_username" password: "mock_elastic_password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ assert mock_fetched_data == parsed_config_yaml @@ -372,7 +372,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ mock_fetched_data = aws_sm_expander(config_yaml) @@ -386,7 +386,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "mock_elastic_url" username: "mock_elastic_username" password: "mock_elastic_password" - es_index_or_datastream_name: "es_index_or_datastream_name" + datastream: "datastream" """ assert mock_fetched_data == parsed_config_yaml @@ -402,7 +402,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - es_index_or_datastream_name: "arn:aws:secretsmanager:eu-west-1:123456789:secret:binary_secret" + datastream: "arn:aws:secretsmanager:eu-west-1:123456789:secret:binary_secret" """ mock_fetched_data = aws_sm_expander(config_yaml) @@ -416,7 +416,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "mock_elastic_url" username: "mock_elastic_username" password: "mock_elastic_password" - es_index_or_datastream_name: "mock_nginx.log" + datastream: "mock_nginx.log" """ assert mock_fetched_data == parsed_config_yaml diff --git a/tests/shippers/test_es.py b/tests/shippers/test_es.py index 724cfb9f..accda7af 100644 --- a/tests/shippers/test_es.py +++ b/tests/shippers/test_es.py @@ -81,7 +81,7 @@ def test_send(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="", - es_index_or_datastream_name="logs-data.set-namespace", + datastream="logs-data.set-namespace", tags=["tag1", "tag2", "tag3"], batch_max_actions=0, ) @@ -127,7 +127,7 @@ def test_send_with_failure(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="", - es_index_or_datastream_name="data.set", + datastream="data.set", tags=["tag1", "tag2", "tag3"], batch_max_actions=0, ) @@ -149,7 +149,7 @@ def test_flush(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="", - es_index_or_datastream_name="logs-data.set-namespace", + datastream="logs-data.set-namespace", tags=["tag1", "tag2", "tag3"], batch_max_actions=2, ) @@ -191,7 +191,7 @@ def test_flush(self) -> None: @mock.patch("shippers.es.es_bulk", mock_bulk) @mock.patch("shippers.es.Elasticsearch", new=MockClient) def test_send_with_dataset_discovery(self) -> None: - with self.subTest("empty es_index_or_datastream_name"): + with self.subTest("empty datastream"): shipper = ElasticsearchShipper( elasticsearch_url="elasticsearch_url", username="username", @@ -237,12 +237,12 @@ def test_send_with_dataset_discovery(self) -> None: assert shipper._bulk_actions == [] - with self.subTest("es_index_or_datastream_name as `logs-unit-test"): + with self.subTest("datastream as `logs-unit-test"): shipper = ElasticsearchShipper( elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="logs-unit-test", + datastream="logs-unit-test", tags=["tag1", "tag2", "tag3"], batch_max_actions=0, ) @@ -284,12 +284,12 @@ def test_send_with_dataset_discovery(self) -> None: assert shipper._bulk_actions == [] - with self.subTest("es_index_or_datastream_name not matching logs datastream naming conventation"): + with self.subTest("datastream not matching logs datastream naming conventation"): shipper = ElasticsearchShipper( elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", tags=["tag1", "tag2", "tag3"], batch_max_actions=0, ) @@ -298,12 +298,12 @@ def test_send_with_dataset_discovery(self) -> None: assert shipper._dataset == "" assert shipper._namespace == "" - assert shipper._es_index == "es_index_or_datastream_name" + assert shipper._es_index == "datastream" assert _documents[0] == [ { "@timestamp": _now, - "_index": "es_index_or_datastream_name", + "_index": "datastream", "_op_type": "create", "aws": { "s3": { @@ -336,7 +336,7 @@ def test_no_datastream(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="logs-es-index-no-datastream", + datastream="logs-es-index-no-datastream", tags=["tag1", "tag2", "tag3"], ) @@ -350,7 +350,7 @@ def test_custom_dataset(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="logs-dataset-namespace", + datastream="logs-dataset-namespace", tags=["tag1", "tag2", "tag3"], ) diff --git a/tests/shippers/test_factory.py b/tests/shippers/test_factory.py index 13272455..1a20264b 100644 --- a/tests/shippers/test_factory.py +++ b/tests/shippers/test_factory.py @@ -17,7 +17,7 @@ def test_create(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) assert isinstance(shipper, ElasticsearchShipper) @@ -27,7 +27,7 @@ def test_create(self) -> None: output_type="elasticsearch", elasticsearch_url="elasticsearch_url", api_key="api_key", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) assert isinstance(shipper, ElasticsearchShipper) @@ -38,7 +38,7 @@ def test_create(self) -> None: cloud_id="cloud_id:bG9jYWxob3N0OjkyMDAkMA==", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) assert isinstance(shipper, ElasticsearchShipper) @@ -48,7 +48,7 @@ def test_create(self) -> None: output_type="elasticsearch", cloud_id="cloud_id:bG9jYWxob3N0OjkyMDAkMA==", api_key="api_key", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) assert isinstance(shipper, ElasticsearchShipper) @@ -83,7 +83,7 @@ def test_create_from_output(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - es_index_or_datastream_name="es_index_or_datastream_name", + datastream="datastream", ) with self.subTest("create output type elasticsearch"): From fc2c66e17f5920fea08dfe64c2ee35dbf367d79e Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Mon, 16 May 2022 18:05:36 +0900 Subject: [PATCH 09/17] changelog --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 04acacb0..18ed6f4a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,8 @@ ### v0.30.0 - 2022/05/16 ##### Features -* Add support for AWS IaC deployment with CloudFormation and terraform: [#](https://github.com/elastic/elastic-serverless-forwarder/pull/) +* Add support for AWS IaC deployment with CloudFormation and terraform: [#115](https://github.com/elastic/elastic-serverless-forwarder/pull/115) ##### Breaking changes -* Replace `es_index_or_datastream_name` config param with `datastream` one: [#](https://github.com/elastic/elastic-serverless-forwarder/pull/) +* Replace `es_index_or_datastream_name` config param with `datastream` one: [#115](https://github.com/elastic/elastic-serverless-forwarder/pull/115) ### v0.29.1 - 2022/05/04 ##### Bug fixes From 4589b29060aa8b5057e27d48bbc363907237d35c Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Mon, 16 May 2022 18:10:30 +0900 Subject: [PATCH 10/17] use producation application_id for terraform in docs --- docs/README-AWS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/README-AWS.md b/docs/README-AWS.md index 66fffc31..f7c3d4fc 100644 --- a/docs/README-AWS.md +++ b/docs/README-AWS.md @@ -119,7 +119,7 @@ provider "aws" { } data "aws_serverlessapplicationrepository_application" "esf_sar" { - application_id = "arn:aws:serverlessrepo:eu-central-1:627286350134:applications/elastic-serverless-forwarder-andrea" + application_id = "arn:aws:serverlessrepo:eu-central-1:267093732750:applications/elastic-serverless-forwarder" } resource "aws_serverlessapplicationrepository_cloudformation_stack" "esf_cf_stak" { From 3460fa6ad7156875a955dc7a2c885e877cb1ba07 Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Mon, 16 May 2022 18:20:04 +0900 Subject: [PATCH 11/17] fix readme --- docs/README-AWS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/README-AWS.md b/docs/README-AWS.md index f7c3d4fc..1b2bf6c8 100644 --- a/docs/README-AWS.md +++ b/docs/README-AWS.md @@ -28,7 +28,7 @@ The Lambda function supports ingesting logs contained in the payload of a SQS bo **S3 SQS Event Notifications input:** The Lambda function supports ingesting logs contained in the S3 bucket through an SQS notification (s3:ObjectCreated) and sends them to Elastic. The SQS queue serves as a trigger for the Lambda function. When a new log file gets written to an S3 bucket and meets the criteria (as configured including prefix/suffix), a notification to SQS is generated that triggers the Lambda function. Users will set up separate SQS queues for each type of logs (i.e. aws.vpcflow, aws.cloudtrail, aws.waf and so on). A single configuration file can have many input sections, pointing to different SQS queues that match specific log types. -The `datastream` parameter in the config file is optional. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types the users can optionally set the `datastream` value in the configuration file according to the naming convention of Elasticsearch datastream and existing integrations. If the `es_index_or_datastream_name` is not specified and it cannot be matched with any of the above AWS services then the dataset will be set to "generic" and the namespace to "default" pointing to the data stream name "logs-generic-default". +The `datastream` parameter in the config file is optional. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types the users can optionally set the `datastream` value in the configuration file according to the naming convention of Elasticsearch datastream and existing integrations. If the `datastream` is not specified and it cannot be matched with any of the above AWS services then the dataset will be set to "generic" and the namespace to "default" pointing to the data stream name "logs-generic-default". For more information, read the AWS [documentation](https://docs.aws.amazon.com/AmazonS3/latest/userguide/ways-to-add-notification-config-to-bucket.html) about creating an SQS event notifications for S3 buckets. From 866f9f6c24b35ba43811aa45fc685ff71c4026fa Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Tue, 17 May 2022 10:06:35 +0900 Subject: [PATCH 12/17] improve docs --- docs/README-AWS.md | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/README-AWS.md b/docs/README-AWS.md index 1b2bf6c8..a0f7c744 100644 --- a/docs/README-AWS.md +++ b/docs/README-AWS.md @@ -32,6 +32,9 @@ The `datastream` parameter in the config file is optional. Lambda supports autom For more information, read the AWS [documentation](https://docs.aws.amazon.com/AmazonS3/latest/userguide/ways-to-add-notification-config-to-bucket.html) about creating an SQS event notifications for S3 buckets. +#### Notes on SQS queues +The SQS queues you want to use as trigger must have a visibility timeout of 910 seconds, 10 seconds more than the Elastic Serverless Forwarder Lambda timeout. + **Kinesis Data Stream input:** The Lambda function supports ingesting logs contained in the payload of a Kinesis data stream record and sends them to Elastic. The Kinesis data stream serves as a trigger for the Lambda function. When a new record gets written to a Kinesis data stream the Lambda function gets triggered. Users will set up separate Kinesis data streams for each type of logs, The config param for Elasticsearch output `datastream` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different Kinesis data streams that match specific log types. @@ -111,6 +114,9 @@ Resources: aws cloudformation deploy --template-file sar-application.yaml --stack-name esf-cloudformation-deployment --capabilities CAPABILITY_IAM CAPABILITY_AUTO_EXPAND ``` +#### Notes +Due to a limitation in AWS CloudFormation, if you want to update the Events settings for the deployed Lambda, you will have to execute a deployment deleting the existing ones before actually apply the new updated ones. + ### Terraform * Save the following yaml content as `sar-application.tf` ``` @@ -147,9 +153,10 @@ resource "aws_serverlessapplicationrepository_cloudformation_stack" "esf_cf_stak * ```commandline terrafrom apply ``` - #### Notes -The SQS queues you want to use as trigger must have a visibility timeout of 910 seconds, 10 seconds more than the Elastic Serverless Forwarder Lambda timeout. +* Due to a limitation in AWS CloudFormation, if you want to update the Events settings for the deployed Lambda, you will have to execute a deployment deleting the existing ones before actually apply the new updated ones. +* Due to a limitation in Terraform related to `aws_serverlessapplicationrepository_application` resource, in order to delete existing Events you have to set the related `aws_serverlessapplicationrepository_cloudformation_stack.parameters` to a blank space value (`" "`) instead that to an empty string (`""`), otherwise the parameter won't be deleted. + #### Lambda IAM permissions and policies A Lambda function has a policy, called an execution role, that grants it permission to access AWS services and resources. Lambda assumes the role when the function is invoked. The role is automatically created when the Function is deployed. The Execution role associated with your function can be seen in the Configuration->Permissions section and by default starts with the name “serverlessrepo-elastic-se-ElasticServerlessForward-”. An custom policy is added to grant minimum permissions to the Lambda to be able to use configured continuing SQS queue, S3 buckets, Kinesis data stream, CloudWatch Logs Log Groups, Secrets manager (if using) and replay SQS queue. From caf13658bfb5f0ab63b1ddc81eec70b69b6085db Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Tue, 17 May 2022 10:27:47 +0900 Subject: [PATCH 13/17] reference bugs links on IaC notes --- docs/README-AWS.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/README-AWS.md b/docs/README-AWS.md index a0f7c744..246ed052 100644 --- a/docs/README-AWS.md +++ b/docs/README-AWS.md @@ -115,7 +115,7 @@ Resources: ``` #### Notes -Due to a limitation in AWS CloudFormation, if you want to update the Events settings for the deployed Lambda, you will have to execute a deployment deleting the existing ones before actually apply the new updated ones. +Due to a [bug](https://github.com/aws/serverless-application-model/issues/1320) in AWS CloudFormation, if you want to update the Events settings for the deployed Lambda, you will have to execute a deployment deleting the existing ones before actually apply the new updated ones. ### Terraform * Save the following yaml content as `sar-application.tf` @@ -154,8 +154,8 @@ resource "aws_serverlessapplicationrepository_cloudformation_stack" "esf_cf_stak terrafrom apply ``` #### Notes -* Due to a limitation in AWS CloudFormation, if you want to update the Events settings for the deployed Lambda, you will have to execute a deployment deleting the existing ones before actually apply the new updated ones. -* Due to a limitation in Terraform related to `aws_serverlessapplicationrepository_application` resource, in order to delete existing Events you have to set the related `aws_serverlessapplicationrepository_cloudformation_stack.parameters` to a blank space value (`" "`) instead that to an empty string (`""`), otherwise the parameter won't be deleted. +* Due to a [bug](https://github.com/aws/serverless-application-model/issues/1320) in AWS CloudFormation, if you want to update the Events settings for the deployed Lambda, you will have to execute a deployment deleting the existing ones before actually apply the new updated ones. +* Due to a [bug](https://github.com/hashicorp/terraform-provider-aws/issues/24771) in Terraform related to `aws_serverlessapplicationrepository_application` resource, in order to delete existing Events you have to set the related `aws_serverlessapplicationrepository_cloudformation_stack.parameters` to a blank space value (`" "`) instead that to an empty string (`""`), otherwise the parameter won't be deleted. #### Lambda IAM permissions and policies From f12b2285299ac3eefee32bf7553bef6bf2975bbb Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Tue, 17 May 2022 11:46:19 +0900 Subject: [PATCH 14/17] remove reference to dev artifacts --- .internal/aws/cloudformation/template.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.internal/aws/cloudformation/template.yaml b/.internal/aws/cloudformation/template.yaml index 43f319ca..3fb4c54b 100644 --- a/.internal/aws/cloudformation/template.yaml +++ b/.internal/aws/cloudformation/template.yaml @@ -43,13 +43,13 @@ Resources: Type: AWS::Serverless::Application Properties: Location: - ApplicationId: arn:aws:serverlessrepo:%awsRegion%:%accountID%:applications/elastic-serverless-forwarder-andrea-macro + ApplicationId: arn:aws:serverlessrepo:%awsRegion%:%accountID%:applications/%sarAppName%-macro SemanticVersion: %semanticVersion% ElasticServerlessForwarderApplication: Type: AWS::Serverless::Application Properties: Location: - ApplicationId: arn:aws:serverlessrepo:%awsRegion%:%accountID%:applications/elastic-serverless-forwarder-andrea-application + ApplicationId: arn:aws:serverlessrepo:%awsRegion%:%accountID%:applications/%sarAppName%-application SemanticVersion: %semanticVersion% Parameters: ElasticServerlessForwarderS3ConfigFile: !Ref ElasticServerlessForwarderS3ConfigFile From b4e72326177daa78d2202f812606bedbc78de02d Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Wed, 18 May 2022 11:42:03 +0900 Subject: [PATCH 15/17] use es_datastream_name as config param --- CHANGELOG.md | 2 +- docs/README-AWS.md | 30 +++--- handlers/aws/replay_trigger.py | 2 +- share/config.py | 24 ++--- shippers/es.py | 22 +++-- shippers/factory.py | 2 +- tests/handlers/aws/test_handler.py | 28 +++--- tests/share/test_config.py | 144 ++++++++++++++--------------- tests/share/test_secretsmanager.py | 36 ++++---- tests/shippers/test_es.py | 24 ++--- tests/shippers/test_factory.py | 10 +- 11 files changed, 163 insertions(+), 161 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 18ed6f4a..c6845a59 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,7 @@ ##### Features * Add support for AWS IaC deployment with CloudFormation and terraform: [#115](https://github.com/elastic/elastic-serverless-forwarder/pull/115) ##### Breaking changes -* Replace `es_index_or_datastream_name` config param with `datastream` one: [#115](https://github.com/elastic/elastic-serverless-forwarder/pull/115) +* Replace `es_index_or_datastream_name` config param with `es_datastream_name` one: [#115](https://github.com/elastic/elastic-serverless-forwarder/pull/115) ### v0.29.1 - 2022/05/04 ##### Bug fixes diff --git a/docs/README-AWS.md b/docs/README-AWS.md index 246ed052..232edf8c 100644 --- a/docs/README-AWS.md +++ b/docs/README-AWS.md @@ -23,12 +23,12 @@ Lambda function also supports writing directly to an index, alias or a custom da **Direct SQS message payload input:** -The Lambda function supports ingesting logs contained in the payload of a SQS body record and sends them to Elastic. The SQS queue serves as a trigger for the Lambda function. When a new record gets written to an SQS queue the Lambda function gets triggered. Users will set up separate SQS queues for each type of logs, The config param for Elasticsearch output `datastream` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different SQS queues that match specific log types. +The Lambda function supports ingesting logs contained in the payload of a SQS body record and sends them to Elastic. The SQS queue serves as a trigger for the Lambda function. When a new record gets written to an SQS queue the Lambda function gets triggered. Users will set up separate SQS queues for each type of logs, The config param for Elasticsearch output `es_datastream_name` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different SQS queues that match specific log types. **S3 SQS Event Notifications input:** The Lambda function supports ingesting logs contained in the S3 bucket through an SQS notification (s3:ObjectCreated) and sends them to Elastic. The SQS queue serves as a trigger for the Lambda function. When a new log file gets written to an S3 bucket and meets the criteria (as configured including prefix/suffix), a notification to SQS is generated that triggers the Lambda function. Users will set up separate SQS queues for each type of logs (i.e. aws.vpcflow, aws.cloudtrail, aws.waf and so on). A single configuration file can have many input sections, pointing to different SQS queues that match specific log types. -The `datastream` parameter in the config file is optional. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types the users can optionally set the `datastream` value in the configuration file according to the naming convention of Elasticsearch datastream and existing integrations. If the `datastream` is not specified and it cannot be matched with any of the above AWS services then the dataset will be set to "generic" and the namespace to "default" pointing to the data stream name "logs-generic-default". +The `es_datastream_name` parameter in the config file is optional. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types the users can optionally set the `es_datastream_name` value in the configuration file according to the naming convention of Elasticsearch datastream and existing integrations. If the `datastream` is not specified and it cannot be matched with any of the above AWS services then the dataset will be set to "generic" and the namespace to "default" pointing to the data stream name "logs-generic-default". For more information, read the AWS [documentation](https://docs.aws.amazon.com/AmazonS3/latest/userguide/ways-to-add-notification-config-to-bucket.html) about creating an SQS event notifications for S3 buckets. @@ -37,10 +37,10 @@ The SQS queues you want to use as trigger must have a visibility timeout of 910 **Kinesis Data Stream input:** -The Lambda function supports ingesting logs contained in the payload of a Kinesis data stream record and sends them to Elastic. The Kinesis data stream serves as a trigger for the Lambda function. When a new record gets written to a Kinesis data stream the Lambda function gets triggered. Users will set up separate Kinesis data streams for each type of logs, The config param for Elasticsearch output `datastream` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different Kinesis data streams that match specific log types. +The Lambda function supports ingesting logs contained in the payload of a Kinesis data stream record and sends them to Elastic. The Kinesis data stream serves as a trigger for the Lambda function. When a new record gets written to a Kinesis data stream the Lambda function gets triggered. Users will set up separate Kinesis data streams for each type of logs, The config param for Elasticsearch output `es_datastream_name` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different Kinesis data streams that match specific log types. **CloudWatch Logs subscription filter input:** -The Lambda function supports ingesting logs contained in the message payload of CloudWatch Logs events. The CloudWatch Logs serves as a trigger for the Lambda function. Users will set up separate Cloudwatch log groups for each type of logs, The config param for Elasticsearch output `datastream` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different CloudWatch Logs log groups that match specific log types. +The Lambda function supports ingesting logs contained in the message payload of CloudWatch Logs events. The CloudWatch Logs serves as a trigger for the Lambda function. Users will set up separate Cloudwatch log groups for each type of logs, The config param for Elasticsearch output `es_datastream_name` is mandatory. If the value is set to an Elasticsearch datastream, the type of logs must be defined with proper value configuration param. A single configuration file can have many input sections, pointing to different CloudWatch Logs log groups that match specific log types. ### Deployment: @@ -214,7 +214,7 @@ inputs: api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo=" username: "username" password: "password" - datastream: "logs-generic-default" + es_datastream_name: "logs-generic-default" batch_max_actions: 500 batch_max_bytes: 10485760 - type: "sqs" @@ -229,7 +229,7 @@ inputs: api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo=" username: "username" password: "password" - datastream: "logs-generic-default" + es_datastream_name: "logs-generic-default" batch_max_actions: 500 batch_max_bytes: 10485760 - type: "kinesis-data-stream" @@ -244,7 +244,7 @@ inputs: api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo=" username: "username" password: "password" - datastream: "logs-generic-default" + es_datastream_name: "logs-generic-default" batch_max_actions: 500 batch_max_bytes: 10485760 - type: "cloudwatch-logs" @@ -259,7 +259,7 @@ inputs: api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo=" username: "username" password: "password" - datastream: "logs-generic-default" + es_datastream_name: "logs-generic-default" batch_max_actions: 500 batch_max_bytes: 10485760 ``` @@ -288,7 +288,7 @@ Custom init arguments for the given forwarding target output * `args.username`: Username of the elasticsearch instance to connect to. Mandatory in case `args.api_key` is not provided. Will be ignored if `args.api_key` is defined as well. * `args.password` Password of the elasticsearch instance to connect to. Mandatory in case `args.api_key` is not provided. Will be ignored if `args.api_key` is defined as well. * `args.api_key`: Api key of elasticsearch endpoint in the format **base64encode(api_key_id:api_key_secret)**. Mandatory in case `args.username` and `args.password ` are not provided. Will take precedence over `args.username`/`args.password` if both are defined. - * `args.datastream`: Name of data stream or the index where to forward the logs to. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types, if using data stream, the users can optionally set its value in the configuration file according to the naming convention for data streams and available integrations. If the `datastream` is not specified and it cannot be matched with any of the above AWS services then the value will be set to "logs-generic-default". Before **v0.30.0** this param was named `es_index_or_datastream_name`, that's now deprecated. It can still be used until the release of **v1.0.0**, when it will be finally removed. + * `args.es_datastream_name`: Name of data stream or the index where to forward the logs to. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types, if using data stream, the users can optionally set its value in the configuration file according to the naming convention for data streams and available integrations. If the `es_datastream_name` is not specified and it cannot be matched with any of the above AWS services then the value will be set to "logs-generic-default". Before **v0.30.0** this param was named `es_index_or_datastream_name`, that's now deprecated. It can still be used until the release of **v1.0.0**, when it will be finally removed. * `args.batch_max_actions`: Maximum number of actions to send in a single bulk request. Default value: 500 * `args.batch_max_bytes`: Maximum size in bytes to send in a single bulk request. Default value: 10485760 (10MB) @@ -303,7 +303,7 @@ inputs: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_url" username: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:password" - datastream: "logs-generic-default" + es_datastream_name: "logs-generic-default" ``` There are 2 types of secrets that can be used: - SecretString (plain text or key/value pairs) @@ -341,7 +341,7 @@ inputs: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_url" username: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:password" - datastream: "logs-generic-default" + es_datastream_name: "logs-generic-default" ``` Using the above configuration, the tags will be set in the following way`["forwarded", "generic", "tag1", "tag2", "tag3"]` @@ -367,7 +367,7 @@ inputs: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_url" username: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-west-1:123456789:secret:es_secrets:password" - datastream: "logs-generic-default" + es_datastream_name: "logs-generic-default" ``` #### Notes @@ -387,14 +387,14 @@ When the regular expression is compiled no flags are used, please refer to [inli ## Routing support for AWS Services Logs When using Elastic integrations, as a first step users should install appropriate [integration](https://docs.elastic.co/en/integrations) assets using the Kibana UI. This sets up appropriate pre-built dashboards, ingest node configurations, and other assets that help you get the most out of the data you ingest. The integrations use [data streams](https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html) with specific [naming conventions](https://www.elastic.co/blog/an-introduction-to-the-elastic-data-stream-naming-scheme) providing users with more granular controls and flexibility on managing the ingested data. -For `S3 SQS Event Notifications input` the Lambda function supports automatic routing of several AWS service logs to the corresponding [integration](https://docs.elastic.co/en/integrations) [data streams](https://docs.elastic.co/en/integrations) for further processing and storage in the Elasticsearch cluster. It supports automatic routing of AWS CloudTrail (`aws.cloudtrail`), Amazon CloudWatch Logs (`aws.cloudwatch_logs`), Elastic Load Balancing(`aws.elb_logs`), AWS Network Firewall (`aws.firewall_logs`), Amazon VPC Flow (`aws.vpcflow`) & AWS Web Application Firewall (`aws.waf`) logs to corresponding default integrations data streams. Setting the `datastream` field in the configuration file is optional for this use case. +For `S3 SQS Event Notifications input` the Lambda function supports automatic routing of several AWS service logs to the corresponding [integration](https://docs.elastic.co/en/integrations) [data streams](https://docs.elastic.co/en/integrations) for further processing and storage in the Elasticsearch cluster. It supports automatic routing of AWS CloudTrail (`aws.cloudtrail`), Amazon CloudWatch Logs (`aws.cloudwatch_logs`), Elastic Load Balancing(`aws.elb_logs`), AWS Network Firewall (`aws.firewall_logs`), Amazon VPC Flow (`aws.vpcflow`) & AWS Web Application Firewall (`aws.waf`) logs to corresponding default integrations data streams. Setting the `es_datastream_name` field in the configuration file is optional for this use case. -For most of the other use cases, the user will need to set the `datastream` field in the configuration file to route the data to a specific data stream or an index. This value should be set in the following use cases: +For most of the other use cases, the user will need to set the `es_datastream_name` field in the configuration file to route the data to a specific data stream or an index. This value should be set in the following use cases: - Users want to write the data to a specific index, alias or a custom data stream and not to the default integration data streams. This can help some users to use the existing Elasticsearch setup like index templates, ingest pipelines or dashboards that you may have already set up and may have developed a business process around it and don’t want to change it. - When using `Kinesis Data Stream`, `CloudWatch Logs subscription filter` or `Direct SQS message payload` input. Only `S3 SQS Event Notifications input` method supports automatic routing to default integrations data streams for several AWS services logs. - When using `S3 SQS Event Notifications input` but the log types is something other than AWS CloudTrail (`aws.cloudtrail`), Amazon CloudWatch Logs (`aws.cloudwatch_logs`), Elastic Load Balancing (`aws.elb_logs`), AWS Network Firewall (`aws.firewall_logs`), Amazon VPC Flow (`aws.vpcflow`) & AWS Web Application Firewall (`aws.waf`). -If the `datastream` is not specified and it cannot be matched with any of the above AWS services then the dataset will be set to "generic" and the namespace to "default" pointing to the data stream name "logs-generic-default". +If the `es_datastream_name` is not specified and it cannot be matched with any of the above AWS services then the dataset will be set to "generic" and the namespace to "default" pointing to the data stream name "logs-generic-default". ## Setting up S3 event notification to SQS In order to set up an S3 event notification to SQS please look at the official documentation: https://docs.aws.amazon.com/AmazonS3/latest/userguide/NotificationHowTo.html diff --git a/handlers/aws/replay_trigger.py b/handlers/aws/replay_trigger.py index 80f8985b..19b08727 100644 --- a/handlers/aws/replay_trigger.py +++ b/handlers/aws/replay_trigger.py @@ -54,7 +54,7 @@ def _handle_replay_event( if output_type == "elasticsearch": assert isinstance(output, ElasticsearchOutput) - output.datastream = output_args["datastream"] + output.es_datastream_name = output_args["es_datastream_name"] shared_logger.info("setting ElasticSearch shipper") elasticsearch: ElasticsearchShipper = ShipperFactory.create_from_output(output_type=output_type, output=output) elasticsearch.set_replay_handler(replay_handler=replay_handler.replay_handler) diff --git a/share/config.py b/share/config.py index 86b22b8b..7f16aff2 100644 --- a/share/config.py +++ b/share/config.py @@ -45,7 +45,7 @@ def __init__( username: str = "", password: str = "", api_key: str = "", - datastream: str = "", + es_datastream_name: str = "", tags: list[str] = [], batch_max_actions: int = 500, batch_max_bytes: int = 10 * 1024 * 1024, @@ -57,7 +57,7 @@ def __init__( self.username = username self.password = password self.api_key = api_key - self.datastream = datastream + self.es_datastream_name = es_datastream_name self.tags = tags self.batch_max_actions = batch_max_actions self.batch_max_bytes = batch_max_bytes @@ -80,8 +80,8 @@ def __init__( if self.username and not self.password: raise ValueError("Elasticsearch Output password must be set when using username") - if not self.datastream: - shared_logger.info("no datastream set in config") + if not self.es_datastream_name: + shared_logger.info("no es_datastream_name set in config") shared_logger.debug("tags: ", extra={"tags": self.tags}) @@ -141,15 +141,15 @@ def api_key(self, value: str) -> None: self._api_key = value @property - def datastream(self) -> str: - return self._datastream + def es_datastream_name(self) -> str: + return self._es_datastream_name - @datastream.setter - def datastream(self, value: str) -> None: + @es_datastream_name.setter + def es_datastream_name(self, value: str) -> None: if not isinstance(value, str): - raise ValueError("Elasticsearch Output datastream must be of type str") + raise ValueError("Elasticsearch Output es_datastream_name must be of type str") - self._datastream = value + self._es_datastream_name = value @property def batch_max_actions(self) -> int: @@ -291,8 +291,8 @@ def add_output(self, output_type: str, **kwargs: Any) -> None: output: Optional[Output] = None if output_type == "elasticsearch": if "es_index_or_datastream_name" in kwargs: - if "datastream" not in kwargs: - kwargs["datastream"] = kwargs["es_index_or_datastream_name"] + if "es_datastream_name" not in kwargs: + kwargs["es_datastream_name"] = kwargs["es_index_or_datastream_name"] del kwargs["es_index_or_datastream_name"] output = ElasticsearchOutput(**kwargs) diff --git a/shippers/es.py b/shippers/es.py index 43946171..ae16d65a 100644 --- a/shippers/es.py +++ b/shippers/es.py @@ -29,7 +29,7 @@ def __init__( password: str = "", cloud_id: str = "", api_key: str = "", - datastream: str = "", + es_datastream_name: str = "", tags: list[str] = [], batch_max_actions: int = 500, batch_max_bytes: int = 10 * 1024 * 1024, @@ -71,7 +71,7 @@ def __init__( self._replay_handler: Optional[ReplayHandlerCallable] = None self._event_id_generator: Optional[EventIdGeneratorCallable] = None - self._datastream = datastream + self._es_datastream_name = es_datastream_name self._tags = tags self._es_index = "" @@ -156,7 +156,7 @@ def set_replay_handler(self, replay_handler: ReplayHandlerCallable) -> None: self._replay_handler = replay_handler def send(self, event: dict[str, Any]) -> str: - self._replay_args["datastream"] = self._datastream + self._replay_args["es_datastream_name"] = self._es_datastream_name if not hasattr(self, "_es_index") or self._es_index == "": self._discover_dataset(event_payload=event) @@ -194,20 +194,22 @@ def flush(self) -> None: return def _discover_dataset(self, event_payload: Dict[str, Any]) -> None: - if self._datastream != "": - if self._datastream.startswith("logs-"): - datastream_components = self._datastream.split("-") + if self._es_datastream_name != "": + if self._es_datastream_name.startswith("logs-"): + datastream_components = self._es_datastream_name.split("-") if len(datastream_components) == 3: self._dataset = datastream_components[1] self._namespace = datastream_components[2] else: shared_logger.debug( - "datastream not matching logs datastream pattern, no dataset " "and namespace set" + "es_datastream_name not matching logs datastream pattern, no dataset and namespace set" ) else: - shared_logger.debug("datastream not matching logs datastream pattern, no dataset and namespace set") + shared_logger.debug( + "es_datastream_name not matching logs datastream pattern, no dataset and namespace set" + ) - self._es_index = self._datastream + self._es_index = self._es_datastream_name return else: self._namespace = "default" @@ -224,4 +226,4 @@ def _discover_dataset(self, event_payload: Dict[str, Any]) -> None: shared_logger.debug("dataset", extra={"dataset": self._dataset}) self._es_index = f"logs-{self._dataset}-{self._namespace}" - self._datastream = self._es_index + self._es_datastream_name = self._es_index diff --git a/shippers/factory.py b/shippers/factory.py index a2e24751..a0318b71 100644 --- a/shippers/factory.py +++ b/shippers/factory.py @@ -39,7 +39,7 @@ def create_from_output(output_type: str, output: Output) -> CommonShipperType: password=output.password, cloud_id=output.cloud_id, api_key=output.api_key, - datastream=output.datastream, + es_datastream_name=output.es_datastream_name, tags=output.tags, batch_max_actions=output.batch_max_actions, batch_max_bytes=output.batch_max_bytes, diff --git a/tests/handlers/aws/test_handler.py b/tests/handlers/aws/test_handler.py index aa3222e8..6118d9b1 100644 --- a/tests/handlers/aws/test_handler.py +++ b/tests/handlers/aws/test_handler.py @@ -779,7 +779,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) event["Records"][0]["messageAttributes"]["config"]["stringValue"] = config_yml @@ -803,7 +803,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -829,7 +829,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -855,7 +855,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -882,7 +882,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -907,7 +907,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -933,7 +933,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -959,7 +959,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -985,7 +985,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -1011,7 +1011,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -1037,7 +1037,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -1062,7 +1062,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -1084,7 +1084,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) @@ -1111,7 +1111,7 @@ def test_lambda_handler_failure(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "logs-redis.log-default" + es_datastream_name: "logs-redis.log-default" """ event = deepcopy(event_with_config) diff --git a/tests/share/test_config.py b/tests/share/test_config.py index 1114dd66..8d680a2b 100644 --- a/tests/share/test_config.py +++ b/tests/share/test_config.py @@ -40,7 +40,7 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -51,7 +51,7 @@ def test_init(self) -> None: assert elasticsearch.password == "password" assert not elasticsearch.cloud_id assert not elasticsearch.api_key - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -61,7 +61,7 @@ def test_init(self) -> None: cloud_id="cloud_id", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -72,7 +72,7 @@ def test_init(self) -> None: assert elasticsearch.password == "password" assert not elasticsearch.elasticsearch_url assert not elasticsearch.api_key - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -81,7 +81,7 @@ def test_init(self) -> None: elasticsearch = ElasticsearchOutput( elasticsearch_url="elasticsearch_url", api_key="api_key", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -92,7 +92,7 @@ def test_init(self) -> None: assert not elasticsearch.cloud_id assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -101,7 +101,7 @@ def test_init(self) -> None: elasticsearch = ElasticsearchOutput( cloud_id="cloud_id", api_key="api_key", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -112,7 +112,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -126,7 +126,7 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", cloud_id="cloud_id", api_key="api_key", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -137,7 +137,7 @@ def test_init(self) -> None: assert not elasticsearch.cloud_id assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -148,7 +148,7 @@ def test_init(self) -> None: ): ElasticsearchOutput( elasticsearch_url="elasticsearch_url", - datastream="datastream", + es_datastream_name="es_datastream_name", ) with self.subTest("both username and api_key"): @@ -157,7 +157,7 @@ def test_init(self) -> None: api_key="api_key", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -168,7 +168,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -179,7 +179,7 @@ def test_init(self) -> None: api_key="api_key", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", tags=["tag1", "tag2", "tag3"], batch_max_actions=1, batch_max_bytes=1, @@ -191,7 +191,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -202,10 +202,10 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="", - datastream="datastream", + es_datastream_name="es_datastream_name", ) - with self.subTest("empty datastream"): + with self.subTest("empty es_datastream_name"): elasticsearch = ElasticsearchOutput( cloud_id="cloud_id", api_key="api_key", @@ -221,7 +221,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.datastream == "" + assert elasticsearch.es_datastream_name == "" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -232,7 +232,7 @@ def test_init(self) -> None: api_key="api_key", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -243,7 +243,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 1 @@ -254,7 +254,7 @@ def test_init(self) -> None: api_key="api_key", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_bytes=1, ) @@ -264,7 +264,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 1 @@ -275,7 +275,7 @@ def test_init(self) -> None: api_key="api_key", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, ) @@ -285,7 +285,7 @@ def test_init(self) -> None: assert not elasticsearch.elasticsearch_url assert not elasticsearch.username assert not elasticsearch.password - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 10485760 @@ -298,7 +298,7 @@ def test_init(self) -> None: elasticsearch_url=0, # type:ignore username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", ) with self.subTest("username not str"): @@ -307,7 +307,7 @@ def test_init(self) -> None: elasticsearch_url="", username=0, # type:ignore password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", ) with self.subTest("password not str"): @@ -316,7 +316,7 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password=0, # type:ignore - datastream="datastream", + es_datastream_name="es_datastream_name", ) with self.subTest("cloud_id not str"): @@ -325,7 +325,7 @@ def test_init(self) -> None: cloud_id=0, # type:ignore username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", ) with self.subTest("api_key not str"): @@ -333,16 +333,16 @@ def test_init(self) -> None: ElasticsearchOutput( cloud_id="cloud_id", api_key=0, # type:ignore - datastream="datastream", + es_datastream_name="es_datastream_name", ) - with self.subTest("datastream not str"): - with self.assertRaisesRegex(ValueError, "Elasticsearch Output datastream must be of type str"): + with self.subTest("es_datastream_name not str"): + with self.assertRaisesRegex(ValueError, "Elasticsearch Output es_datastream_name must be of type str"): ElasticsearchOutput( elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream=0, # type:ignore + es_datastream_name=0, # type:ignore ) with self.subTest("batch_max_actions not int"): @@ -351,7 +351,7 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions="test", # type:ignore ) @@ -361,7 +361,7 @@ def test_init(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_bytes="test", # type:ignore ) @@ -465,7 +465,7 @@ def test_get_output_by_type(self) -> None: username="username", password="password", es_index_or_datastream_name="es_index_or_datastream_name", - datastream="datasream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -479,7 +479,7 @@ def test_get_output_by_type(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -494,7 +494,7 @@ def test_add_output(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -518,7 +518,7 @@ def test_add_output(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -529,7 +529,7 @@ def test_add_output(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -546,7 +546,7 @@ def test_get_output_types(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -561,7 +561,7 @@ def test_delete_output_by_type(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", batch_max_actions=1, batch_max_bytes=1, ) @@ -809,7 +809,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" batch_max_actions: "test" """ ) @@ -826,7 +826,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" batch_max_bytes: "test" """ ) @@ -844,7 +844,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ ) @@ -860,7 +860,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ ) @@ -882,7 +882,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ ) @@ -920,13 +920,13 @@ def test_parse_config(self) -> None: assert elasticsearch.elasticsearch_url == "elasticsearch_url" assert elasticsearch.username == "username" assert elasticsearch.password == "password" - assert elasticsearch.datastream == "es_index_or_datastream_name" + assert elasticsearch.es_datastream_name == "es_index_or_datastream_name" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 with self.subTest( - "valid input valid elasticsearch output with both legacy es_index_or_datastream_name and datastream" + "valid input valid elasticsearch output with both legacy es_index_or_datastream_name and es_datastream_name" ): config = parse_config( config_yaml=""" @@ -943,7 +943,7 @@ def test_parse_config(self) -> None: elasticsearch_url: "elasticsearch_url" username: "username" password: "password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" es_index_or_datastream_name: "es_index_or_datastream_name" """ ) @@ -962,7 +962,7 @@ def test_parse_config(self) -> None: assert elasticsearch.elasticsearch_url == "elasticsearch_url" assert elasticsearch.username == "username" assert elasticsearch.password == "password" - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -983,7 +983,7 @@ def test_parse_config(self) -> None: elasticsearch_url: "elasticsearch_url" username: "username" password: "password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ ) @@ -1001,7 +1001,7 @@ def test_parse_config(self) -> None: assert elasticsearch.elasticsearch_url == "elasticsearch_url" assert elasticsearch.username == "username" assert elasticsearch.password == "password" - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1021,7 +1021,7 @@ def test_parse_config(self) -> None: args: elasticsearch_url: "elasticsearch_url" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ ) @@ -1038,7 +1038,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.elasticsearch_url == "elasticsearch_url" assert elasticsearch.api_key == "api_key" - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1059,7 +1059,7 @@ def test_parse_config(self) -> None: cloud_id: "cloud_id" username: "username" password: "password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ ) @@ -1077,7 +1077,7 @@ def test_parse_config(self) -> None: assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.username == "username" assert elasticsearch.password == "password" - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1097,7 +1097,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ ) @@ -1114,7 +1114,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1130,7 +1130,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" tags: - "tag1" - "tag2" @@ -1151,7 +1151,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1170,7 +1170,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" tags: - "tag1" - "tag2" @@ -1191,7 +1191,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == ["input_tag1", "input_tag2"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1211,7 +1211,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ ) @@ -1228,7 +1228,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == ["tag1", "tag2", "tag3"] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1250,7 +1250,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ ) @@ -1276,7 +1276,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 10485760 @@ -1298,7 +1298,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ ) @@ -1319,7 +1319,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ ) @@ -1334,7 +1334,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" batch_max_actions: 1 """ ) @@ -1352,7 +1352,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 1 assert elasticsearch.batch_max_bytes == 10485760 @@ -1368,7 +1368,7 @@ def test_parse_config(self) -> None: args: cloud_id: "cloud_id" api_key: "api_key" - datastream: "datastream" + es_datastream_name: "es_datastream_name" batch_max_bytes: 1 """ ) @@ -1386,7 +1386,7 @@ def test_parse_config(self) -> None: assert elasticsearch.type == "elasticsearch" assert elasticsearch.cloud_id == "cloud_id" assert elasticsearch.api_key == "api_key" - assert elasticsearch.datastream == "datastream" + assert elasticsearch.es_datastream_name == "es_datastream_name" assert elasticsearch.tags == [] assert elasticsearch.batch_max_actions == 500 assert elasticsearch.batch_max_bytes == 1 diff --git a/tests/share/test_secretsmanager.py b/tests/share/test_secretsmanager.py index 9215c970..acc6f2f8 100644 --- a/tests/share/test_secretsmanager.py +++ b/tests/share/test_secretsmanager.py @@ -93,7 +93,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaisesRegex( @@ -115,7 +115,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaisesRegex( ValueError, "Must be provided region in arn: arn:aws:secretsmanager::123456789:secret:plain_secret" @@ -134,7 +134,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaisesRegex( ValueError, @@ -154,7 +154,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaisesRegex( ValueError, @@ -175,7 +175,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaisesRegex( @@ -196,7 +196,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaises(ClientError): @@ -213,7 +213,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaisesRegex( @@ -233,7 +233,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaisesRegex( @@ -254,7 +254,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaisesRegex( @@ -276,7 +276,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaisesRegex( @@ -297,7 +297,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaisesRegex( @@ -318,7 +318,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ with self.assertRaisesRegex( @@ -341,7 +341,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secrets_manager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ mock_fetched_data = aws_sm_expander(config_yaml) @@ -355,7 +355,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secrets_manager:eu-central-1:123456789:secret:es_secrets:url" username: "mock_elastic_username" password: "mock_elastic_password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ assert mock_fetched_data == parsed_config_yaml @@ -372,7 +372,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ mock_fetched_data = aws_sm_expander(config_yaml) @@ -386,7 +386,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "mock_elastic_url" username: "mock_elastic_username" password: "mock_elastic_password" - datastream: "datastream" + es_datastream_name: "es_datastream_name" """ assert mock_fetched_data == parsed_config_yaml @@ -402,7 +402,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:url" username: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:username" password: "arn:aws:secretsmanager:eu-central-1:123456789:secret:es_secrets:password" - datastream: "arn:aws:secretsmanager:eu-west-1:123456789:secret:binary_secret" + es_datastream_name: "arn:aws:secretsmanager:eu-west-1:123456789:secret:binary_secret" """ mock_fetched_data = aws_sm_expander(config_yaml) @@ -416,7 +416,7 @@ def test_parse_secrets_manager(self) -> None: elasticsearch_url: "mock_elastic_url" username: "mock_elastic_username" password: "mock_elastic_password" - datastream: "mock_nginx.log" + es_datastream_name: "mock_nginx.log" """ assert mock_fetched_data == parsed_config_yaml diff --git a/tests/shippers/test_es.py b/tests/shippers/test_es.py index accda7af..db8113cb 100644 --- a/tests/shippers/test_es.py +++ b/tests/shippers/test_es.py @@ -81,7 +81,7 @@ def test_send(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="", - datastream="logs-data.set-namespace", + es_datastream_name="logs-data.set-namespace", tags=["tag1", "tag2", "tag3"], batch_max_actions=0, ) @@ -127,7 +127,7 @@ def test_send_with_failure(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="", - datastream="data.set", + es_datastream_name="data.set", tags=["tag1", "tag2", "tag3"], batch_max_actions=0, ) @@ -149,7 +149,7 @@ def test_flush(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="", - datastream="logs-data.set-namespace", + es_datastream_name="logs-data.set-namespace", tags=["tag1", "tag2", "tag3"], batch_max_actions=2, ) @@ -191,7 +191,7 @@ def test_flush(self) -> None: @mock.patch("shippers.es.es_bulk", mock_bulk) @mock.patch("shippers.es.Elasticsearch", new=MockClient) def test_send_with_dataset_discovery(self) -> None: - with self.subTest("empty datastream"): + with self.subTest("empty es_datastream_name"): shipper = ElasticsearchShipper( elasticsearch_url="elasticsearch_url", username="username", @@ -237,12 +237,12 @@ def test_send_with_dataset_discovery(self) -> None: assert shipper._bulk_actions == [] - with self.subTest("datastream as `logs-unit-test"): + with self.subTest("es_datastream_name as `logs-unit-test"): shipper = ElasticsearchShipper( elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="logs-unit-test", + es_datastream_name="logs-unit-test", tags=["tag1", "tag2", "tag3"], batch_max_actions=0, ) @@ -284,12 +284,12 @@ def test_send_with_dataset_discovery(self) -> None: assert shipper._bulk_actions == [] - with self.subTest("datastream not matching logs datastream naming conventation"): + with self.subTest("es_datastream_name not matching logs datastream naming convention"): shipper = ElasticsearchShipper( elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", tags=["tag1", "tag2", "tag3"], batch_max_actions=0, ) @@ -298,12 +298,12 @@ def test_send_with_dataset_discovery(self) -> None: assert shipper._dataset == "" assert shipper._namespace == "" - assert shipper._es_index == "datastream" + assert shipper._es_index == "es_datastream_name" assert _documents[0] == [ { "@timestamp": _now, - "_index": "datastream", + "_index": "es_datastream_name", "_op_type": "create", "aws": { "s3": { @@ -336,7 +336,7 @@ def test_no_datastream(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="logs-es-index-no-datastream", + es_datastream_name="logs-es-index-no-datastream", tags=["tag1", "tag2", "tag3"], ) @@ -350,7 +350,7 @@ def test_custom_dataset(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="logs-dataset-namespace", + es_datastream_name="logs-dataset-namespace", tags=["tag1", "tag2", "tag3"], ) diff --git a/tests/shippers/test_factory.py b/tests/shippers/test_factory.py index 1a20264b..cfdd8435 100644 --- a/tests/shippers/test_factory.py +++ b/tests/shippers/test_factory.py @@ -17,7 +17,7 @@ def test_create(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", ) assert isinstance(shipper, ElasticsearchShipper) @@ -27,7 +27,7 @@ def test_create(self) -> None: output_type="elasticsearch", elasticsearch_url="elasticsearch_url", api_key="api_key", - datastream="datastream", + es_datastream_name="es_datastream_name", ) assert isinstance(shipper, ElasticsearchShipper) @@ -38,7 +38,7 @@ def test_create(self) -> None: cloud_id="cloud_id:bG9jYWxob3N0OjkyMDAkMA==", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", ) assert isinstance(shipper, ElasticsearchShipper) @@ -48,7 +48,7 @@ def test_create(self) -> None: output_type="elasticsearch", cloud_id="cloud_id:bG9jYWxob3N0OjkyMDAkMA==", api_key="api_key", - datastream="datastream", + es_datastream_name="es_datastream_name", ) assert isinstance(shipper, ElasticsearchShipper) @@ -83,7 +83,7 @@ def test_create_from_output(self) -> None: elasticsearch_url="elasticsearch_url", username="username", password="password", - datastream="datastream", + es_datastream_name="es_datastream_name", ) with self.subTest("create output type elasticsearch"): From d88b5c2247f6acc8688f9c58d5ff1acddf3c2456 Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Wed, 18 May 2022 11:57:37 +0900 Subject: [PATCH 16/17] template and readme cr fix --- .internal/aws/cloudformation/application.yaml | 6 ++++-- .internal/aws/cloudformation/macro.yaml | 6 ++++-- docs/README-AWS.md | 2 +- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/.internal/aws/cloudformation/application.yaml b/.internal/aws/cloudformation/application.yaml index bff8dab6..9566016d 100644 --- a/.internal/aws/cloudformation/application.yaml +++ b/.internal/aws/cloudformation/application.yaml @@ -76,8 +76,10 @@ Resources: Name: %sarAppName%-macro Metadata: AWS::ServerlessRepo::Application: - Name: %sarAppName%-application - Description: SAM Template for the application, not intended to be deployed on its own + Name: helper-application-%sarAppName% + Description: | + NOTE: DO NOT DEPLOY + Deploy https://serverlessrepo.aws.amazon.com/applications/eu-central-1/267093732750/elastic-serverless-forwarder instead. This is a helper SAM template for the application and not intended to be deployed on its own. Author: %sarAuthorName% SemanticVersion: %semanticVersion% LicenseUrl: %codeUri%/LICENSE.txt diff --git a/.internal/aws/cloudformation/macro.yaml b/.internal/aws/cloudformation/macro.yaml index bd207109..dc2a7ccf 100644 --- a/.internal/aws/cloudformation/macro.yaml +++ b/.internal/aws/cloudformation/macro.yaml @@ -222,8 +222,10 @@ Resources: Name: %sarAppName%-macro Metadata: AWS::ServerlessRepo::Application: - Name: %sarAppName%-macro - Description: SAM Template for the macro, not intended to be deployed on its own + Name: helper-macro-%sarAppName% + Description: | + NOTE: DO NOT DEPLOY + Deploy https://serverlessrepo.aws.amazon.com/applications/eu-central-1/267093732750/elastic-serverless-forwarder instead. This is a helper SAM template for the macro and not intended to be deployed on its own. Author: %sarAuthorName% SemanticVersion: %semanticVersion% LicenseUrl: %codeUri%/LICENSE.txt diff --git a/docs/README-AWS.md b/docs/README-AWS.md index 232edf8c..c8ae2534 100644 --- a/docs/README-AWS.md +++ b/docs/README-AWS.md @@ -288,7 +288,7 @@ Custom init arguments for the given forwarding target output * `args.username`: Username of the elasticsearch instance to connect to. Mandatory in case `args.api_key` is not provided. Will be ignored if `args.api_key` is defined as well. * `args.password` Password of the elasticsearch instance to connect to. Mandatory in case `args.api_key` is not provided. Will be ignored if `args.api_key` is defined as well. * `args.api_key`: Api key of elasticsearch endpoint in the format **base64encode(api_key_id:api_key_secret)**. Mandatory in case `args.username` and `args.password ` are not provided. Will take precedence over `args.username`/`args.password` if both are defined. - * `args.es_datastream_name`: Name of data stream or the index where to forward the logs to. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types, if using data stream, the users can optionally set its value in the configuration file according to the naming convention for data streams and available integrations. If the `es_datastream_name` is not specified and it cannot be matched with any of the above AWS services then the value will be set to "logs-generic-default". Before **v0.30.0** this param was named `es_index_or_datastream_name`, that's now deprecated. It can still be used until the release of **v1.0.0**, when it will be finally removed. + * `args.es_datastream_name`: Name of data stream or the index where to forward the logs to. Lambda supports automatic routing of various AWS service logs to the corresponding data streams for further processing and storage in the Elasticsearch cluster. It supports automatic routing of `aws.cloudtrail`, `aws.cloudwatch_logs`, `aws.elb_logs`, `aws.firewall_logs`, `aws.vpcflow`, and `aws.waf` logs. For other log types, if using data stream, the users can optionally set its value in the configuration file according to the naming convention for data streams and available integrations. If the `es_datastream_name` is not specified and it cannot be matched with any of the above AWS services then the value will be set to "logs-generic-default". In version **v0.29.1** and before this configuration parameter was named `es_index_or_datastream_name`. Rename the configuration parameter to `es_datastream_name` in your configuration yaml file on the S3 bucket, to continue using it in the future version. The older name `es_index_or_datastream_name` is deprecated as of version **v0.30.0** and related backward compatibility code will be removed in version **v1.0.0**. * `args.batch_max_actions`: Maximum number of actions to send in a single bulk request. Default value: 500 * `args.batch_max_bytes`: Maximum size in bytes to send in a single bulk request. Default value: 10485760 (10MB) From 1d8b39b7cdd2740e17cf2923b14547ce0485ca38 Mon Sep 17 00:00:00 2001 From: Andrea Spacca Date: Thu, 19 May 2022 08:28:22 +0900 Subject: [PATCH 17/17] use new application name for helpers im cf template --- .internal/aws/cloudformation/application.yaml | 2 +- .internal/aws/cloudformation/macro.yaml | 2 +- .internal/aws/cloudformation/template.yaml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.internal/aws/cloudformation/application.yaml b/.internal/aws/cloudformation/application.yaml index 9566016d..177b684e 100644 --- a/.internal/aws/cloudformation/application.yaml +++ b/.internal/aws/cloudformation/application.yaml @@ -79,7 +79,7 @@ Metadata: Name: helper-application-%sarAppName% Description: | NOTE: DO NOT DEPLOY - Deploy https://serverlessrepo.aws.amazon.com/applications/eu-central-1/267093732750/elastic-serverless-forwarder instead. This is a helper SAM template for the application and not intended to be deployed on its own. + Deploy elastic-serverless-forwarder instead. This is a helper SAM template for the application and not intended to be deployed on its own. Author: %sarAuthorName% SemanticVersion: %semanticVersion% LicenseUrl: %codeUri%/LICENSE.txt diff --git a/.internal/aws/cloudformation/macro.yaml b/.internal/aws/cloudformation/macro.yaml index dc2a7ccf..9c3f4c4d 100644 --- a/.internal/aws/cloudformation/macro.yaml +++ b/.internal/aws/cloudformation/macro.yaml @@ -225,7 +225,7 @@ Metadata: Name: helper-macro-%sarAppName% Description: | NOTE: DO NOT DEPLOY - Deploy https://serverlessrepo.aws.amazon.com/applications/eu-central-1/267093732750/elastic-serverless-forwarder instead. This is a helper SAM template for the macro and not intended to be deployed on its own. + Deploy elastic-serverless-forwarder instead. This is a helper SAM template for the macro and not intended to be deployed on its own. Author: %sarAuthorName% SemanticVersion: %semanticVersion% LicenseUrl: %codeUri%/LICENSE.txt diff --git a/.internal/aws/cloudformation/template.yaml b/.internal/aws/cloudformation/template.yaml index 3fb4c54b..8b6a6e4e 100644 --- a/.internal/aws/cloudformation/template.yaml +++ b/.internal/aws/cloudformation/template.yaml @@ -43,13 +43,13 @@ Resources: Type: AWS::Serverless::Application Properties: Location: - ApplicationId: arn:aws:serverlessrepo:%awsRegion%:%accountID%:applications/%sarAppName%-macro + ApplicationId: arn:aws:serverlessrepo:%awsRegion%:%accountID%:applications/helper-macro-%sarAppName% SemanticVersion: %semanticVersion% ElasticServerlessForwarderApplication: Type: AWS::Serverless::Application Properties: Location: - ApplicationId: arn:aws:serverlessrepo:%awsRegion%:%accountID%:applications/%sarAppName%-application + ApplicationId: arn:aws:serverlessrepo:%awsRegion%:%accountID%:applications/helper-application-%sarAppName% SemanticVersion: %semanticVersion% Parameters: ElasticServerlessForwarderS3ConfigFile: !Ref ElasticServerlessForwarderS3ConfigFile