Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fixing small bugs with terraform 12 code #1053

Merged
merged 2 commits into from
Nov 20, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 3 additions & 5 deletions docs/source/clusters.rst
Original file line number Diff line number Diff line change
Expand Up @@ -114,11 +114,9 @@ Example: CloudTrail via S3 Events
"enable_kinesis": false,
"enable_logging": true
},
"s3_events": [
{
"bucket_id": "PREFIX.CLUSTER.streamalert.cloudtrail"
}
],
"s3_events": {
"PREFIX.CLUSTER.streamalert.cloudtrail": []
},
"stream_alert": {
"classifier_config": {
"enable_custom_metrics": true,
Expand Down
6 changes: 5 additions & 1 deletion streamalert_cli/terraform/s3_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
See the License for the specific language governing permissions and
limitations under the License.
"""
import re

from streamalert.shared.logger import get_logger

LOGGER = get_logger(__name__)
Expand All @@ -36,7 +38,9 @@ def generate_s3_events(cluster_name, cluster_dict, config):

# Add each configured S3 bucket module
for bucket_name, info in s3_event_buckets.items():
cluster_dict['module']['s3_events_{}_{}_{}'.format(prefix, cluster_name, bucket_name)] = {
# Replace all invalid module characters with underscores
mod_suffix = re.sub('[^a-zA-Z0-9_-]', '_', bucket_name)
cluster_dict['module']['s3_events_{}_{}_{}'.format(prefix, cluster_name, mod_suffix)] = {
'source': './modules/tf_s3_events',
'lambda_role_id': '${{{}.role_id}}'.format(lambda_module_path),
'lambda_function_alias': '${{{}.function_alias}}'.format(lambda_module_path),
Expand Down
2 changes: 0 additions & 2 deletions terraform/modules/tf_kinesis_events/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,3 @@ variable "lambda_production_enabled" {
variable "lambda_function_alias_arn" {
type = string
}

variable "lambda_function_alias_arn" {}
10 changes: 1 addition & 9 deletions terraform/modules/tf_lookup_tables_dynamodb/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,7 @@ data "aws_iam_policy_document" "streamalert_read_items_from_lookup_tables_dynamo
"dynamodb:DescribeTable",
]

# TF-UPGRADE-TODO: In Terraform v0.10 and earlier, it was sometimes necessary to
# force an interpolation expression to be interpreted as a list by wrapping it
# in an extra set of list brackets. That form was supported for compatibility in
# v0.11, but is no longer supported in Terraform v0.12.
#
# If the expression in the following list itself returns a list, remove the
# brackets to avoid interpretation as a list of lists. If the expression
# returns a single list item then leave it as-is and remove this TODO comment.
resources = [local.dynamodb_table_arns]
resources = local.dynamodb_table_arns
}
}

Expand Down
16 changes: 13 additions & 3 deletions terraform/modules/tf_s3_events/main.tf
Original file line number Diff line number Diff line change
@@ -1,19 +1,29 @@
locals {
sanitized_bucket_name = replace(var.bucket_name, "/[^a-zA-Z0-9_-]/", "_")
}

// Lambda Permission: Allow S3 Event Notifications to invoke Lambda
resource "aws_lambda_permission" "allow_bucket" {
statement_id = "${var.prefix}_${var.cluster}_InvokeFromS3Bucket_${var.bucket_name}"
statement_id = "InvokeFromS3Bucket_${local.sanitized_bucket_name}"
action = "lambda:InvokeFunction"
function_name = var.lambda_function_name
principal = "s3.amazonaws.com"
source_arn = "arn:aws:s3:::${var.bucket_name}"
qualifier = var.lambda_function_alias
}

// This hack ensures that the lambda_function block is still created
// even if no filters are provided
locals {
filters = coalescelist(var.filters, [{}])
}

// S3 Bucket Notification: Invoke the StreamAlert Classifier
resource "aws_s3_bucket_notification" "bucket_notification" {
bucket = var.bucket_name

dynamic "lambda_function" {
for_each = var.filters
for_each = local.filters

content {
events = ["s3:ObjectCreated:*"]
Expand All @@ -26,7 +36,7 @@ resource "aws_s3_bucket_notification" "bucket_notification" {

// IAM Policy: Allow Lambda to GetObjects from S3
resource "aws_iam_role_policy" "lambda_s3_permission" {
name = "${var.prefix}_${var.cluster}_S3GetObjects_${var.bucket_name}"
name = "S3GetObjects_${var.bucket_name}"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why don't we also use local.sanitized_bucket_name instead of var.bucket_name?

Copy link
Contributor Author

@ryandeivert ryandeivert Nov 20, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

because the name attribute for these resources supports special characters (eg - characters that are NOT in the set [a-zA-Z0-9_-]).. while the statement_id attribute above only supports those characters: https://docs.aws.amazon.com/lambda/latest/dg/API_AddPermission.html#SSS-AddPermission-request-StatementId

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Aha, this is annoy that we have to be very accurate to know which resource name needs to be sanitized 😒 Thanks for the explanation!

role = var.lambda_role_id
policy = data.aws_iam_policy_document.s3_read_only.json
}
Expand Down
8 changes: 0 additions & 8 deletions terraform/modules/tf_s3_events/variables.tf
Original file line number Diff line number Diff line change
@@ -1,11 +1,3 @@
variable "prefix" {
type = string
}

variable "cluster" {
type = string
}

variable "bucket_name" {
type = string
}
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/streamalert_cli/terraform/test_generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,8 +489,8 @@ def test_generate_cluster_advanced(self):
'kinesis_events_advanced',
'flow_logs_advanced',
'cloudtrail_advanced',
's3_events_unit-test_advanced_unit-test-bucket.data',
's3_events_unit-test_advanced_unit-test.cloudtrail.data'
's3_events_unit-test_advanced_unit-test-bucket_data',
's3_events_unit-test_advanced_unit-test_cloudtrail_data'
}

assert_equal(set(tf_cluster['module'].keys()), advanced_modules)
Expand Down
20 changes: 6 additions & 14 deletions tests/unit/streamalert_cli/terraform/test_s3_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,13 @@ def test_generate_s3_events():

expected_config = {
'module': {
's3_events_unit-test_advanced_unit-test-bucket.data': {
's3_events_unit-test_advanced_unit-test-bucket_data': {
'source': './modules/tf_s3_events',
'lambda_function_alias': (
'${module.classifier_advanced_lambda.function_alias}'
),
'lambda_function_alias': '${module.classifier_advanced_lambda.function_alias}',
'lambda_function_alias_arn': (
'${module.classifier_advanced_lambda.function_alias_arn}'
),
'lambda_function_name': (
'${module.classifier_advanced_lambda.function_name}'
),
'lambda_function_name': '${module.classifier_advanced_lambda.function_name}',
'bucket_name': 'unit-test-bucket.data',
'lambda_role_id': '${module.classifier_advanced_lambda.role_id}',
'filters': [
Expand All @@ -48,17 +44,13 @@ def test_generate_s3_events():
}
]
},
's3_events_unit-test_advanced_unit-test.cloudtrail.data': {
's3_events_unit-test_advanced_unit-test_cloudtrail_data': {
'source': './modules/tf_s3_events',
'lambda_function_alias': (
'${module.classifier_advanced_lambda.function_alias}'
),
'lambda_function_alias': '${module.classifier_advanced_lambda.function_alias}',
'lambda_function_alias_arn': (
'${module.classifier_advanced_lambda.function_alias_arn}'
),
'lambda_function_name': (
'${module.classifier_advanced_lambda.function_name}'
),
'lambda_function_name': '${module.classifier_advanced_lambda.function_name}',
'bucket_name': 'unit-test.cloudtrail.data',
'lambda_role_id': '${module.classifier_advanced_lambda.role_id}',
'filters': []
Expand Down