Skip to content

Commit

Permalink
Using ARM64 Architecture
Browse files Browse the repository at this point in the history
  • Loading branch information
amarouane-ABDELHAK committed Jul 8, 2024
1 parent 790f868 commit 0c23901
Show file tree
Hide file tree
Showing 8 changed files with 68 additions and 164 deletions.
26 changes: 8 additions & 18 deletions infrastructure/ecs_services/airflow_metrics.tf
Original file line number Diff line number Diff line change
Expand Up @@ -34,16 +34,6 @@ resource "aws_ecs_task_definition" "airflow_metrics" {
requires_compatibilities = ["FARGATE"]
volume {
name = "efs-${var.prefix}"
efs_volume_configuration {
file_system_id = aws_efs_file_system.efs.id
root_directory = "/mnt/data"
transit_encryption = "ENABLED"
transit_encryption_port = 2999
authorization_config {
access_point_id = aws_efs_access_point.access.id
iam = "ENABLED"
}
}
}
container_definitions = jsonencode([
{
Expand All @@ -55,13 +45,6 @@ resource "aws_ecs_task_definition" "airflow_metrics" {
entryPoint = [
"python"
]
mountPoints : [
{
"containerPath" : "/opt/airflow/dags_efs",
"sourceVolume" : "efs-${var.prefix}"

}
]
command = [
"scripts/put_airflow_worker_autoscaling_metric_data.py",
"--cluster-name",
Expand All @@ -75,7 +58,14 @@ resource "aws_ecs_task_definition" "airflow_metrics" {
"--period",
"30"
]
environment = var.airflow_task_common_environment
environment = concat(var.airflow_task_common_environment,
[
{
name = "SERVICES_HASH"
value = local.services_hashes
}

])
user = "50000:0"
logConfiguration = {
logDriver = "awslogs"
Expand Down
32 changes: 10 additions & 22 deletions infrastructure/ecs_services/airflow_scheduler.tf
Original file line number Diff line number Diff line change
Expand Up @@ -48,33 +48,14 @@ resource "aws_ecs_task_definition" "airflow_scheduler" {
cpu_architecture = "ARM64"
}
requires_compatibilities = ["FARGATE"]
# volume {
# name = "efs-${var.prefix}"
# efs_volume_configuration {
# file_system_id = aws_efs_file_system.efs.id
# root_directory = "/mnt/data"
# transit_encryption = "ENABLED"
# transit_encryption_port = 2999
# authorization_config {
# access_point_id = aws_efs_access_point.access.id
# iam = "ENABLED"
# }
# }
# }

container_definitions = jsonencode([
{
name = "scheduler"
image = join(":", [aws_ecr_repository.airflow.repository_url, "latest"])
cpu = var.scheduler_cpu
memory = var.scheduler_memory
# mountPoints : [
# {
# "containerPath" : "/opt/airflow/dags_efs",
# "sourceVolume" : "efs-${var.prefix}"
#
# }
# ]

healthcheck = {
command = [
"CMD-SHELL",
Expand All @@ -92,7 +73,14 @@ resource "aws_ecs_task_definition" "airflow_scheduler" {
linuxParameters = {
initProcessEnabled = true
}
environment = var.airflow_task_common_environment
environment = concat(var.airflow_task_common_environment,
[
{
name = "SERVICES_HASH"
value = local.services_hashes
}

])
user = "50000:0"
logConfiguration = {
logDriver = "awslogs"
Expand Down Expand Up @@ -159,7 +147,7 @@ resource "aws_ecs_service" "airflow_scheduler" {
platform_version = "1.4.0"
scheduling_strategy = "REPLICA"
# Update from requirements
force_new_deployment = var.force_new_ecs_service_deployment
#force_new_deployment = var.force_new_ecs_service_deployment
}


11 changes: 9 additions & 2 deletions infrastructure/ecs_services/airflow_server.tf
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,14 @@ resource "aws_ecs_task_definition" "airflow_webserver" {
}
essential = true
command = ["webserver"]
environment = var.airflow_task_common_environment
environment = concat(var.airflow_task_common_environment,
[
{
name = "SERVICES_HASH"
value = [local.config_folder_hash, local.services_build_path_hash]
}

])
user = "50000:0"
logConfiguration = {
logDriver = "awslogs"
Expand Down Expand Up @@ -109,7 +116,7 @@ resource "aws_ecs_service" "airflow_webserver" {
container_port = 8080
}
# Update from services folder
force_new_deployment = var.force_new_ecs_service_deployment
#force_new_deployment = var.force_new_ecs_service_deployment
# This can be used to update tasks to use a newer container image with same
# image/tag combination (e.g., myimage:latest)
}
21 changes: 1 addition & 20 deletions infrastructure/ecs_services/airflow_standalone_task.tf
Original file line number Diff line number Diff line change
Expand Up @@ -34,32 +34,13 @@ resource "aws_ecs_task_definition" "airflow_standalone_task" {
cpu_architecture = "ARM64"
}
requires_compatibilities = ["FARGATE"]
# volume {
# name = "efs-${var.prefix}"
# efs_volume_configuration {
# file_system_id = aws_efs_file_system.efs.id
# root_directory = "/mnt/data"
# transit_encryption = "ENABLED"
# transit_encryption_port = 2999
# authorization_config {
# access_point_id = aws_efs_access_point.access.id
# iam = "ENABLED"
# }
# }
# }

container_definitions = jsonencode([
{
name = "airflow"
image = join(":", [aws_ecr_repository.airflow.repository_url, "latest"])
cpu = 256
memory = 512
# mountPoints : [
# {
# "containerPath" : "/opt/airflow/dags_efs",
# "sourceVolume" : "efs-${var.prefix}"
#
# }
# ]
essential = true
command = ["version"]
environment = var.airflow_task_common_environment
Expand Down
27 changes: 5 additions & 22 deletions infrastructure/ecs_services/airflow_worker.tf
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,6 @@ resource "aws_ecs_task_definition" "airflow_worker" {
cpu_architecture = "ARM64"
}
requires_compatibilities = ["FARGATE"]
volume {
name = "efs-${var.prefix}"
efs_volume_configuration {
file_system_id = aws_efs_file_system.efs.id
#root_directory = "/mnt/data"
transit_encryption = "ENABLED"
transit_encryption_port = 2999
authorization_config {
access_point_id = aws_efs_access_point.access.id
iam = "ENABLED"
}
}
}

container_definitions = jsonencode([
{
name = "worker"
Expand All @@ -41,13 +27,6 @@ resource "aws_ecs_task_definition" "airflow_worker" {
memory = var.worker_memory
essential = true
command = var.worker_cmd != [] ? var.worker_cmd : ["celery", "worker"]
mountPoints : [
{
"containerPath" : "/opt/airflow/dags_efs",
"sourceVolume" : "efs-${var.prefix}"

}
]
linuxParameters = {
initProcessEnabled = true
}
Expand All @@ -59,6 +38,10 @@ resource "aws_ecs_task_definition" "airflow_worker" {
{
name = "DUMB_INIT_SETSID"
value = "0"
},
{
name = "WORKER_HASHES"
value = local.workers_hashes
}
]
)
Expand Down Expand Up @@ -116,7 +99,7 @@ resource "aws_ecs_service" "airflow_worker" {
weight = 1
}
# Update from workers folder
force_new_deployment = var.force_new_ecs_service_deployment
# force_new_deployment = var.force_new_ecs_service_deployment

}

Expand Down
24 changes: 6 additions & 18 deletions infrastructure/ecs_services/ecr.tf
Original file line number Diff line number Diff line change
Expand Up @@ -37,24 +37,12 @@ resource "aws_ecr_lifecycle_policy" "ecr_policy" {
})

}
locals {

services_build_path = "../${path.root}/airflow_services"
dag_folder_path = "../${path.root}/dags"
scripts_path = "../${path.root}/scripts"
config_path = "../${path.root}/configuration"
worker_build_path = "../${path.root}/airflow_worker"
}


resource "null_resource" "build_ecr_image" {
triggers = {
build_path = sha1(join("", [for f in fileset(local.services_build_path, "**") : filesha1("${local.services_build_path}/${f}")]))
scripts_path = sha1(join("", [for f in fileset(local.scripts_path, "**") : filesha1("${local.scripts_path}/${f}")]))
dag_folder_path = sha1(join("", [for f in fileset(local.dag_folder_path, "**") : filesha1("${local.dag_folder_path}/${f}")]))
config_folder_path = sha1(join("", [for f in fileset(local.config_path, "**") : filesha1("${local.config_path}/${f}")]))


services_build_path_hash = local.services_build_path_hash
scripts_folder_hash = local.scripts_folder_hash
dag_folder_hash = local.dag_folder_hash
config_folder_hash = local.config_folder_hash
}

provisioner "local-exec" {
Expand All @@ -71,8 +59,8 @@ resource "null_resource" "build_ecr_image" {

resource "null_resource" "build_worker_ecr_image" {
triggers = {
build_path_worker = sha1(join("", [for f in fileset(local.worker_build_path, "**") : filesha1("${local.worker_build_path}/${f}")]))
dag_folder_path = sha1(join("", [for f in fileset(local.dag_folder_path, "**") : filesha1("${local.dag_folder_path}/${f}")]))
worker_folder_hash = local.worker_folder_hash
dag_folder_hash = local.dag_folder_hash
}

provisioner "local-exec" {
Expand Down
62 changes: 0 additions & 62 deletions infrastructure/ecs_services/efs.tf

This file was deleted.

29 changes: 29 additions & 0 deletions infrastructure/ecs_services/locals.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
locals {

services_build_path = "../${path.root}/airflow_services"
dag_folder_path = "../${path.root}/dags"
scripts_path = "../${path.root}/scripts"
config_path = "../${path.root}/configuration"
worker_build_path = "../${path.root}/airflow_worker"
}



locals {

services_build_path_hash = sha1(join("", [for f in fileset(local.services_build_path, "**") : filesha1("${local.services_build_path}/${f}")]))
dag_folder_hash = sha1(join("", [for f in fileset(local.dag_folder_path, "**") : filesha1("${local.dag_folder_path}/${f}")]))
scripts_folder_hash = sha1(join("", [for f in fileset(local.scripts_path, "**") : filesha1("${local.scripts_path}/${f}")]))
config_folder_hash = sha1(join("", [for f in fileset(local.config_path, "**") : filesha1("${local.config_path}/${f}")]))
worker_folder_hash = sha1(join("", [for f in fileset(local.worker_build_path, "**") : filesha1("${local.worker_build_path}/${f}")]))
}


locals {

services_hashes = [local.scripts_folder_hash, local.dag_folder_hash, local.config_folder_hash, local.services_build_path_hash]
workers_hashes = [local.dag_folder_hash, local.config_folder_hash, local.worker_folder_hash]

}


0 comments on commit 0c23901

Please sign in to comment.