diff --git a/infrastructure/ecs_services/airflow_metrics.tf b/infrastructure/ecs_services/airflow_metrics.tf index b39d541..0d0b9b7 100644 --- a/infrastructure/ecs_services/airflow_metrics.tf +++ b/infrastructure/ecs_services/airflow_metrics.tf @@ -32,6 +32,19 @@ resource "aws_ecs_task_definition" "airflow_metrics" { cpu_architecture = "X86_64" } requires_compatibilities = ["FARGATE"] + volume { + name = "efs-${var.prefix}" + efs_volume_configuration { + file_system_id = aws_efs_file_system.efs.id + root_directory = "/mnt/data" + transit_encryption = "ENABLED" + transit_encryption_port = 2999 + authorization_config { + access_point_id = aws_efs_access_point.access.id + iam = "ENABLED" + } + } + } container_definitions = jsonencode([ { name = "metrics" @@ -42,6 +55,13 @@ resource "aws_ecs_task_definition" "airflow_metrics" { entryPoint = [ "python" ] + mountPoints : [ + { + "containerPath" : "/opt/airflow/dags_efs", + "sourceVolume" : "efs-${var.prefix}" + + } + ] command = [ "scripts/put_airflow_worker_autoscaling_metric_data.py", "--cluster-name", @@ -89,5 +109,5 @@ resource "aws_ecs_service" "airflow_metrics" { } platform_version = "1.4.0" scheduling_strategy = "REPLICA" - force_new_deployment = var.force_new_ecs_service_deployment + # force_new_deployment = var.force_new_ecs_service_deployment } diff --git a/infrastructure/ecs_services/airflow_scheduler.tf b/infrastructure/ecs_services/airflow_scheduler.tf index eaf8cff..1a8c774 100644 --- a/infrastructure/ecs_services/airflow_scheduler.tf +++ b/infrastructure/ecs_services/airflow_scheduler.tf @@ -48,12 +48,33 @@ resource "aws_ecs_task_definition" "airflow_scheduler" { cpu_architecture = "X86_64" } requires_compatibilities = ["FARGATE"] + volume { + name = "efs-${var.prefix}" + efs_volume_configuration { + file_system_id = aws_efs_file_system.efs.id + root_directory = "/mnt/data" + transit_encryption = "ENABLED" + transit_encryption_port = 2999 + authorization_config { + access_point_id = aws_efs_access_point.access.id + iam = "ENABLED" + } + } + } + container_definitions = jsonencode([ { name = "scheduler" image = join(":", [aws_ecr_repository.airflow.repository_url, "latest"]) cpu = var.scheduler_cpu memory = var.scheduler_memory + mountPoints : [ + { + "containerPath" : "/opt/airflow/dags_efs", + "sourceVolume" : "efs-${var.prefix}" + + } + ] healthcheck = { command = [ "CMD-SHELL", @@ -138,7 +159,7 @@ resource "aws_ecs_service" "airflow_scheduler" { } platform_version = "1.4.0" scheduling_strategy = "REPLICA" - force_new_deployment = var.force_new_ecs_service_deployment + # force_new_deployment = var.force_new_ecs_service_deployment } diff --git a/infrastructure/ecs_services/airflow_server.tf b/infrastructure/ecs_services/airflow_server.tf index 2ae061c..bd7c55a 100644 --- a/infrastructure/ecs_services/airflow_server.tf +++ b/infrastructure/ecs_services/airflow_server.tf @@ -110,7 +110,7 @@ resource "aws_ecs_service" "airflow_webserver" { container_name = "webserver" container_port = 8080 } - force_new_deployment = var.force_new_ecs_service_deployment + # force_new_deployment = var.force_new_ecs_service_deployment # This can be used to update tasks to use a newer container image with same # image/tag combination (e.g., myimage:latest) } diff --git a/infrastructure/ecs_services/airflow_standalone_task.tf b/infrastructure/ecs_services/airflow_standalone_task.tf index 55c8cef..d7dae0e 100644 --- a/infrastructure/ecs_services/airflow_standalone_task.tf +++ b/infrastructure/ecs_services/airflow_standalone_task.tf @@ -34,12 +34,32 @@ resource "aws_ecs_task_definition" "airflow_standalone_task" { cpu_architecture = "X86_64" } requires_compatibilities = ["FARGATE"] + volume { + name = "efs-${var.prefix}" + efs_volume_configuration { + file_system_id = aws_efs_file_system.efs.id + root_directory = "/mnt/data" + transit_encryption = "ENABLED" + transit_encryption_port = 2999 + authorization_config { + access_point_id = aws_efs_access_point.access.id + iam = "ENABLED" + } + } + } container_definitions = jsonencode([ { name = "airflow" image = join(":", [aws_ecr_repository.airflow.repository_url, "latest"]) cpu = 256 memory = 512 + mountPoints : [ + { + "containerPath" : "/opt/airflow/dags_efs", + "sourceVolume" : "efs-${var.prefix}" + + } + ] essential = true command = ["version"] environment = var.airflow_task_common_environment diff --git a/infrastructure/ecs_services/airflow_worker.tf b/infrastructure/ecs_services/airflow_worker.tf index 94ae2ee..ad2c4a1 100644 --- a/infrastructure/ecs_services/airflow_worker.tf +++ b/infrastructure/ecs_services/airflow_worker.tf @@ -18,6 +18,19 @@ resource "aws_ecs_task_definition" "airflow_worker" { cpu_architecture = "X86_64" } requires_compatibilities = ["FARGATE"] + volume { + name = "efs-${var.prefix}" + efs_volume_configuration { + file_system_id = aws_efs_file_system.efs.id + root_directory = "/mnt/data" + transit_encryption = "ENABLED" + transit_encryption_port = 2999 + authorization_config { + access_point_id = aws_efs_access_point.access.id + iam = "ENABLED" + } + } + } container_definitions = jsonencode([ { @@ -27,6 +40,13 @@ resource "aws_ecs_task_definition" "airflow_worker" { memory = var.worker_memory essential = true command = var.worker_cmd != [] ? var.worker_cmd : ["celery", "worker"] + mountPoints : [ + { + "containerPath" : "/opt/airflow/dags_efs", + "sourceVolume" : "efs-${var.prefix}" + + } + ] linuxParameters = { initProcessEnabled = true } @@ -94,7 +114,7 @@ resource "aws_ecs_service" "airflow_worker" { capacity_provider = "FARGATE" weight = 1 } - force_new_deployment = var.force_new_ecs_service_deployment + # force_new_deployment = var.force_new_ecs_service_deployment } diff --git a/infrastructure/ecs_services/efs.tf b/infrastructure/ecs_services/efs.tf new file mode 100644 index 0000000..ba536b1 --- /dev/null +++ b/infrastructure/ecs_services/efs.tf @@ -0,0 +1,49 @@ +##### +# EFS +##### + +locals { + task_security_group_ids = [ + aws_security_group.airflow_worker_service.id, + aws_security_group.airflow_scheduler_service.id, + aws_security_group.airflow_standalone_task.id, + aws_security_group.airflow_metrics_service.id, + aws_security_group.airflow_webserver_service.id + ] +} +resource "aws_efs_file_system" "efs" { + creation_token = "${var.prefix}-efs" + + tags = { + Name = "${var.prefix}-efs" + } +} + +resource "aws_efs_access_point" "access" { + file_system_id = aws_efs_file_system.efs.id +} +resource "aws_security_group" "efs" { + name = "${var.prefix}-efs-sg" + vpc_id = var.vpc_id + + ingress { + protocol = "tcp" + from_port = 2999 + to_port = 2999 + security_groups = local.task_security_group_ids + cidr_blocks = ["10.0.0.0/16"] + } + ingress { + description = "NFS traffic from VPC" + from_port = 2049 + to_port = 2049 + protocol = "tcp" + cidr_blocks = ["10.0.0.0/16"] + } +} +resource "aws_efs_mount_target" "mount" { + count = 2 + file_system_id = aws_efs_file_system.efs.id + subnet_id = var.private_subnet_ids[count.index] + security_groups = [aws_security_group.efs.id] +} \ No newline at end of file