Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: migrates example on eks-cluster-aws-4.x #173

Merged
merged 3 commits into from
May 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/complete/fixtures.us-east-2.tfvars
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ stage = "test"

name = "eks-node-group"

kubernetes_version = "1.25"
kubernetes_version = "1.29"

oidc_provider_enabled = true

Expand Down
46 changes: 33 additions & 13 deletions examples/complete/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,12 @@ module "label" {
context = module.this.context
}

data "aws_caller_identity" "current" {}

data "aws_iam_session_context" "current" {
arn = data.aws_caller_identity.current.arn
}

locals {
# The usage of the specific kubernetes.io/cluster/* resource tags below are required
# for EKS and Kubernetes to discover and manage networking resources
Expand Down Expand Up @@ -45,18 +51,34 @@ locals {
}

extra_policy_arn = "arn:aws:iam::aws:policy/job-function/ViewOnlyAccess"

# Enable the IAM user creating the cluster to administer it,
# without using the bootstrap_cluster_creator_admin_permissions option,
# as a way to test the access_entry_map feature.
# In general, this is not recommended. Instead, you should
# create the access_entry_map statically, with the ARNs you want to
# have access to the cluster. We do it dynamically here just for testing purposes.
# See the original PR for more information:
# https://github.com/cloudposse/terraform-aws-eks-cluster/pull/206
access_entry_map = {
(data.aws_iam_session_context.current.issuer_arn) = {
access_policy_associations = {
ClusterAdmin = {}
}
}
}
}

module "vpc" {
source = "cloudposse/vpc/aws"
version = "2.1.0"
version = "2.2.0"
ipv4_primary_cidr_block = var.vpc_cidr_block
context = module.this.context
}

module "subnets" {
source = "cloudposse/dynamic-subnets/aws"
version = "2.4.1"
version = "2.4.2"
availability_zones = var.availability_zones
vpc_id = module.vpc.vpc_id
igw_id = [module.vpc.igw_id]
Expand All @@ -68,11 +90,10 @@ module "subnets" {

module "ssh_source_access" {
source = "cloudposse/security-group/aws"
version = "0.4.3"
version = "2.2.0"

attributes = ["ssh", "source"]
security_group_description = "Test source security group ssh access only"
create_before_destroy = true
allow_all_egress = true

rules = [local.allow_all_ingress_rule]
Expand All @@ -85,11 +106,10 @@ module "ssh_source_access" {

module "https_sg" {
source = "cloudposse/security-group/aws"
version = "0.4.3"
version = "2.2.0"

attributes = ["http"]
security_group_description = "Allow http access"
create_before_destroy = true
allow_all_egress = true

rules = [local.allow_http_ingress_rule]
Expand All @@ -101,21 +121,21 @@ module "https_sg" {

module "eks_cluster" {
source = "cloudposse/eks-cluster/aws"
version = "2.9.0"
version = "4.1.0"
region = var.region
vpc_id = module.vpc.vpc_id
subnet_ids = module.subnets.public_subnet_ids
kubernetes_version = var.kubernetes_version
local_exec_interpreter = var.local_exec_interpreter
oidc_provider_enabled = var.oidc_provider_enabled
enabled_cluster_log_types = var.enabled_cluster_log_types
cluster_log_retention_period = var.cluster_log_retention_period

# data auth has problems destroying the auth-map
kube_data_auth_enabled = false
kube_exec_auth_enabled = true
access_config = {
authentication_mode = "API"
bootstrap_cluster_creator_admin_permissions = false
}

context = module.this.context
access_entry_map = local.access_entry_map
context = module.this.context
}

module "eks_node_group" {
Expand Down
15 changes: 0 additions & 15 deletions examples/complete/outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,6 @@ output "vpc_cidr" {
description = "VPC ID"
}

output "eks_cluster_security_group_id" {
description = "ID of the EKS cluster Security Group"
value = module.eks_cluster.security_group_id
}

output "eks_cluster_security_group_arn" {
description = "ARN of the EKS cluster Security Group"
value = module.eks_cluster.security_group_arn
}

output "eks_cluster_security_group_name" {
description = "Name of the EKS cluster Security Group"
value = module.eks_cluster.security_group_name
}

output "eks_cluster_id" {
description = "The name of the cluster"
value = module.eks_cluster.eks_cluster_id
Expand Down
2 changes: 1 addition & 1 deletion examples/complete/versions.tf
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ terraform {
required_providers {
aws = {
source = "hashicorp/aws"
version = ">= 3.0"
version = ">= 5.34"
}
template = {
source = "cloudposse/template"
Expand Down