diff --git a/README.md b/README.md
index 7ceeffa..47d7d46 100644
--- a/README.md
+++ b/README.md
@@ -13,10 +13,16 @@ coredns
kube-proxy
# CHANGELOG
-- created content in the README
-
-
-## Requirements
+* 0.0.3 -- 2024-07-30
+ - updated to use karpenter
+ - misc cleanup
+ - add hack dns for today until modules work
+* 0.0.2 -- 2024-07-22
+ - updated version.tf to 0.0.2
+ - add kube.config update after cluster create
+ - update ami_type to AL2023
+ - update upstream cluster module to 20.20.0
+ - created changelog
| Name | Version |
|------|---------|
@@ -131,4 +137,4 @@ kube-proxy
| [vpc\_cidr\_block](#output\_vpc\_cidr\_block) | The CIDR block associated with the VPC. |
| [vpc\_cni\_irsa\_role](#output\_vpc\_cni\_irsa\_role) | The arn/name/unique\_id of the irsa role for the vpc-cni addon |
| [vpc\_id](#output\_vpc\_id) | The VPC id where the EKS cluster was deployed. |
-
\ No newline at end of file
+
diff --git a/aws_data.tf b/aws_data.tf
index c2dfb22..7dead47 100644
--- a/aws_data.tf
+++ b/aws_data.tf
@@ -5,10 +5,31 @@ data "aws_region" "current" {}
data "aws_arn" "current" {
arn = data.aws_caller_identity.current.arn
}
-
+data "aws_subnets" "container-subnets" {
+ filter {
+ name = "tag:Name"
+ values = [local.container_subnets_name]
+ }
+ filter {
+ name = "vpc-id"
+ values = [data.aws_vpc.eks_vpc.id]
+ }
+}
+data "aws_subnets" "lb-subnets" {
+ filter {
+ name = "tag:Name"
+ values = [local.lb_subnets_name]
+ }
+ filter {
+ name = "vpc-id"
+ values = [data.aws_vpc.eks_vpc.id]
+ }
+}
locals {
- base_arn = format("arn:%v:%%v:%v:%v:%%v:%%v", data.aws_arn.current.partition, data.aws_region.current.name, data.aws_caller_identity.current.account_id)
- iam_arn = format("arn:%v:iam::%v:%%v", data.aws_arn.current.partition, data.aws_caller_identity.current.account_id)
+ container_subnets_name = var.subnets_name
+ lb_subnets_name = var.lb_subnets_name
+ base_arn = format("arn:%v:%%v:%v:%v:%%v:%%v", data.aws_arn.current.partition, data.aws_region.current.name, data.aws_caller_identity.current.account_id)
+ iam_arn = format("arn:%v:iam::%v:%%v", data.aws_arn.current.partition, data.aws_caller_identity.current.account_id)
common_arn = format("arn:%v:%%v:%v:%v:%%v",
data.aws_arn.current.partition,
data.aws_region.current.name,
diff --git a/dns_zones.tf b/dns_zones.tf
new file mode 100644
index 0000000..c54d080
--- /dev/null
+++ b/dns_zones.tf
@@ -0,0 +1,394 @@
+#-------------------------------------------------
+# DNS Zone for EKS
+#-------------------------------------------------
+locals {
+ cluster_domain_name = format("%v.%v", var.cluster_name, var.vpc_domain_name)
+ cluster_domain_description = format("%v EKS Cluster DNS Zone", var.cluster_name)
+ account_environment = data.aws_arn.current.partition == "aws-us-gov" ? "gov" : "ew"
+ region_short = join("", [for c in split("-", var.region) : substr(c, 0, 1)])
+ zone_ids = compact(var.zone_ids)
+}
+#-------------------------------------------------
+# Providers for Cross Account DNS Action
+#-------------------------------------------------
+provider "aws" {
+ alias = "route53_main_east"
+ region = var.region_map["east"]
+ assume_role {
+ role_arn = format("arn:%v:iam::%v:role/r-inf-terraform-route53", data.aws_arn.current.partition, var.route53_endpoints["route53_main"].account_id)
+ session_name = var.os_username
+ }
+}
+
+provider "aws" {
+ alias = "route53_main_west"
+ region = var.region_map["west"]
+ assume_role {
+ role_arn = format("arn:%v:iam::%v:role/r-inf-terraform-route53", data.aws_arn.current.partition, var.route53_endpoints["route53_main"].account_id)
+ session_name = var.os_username
+ }
+}
+
+provider "aws" {
+ alias = "self"
+ # assume_role {
+ # role_arn = format("arn:%v:iam::%v:role/r-inf-terraform", data.aws_arn.current.partition, data.aws_caller_identity.current.account_id)
+ # session_name = var.os_username
+ # }
+}
+
+#-------------------------------------------------
+# network prod for shared vpcs zones
+#-------------------------------------------------
+
+## Associate between self (vpc8) and network-prod-west
+resource "aws_route53_vpc_association_authorization" "self_zone" {
+ provider = aws.self
+ for_each = toset(local.zone_ids)
+ zone_id = each.key
+ vpc_region = var.region_map["west"]
+ vpc_id = data.aws_vpc.eks_vpc.id
+}
+
+resource "aws_route53_zone_association" "self_zone_west" {
+ provider = aws.route53_main_west
+ for_each = toset(local.zone_ids)
+ zone_id = each.key
+ vpc_id = data.aws_vpc.eks_vpc.id
+ vpc_region = var.region_map["west"]
+ depends_on = [aws_route53_vpc_association_authorization.self_zone]
+}
+
+## Associate between self (vpc8) and network-prod-east
+resource "aws_route53_vpc_association_authorization" "self_zone_east" {
+ provider = aws.self
+ for_each = toset(local.zone_ids)
+ zone_id = each.key
+ vpc_region = var.region_map["east"]
+ vpc_id = data.aws_vpc.eks_vpc.id
+}
+
+resource "aws_route53_zone_association" "self_zone_east" {
+ provider = aws.route53_main_east
+ for_each = toset(local.zone_ids)
+ zone_id = each.key
+ vpc_id = data.aws_vpc.eks_vpc.id
+ vpc_region = var.region_map["east"]
+ depends_on = [aws_route53_vpc_association_authorization.self_zone]
+}
+
+#---
+# zone list
+#---
+data "aws_route53_zone" "zones" {
+ provider = aws.self
+ for_each = toset(local.zone_ids)
+ zone_id = each.key
+ private_zone = true
+}
+
+resource "aws_route53_zone" "cluster_domain" {
+ name = local.cluster_domain_name
+ comment = local.cluster_domain_description
+ force_destroy = false
+ depends_on = [
+ data.aws_vpc.dummy_vpc
+ ]
+ vpc {
+ vpc_id = !(var.shared_vpc_label == null || var.shared_vpc_label == "") ? try(data.aws_vpc.dummy_vpc[0].id, data.aws_vpc.eks_vpc.id) : data.aws_vpc.eks_vpc.id
+ vpc_region = var.region
+ }
+
+ lifecycle {
+ ignore_changes = [vpc]
+ }
+
+ tags = merge(
+ var.tags,
+ { "Name" = local.cluster_domain_name },
+ )
+}
+
+## Dummy VPC
+
+#---
+# dummy vpc, so we can associate the zone to this account
+#---
+data "aws_vpc" "dummy_vpc" {
+ depends_on = [aws_vpc.vpc]
+ count = !(var.shared_vpc_label == null || var.shared_vpc_label == "") ? 1 : 0
+ filter {
+ name = "tag:Name"
+ values = ["vpc0-dummy"]
+ }
+ # filter {
+ # name = "tag:eks-cluster-name"
+ # values = [var.cluster_name]
+ # }
+}
+
+resource "aws_vpc" "vpc" {
+ cidr_block = "192.168.0.0/24"
+ enable_dns_support = false
+ enable_dns_hostnames = false
+ tags = merge(
+ var.tags,
+ { "Name" = "vpc0-dummy" },
+ )
+}
+# Tag existing subnets for EKS
+# Container subnets under data.aws_subnets.container-subnets
+# Load Balance subnets under data.aws_subnets.lb-subnets
+resource "aws_ec2_tag" "container-subnets" {
+ for_each = toset(data.aws_subnets.container-subnets.ids)
+ resource_id = each.value
+ key = "kubernetes.io/cluster/${var.cluster_name}"
+ value = "shared"
+}
+
+resource "aws_ec2_tag" "lb-subnets" {
+ for_each = toset(data.aws_subnets.lb-subnets.ids)
+ resource_id = each.value
+ key = "kubernetes.io/role/internal-nlb"
+ value = "1"
+}
+
+#### This is the correct way, it's commented because
+#### the module is throwing an error on the for_each
+#### in the module.
+# locals {
+# vpc_domain_name = var.vpc_domain_name
+# cluster_domain_name = format("%v.%v", var.cluster_name, local.vpc_domain_name)
+# cluster_domain_description = format("%v EKS Cluster DNS Zone", var.cluster_name)
+# region = var.region
+# zone_ids = compact(var.zone_ids)
+# }
+
+# #---
+# # network prod
+# #---
+# provider "aws" {
+# alias = "route53_main_east"
+# region = var.region_map["east"]
+# assume_role {
+# role_arn = format("arn:%v:iam::%v:role/r-inf-terraform-route53", data.aws_arn.current.partition, var.route53_endpoints["route53_main"].account_id)
+# session_name = var.os_username
+# }
+# }
+
+# provider "aws" {
+# alias = "route53_main_west"
+# region = var.region_map["west"]
+# assume_role {
+# role_arn = format("arn:%v:iam::%v:role/r-inf-terraform-route53", data.aws_arn.current.partition, var.route53_endpoints["route53_main"].account_id)
+# session_name = var.os_username
+# }
+# }
+
+# provider "aws" {
+# alias = "self"
+# assume_role {
+# role_arn = format("arn:%v:iam::%v:role/r-inf-terraform", data.aws_arn.current.partition, data.aws_caller_identity.current.account_id)
+# session_name = var.os_username
+# }
+# }
+# #---
+# # dummy vpc, so we can associate the zone to this account
+# #---
+# data "aws_vpc" "dummy_vpc" {
+# provider = aws.self
+# depends_on = [aws_vpc.vpc]
+# count = !(var.shared_vpc_label == null || var.shared_vpc_label == "") ? 1 : 0
+# filter {
+# name = "tag:Name"
+# values = ["vpc0-dummy"]
+# }
+# filter {
+# name = "tag:eks-cluster-name"
+# values = [var.cluster_name]
+# }
+# }
+
+# ## Dummy VPC
+# resource "aws_vpc" "vpc" {
+# provider = aws.self
+# cidr_block = "192.168.0.0/24"
+# enable_dns_support = false
+# enable_dns_hostnames = false
+# tags = merge(
+# var.tags,
+# { "Name" = "vpc0-dummy" },
+# )
+# }
+
+# #---
+# # zone list
+# #---
+# data "aws_route53_zone" "zones" {
+# provider = aws.self
+# for_each = toset(local.zone_ids)
+# zone_id = each.key
+# private_zone = true
+# }
+
+# resource "aws_route53_zone" "cluster_domain" {
+# provider = aws.self
+# name = local.cluster_domain_name
+# comment = local.cluster_domain_description
+# force_destroy = false
+
+# vpc {
+# vpc_id = !(var.shared_vpc_label == null || var.shared_vpc_label == "") ? try(data.aws_vpc.dummy_vpc[0].id, null) : data.aws_vpc.eks_vpc.id
+# vpc_region = local.region
+# }
+
+# lifecycle {
+# ignore_changes = [vpc]
+# precondition {
+# condition = (var.shared_vpc_label == null || var.shared_vpc_label == "") || (!(var.shared_vpc_label == null || var.shared_vpc_label == "") && !(var.vpc_domain_name == null || var.vpc_domain_name == ""))
+# error_message = "var.vpc_domain_name must be provided when shared VPCs are in use."
+# }
+# }
+
+# tags = merge(
+# var.tags,
+# { "Name" = local.cluster_domain_name },
+# )
+# }
+
+# #---
+# # need to also associate with network-prod account and this vpc
+# #---
+# module "route53_cluster_domain_east" {
+
+# count = local.region == "us-gov-east-1" && !(var.shared_vpc_label == null || var.shared_vpc_label == "") ? 1 : 0
+# providers = {
+# aws.self = aws
+# aws.peer = aws.route53_main_east
+# }
+
+# source = "git@github.e.it.census.gov:terraform-modules/aws-vpc-setup.git//route53-zone-association/zone?ref=tf-upgrade"
+# region = "us-gov-east-1"
+# vpc_id = data.aws_vpc.eks_vpc.id
+# zone_ids = [aws_route53_zone.cluster_domain.zone_id]
+
+# tags = var.tags
+# }
+
+# module "route53_cluster_domain_west" {
+
+# count = local.region == "us-gov-west-1" && !(var.shared_vpc_label == null || var.shared_vpc_label == "") ? 1 : 0
+# providers = {
+# aws.self = aws
+# aws.peer = aws.route53_main_west
+# }
+
+# source = "git@github.e.it.census.gov:terraform-modules/aws-vpc-setup.git//route53-zone-association/zone?ref=tf-upgrade"
+# region = "us-gov-west-1"
+# vpc_id = data.aws_vpc.eks_vpc.id
+# zone_ids = [aws_route53_zone.cluster_domain.zone_id]
+
+# tags = var.tags
+# }
+
+# output "cluster_domain_name" {
+# description = "DNS Zone Name"
+# value = local.cluster_domain_name
+# }
+
+# output "cluster_domain_id" {
+# description = "DNS Zone ID"
+# value = aws_route53_zone.cluster_domain.zone_id
+# }
+
+# output "cluster_domain_ns" {
+# description = "DNS Zone Nameservers"
+# value = aws_route53_zone.cluster_domain.name_servers
+# }
+
+# #---
+# # associate to main do2-govcloud vpc1-services east and west for inbound resolution
+# # and to vpc7-endpoints in network prod
+# #---
+
+# # #---
+# # # network prod
+# # #---
+# # provider "aws" {
+# # alias = "route53_main"
+# # region = var.region_map["east"]
+# # profile = var.profile
+# # assume_role {
+# # role_arn = format("arn:%v:iam::%v:role/r-inf-terraform-route53", data.aws_arn.current.partition, var.route53_endpoints["route53_main"].account_id)
+# # session_name = var.os_username
+# # }
+# # }
+
+# # module "route53_main_east" {
+# # providers = {
+# # aws.self = aws
+# # aws.peer = aws.route53_main
+# # }
+
+# # source = "git@github.e.it.census.gov:terraform-modules/aws-vpc-setup.git//route53-zone-association/zone?ref=tf-upgrade"
+# # region = "us-gov-east-1"
+# # vpc_id = var.route53_endpoints["route53_main"]["us-gov-east-1"]
+# # zone_ids = [aws_route53_zone.cluster_domain.zone_id]
+
+# # tags = var.tags
+# # }
+
+# # module "route53_main_west" {
+# # providers = {
+# # aws.self = aws
+# # aws.peer = aws.route53_main
+# # }
+
+# # source = "git@github.e.it.census.gov:terraform-modules/aws-vpc-setup.git//route53-zone-association/zone?ref=tf-upgrade"
+# # region = "us-gov-west-1"
+# # vpc_id = var.route53_endpoints["route53_main"]["us-gov-west-1"]
+# # zone_ids = [aws_route53_zone.cluster_domain.zone_id]
+
+# # tags = var.tags
+# # }
+
+# #---
+# # do2-gov ("legacy")
+# #---
+# # provider "aws" {
+# # alias = "route53_main_legacy"
+# # region = var.region_map["east"]
+# # profile = var.profile
+# # assume_role {
+# # role_arn = format("arn:%v:iam::%v:role/r-inf-terraform-route53", data.aws_arn.current.partition, var.route53_endpoints["route53_main_legacy"].account_id)
+# # session_name = var.os_username
+# # }
+# # }
+
+# # module "route53_main_legacy_east" {
+# # providers = {
+# # aws.self = aws
+# # aws.peer = aws.route53_main_legacy
+# # }
+
+# # source = "git@github.e.it.census.gov:terraform-modules/aws-vpc-setup.git//route53-zone-association/zone?ref=tf-upgrade"
+# # region = "us-gov-east-1"
+# # vpc_id = var.route53_endpoints["route53_main_legacy"]["us-gov-east-1"]
+# # zone_ids = [aws_route53_zone.cluster_domain.zone_id]
+
+# # tags = var.tags
+# # }
+
+# # module "route53_main_legacy_west" {
+# # providers = {
+# # aws.self = aws
+# # aws.peer = aws.route53_main_legacy
+# # }
+
+# # source = "git@github.e.it.census.gov:terraform-modules/aws-vpc-setup.git//route53-zone-association/zone?ref=tf-upgrade"
+# # region = "us-gov-west-1"
+# # vpc_id = var.route53_endpoints["route53_main_legacy"]["us-gov-west-1"]
+# # zone_ids = [aws_route53_zone.cluster_domain.zone_id]
+
+# # tags = var.tags
+# # }
diff --git a/main.tf b/main.tf
index 074159b..60f4400 100644
--- a/main.tf
+++ b/main.tf
@@ -72,7 +72,7 @@ locals {
}
module "cluster" {
- source = "git@github.e.it.census.gov:SCT-Engineering/terraform-aws-eks.git?ref=v20.8.5"
+ source = "git@github.e.it.census.gov:SCT-Engineering/terraform-aws-eks.git?ref=v20.20.0"
#version = "19.16.0"
cluster_name = var.cluster_name
@@ -114,10 +114,10 @@ module "cluster" {
}
eks_managed_node_group_defaults = {
- ami_type = "AL2_x86_64"
+ ami_type = "BOTTLEROCKET_x86_64"
}
- node_security_group_enable_recommended_rules = false
+ node_security_group_enable_recommended_rules = true
node_security_group_additional_rules = local.node_security_group_additional_rules
@@ -139,10 +139,10 @@ module "cluster" {
xvda = {
device_name = "/dev/xvda"
ebs = {
- volume_size = var.eks_instance_disk_size
- volume_type = "gp3"
- iops = 3000
- throughput = 125
+ volume_size = var.eks_instance_disk_size
+ volume_type = "gp3"
+ # iops = 3000
+ # throughput = 125
encrypted = true
delete_on_termination = true
kms_key_id = data.aws_kms_key.ebs_key.arn
@@ -176,14 +176,3 @@ resource "null_resource" "kube_config_create" {
command = "aws eks --region ${data.aws_region.current.name} update-kubeconfig --name ${module.cluster.cluster_name} --profile=${var.profile} && export KUBE_CONFIG_PATH=~/.kube/config && export KUBERNETES_MASTER=~/.kube/config"
}
}
-
-
-# resource "kubernetes_namespace" "operators" {
-# depends_on = [
-# module.cluster.eks_managed_node_groups,
-# ]
-
-# metadata {
-# name = var.operators_ns
-# }
-# }
diff --git a/outputs.tf b/outputs.tf
index a17cf6b..d67e519 100644
--- a/outputs.tf
+++ b/outputs.tf
@@ -42,7 +42,7 @@ output "security_group_all_worker_mgmt_id" {
output "cluster_fqdn" {
description = "The cluster_name.domain"
- value = format("%v.%v", var.cluster_name, var.domain)
+ value = format("%v.%v", var.cluster_name, var.vpc_domain_name)
}
################################################################################
@@ -277,6 +277,6 @@ output "self_managed_node_groups_autoscaling_group_names" {
################################################################################
# Additional
################################################################################
-output "cluster_autoscaler_role_name" {
- value = module.cluster_autoscaler_irsa_role.iam_role_name
-}
+# output "cluster_autoscaler_role_name" {
+# value = module.cluster_autoscaler_irsa_role.iam_role_name
+# }
diff --git a/variables.tf b/variables.tf
index eb16b7f..2481239 100644
--- a/variables.tf
+++ b/variables.tf
@@ -32,8 +32,14 @@ variable "subnets_name" {
default = "*-container-*"
}
-variable "domain" {
- description = "The DNS domain name of the cluster."
+variable "lb_subnets_name" {
+ description = "Define the name of the subnets to be used by this cluster"
+ type = string
+ default = "*-private-lb-*"
+}
+
+variable "vpc_domain_name" {
+ description = "The DNS domain name of the vpc the cluster is in."
type = string
}
@@ -107,8 +113,76 @@ variable "profile" {
default = ""
}
+variable "account_id" {
+ description = "AWS account id"
+ type = string
+ default = ""
+}
+
+variable "region" {
+ description = "AWS config region"
+ type = string
+ default = ""
+}
+
variable "aws_environment" {
description = "AWS Environment (govcloud | east-west)"
type = string
default = ""
}
+
+variable "os_username" {
+ description = "OS username from environment variable, ideally as $USER"
+ type = string
+ default = null
+}
+
+###################################################################
+# DNS variables
+###################################################################
+
+variable "main_dns_vpcs" {
+ description = "Map of region and VPC ids of the vpc1-services in us-gov-west-1 and us-gov-east-1 for centralized DNS"
+ type = map(string)
+ default = {
+ "us-gov-east-1" = "vpc-070595c5b133243dd"
+ "us-gov-west-1" = "vpc-08b7b4db6a5ddf9c1"
+ }
+}
+
+variable "main_dns_profile" {
+ description = "Profile name for AWS for the main DNS central account"
+ type = string
+ default = "269244441389-lab-gov-network-nonprod"
+}
+
+variable "shared_vpc_label" {
+ description = "Label to use for shared VPC for flowlogs and other things"
+ type = string
+ default = null
+}
+
+variable "region_map" {
+ description = "AWS region map"
+ type = map(string)
+ default = { "east" : "us-gov-east-1", "west" : "us-gov-west-1" }
+}
+
+variable "route53_endpoints" {
+ description = "Map of target route53 endpoints (for inbound) central VPCs"
+ type = map(map(string))
+ default = {
+ route53_main = {
+ "account_id" = "269244441389"
+ "alias" = "lab-gov-network-nonprod"
+ "us-gov-east-1" = "vpc-070595c5b133243dd"
+ "us-gov-west-1" = "vpc-08b7b4db6a5ddf9c1"
+ }
+ }
+}
+
+variable "zone_ids" {
+ description = "List of Route53 PHZ IDs to associate with a (local/remote) VPC"
+ type = list(string)
+ default = []
+}
diff --git a/version.tf b/version.tf
index c703b7b..04110bc 100644
--- a/version.tf
+++ b/version.tf
@@ -1,4 +1,4 @@
locals {
_module_name = "tfmod-eks"
- _module_version = "0.0.1"
+ _module_version = "0.0.3"
}