From 398b9b787a27d00b06030b99a326bbf083843914 Mon Sep 17 00:00:00 2001 From: badra001 Date: Tue, 23 Aug 2022 09:21:20 -0400 Subject: [PATCH] start re-working for example to work into module --- examples/efk/copy_images.tf | 43 ++++ examples/efk/expose-kibana.yaml | 72 +++++++ examples/efk/main.tf | 241 ++++++++++++++++++++++ examples/efk/providers.tf | 34 +++ examples/efk/restrict-access.yaml | 41 ++++ examples/efk/variables.common-services.tf | 41 ++++ examples/efk/variables.eks.tf | 15 ++ examples/efk/variables.elk.tf | 28 +++ 8 files changed, 515 insertions(+) create mode 100644 examples/efk/copy_images.tf create mode 100644 examples/efk/expose-kibana.yaml create mode 100644 examples/efk/main.tf create mode 100644 examples/efk/providers.tf create mode 100644 examples/efk/restrict-access.yaml create mode 100644 examples/efk/variables.common-services.tf create mode 100644 examples/efk/variables.eks.tf create mode 100644 examples/efk/variables.elk.tf diff --git a/examples/efk/copy_images.tf b/examples/efk/copy_images.tf new file mode 100644 index 0000000..0fa4a50 --- /dev/null +++ b/examples/efk/copy_images.tf @@ -0,0 +1,43 @@ +data "aws_ecr_authorization_token" "token" {} + +locals { + account_id = data.aws_caller_identity.current.account_id + repo_parent_name = format("eks/%v", var.cluster_name) + + account_ecr = format("%v.dkr.ecr.%v.amazonaws.com/%v", local.account_id, var.region, local.repo_parent_name) + + images = [ + # logging stack related images: + { + name = "elastic/elasticsearch" + image = "docker.elastic.co/elasticsearch/elasticsearch" + tag = var.elasticsearch_tag + }, + { + name = "elastic/kibana" + image = "docker.elastic.co/kibana/kibana" + tag = var.kibana_tag + }, + { + name = "fluent/fluentd-kubernetes-daemonset" + image = "docker.io/fluent/fluentd-kubernetes-daemonset" + tag = var.fluentd_tag + }, + ] + image_repos = { for image in local.images : image.name => format("%v/%v", local.account_ecr, image.name) } +} + +resource "null_resource" "copy_images" { + for_each = { for image in local.images : image.name => image } + + provisioner "local-exec" { + command = "${path.module}/copy_image.sh" + environment = { + SOURCE_IMAGE = format("%v:%v", each.value.image, each.value.tag) + DESTINATION_IMAGE = format("%v/%v:%v", local.account_ecr, each.value.name, each.value.tag) + DESTINATION_USERNAME = data.aws_ecr_authorization_token.token.user_name + DESTINATION_PASSWORD = data.aws_ecr_authorization_token.token.password + } + } +} + diff --git a/examples/efk/expose-kibana.yaml b/examples/efk/expose-kibana.yaml new file mode 100644 index 0000000..98772f0 --- /dev/null +++ b/examples/efk/expose-kibana.yaml @@ -0,0 +1,72 @@ +apiVersion: cert-manager.io/v1 +kind: Certificate +metadata: + name: kibana-cert + namespace: istio-system +spec: + secretName: 'kibana-cert' + subject: + organizations: + - census.gov + dnsNames: + - 'kibana.test4.sandbox.csp2.census.gov' + issuerRef: + kind: 'ClusterIssuer' + name: 'clusterissuer' + +--- + +apiVersion: networking.istio.io/v1beta1 +kind: Gateway +metadata: + name: kibana + namespace: istio-system +spec: + selector: + istio: ingressgateway + servers: + - port: + number: 80 + name: http + protocol: HTTP + tls: + httpsRedirect: true + hosts: + - 'kibana.test4.sandbox.csp2.census.gov' + - port: + number: 443 + name: https + protocol: HTTPS + tls: + mode: SIMPLE + credentialName: "kibana-cert" + hosts: + - 'kibana.test4.sandbox.csp2.census.gov' + +--- + +apiVersion: networking.istio.io/v1beta1 +kind: VirtualService +metadata: + name: kibana-routes + namespace: istio-system +spec: + gateways: + - 'kibana' + hosts: + - 'kibana.test4.sandbox.csp2.census.gov' + http: + - name: "kibana-route" + match: + - uri: + prefix: "/" + headers: + request: + set: + X-Forwarded-Port: "443" + route: + - destination: + host: kibana-kibana.logging.svc.cluster.local + port: + number: 5601 + diff --git a/examples/efk/main.tf b/examples/efk/main.tf new file mode 100644 index 0000000..77f7a38 --- /dev/null +++ b/examples/efk/main.tf @@ -0,0 +1,241 @@ +resource "kubernetes_namespace" "logging" { + depends_on = [null_resource.copy_images] + + metadata { + name = "logging" + labels = { + istio-injection = "enabled" + } + } +} + +resource "helm_release" "elasticsearch" { + chart = "elasticsearch" + version = var.elasticsearch_chart_version + name = "elasticsearch" + namespace = kubernetes_namespace.logging.metadata[0].name + repository = "https://helm.elastic.co" + + set { + name = "image" + value = local.image_repos["elastic/elasticsearch"] + } + set { + name = "imageTag" + value = var.elasticsearch_tag + } + + # set { + # name = "master.livenessProbe.enabled" + # value = "false" + # } + # set { + # name = "master.readinessProbe.enabled" + # value = "false" + # } + # set { + # name = "coordinating.livenessProbe.enabled" + # value = "false" + # } + # set { + # name = "coordinating.readinessProbe.enabled" + # value = "false" + # } + # set { + # name = "data.livenessProbe.enabled" + # value = "false" + # } + # set { + # name = "data.readinessProbe.enabled" + # value = "false" + # } + + timeout = 300 +} + +resource "helm_release" "kibana" { + chart = "kibana" + version = var.kibana_chart_version + name = "kibana" + namespace = kubernetes_namespace.logging.metadata[0].name + repository = "https://helm.elastic.co" + + depends_on = [helm_release.elasticsearch] + + set { + name = "image" + value = local.image_repos["elastic/kibana"] + } + set { + name = "imageTag" + value = var.kibana_tag + } + + set { + name = "persistence.storageClass" + value = "efs" + } + + set { + name = "elasticsearch.enabled" + value = "false" + } + set { + name = "elasticsearch.external.hosts[0]" + value = "elasticsearch-coordinating-only.logging.svc.cluster.local" + } + set { + name = "elasticsearch.external.port" + value = "9200" + } + set { + name = "elasticsearch.hosts[0]" + value = "elasticsearch-coordinating-only.logging.svc.cluster.local" + } + set { + name = "elasticsearch.port" + value = "9200" + } + + # set { + # name = "livenessProbe.enabled" + # value = "false" + # } + # set { + # name = "readinessProbe.enabled" + # value = "false" + # } + + timeout = 180 +} + +resource "kubernetes_config_map" "elasticsearch-output" { + metadata { + name = "elasticsearch-output" + namespace = kubernetes_namespace.logging.metadata[0].name + } + + data = { + "fluentd.conf" = < + @type null + + + # TCP input to receive logs from the forwarders + + @type forward + bind 0.0.0.0 + port 24224 + + + # HTTP input for the liveness and readiness probes + + @type http + bind 0.0.0.0 + port 9880 + + + # Throw the healthcheck to the standard output instead of forwarding it + + @type stdout + + + # Send the logs to the standard output + + @type elasticsearch + include_tag_key true + host "#{ENV['ELASTICSEARCH_HOST']}" + port "#{ENV['ELASTICSEARCH_PORT']}" + logstash_format true + + @type file + path /opt/bitnami/fluentd/logs/buffers/logs.buffer + flush_thread_count 2 + flush_interval 5s + + +EOF + } +} + +resource "kubernetes_config_map" "apache-log-parser" { + metadata { + name = "apache-log-parser" + namespace = kubernetes_namespace.logging.metadata[0].name + } + + data = { + "fluentd.conf" = < + @type null + + + # HTTP input for the liveness and readiness probes + + @type http + port 9880 + + + # Throw the healthcheck to the standard output instead of forwarding it + + @type stdout + + + # Get the logs from the containers running in the cluster + # This block parses logs using an expression valid for the Apache log format + # Update this depending on your application log format + + @type tail + path /var/log/containers/*.log + pos_file /opt/bitnami/fluentd/logs/buffers/fluentd-docker.pos + tag www.log + + @type regexp + expression /^(?[^ ]*) [^ ]* (?[^ ]*) \[(? + + + # Forward all logs to the aggregators + + @type forward + + host fluentd-0.fluentd-headless.logging.svc.cluster.local + port 24224 + + + + @type file + path /opt/bitnami/fluentd/logs/buffers/logs.buffer + flush_thread_count 2 + flush_interval 5s + + +EOF + } +} + +resource "helm_release" "fluentd" { + chart = "fluentd" + version = var.fluentd_chart_version + name = "fluentd" + namespace = kubernetes_namespace.logging.metadata[0].name + repository = "https://fluent.github.io/helm-charts" + + depends_on = [helm_release.elasticsearch] + + set { + name = "image.repository" + value = local.image_repos["fluent/fluentd-kubernetes-daemonset"] + } + set { + name = "image.tag" + value = var.fluentd_tag + } + + timeout = 180 +} + + diff --git a/examples/efk/providers.tf b/examples/efk/providers.tf new file mode 100644 index 0000000..830cf67 --- /dev/null +++ b/examples/efk/providers.tf @@ -0,0 +1,34 @@ +terraform { + required_version = ">= 0.12.31" +} + +data "aws_caller_identity" "current" {} + +data "aws_eks_cluster" "cluster" { + name = var.cluster_name +} + +data "aws_eks_cluster_auth" "cluster" { + name = var.cluster_name +} + +provider "aws" { + region = var.region + profile = var.profile +} + +provider "kubernetes" { + host = data.aws_eks_cluster.cluster.endpoint + + cluster_ca_certificate = base64decode(data.aws_eks_cluster.cluster.certificate_authority[0].data) + token = data.aws_eks_cluster_auth.cluster.token +} + +provider "helm" { + kubernetes { + host = data.aws_eks_cluster.cluster.endpoint + + cluster_ca_certificate = base64decode(data.aws_eks_cluster.cluster.certificate_authority[0].data) + token = data.aws_eks_cluster_auth.cluster.token + } +} diff --git a/examples/efk/restrict-access.yaml b/examples/efk/restrict-access.yaml new file mode 100644 index 0000000..422ee7e --- /dev/null +++ b/examples/efk/restrict-access.yaml @@ -0,0 +1,41 @@ +apiVersion: security.istio.io/v1beta1 +kind: AuthorizationPolicy +metadata: + name: allow-nothing + namespace: logging +spec: + {} + +--- + +apiVersion: security.istio.io/v1beta1 +kind: AuthorizationPolicy +metadata: + name: elasticsearch-access + namespace: logging +spec: + action: ALLOW + selector: + matchLabels: + app: elasticsearch-master + rules: + - from: + - source: + namespaces: ["logging"] + +--- + +apiVersion: security.istio.io/v1beta1 +kind: AuthorizationPolicy +metadata: + name: kibana-access + namespace: logging +spec: + action: ALLOW + selector: + matchLabels: + app: kibana + rules: + - to: + - operation: + paths: ["*"] diff --git a/examples/efk/variables.common-services.tf b/examples/efk/variables.common-services.tf new file mode 100644 index 0000000..b2466cf --- /dev/null +++ b/examples/efk/variables.common-services.tf @@ -0,0 +1,41 @@ +# https://github.com/elastic/helm-charts/releases +variable "elasticsearch_chart_version" { + description = "Which version of bitnami/elasticsearch chart to install" + type = string + default = "7.14.0" +} + +variable "elasticsearch_tag" { + description = "Which tag of docker.elastic.co/elasticsearch/elasticsearch to install" + type = string + default = "7.14.0" +} + +# https://github.com/elastic/helm-charts/releases +variable "kibana_chart_version" { + description = "Which version of the bitnami/kibana chart to install" + type = string + default = "7.14.0" +} + +variable "kibana_tag" { + description = "Which tag of docker.elastic.co/kibana/kibana to install" + type = string + default = "7.14.0" +} + + +# https://github/fluent/helm-charts/releases +variable "fluentd_chart_version" { + description = "Which version of the bitnami/fluentd chart to install" + type = string + default = "0.2.10" +} + +# https://hub.docker.com/r/fluent/fluentd-kubernetes-daemonset +variable "fluentd_tag" { + description = "Which tag of docker.io/bitnami/fluentd to install" + type = string + default = "v1.13.3-debian-elasticsearch7-1.2" +} + diff --git a/examples/efk/variables.eks.tf b/examples/efk/variables.eks.tf new file mode 100644 index 0000000..ec7b0d9 --- /dev/null +++ b/examples/efk/variables.eks.tf @@ -0,0 +1,15 @@ +variable "region" { + description = "AWS region" + type = string +} + +variable "profile" { + description = "AWS config profile" + type = string + default = "" +} + +variable "cluster_name" { + description = "The name of the EKS cluster into which cert-manager is to be installed." + type = string +} diff --git a/examples/efk/variables.elk.tf b/examples/efk/variables.elk.tf new file mode 100644 index 0000000..09c375b --- /dev/null +++ b/examples/efk/variables.elk.tf @@ -0,0 +1,28 @@ +variable "image_config" { + description = "List of image configuration objects to copy from SOURCE to DESTINATION" + type = list(object({ + name = string, + tag = string, + dest_path = string, + source_registry = string, + source_image = string, + source_tag = string, + enabled = bool, + })) + default = [] +} + + +variable "chart_config" { + description = "List of helm chart configuration objects" + type = list(object({ + name = string, + chart_name = string, + tag = string, + source_repository = string, + image_reference = string + image_tag = string + enabled = bool, + })) + default = [] +}