I'm trying to provision a working EKS cluster. I've got the AWS resources up with Terraform, but can't get Terraform to provision the kubernetes configuration. I have this. All the references to the remote state return the correct values.
provider "aws" {}
terraform {
backend "s3" {
# Maintained by ansible in ./state/
bucket = "example-terraform-eks-gir01"
key = "k8s"
region = "us-east-1"
}
}
# Remote backend for EKS
data "terraform_remote_state" "eks" {
backend = "s3"
config = {
bucket = "example-terraform-eks-gir01"
key = "eks"
region = "us-east-1"
}
}
provider "kubernetes" {
host = "${data.terraform_remote_state.eks.cluster_endpoint}"
token = "${data.external.aws_iam_authenticator.result.token}"
load_config_file = false
cluster_ca_certificate
= "${base64decode(data.terraform_remote_state.eks.cluster_cert.0.data)}"
}
data "external" "aws_iam_authenticator" {
program = ["${path.module}/get_token.sh", "cluster"
, "${data.terraform_remote_state.eks.cluster_name}"]
}
resource "kubernetes_config_map" "aws_auth" {
metadata {
name = "aws-auth"
namespace = "kube-system"
}
data {
"mapRoles" = <<MAPROLES
- rolearn: ${data.terraform_remote_state.eks.node_iam_role}
username: system:node:{{EC2PrivateDNSName}}
groups:
- system:bootstrappers
- system:nodes
MAPROLES
}
}
resource "kubernetes_service_account" "tiller" {
depends_on = [ "kubernetes_config_map.aws_auth" ]
metadata {
name = "tiller"
namespace = "kube-system"
}
}
Here's the external authentication helper. Run by hand the output looks good.
#!/bin/sh
set -e
eval "$(jq -r '@sh "cluster=\(.cluster)"')"
token=$(aws-iam-authenticator token -i $cluster |jq -r '.status.token')
jq -n --arg token "$token" '{"token":$token}'
When I run terraform apply:
Error: Error applying plan:
1 error(s) occurred:
* kubernetes_config_map.aws_auth: 1 error(s) occurred:
* kubernetes_config_map.aws_auth: Unauthorized
What have I done wrong?