diff --git a/infra/aws/terraform/prow-build-cluster/.terraform.lock.hcl b/infra/aws/terraform/prow-build-cluster/.terraform.lock.hcl new file mode 100644 index 00000000000..e0ae040c28e --- /dev/null +++ b/infra/aws/terraform/prow-build-cluster/.terraform.lock.hcl @@ -0,0 +1,125 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "4.61.0" + constraints = ">= 3.72.0, >= 3.73.0, >= 4.0.0, >= 4.47.0" + hashes = [ + "h1:qyBawxoNN6EpiiX5h5ZG5P2dHsBeA5Z67xESl2c1HRk=", + "zh:051e2588410b7448a5c4c30d668948dd6fdfa8037700bfc00fb228986ccbf3a5", + "zh:082fbcf9706b48d0880ba552a11c29527e228dadd6d83668d0789abda24e5922", + "zh:0e0e72f214fb24f4f9c601cab088a2d8e00ec3327c451bc753911951d773214a", + "zh:3af6d38ca733ca66cce15c6a5735ded7c18348ad26040ebd9a59778b2cd9cf6c", + "zh:404898bc2258bbb9527fa06c72cb927ca011fd9bc3f4b90931c0912652c3f9e9", + "zh:4f617653b0f17a7708bc896f029c4ab0b677a1a1c987bd77166acad1d82db469", + "zh:5dbe393355ac137aa3fd329e3d24871f27012d3ba93d714485b55820df240349", + "zh:6067c2127eb5c879227aca671f101de6dcba909d0d8d15d5711480351962a248", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:a939f94461f91aa3b7ec7096271e2714309bd917fe9a03e02f68afb556d65e0f", + "zh:b21227b9082e5fafe8b7c415dc6a99c0d82da05492457377a5fe7d4acaed80e2", + "zh:b8d9f09ed5fc8c654b768b7bee1237eaf1e2287c898249e740695055fb0fe072", + "zh:d360e1e185b148ff6b1d0ed4f7d574e08f2391697ab43df62085b04a1a5b1284", + "zh:da962da17ddda744911cb1e92b983fa3874d73a28f3ee72faa9ddb6680a63774", + "zh:e2f1c4f5ebeb4fd7ef690178168a4c529025b54a91bb7a087dcea48e0b82737a", + ] +} + +provider "registry.terraform.io/hashicorp/cloudinit" { + version = "2.3.2" + constraints = ">= 2.0.0" + hashes = [ + "h1:ocyv0lvfyvzW4krenxV5CL4Jq5DiA3EUfoy8DR6zFMw=", + "zh:2487e498736ed90f53de8f66fe2b8c05665b9f8ff1506f751c5ee227c7f457d1", + "zh:3d8627d142942336cf65eea6eb6403692f47e9072ff3fa11c3f774a3b93130b3", + "zh:434b643054aeafb5df28d5529b72acc20c6f5ded24decad73b98657af2b53f4f", + "zh:436aa6c2b07d82aa6a9dd746a3e3a627f72787c27c80552ceda6dc52d01f4b6f", + "zh:458274c5aabe65ef4dbd61d43ce759287788e35a2da004e796373f88edcaa422", + "zh:54bc70fa6fb7da33292ae4d9ceef5398d637c7373e729ed4fce59bd7b8d67372", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:893ba267e18749c1a956b69be569f0d7bc043a49c3a0eb4d0d09a8e8b2ca3136", + "zh:95493b7517bce116f75cdd4c63b7c82a9d0d48ec2ef2f5eb836d262ef96d0aa7", + "zh:9ae21ab393be52e3e84e5cce0ef20e690d21f6c10ade7d9d9d22b39851bfeddc", + "zh:cc3b01ac2472e6d59358d54d5e4945032efbc8008739a6d4946ca1b621a16040", + "zh:f23bfe9758f06a1ec10ea3a81c9deedf3a7b42963568997d84a5153f35c5839a", + ] +} + +provider "registry.terraform.io/hashicorp/helm" { + version = "2.9.0" + constraints = "2.9.0" + hashes = [ + "h1:fEDID5J/9ret/sLpOSNAu98F/ZBEZhOmL0Leut7m5JU=", + "zh:1471cb45908b426104687c962007b2980cfde294fa3530fabc4798ce9fb6c20c", + "zh:1572e9cec20591ec08ece797b3630802be816a5adde36ca91a93359f2430b130", + "zh:1b10ae03cf5ab1ae21ffaac2251de99797294ae4242b156b3b0beebbdbcb7e0f", + "zh:3bd043b68de967d8d0b549d3f71485193d81167d5656f5507d743dedfe60e352", + "zh:538911921c729185900176cc22eb8edcb822bc8d22b9ebb48103a1d9bb53cc38", + "zh:69a6a2d40c0463662c3fb1621e37a3ee65024ea4479adf4d5f7f19fb0dea48c2", + "zh:94b58daa0c351a49d01f6d8f1caae46c95c2d6c3f29753e2b9ea3e3c0e7c9ab4", + "zh:9d0543331a4a32241e1ab5457f30b41df745acb235a0391205c725a5311e4809", + "zh:a6789306524ca121512a95e873e3949b4175114a6c5db32bed2df2551a79368f", + "zh:d146b94cd9502cca7f2044797a328d71c7ec2a98e2d138270d8a28c872f04289", + "zh:d14ccd14511f0446eacf43a9243f22de7c1427ceb059cf67d7bf9803be2cb15d", + "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", + ] +} + +provider "registry.terraform.io/hashicorp/kubernetes" { + version = "2.19.0" + constraints = ">= 2.10.0" + hashes = [ + "h1:ID/u9YOv00w+Z8iG+592oyuV7HcqRmPiZpEC9hnyTMY=", + "zh:028d346460de2d1d19b4c863dfc36be51c7bcd97d372b54a3a946bcb19f3f613", + "zh:391d0b38c455437d0a2ab1beb6ce6e1230aa4160bbae11c58b2810b258b44280", + "zh:40ea742f91b67f66e71d7091cfd40cc604528c4947651924bd6d8bd8d9793708", + "zh:48a99d341c8ba3cadaafa7cb99c0f11999f5e23f5cfb0f8469b4e352d9116e74", + "zh:4a5ade940eff267cbf7dcd52c1a7ac3999e7cc24996a409bd8b37bdb48a97f02", + "zh:5063742016a8249a4be057b9cc0ef24a684ec76d0ae5463d4b07e9b2d21e047e", + "zh:5d36b3a5662f840a6788f5e2a19d02139e87318feb3c5d82c7d076be1366fec4", + "zh:75edd9960cb30e54ef7de1b7df2761a274f17d4d41f54e72f86b43f41af3eb6d", + "zh:b85cadef3e6f25f1a10a617472bf5e8449decd61626733a1bc723de5edc08f64", + "zh:dc565b17b4ea6dde6bd1b92bc37e5e850fcbf9400540eec00ad3d9552a76ac2e", + "zh:deb665cc2123f2701aa3d653987b2ca35fb035a08a76a2382efb215c209f19a5", + "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", + ] +} + +provider "registry.terraform.io/hashicorp/time" { + version = "0.9.1" + constraints = ">= 0.9.0" + hashes = [ + "h1:VxyoYYOCaJGDmLz4TruZQTSfQhvwEcMxvcKclWdnpbs=", + "zh:00a1476ecf18c735cc08e27bfa835c33f8ac8fa6fa746b01cd3bcbad8ca84f7f", + "zh:3007f8fc4a4f8614c43e8ef1d4b0c773a5de1dcac50e701d8abc9fdc8fcb6bf5", + "zh:5f79d0730fdec8cb148b277de3f00485eff3e9cf1ff47fb715b1c969e5bbd9d4", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:8c8094689a2bed4bb597d24a418bbbf846e15507f08be447d0a5acea67c2265a", + "zh:a6d9206e95d5681229429b406bc7a9ba4b2d9b67470bda7df88fa161508ace57", + "zh:aa299ec058f23ebe68976c7581017de50da6204883950de228ed9246f309e7f1", + "zh:b129f00f45fba1991db0aa954a6ba48d90f64a738629119bfb8e9a844b66e80b", + "zh:ef6cecf5f50cda971c1b215847938ced4cb4a30a18095509c068643b14030b00", + "zh:f1f46a4f6c65886d2dd27b66d92632232adc64f92145bf8403fe64d5ffa5caea", + "zh:f79d6155cda7d559c60d74883a24879a01c4d5f6fd7e8d1e3250f3cd215fb904", + "zh:fd59fa73074805c3575f08cd627eef7acda14ab6dac2c135a66e7a38d262201c", + ] +} + +provider "registry.terraform.io/hashicorp/tls" { + version = "4.0.4" + constraints = ">= 3.0.0" + hashes = [ + "h1:GZcFizg5ZT2VrpwvxGBHQ/hO9r6g0vYdQqx3bFD3anY=", + "zh:23671ed83e1fcf79745534841e10291bbf34046b27d6e68a5d0aab77206f4a55", + "zh:45292421211ffd9e8e3eb3655677700e3c5047f71d8f7650d2ce30242335f848", + "zh:59fedb519f4433c0fdb1d58b27c210b27415fddd0cd73c5312530b4309c088be", + "zh:5a8eec2409a9ff7cd0758a9d818c74bcba92a240e6c5e54b99df68fff312bbd5", + "zh:5e6a4b39f3171f53292ab88058a59e64825f2b842760a4869e64dc1dc093d1fe", + "zh:810547d0bf9311d21c81cc306126d3547e7bd3f194fc295836acf164b9f8424e", + "zh:824a5f3617624243bed0259d7dd37d76017097dc3193dac669be342b90b2ab48", + "zh:9361ccc7048be5dcbc2fafe2d8216939765b3160bd52734f7a9fd917a39ecbd8", + "zh:aa02ea625aaf672e649296bce7580f62d724268189fe9ad7c1b36bb0fa12fa60", + "zh:c71b4cd40d6ec7815dfeefd57d88bc592c0c42f5e5858dcc88245d371b4b8b1e", + "zh:dabcd52f36b43d250a3d71ad7abfa07b5622c69068d989e60b79b2bb4f220316", + "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", + ] +} diff --git a/infra/aws/terraform/prow-build-cluster/Makefile b/infra/aws/terraform/prow-build-cluster/Makefile new file mode 100644 index 00000000000..42ee318a57f --- /dev/null +++ b/infra/aws/terraform/prow-build-cluster/Makefile @@ -0,0 +1,58 @@ +# Copyright 2023 The Kubernetes Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +TF ?= terraform +ASSUME_ROLE ?= true + +# Valid values are: canary, prod +WORKSPACE_NAME ?= canary + +.PHONY: workspace-select +workspace-select: + $(TF) workspace select $(WORKSPACE_NAME) + +.PHONY: init +init: + $(TF) $@ + +.PHONY: plan +plan: workspace-select + $(TF) $@ \ + -var-file=./terraform.$(WORKSPACE_NAME).tfvars \ + -var="assume_role=$(ASSUME_ROLE)" + +.PHONY: apply +apply: workspace-select + $(TF) $@ \ + -var-file=./terraform.$(WORKSPACE_NAME).tfvars \ + -var="assume_role=$(ASSUME_ROLE)" + +.PHONY: destroy +destroy: workspace-select + $(TF) $@ \ + -var-file=./terraform.$(WORKSPACE_NAME).tfvars \ + -var="assume_role=$(ASSUME_ROLE)" + +.PHONY: fmt +fmt: + $(TF) $@ + +.PHONY: output +output: + $(TF) $@ + +.PHONY: clean +clean: + rm -rf ./.terraform + diff --git a/infra/aws/terraform/prow-build-cluster/README.md b/infra/aws/terraform/prow-build-cluster/README.md new file mode 100644 index 00000000000..8c9bbd580bc --- /dev/null +++ b/infra/aws/terraform/prow-build-cluster/README.md @@ -0,0 +1,109 @@ +# Provisioninig EKS clusters + +## Prod vs Canary + +These scripts support provisioning two types of EKS clusters. One is meant for hosting prow jobs +on production and the other one is for testing infrastructure changes before promoting them to +production. + +Here are some differences between canary and production setups: +* cluster name, +* cluster admin IAM role name, +* secrets-manager IAM policy name, +* canary is missing k8s prow OIDC provider and corresponding role, +* subnet setup is different, +* instance type and autoscaling paramethers (mainly for saving), + +## Provisioning Cluster + +Running installation from scratch is different than consecutive invocations of Terraform. +First run creates a role that can be later assumed by other users. Becasue of that additional +variable has to be set: + +```bash +# For provisioning Prod: +export WORKSPACE_NAME=prod +# For provisioning Canary: +export WORKSPACE_NAME=canary + +# Just making sure we don't have state cached locally. +ASSUME_ROLE=false make init +ASSUME_ROLE=false make apply +``` + +Once the infrastructure is provisioned, next step is RBAC setup: + +```bash +# Fetch & update kubeconfig. +# For Prod: +aws eks update-kubeconfig --region us-east-2 --name prow-build-cluster +# For Canary: +aws eks update-kubeconfig --region us-east-2 --name prow-build-canary-cluster + +# create cluster role bindings +kubectl apply -f ./resources/rbac +``` + +Lastly, run Terraform script again without additinal variable. This time, it will implicitly assume +previously created role and provision resources on top of EKS cluster. + +```bash +make apply +``` + +From here, all consecutive runs should be possible with command from above. + +## Using cluster + +### Fetch kubeconfig + +```bash +# Prod: +aws eks update-kubeconfig --region us-east-2 --name prow-build-cluster +# Canary: +aws eks update-kubeconfig --region us-east-2 --name prow-build-canary-cluster +``` + +### Open kubeconfig and add assume role argument + +For Prod: +```yaml +args: + - --region + - us-east-2 + - eks + - get-token + - --cluster-name + - prow-build-cluster + - --role-arn + - arn:aws:iam::468814281478:role/Prow-Cluster-Admin +``` + +For Canary: +```yaml +args: + - --region + - us-east-2 + - eks + - get-token + - --cluster-name + - prow-build-canary-cluster + - --role-arn + - arn:aws:iam::468814281478:role/canary-Prow-Cluster-Admin +``` + +## Removing cluster + +Same as for installation, cluster removal requires running Terraform twice. +**IMPORTANT**: It's possible only for users with assigned `AdministratorAccess` policy. + +```bash +export WORKSPACE_NAME= # choose between canary/prod + +# First remove resources running on the cluster and IAM role. This fails once assumed role gets deleted. +make destroy + +# Clean up the rest. +ASSUME_ROLE=false make destroy +``` + diff --git a/infra/aws/terraform/prow-build-cluster/eks.tf b/infra/aws/terraform/prow-build-cluster/eks.tf index 7da650d1dd7..50956180ea0 100644 --- a/infra/aws/terraform/prow-build-cluster/eks.tf +++ b/infra/aws/terraform/prow-build-cluster/eks.tf @@ -18,6 +18,31 @@ limitations under the License. # EKS Cluster ############################################### +locals { + aws_auth_roles = concat( + terraform.workspace == "prod" ? [ + # Allow access to the Prow-EKS-Admin IAM role (used by Prow directly). + { + "rolearn" = aws_iam_role.eks_admin[0].arn + "username" = "eks-admin" + "groups" = [ + "eks-prow-cluster-admin" + ] + } + ] : [], + [ + # Allow access to the Prow-Cluster-Admin IAM role (used with assume role with other IAM accounts). + { + "rolearn" = aws_iam_role.iam_cluster_admin.arn + "username" = "eks-cluster-admin" + "groups" = [ + "eks-cluster-admin" + ] + } + ] + ) +} + module "eks" { source = "terraform-aws-modules/eks/aws" version = "19.10.0" @@ -31,24 +56,8 @@ module "eks" { manage_aws_auth_configmap = true # Configure aws-auth - aws_auth_roles = [ - # Allow access to the Prow-EKS-Admin IAM role (used by Prow directly). - { - "rolearn" = aws_iam_role.eks_admin.arn - "username" = "eks-admin" - "groups" = [ - "eks-prow-cluster-admin" - ] - }, - # Allow access to the Prow-Cluster-Admin IAM role (used with assume role with other IAM accounts). - { - "rolearn" = aws_iam_role.iam_cluster_admin.arn - "username" = "eks-cluster-admin" - "groups" = [ - "eks-cluster-admin" - ] - }, - ] + aws_auth_roles = local.aws_auth_roles + # Allow EKS access to the root account. aws_auth_users = [ { diff --git a/infra/aws/terraform/prow-build-cluster/iam.tf b/infra/aws/terraform/prow-build-cluster/iam.tf index 0d8d9737120..005237d9cdc 100644 --- a/infra/aws/terraform/prow-build-cluster/iam.tf +++ b/infra/aws/terraform/prow-build-cluster/iam.tf @@ -26,8 +26,8 @@ data "aws_iam_user" "user_pprzekwa" { } resource "aws_iam_role" "iam_cluster_admin" { - name = "Prow-Cluster-Admin" - description = "IAM role used to delegate access to prow-build-cluster" + name = "${local.canary_prefix}Prow-Cluster-Admin" + description = "IAM role used to delegate access to ${local.canary_prefix}prow-build-cluster" assume_role_policy = jsonencode({ Version = "2012-10-17" diff --git a/infra/aws/terraform/prow-build-cluster/kubernetes.tf b/infra/aws/terraform/prow-build-cluster/kubernetes.tf index c406e03bb78..02ffd589e69 100644 --- a/infra/aws/terraform/prow-build-cluster/kubernetes.tf +++ b/infra/aws/terraform/prow-build-cluster/kubernetes.tf @@ -15,6 +15,8 @@ limitations under the License. */ module "cluster_autoscaler" { + count = var.assume_role ? 1 : 0 + source = "./modules/cluster-autoscaler" providers = { kubernetes = kubernetes @@ -30,6 +32,8 @@ module "cluster_autoscaler" { } module "metrics_server" { + count = var.assume_role ? 1 : 0 + source = "./modules/metrics-server" providers = { kubernetes = kubernetes @@ -42,6 +46,8 @@ module "metrics_server" { # AWS Load Balancer Controller (ALB/NLB integration). resource "helm_release" "aws_lb_controller" { + count = var.assume_role ? 1 : 0 + name = "aws-load-balancer-controller" namespace = "kube-system" repository = "https://aws.github.io/eks-charts" @@ -75,6 +81,8 @@ resource "helm_release" "aws_lb_controller" { # AWS Secrets Manager integration resource "helm_release" "secrets_store_csi_driver" { + count = var.assume_role ? 1 : 0 + name = "secrets-store-csi-driver" namespace = "kube-system" repository = "https://kubernetes-sigs.github.io/secrets-store-csi-driver/charts" @@ -87,6 +95,8 @@ resource "helm_release" "secrets_store_csi_driver" { } resource "helm_release" "secrets_store_csi_driver_provider_aws" { + count = var.assume_role ? 1 : 0 + name = "aws-secrets-manager" namespace = "kube-system" repository = "https://aws.github.io/secrets-store-csi-driver-provider-aws" diff --git a/infra/aws/terraform/prow-build-cluster/main.tf b/infra/aws/terraform/prow-build-cluster/main.tf index e53da7d19ec..f23fd615047 100644 --- a/infra/aws/terraform/prow-build-cluster/main.tf +++ b/infra/aws/terraform/prow-build-cluster/main.tf @@ -18,15 +18,42 @@ limitations under the License. # INITIALIZATION ############################################### +data "aws_caller_identity" "current" {} +data "aws_availability_zones" "available" {} + +locals { + canary_prefix = terraform.workspace != "prod" ? "canary-" : "" + + root_account_arn = "arn:aws:iam::${data.aws_caller_identity.current.account_id}:root" + aws_cli_base_args = ["eks", "get-token", "--cluster-name", module.eks.cluster_name] + aws_cli_args = var.assume_role != true ? local.aws_cli_base_args : concat( + local.aws_cli_base_args, ["--role-arn", aws_iam_role.iam_cluster_admin.arn] + ) + + tags = { + Cluster = var.cluster_name + } + auto_scaling_tags = { + "k8s.io/cluster-autoscaler/${var.cluster_name}" = "owned" + "k8s.io/cluster-autoscaler/enabled" = true + } + node_group_tags = merge(local.tags, local.auto_scaling_tags) + azs = slice(data.aws_availability_zones.available.names, 0, 3) +} + provider "aws" { region = var.cluster_region # We have a chicken-egg problem here. This role is not going to exist - # when creating the cluster for the first time. In that case, this must - # be commented, than uncommented afterwards. - assume_role { - role_arn = "arn:aws:iam::468814281478:role/Prow-Cluster-Admin" - session_name = "prow-build-cluster-terraform" + # when creating the cluster for the first time. In that case, `assume_role` var + # has to be set to false. + dynamic "assume_role" { + for_each = var.assume_role ? [null] : [] + + content { + role_arn = "arn:aws:iam::468814281478:role/${local.canary_prefix}Prow-Cluster-Admin" + session_name = "prow-build-cluster-terraform" + } } } @@ -38,7 +65,7 @@ provider "kubernetes" { exec { api_version = "client.authentication.k8s.io/v1beta1" command = "aws" - args = ["eks", "get-token", "--cluster-name", module.eks.cluster_name, "--role-arn", aws_iam_role.iam_cluster_admin.arn] + args = local.aws_cli_args } } @@ -51,24 +78,7 @@ provider "helm" { exec { api_version = "client.authentication.k8s.io/v1beta1" command = "aws" - args = ["eks", "get-token", "--cluster-name", module.eks.cluster_name, "--role-arn", aws_iam_role.iam_cluster_admin.arn] + args = local.aws_cli_args } } } - -data "aws_caller_identity" "current" {} -data "aws_availability_zones" "available" {} - -locals { - root_account_arn = "arn:aws:iam::${data.aws_caller_identity.current.account_id}:root" - - tags = { - Cluster = var.cluster_name - } - auto_scaling_tags = { - "k8s.io/cluster-autoscaler/${var.cluster_name}" = "owned" - "k8s.io/cluster-autoscaler/enabled" = true - } - node_group_tags = merge(local.tags, local.auto_scaling_tags) - azs = slice(data.aws_availability_zones.available.names, 0, 3) -} diff --git a/infra/aws/terraform/prow-build-cluster/prow.tf b/infra/aws/terraform/prow-build-cluster/prow.tf index 6b78d9c9474..9cad9120c9d 100644 --- a/infra/aws/terraform/prow-build-cluster/prow.tf +++ b/infra/aws/terraform/prow-build-cluster/prow.tf @@ -14,11 +14,13 @@ See the License for the specific language governing permissions and limitations under the License. */ -# This IAM configuration allows Prow GKE Clusters to assume a role on AWS - +# This IAM configuration allows Prow GKE Clusters to assume a role on AWS. +# Provisioning those resources for canary installation is skipped. # Recognize federated identities from the prow trusted cluster resource "aws_iam_openid_connect_provider" "k8s_prow" { + count = terraform.workspace == "prod" ? 1 : 0 + url = "https://container.googleapis.com/v1/projects/k8s-prow/locations/us-central1-f/clusters/prow" client_id_list = ["sts.amazonaws.com"] thumbprint_list = ["08745487e891c19e3078c1f2a07e452950ef36f6"] @@ -26,14 +28,17 @@ resource "aws_iam_openid_connect_provider" "k8s_prow" { # We allow Prow Pods with specific service acccounts on the a particular cluster to assume this role resource "aws_iam_role" "eks_admin" { + count = terraform.workspace == "prod" ? 1 : 0 + name = "Prow-EKS-Admin" + assume_role_policy = jsonencode({ Version = "2012-10-17" Statement = [ { "Effect" : "Allow", "Principal" : { - "Federated" : aws_iam_openid_connect_provider.k8s_prow.arn + "Federated" : aws_iam_openid_connect_provider.k8s_prow[0].arn }, "Action" : "sts:AssumeRoleWithWebIdentity", "Condition" : { diff --git a/infra/aws/terraform/prow-build-cluster/secrets_manager.tf b/infra/aws/terraform/prow-build-cluster/secrets_manager.tf index 1772b7ef6c7..cdc482acf19 100644 --- a/infra/aws/terraform/prow-build-cluster/secrets_manager.tf +++ b/infra/aws/terraform/prow-build-cluster/secrets_manager.tf @@ -28,7 +28,7 @@ data "aws_iam_policy_document" "secretsmanager_read" { } resource "aws_iam_policy" "secretsmanager_read" { - name = "secretsmanager_read" + name = "${local.canary_prefix}secretsmanager_read" path = "/" policy = data.aws_iam_policy_document.secretsmanager_read.json } diff --git a/infra/aws/terraform/prow-build-cluster/terraform.canary.tfvars b/infra/aws/terraform/prow-build-cluster/terraform.canary.tfvars new file mode 100644 index 00000000000..3c3a322f5e6 --- /dev/null +++ b/infra/aws/terraform/prow-build-cluster/terraform.canary.tfvars @@ -0,0 +1,40 @@ +/* +Copyright 2023 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +assume_role = true + +cluster_name = "prow-build-canary-cluster" +cluster_region = "us-east-2" +cluster_version = "1.25" + +vpc_cidr = "10.3.0.0/16" +vpc_secondary_cidr_blocks = ["10.4.0.0/16", "10.5.0.0/16"] +vpc_public_subnet = ["10.3.0.0/18", "10.3.64.0/18", "10.3.128.0/18"] +vpc_private_subnet = ["10.4.0.0/18", "10.4.64.0/18", "10.4.128.0/18"] +vpc_intra_subnet = ["10.5.0.0/18", "10.5.64.0/18", "10.5.128.0/18"] + +# Ubuntu EKS optimized AMI: https://cloud-images.ubuntu.com/aws-eks/ +node_ami = "ami-03de35fda144b3672" +node_instance_types = ["r5d.xlarge"] +node_volume_size = 100 + +node_min_size = 1 +node_max_size = 10 +node_desired_size = 1 +node_max_unavailable_percentage = 100 # To ease testing + +cluster_autoscaler_version = "v1.25.0" + diff --git a/infra/aws/terraform/prow-build-cluster/terraform.tfvars b/infra/aws/terraform/prow-build-cluster/terraform.prod.tfvars similarity index 98% rename from infra/aws/terraform/prow-build-cluster/terraform.tfvars rename to infra/aws/terraform/prow-build-cluster/terraform.prod.tfvars index 20c5879c0f4..e0dfba31a85 100644 --- a/infra/aws/terraform/prow-build-cluster/terraform.tfvars +++ b/infra/aws/terraform/prow-build-cluster/terraform.prod.tfvars @@ -14,6 +14,8 @@ See the License for the specific language governing permissions and limitations under the License. */ +assume_role = true + cluster_name = "prow-build-cluster" cluster_region = "us-east-2" cluster_version = "1.25" @@ -36,3 +38,4 @@ node_desired_size = 20 node_max_unavailable_percentage = 100 # To ease testing cluster_autoscaler_version = "v1.25.0" + diff --git a/infra/aws/terraform/prow-build-cluster/variables.tf b/infra/aws/terraform/prow-build-cluster/variables.tf index 0ce8beb8ef2..6f1dba6d0ce 100644 --- a/infra/aws/terraform/prow-build-cluster/variables.tf +++ b/infra/aws/terraform/prow-build-cluster/variables.tf @@ -14,6 +14,14 @@ See the License for the specific language governing permissions and limitations under the License. */ +# This variable is required in the installation process as we cannot +# assume a role that is yet to be created. +variable "assume_role" { + type = bool + description = "Assumes role to get access to EKS cluster after provisioning." + default = true +} + variable "vpc_cidr" { type = string description = "CIDR of the VPC"