feat: finaly working with proxmox-ccm

This commit is contained in:
Tine 2024-07-20 13:09:30 +02:00
parent 1ba29ddc04
commit 7c6c07b155
Signed by: mentos1386
SSH key fingerprint: SHA256:MNtTsLbihYaWF8j1fkOHfkKNlnN1JQfxEU/rBU8nCGw
12 changed files with 13858 additions and 304 deletions

View file

@ -1,6 +1,28 @@
# This file is maintained automatically by "tofu init". # This file is maintained automatically by "tofu init".
# Manual edits may be lost in future updates. # Manual edits may be lost in future updates.
provider "registry.opentofu.org/alekc/kubectl" {
version = "2.0.4"
constraints = "2.0.4"
hashes = [
"h1:6xRO3WlBsOTbeJ90QFjxGbc4BjnoGdEaeSCdWI/B1jU=",
"zh:15c227886bac78c8b8827f85595648212574ec81febc39e1055e1a6bf048fe65",
"zh:2211ebeeb0918dbb3587d206e32adca9e1f343a93bbffcd37d8d99bf4d8dea9a",
"zh:2303836cdea12ece8dbe39c2d7d30a9378fd06e9c2ebda66cbe5e01cc096ee2e",
"zh:3687f69e531c70845682b214888a9959b93f2be3c2531801228a4b1965d59921",
"zh:4dd686b4c55e2eedd80464984c9bb736c2df7a96d9dd59a692d91d09173f5f64",
"zh:51e29c13a87e56867b4be0b0c68da874149bf6d4014d7259b62d91162142c1bd",
"zh:5d9d99260f2adfb8867068a3d7644336d57cfa7710062c5221dcbb5a7ec90c7d",
"zh:901c19d73da6688437b19a85e3cd60e8f2090c84699e108b31953bb87f6d3141",
"zh:9547743606a36fa6b6748c5e2e1959b6f185730a1da53a3c351cfa0d8c096687",
"zh:9772a30704e69b54de5a332858a39591f52286121cffcba702346830b1c6e362",
"zh:b44792f99d7c90b9a364dd922f861e459ae1b1edc039f6b3078549021fec4511",
"zh:b5eb871ed2e39b9236dce06170b1fd5dda29f3c1d53f8e08285ccb9a4f574201",
"zh:e8bb4c3d9f680977b560e9dec24662650f790259b2c1311ee07a72157f6492b3",
"zh:f4772cfa0f9c73fdef008bb917cd268620009dc7ff270a4d819125c642b5acce",
]
}
provider "registry.opentofu.org/bpg/proxmox" { provider "registry.opentofu.org/bpg/proxmox" {
version = "0.61.1" version = "0.61.1"
constraints = "0.61.1" constraints = "0.61.1"
@ -115,28 +137,6 @@ provider "registry.opentofu.org/hashicorp/random" {
] ]
} }
provider "registry.opentofu.org/ivoronin/macaddress" {
version = "0.3.2"
constraints = "0.3.2"
hashes = [
"h1:yk0ASl2cAoc/22tvpi9Kke+WvowgXGq0QwaP93IQ+S0=",
"zh:00cb168d9210ed88cfa7de8a33d5666b2cf6660a5d20a7a96348b8b902833eca",
"zh:1366458320df0b6f1132e59b5410931c0c5626bbf27b05b29dd311311a710e9b",
"zh:2e8102c7f6046665c95b806752d692843f2e846554f7eba85690cd2087c9048a",
"zh:3c1ae52f855d0e694ad28eb34ec41c553344aaa7bd51adaa48cf15e3ee842e17",
"zh:496d8db2055cead9d264fdad83534318e3ab77ce06e38d43674a4ec25c0e860d",
"zh:54c5eeae7cc61d706080256e06aaf509869b1d86297b9e99948a2fe2af6d455b",
"zh:5f26e851048be3c56f3706b7fde25fe76dd30003ef6356216dc9ecff400218bb",
"zh:5fc1debcd0fe043dfce00ab110e180b896a1a9958edea7d81d05aacc9b630e5e",
"zh:650045261b382b4559fd1bd190d6cabbeb022b53d7e240eb6b66f6824ca81bf4",
"zh:7203dea017883e8fdd7ba66c9b1a9aac0cab101133e4eeab365c4d0995194272",
"zh:726a9222d15f11316587c199ee367bae1d5495ff16ebdfc41635f7628834a8d6",
"zh:c9f3bcaa073a0921189bd74ef6b2b57cad34b3eb01788c010df8a15fd9d8045c",
"zh:d3fba491b0ff0d3d64162216159232398a75ad81c31e4304335d6b76b74a864a",
"zh:e80011c6e3af4eeafdeda9bd118a774f8b7cdf1f133953abf827f313653ec184",
]
}
provider "registry.opentofu.org/siderolabs/talos" { provider "registry.opentofu.org/siderolabs/talos" {
version = "0.5.0" version = "0.5.0"
constraints = "0.5.0" constraints = "0.5.0"

View file

@ -1,26 +0,0 @@
apiVersion: v1
kind: Config
clusters:
- name: tjo-cloud
cluster:
server: https://api.k8s.tjo.cloud:6443
certificate-authority-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJpVENDQVMrZ0F3SUJBZ0lRQ2o1ekNPSDg2ajN2bXVtcmVNYU9QekFLQmdncWhrak9QUVFEQWpBVk1STXcKRVFZRFZRUUtFd3ByZFdKbGNtNWxkR1Z6TUI0WERUSTBNRGN4T1RJd016VTFPRm9YRFRNME1EY3hOekl3TXpVMQpPRm93RlRFVE1CRUdBMVVFQ2hNS2EzVmlaWEp1WlhSbGN6QlpNQk1HQnlxR1NNNDlBZ0VHQ0NxR1NNNDlBd0VICkEwSUFCSElQUkIzNFdHOHNVNTZrWlFYVDV0VnpjQXlKRXpwd1VpRmpjUkJDQlJXY3JjZ1RiZ3hsaSs4RDNOVEsKV1ZLNUh6bkZnM25kV2VVQzl3S0l4SCs4bHQyallUQmZNQTRHQTFVZER3RUIvd1FFQXdJQ2hEQWRCZ05WSFNVRQpGakFVQmdnckJnRUZCUWNEQVFZSUt3WUJCUVVIQXdJd0R3WURWUjBUQVFIL0JBVXdBd0VCL3pBZEJnTlZIUTRFCkZnUVVjKzFYNDU2QTZLSWZJb3FNL3ZiU0dEU0VFQzB3Q2dZSUtvWkl6ajBFQXdJRFNBQXdSUUloQUpNWkhHZ2UKSUdacHdmbFpQRnl2ZlVrUi9xTDhsWjhXQVJpRFYwWXdoQWNDQWlBQStaNzJNelQ5c2RTS3RUSmRRVmR2WGhZKwpWTkVWb2JicDQ5WjFPeUNTL1E9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
contexts:
- name: oidc@tjo-cloud
context:
cluster: tjo-cloud
namespace: default
user: oidc
current-context: oidc@tjo-cloud
users:
- name: oidc
user:
exec:
apiVersion: client.authentication.k8s.io/v1beta1
command: kubectl
args:
- oidc-login
- get-token
- --oidc-issuer-url=https://id.tjo.space/application/o/k8stjocloud/
- --oidc-client-id=HAI6rW0EWtgmSPGKAJ3XXzubQTUut2GMeTRS2spg
- --oidc-extra-scope=profile

View file

@ -85,24 +85,21 @@ resource "local_file" "kubeconfig" {
filename = "${path.module}/kubeconfig" filename = "${path.module}/kubeconfig"
} }
resource "kubernetes_manifest" "hetzner-nodes-as-loadbalancers" { module "cluster_components" {
manifest = { source = "../modules/cluster-components"
apiVersion = "cilium.io/v2alpha1"
kind = "CiliumLoadBalancerIPPool"
metadata = {
name = "hetzner-nodes"
}
spec = {
blocks = concat(
[for k, node in module.cluster.nodes : { start : node.ipv4 } if node.public],
# [for k, node in module.cluster.nodes : { start : node.ipv6 } if node.public],
)
}
}
}
resource "kubernetes_namespace" "tjo-cloud" { oidc_issuer_url = var.oidc_issuer_url
metadata { oidc_client_id = var.oidc_client_id
name = "tjo-cloud"
digitalocean_token = var.digitalocean_token
cluster_name = module.cluster.name
cluster_domain = module.cluster.domain
loadbalancer_ips = {
hetzner-public = {
ipv4 = [for k, node in module.cluster.nodes : node.ipv4 if node.public]
ipv6 = [for k, node in module.cluster.nodes : node.ipv6 if node.public]
}
} }
} }

View file

@ -26,7 +26,7 @@ resource "kubernetes_manifest" "dashoard-http-route" {
} }
] ]
hostnames = [ hostnames = [
"dashboard.${module.cluster.domain}" "dashboard.${var.cluster_domain}"
] ]
rules = [ rules = [
{ {
@ -129,7 +129,7 @@ resource "kubernetes_manifest" "dashboard-oidc" {
scopes : ["openid", "email", "profile"] scopes : ["openid", "email", "profile"]
forwardAccessToken : true forwardAccessToken : true
redirectURL : "https://dashboard.${module.cluster.domain}/login" redirectURL : "https://dashboard.${var.cluster_domain}/login"
} }
} }
} }

View file

@ -8,29 +8,7 @@ resource "kubernetes_secret" "digitalocean-token" {
} }
} }
resource "helm_release" "cert-manager" {
depends_on = [helm_release.envoy]
name = "cert-manager"
chart = "cert-manager"
repository = "https://charts.jetstack.io"
version = "v1.15.1"
namespace = kubernetes_namespace.tjo-cloud.metadata[0].name
set {
name = "crds.enabled"
value = true
}
set_list {
name = "extraArgs"
value = ["--enable-gateway-api"]
}
}
resource "kubernetes_manifest" "tjo-cloud-issuer" { resource "kubernetes_manifest" "tjo-cloud-issuer" {
depends_on = [helm_release.cert-manager]
manifest = { manifest = {
apiVersion = "cert-manager.io/v1" apiVersion = "cert-manager.io/v1"
kind = "Issuer" kind = "Issuer"
@ -62,41 +40,7 @@ resource "kubernetes_manifest" "tjo-cloud-issuer" {
} }
} }
resource "helm_release" "envoy" {
name = "envoy"
chart = "gateway-helm"
repository = "oci://docker.io/envoyproxy"
version = "v1.1.0-rc.1"
namespace = "kube-system"
values = [
yamlencode({
config = {
envoyGateway = {
provider = {
type = "Kubernetes"
kubernetes = {
envoyDaemonSet = {}
envoyDeployment = null
}
}
gateway = {
controllerName = "gateway.envoyproxy.io/gatewayclass-controller"
}
logging = {
level = {
default = "info"
}
}
}
}
})
]
}
resource "kubernetes_manifest" "gateway_class" { resource "kubernetes_manifest" "gateway_class" {
depends_on = [helm_release.envoy]
manifest = { manifest = {
apiVersion = "gateway.networking.k8s.io/v1" apiVersion = "gateway.networking.k8s.io/v1"
kind = "GatewayClass" kind = "GatewayClass"
@ -125,7 +69,7 @@ resource "kubernetes_manifest" "gateway" {
listeners = [ listeners = [
{ {
name : "http" name : "http"
hostname : "*.${module.cluster.name}.${module.cluster.domain}" hostname : "*.${var.cluster_name}.${var.cluster_domain}"
protocol : "HTTPS" protocol : "HTTPS"
port : 443 port : 443
allowedRoutes : { allowedRoutes : {

View file

@ -0,0 +1,20 @@
resource "kubernetes_namespace" "tjo-cloud" {
metadata {
name = "tjo-cloud"
}
}
resource "kubernetes_manifest" "loadbalancer_ips" {
for_each = var.loadbalancer_ips
manifest = {
apiVersion = "cilium.io/v2alpha1"
kind = "CiliumLoadBalancerIPPool"
metadata = {
name = each.key
}
spec = {
blocks = [for ip in each.value.ipv4 : { start : ip }]
}
}
}

View file

@ -0,0 +1,26 @@
variable "loadbalancer_ips" {
description = "Map of loadbalancer IPs."
type = map(object({ ipv4 = set(string), ipv6 = set(string) }))
}
variable "cluster_name" {
description = "Name of the cluster."
type = string
}
variable "cluster_domain" {
description = "Domain of the cluster."
type = string
}
variable "oidc_client_id" {
type = string
}
variable "oidc_issuer_url" {
type = string
}
variable "digitalocean_token" {
type = string
sensitive = true
}

View file

@ -0,0 +1,20 @@
terraform {
required_providers {
digitalocean = {
source = "digitalocean/digitalocean"
version = "~> 2.0"
}
random = {
source = "hashicorp/random"
version = "3.6.2"
}
helm = {
source = "hashicorp/helm"
version = "2.14.0"
}
kubernetes = {
source = "hashicorp/kubernetes"
version = "2.31.0"
}
}
}

View file

@ -0,0 +1,220 @@
data "helm_template" "cilium" {
provider = helm.template
name = "cilium"
chart = "cilium"
repository = "https://helm.cilium.io/"
version = "1.15.6"
namespace = "kube-system"
kube_version = var.talos.kubernetes
values = [yamlencode({
ipam : {
mode : "kubernetes"
},
nodeIPAM : {
enabled : true
},
kubeProxyReplacement : "true"
securityContext : {
capabilities : {
ciliumAgent : [
"CHOWN",
"KILL",
"NET_ADMIN",
"NET_RAW",
"IPC_LOCK",
"SYS_ADMIN",
"SYS_RESOURCE",
"DAC_OVERRIDE",
"FOWNER",
"SETGID",
"SETUID"
],
cleanCiliumState : [
"NET_ADMIN",
"SYS_ADMIN",
"SYS_RESOURCE"
]
}
},
cgroup : {
autoMount : {
enabled : false
},
hostRoot : "/sys/fs/cgroup"
},
k8sServiceHost : local.cluster_api_domain
k8sServicePort : var.cluster.api.port
ipv4 : {
enabled : true
},
#ipv6 : {
# enabled : true
#},
hubble : {
tls : {
auto : {
enabled : true
method : "cronJob"
schedule : "0 0 1 */4 *"
}
}
ui : {
enabled : true
}
relay : {
enabled : true
}
},
gatewayAPI : {
enabled : false
}
envoy : {
enabled : false
}
})]
}
data "helm_template" "proxmox-csi" {
provider = helm.template
name = "proxmox-csi-plugin"
chart = "proxmox-csi-plugin"
repository = "oci://ghcr.io/sergelogvinov/charts"
version = "0.2.5"
namespace = "kube-system"
kube_version = var.talos.kubernetes
values = [<<-EOF
config:
clusters:
- url: ${var.proxmox.url}
insecure: ${var.proxmox.insecure}
token_id: "${proxmox_virtual_environment_user_token.csi.id}"
token_secret: "${split("=", proxmox_virtual_environment_user_token.csi.value)[1]}"
region: "${var.proxmox.name}"
storageClass:
- name: proxmox
storage: local-zfs
reclaimPolicy: Delete
fstype: ext4
cache: none
replicaCount: 1
nodeSelector:
node-role.kubernetes.io/control-plane: ""
node.cloudprovider.kubernetes.io/platform: nocloud
tolerations:
- key: node-role.kubernetes.io/control-plane
effect: NoSchedule
node:
nodeSelector:
node.cloudprovider.kubernetes.io/platform: nocloud
tolerations:
- operator: Exists
EOF
]
}
data "helm_template" "proxmox-ccm" {
provider = helm.template
name = "proxmox-cloud-controller-manager"
chart = "proxmox-cloud-controller-manager"
repository = "oci://ghcr.io/sergelogvinov/charts"
version = "0.2.3"
namespace = "kube-system"
kube_version = var.talos.kubernetes
values = [<<-EOF
nodeSelector:
node-role.kubernetes.io/control-plane: ""
enabledControllers:
- cloud-node-lifecycle
config:
clusters:
- url: ${var.proxmox.url}
insecure: ${var.proxmox.insecure}
token_id: ${proxmox_virtual_environment_user_token.ccm.id}
token_secret: ${split("=", proxmox_virtual_environment_user_token.ccm.value)[1]}
region: ${var.proxmox.name}
EOF
]
}
data "helm_template" "talos-ccm" {
provider = helm.template
name = "talos-cloud-controller-manager"
chart = "talos-cloud-controller-manager"
repository = "oci://ghcr.io/siderolabs/charts"
version = "0.3.1"
namespace = "kube-system"
kube_version = var.talos.kubernetes
}
data "helm_template" "cert-manager" {
provider = helm.template
name = "cert-manager"
chart = "cert-manager"
repository = "https://charts.jetstack.io"
version = "v1.15.1"
namespace = "kube-system"
kube_version = var.talos.kubernetes
api_versions = [
"gateway.networking.k8s.io/v1/GatewayClass",
]
include_crds = true
set {
name = "crds.enabled"
value = true
}
set_list {
name = "extraArgs"
value = ["--enable-gateway-api"]
}
}
data "helm_template" "envoy" {
provider = helm.template
name = "envoy"
chart = "gateway-helm"
repository = "oci://docker.io/envoyproxy"
version = "v1.1.0-rc.1"
namespace = "kube-system"
kube_version = var.talos.kubernetes
api_versions = [
"gateway.networking.k8s.io/v1/GatewayClass",
]
include_crds = true
values = [
yamlencode({
config = {
envoyGateway = {
provider = {
type = "Kubernetes"
kubernetes = {
envoyDaemonSet = {}
envoyDeployment = null
}
}
gateway = {
controllerName = "gateway.envoyproxy.io/gatewayclass-controller"
}
}
}
})
]
}

View file

@ -19,6 +19,21 @@ locals {
] ]
talos_controlplane_config = { talos_controlplane_config = {
machine : {
features : {
rbac : true
apidCheckExtKeyUsage : true
kubernetesTalosAPIAccess : {
enabled : true
allowedRoles : [
"os:reader"
]
allowedKubernetesNamespaces : [
"kube-system"
]
}
}
}
cluster : { cluster : {
etcd : { etcd : {
advertisedSubnets : local.tailscaleSubnets advertisedSubnets : local.tailscaleSubnets
@ -37,7 +52,39 @@ locals {
} }
inlineManifests : [ inlineManifests : [
{ {
name : "oidc-groups" name : "proxmox-cloud-controller-manager"
contents : data.helm_template.proxmox-ccm.manifest
},
{
name : "talos-cloud-controller-manager"
contents : data.helm_template.talos-ccm.manifest
},
{
name : "promxmox-csi-plugin"
contents : data.helm_template.proxmox-csi.manifest
},
{
name : "gateway-api-crds"
contents : file("${path.module}/manifests/gateway-api-crds.yaml")
},
{
name : "metrics-server"
contents : file("${path.module}/manifests/metrics-server.yaml")
},
{
name : "cilium"
contents : data.helm_template.cilium.manifest
},
{
name : "envoy"
contents : data.helm_template.envoy.manifest
},
{
name : "cert-manager"
contents : data.helm_template.cert-manager.manifest
},
{
name : "oidc-admins"
contents : <<-EOF contents : <<-EOF
apiVersion: rbac.authorization.k8s.io/v1 apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding kind: ClusterRoleBinding
@ -53,31 +100,15 @@ locals {
apiGroup: rbac.authorization.k8s.io apiGroup: rbac.authorization.k8s.io
EOF EOF
}, },
{
name : "cilium"
contents : data.helm_template.cilium.manifest
},
{
name : "promxmox-csi-plugin"
contents : data.helm_template.csi.manifest
},
{
name : "proxmox-cloud-controller-manager"
contents : data.helm_template.ccm.manifest
}
]
extraManifests : [
"https://raw.githubusercontent.com/alex1989hu/kubelet-serving-cert-approver/v0.8.5/deploy/standalone-install.yaml",
"https://github.com/kubernetes-sigs/metrics-server/releases/download/v0.7.1/components.yaml",
] ]
} }
} }
talos_worker_config = { talos_worker_config = {
cluster : { cluster : {
#externalCloudProvider : { externalCloudProvider : {
# enabled : true enabled : true
#} }
controlPlane : { controlPlane : {
endpoint : local.cluster_endpoint endpoint : local.cluster_endpoint
localAPIServerPort : var.cluster.api.port localAPIServerPort : var.cluster.api.port
@ -100,7 +131,7 @@ locals {
} }
extraArgs : { extraArgs : {
rotate-server-certificates : true rotate-server-certificates : true
#cloud-provider : "external" cloud-provider : "external"
} }
} }
install = { install = {
@ -121,6 +152,8 @@ locals {
"k8s.tjo.cloud/public" = node.public ? "true" : "false" "k8s.tjo.cloud/public" = node.public ? "true" : "false"
"k8s.tjo.cloud/host" = node.host "k8s.tjo.cloud/host" = node.host
"k8s.tjo.cloud/proxmox" = var.proxmox.name "k8s.tjo.cloud/proxmox" = var.proxmox.name
# TODO: Can we remove this?
"node.cloudprovider.kubernetes.io/platform" = "proxmox"
} }
} }
}), }),
@ -192,164 +225,6 @@ data "talos_machine_configuration" "worker" {
] ]
} }
data "helm_template" "cilium" {
provider = helm.template
name = "cilium"
chart = "cilium"
repository = "https://helm.cilium.io/"
version = "1.15.6"
namespace = "kube-system"
kube_version = var.talos.kubernetes
api_versions = [
"gateway.networking.k8s.io/v1/GatewayClass",
]
values = [yamlencode({
ipam : {
mode : "kubernetes"
},
nodeIPAM : {
enabled : true
},
kubeProxyReplacement : "true"
securityContext : {
capabilities : {
ciliumAgent : [
"CHOWN",
"KILL",
"NET_ADMIN",
"NET_RAW",
"IPC_LOCK",
"SYS_ADMIN",
"SYS_RESOURCE",
"DAC_OVERRIDE",
"FOWNER",
"SETGID",
"SETUID"
],
cleanCiliumState : [
"NET_ADMIN",
"SYS_ADMIN",
"SYS_RESOURCE"
]
}
},
cgroup : {
autoMount : {
enabled : false
},
hostRoot : "/sys/fs/cgroup"
},
k8sServiceHost : local.cluster_api_domain
k8sServicePort : var.cluster.api.port
ipv4 : {
enabled : true
},
#ipv6 : {
# enabled : true
#},
hubble : {
tls : {
auto : {
enabled : true
method : "cronJob"
schedule : "0 0 1 */4 *"
}
}
ui : {
enabled : true
}
relay : {
enabled : true
}
},
gatewayAPI : {
enabled : false
}
envoy : {
enabled : false
}
})]
}
data "helm_template" "csi" {
provider = helm.template
name = "proxmox-csi-plugin"
chart = "proxmox-csi-plugin"
repository = "oci://ghcr.io/sergelogvinov/charts"
version = "0.2.5"
namespace = "kube-system"
kube_version = var.talos.kubernetes
values = [<<-EOF
config:
clusters:
- url: ${var.proxmox.url}
insecure: ${var.proxmox.insecure}
token_id: "${proxmox_virtual_environment_user_token.csi.id}"
token_secret: "${split("=", proxmox_virtual_environment_user_token.csi.value)[1]}"
region: "${var.proxmox.name}"
storageClass:
- name: proxmox
storage: local-storage
reclaimPolicy: Delete
fstype: ext4
ssd: true
cache: none
replicaCount: 1
nodeSelector:
node-role.kubernetes.io/control-plane: ""
node.cloudprovider.kubernetes.io/platform: nocloud
tolerations:
- key: node-role.kubernetes.io/control-plane
effect: NoSchedule
node:
nodeSelector:
node.cloudprovider.kubernetes.io/platform: nocloud
tolerations:
- operator: Exists
EOF
]
}
data "helm_template" "ccm" {
provider = helm.template
name = "proxmox-cloud-controller-manager"
chart = "proxmox-cloud-controller-manager"
repository = "oci://ghcr.io/sergelogvinov/charts"
version = "0.2.3"
namespace = "kube-system"
kube_version = var.talos.kubernetes
values = [<<-EOF
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchExpressions:
- key: node-role.kubernetes.io/control-plane
operator: Exists
enabledControllers:
- cloud-node-lifecycle
config:
clusters:
- url: ${var.proxmox.url}
insecure: ${var.proxmox.insecure}
token_id: ${proxmox_virtual_environment_user_token.ccm.id}
token_secret: ${split("=", proxmox_virtual_environment_user_token.ccm.value)[1]}
region: ${var.proxmox.name}
EOF
]
}
resource "talos_machine_configuration_apply" "controlplane" { resource "talos_machine_configuration_apply" "controlplane" {
for_each = { for k, v in local.nodes_with_address : k => v if v.type == "controlplane" } for_each = { for k, v in local.nodes_with_address : k => v if v.type == "controlplane" }

File diff suppressed because it is too large Load diff