feat: working k8s
Some checks are pending
/ lint (push) Waiting to run

This commit is contained in:
Tine 2024-12-22 13:32:22 +01:00
parent 840c9c1429
commit 784d40a30d
Signed by: mentos1386
SSH key fingerprint: SHA256:MNtTsLbihYaWF8j1fkOHfkKNlnN1JQfxEU/rBU8nCGw
10 changed files with 221 additions and 111 deletions

26
k8s.tjo.cloud/kubeconfig Executable file
View file

@ -0,0 +1,26 @@
apiVersion: v1
kind: Config
clusters:
- name: k8s-tjo-cloud
cluster:
server: https://api.internal.k8s.tjo.cloud:6443
certificate-authority-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJpekNDQVRDZ0F3SUJBZ0lSQUpUTlZleXdmU3N0WVh0YTFkU0NjWVF3Q2dZSUtvWkl6ajBFQXdJd0ZURVQKTUJFR0ExVUVDaE1LYTNWaVpYSnVaWFJsY3pBZUZ3MHlOREV5TWpJd09UVXhNakphRncwek5ERXlNakF3T1RVeApNakphTUJVeEV6QVJCZ05WQkFvVENtdDFZbVZ5Ym1WMFpYTXdXVEFUQmdjcWhrak9QUUlCQmdncWhrak9QUU1CCkJ3TkNBQVIxSS9NQUVmZmkrZzcvaUVPc1lhbVRpc1dzTVBlWCtKc0VWN21DV0o5YkEyMUVSck84eU9EcWVTdDkKWlpJekZ4ZVRoM0hEU1NGWFJURU9mSm03TytoWG8yRXdYekFPQmdOVkhROEJBZjhFQkFNQ0FvUXdIUVlEVlIwbApCQll3RkFZSUt3WUJCUVVIQXdFR0NDc0dBUVVGQndNQ01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0hRWURWUjBPCkJCWUVGT3hEZHpIckVuSVBDa3Rucnc2bjdZZHFXNHgrTUFvR0NDcUdTTTQ5QkFNQ0Ewa0FNRVlDSVFDbDFQZkYKUzQwUXdxSndTQVh5R29sVU9kRmRBeHd6UnhyZkY1OGVXd3k5VkFJaEFPYnhrYVl0bzBHTUZPK1VZaUNObG9pVgp0UmhpUEZOVHEwaG9Oa2ppaSt3WgotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
contexts:
- name: oidc@k8s-tjo-cloud
context:
cluster: k8s-tjo-cloud
namespace: default
user: oidc
current-context: oidc@k8s-tjo-cloud
users:
- name: oidc
user:
exec:
apiVersion: client.authentication.k8s.io/v1beta1
command: kubectl
args:
- oidc-login
- get-token
- --oidc-issuer-url=https://id.tjo.space/application/o/k8stjocloud/
- --oidc-client-id=HAI6rW0EWtgmSPGKAJ3XXzubQTUut2GMeTRS2spg
- --oidc-extra-scope=profile

View file

@ -21,12 +21,16 @@ module "cluster" {
issuer_url = var.oidc_issuer_url
}
pod_cidr = {
ipv4 = "10.0.240.0/21"
ipv6 = "fd74:6a6f:0:f000::/53"
ipv4 = "10.0.240.0/22"
ipv6 = "fd74:6a6f:0:f000::/54"
}
service_cidr = {
ipv4 = "10.0.244.0/22"
ipv6 = "fd74:6a6f:0:f400::/108"
}
load_balancer_cidr = {
ipv4 = "10.0.248.0/22"
ipv6 = "fd74:6a6f:0:f800::/108"
ipv6 = "fd74:6a6f:0:f800::/54"
}
}
@ -54,8 +58,24 @@ module "cluster" {
cores = 4
memory = 4096
}
nevaroo-2 = {
id = 6003
type = "worker"
host = "nevaroo"
storage = "local-nvme-lvm"
cores = 4
memory = 4096
}
mustafar-1 = {
id = 6000
type = "controlplane"
host = "mustafar"
storage = "local"
cores = 4
memory = 4096
}
mustafar-2 = {
id = 6002
type = "worker"
host = "mustafar"
storage = "local"

View file

@ -2,7 +2,7 @@ resource "helm_release" "external-dns-privileged" {
name = "external-dns-privileged"
chart = "external-dns"
repository = "https://kubernetes-sigs.github.io/external-dns/"
version = "v1.14.5"
version = "v1.15.0"
namespace = kubernetes_namespace.tjo-cloud.metadata[0].name
values = [<<-EOF
@ -16,6 +16,10 @@ resource "helm_release" "external-dns-privileged" {
sources:
- ingress
- service
- gateway-httproute
- gateway-grpcroute
- gateway-tlsroute
- gateway-tcproute
domainFilters:
- k8s.tjo.cloud
- internal.k8s.tjo.cloud
@ -27,7 +31,7 @@ resource "helm_release" "external-dns-user-content" {
name = "external-dns-user-content"
chart = "external-dns"
repository = "https://kubernetes-sigs.github.io/external-dns/"
version = "v1.14.5"
version = "v1.15.0"
namespace = kubernetes_namespace.tjo-cloud.metadata[0].name
values = [<<-EOF
@ -41,6 +45,10 @@ resource "helm_release" "external-dns-user-content" {
sources:
- ingress
- service
- gateway-httproute
- gateway-grpcroute
- gateway-tlsroute
- gateway-tcproute
domainFilters:
- user-content.tjo.cloud
EOF

View file

@ -49,25 +49,10 @@ resource "kubernetes_manifest" "gateway_class_config" {
type = "Kubernetes"
kubernetes = {
envoyService = {
type = "ClusterIP"
externalTrafficPolicy = "Local"
annotations = {
"external-dns.alpha.kubernetes.io/internal-hostname" = "envoy.internal.k8s.tjo.cloud"
}
}
envoyDaemonSet = {
pod = {
nodeSelector = {
"node-role.kubernetes.io/control-plane" = ""
}
tolerations = [
{
key = "node-role.kubernetes.io/control-plane"
effect = "NoSchedule"
}
]
}
}
}
}
}
@ -109,7 +94,7 @@ resource "kubernetes_manifest" "gateway" {
listeners = [
{
name = "http"
hostname = "*.${var.cluster_name}.${var.cluster_domain}"
hostname = "*.${var.cluster_domain}"
protocol = "HTTPS"
port = 443
allowedRoutes = {
@ -130,3 +115,22 @@ resource "kubernetes_manifest" "gateway" {
}
}
}
resource "kubernetes_manifest" "enable-proxy-protocol-policy" {
manifest = {
apiVersion = "gateway.envoyproxy.io/v1alpha1"
kind = "ClientTrafficPolicy"
metadata = {
name = "enable-proxy-protocol-policy"
namespace = kubernetes_namespace.tjo-cloud.metadata[0].name
}
spec = {
targetRef = {
group = "gateway.networking.k8s.io"
kind = "Gateway"
name = kubernetes_manifest.gateway.object.metadata.name
}
enableProxyProtocol = false
}
}
}

View file

@ -0,0 +1,34 @@
resource "helm_release" "nats" {
name = "nats"
repository = "https://nats-io.github.io/k8s/helm/charts/"
chart = "nats"
version = "1.2.8"
namespace = kubernetes_namespace.tjo-cloud.metadata[0].name
atomic = true
cleanup_on_fail = true
values = [<<-EOF
config:
cluster:
enabled: true
replicas: 2
jetstream:
enabled: true
fileStore:
pvc:
size: 10Gi
podTemplate:
topologySpreadConstraints:
kubernetes.io/hostname:
maxSkew: 1
whenUnsatisfiable: DoNotSchedule
service:
merge:
spec:
type: LoadBalancer
EOF
]
}

View file

@ -2,7 +2,7 @@ resource "helm_release" "cert-manager" {
name = "cert-manager"
chart = "cert-manager"
repository = "https://charts.jetstack.io"
version = "v1.15.1"
version = "v1.16.2"
namespace = "kube-system"
atomic = true
cleanup_on_fail = true
@ -11,8 +11,10 @@ resource "helm_release" "cert-manager" {
crds:
enabled: true
extraArgs:
- --enable-gateway-api
config:
apiVersion: controller.config.cert-manager.io/v1alpha1
kind: ControllerConfiguration
enableGatewayAPI: true
EOF
]
}
@ -21,7 +23,7 @@ resource "helm_release" "envoy" {
name = "envoy"
chart = "gateway-helm"
repository = "oci://docker.io/envoyproxy"
version = "v1.1.0"
version = "v1.2.4"
namespace = "kube-system"
atomic = true
cleanup_on_fail = true
@ -31,7 +33,7 @@ resource "helm_release" "metrics-server" {
name = "metrics-server"
chart = "metrics-server"
repository = "https://kubernetes-sigs.github.io/metrics-server/"
version = "3.11.0"
version = "3.12.2"
namespace = "kube-system"
atomic = true
cleanup_on_fail = true

View file

@ -46,10 +46,12 @@ resource "helm_release" "kube-state-metrics" {
resource "helm_release" "monitoring" {
depends_on = [kubernetes_manifest.prometheus-pod-monitors, kubernetes_manifest.prometheus-service-monitors]
count = 0
name = "monitoring"
chart = "k8s-monitoring"
repository = "https://grafana.github.io/helm-charts"
version = "1.4.6"
version = "2.0.0-rc.10"
namespace = kubernetes_namespace.monitoring-system.metadata[0].name
atomic = true
cleanup_on_fail = true
@ -58,73 +60,52 @@ resource "helm_release" "monitoring" {
cluster:
name: "${var.cluster_name}"
prometheus-operator-crds:
enabled: false
prometheus-node-exporter:
enabled: true
kube-state-metrics:
enabled: false
opencost:
enabled: false
metrics:
enabled: true
serviceMonitors:
enabled: true
probes:
enabled: true
podMonitors:
enabled: true
node-exporter:
enabled: true
kubelet:
enabled: true
kube-state-metrics:
enabled: true
cost:
enabled: false
apiserver:
enabled: true
autoDiscover:
enabled: true
cadvisor:
enabled: true
kubeControllerManager:
enabled: true
kubeScheduler:
clusterMetrics:
enabled: true
logs:
clusterEvents:
enabled: true
profiles:
enabled: false
podLogs:
enabled: true
receivers:
deployGrafanaAgentService: false
nodeLogs:
enabled: true
externalServices:
prometheus:
host: "https://prometheus.monitor.tjo.cloud"
writeEndpoint: "/api/v1/write"
authMode: "oauth2"
prometheusOperatorObjects:
enabled: true
annotationAutodiscovery:
enabled: true
alloy-logs:
enabled: true
alloy-metrics:
enabled: true
alloy-singleton:
enabled: true
destinations:
- name: monitor-tjo-cloud
type: otlp
url: "grpc.otel.monitor.tjo.cloud:443"
auth:
type: oauth2
oauth2:
tokenURL: "https://id.tjo.space/application/o/token/"
clientId: "o6Tz2215HLvhvZ4RCZCR8oMmCapTu30iwkoMkz6m"
clientSecretFile: "/var/run/secrets/kubernetes.io/serviceaccount/token"
endpointParams:
grant_type: "client_credentials"
client_assertion_type: "urn:ietf:params:oauth:client-assertion-type:jwt-bearer"
loki:
host: "https://loki.monitor.tjo.cloud"
authMode: "oauth2"
oauth2:
tokenURL: "https://id.tjo.space/application/o/token/"
clientId: "56TYXtgg7QwLjh4lPl1PTu3C4iExOvO1d6b15WuC"
clientSecretFile: "/var/run/secrets/kubernetes.io/serviceaccount/token"
endpointParams:
grant_type: "client_credentials"
client_assertion_type: "urn:ietf:params:oauth:client-assertion-type:jwt-bearer"
grant_type:
- "client_credentials"
client_assertion_type:
- "urn:ietf:params:oauth:client-assertion-type:jwt-bearer"
logs:
enabled: true
metrics:
enabled: true
traces:
enabled: false
EOF
]
}

View file

@ -15,6 +15,8 @@ data "helm_template" "cilium" {
operator:
priorityClassName: "system-cluster-critical"
prometheus:
enabled: true
routingMode: "native"
autoDirectNodeRoutes: true
@ -23,6 +25,9 @@ data "helm_template" "cilium" {
bgpControlPlane:
enabled: true
bpf:
datapathMode: netkit
ipv4:
enabled: true
enableIPv4Masquerade: false
@ -63,11 +68,21 @@ data "helm_template" "cilium" {
k8sServiceHost: localhost
k8sServicePort: 7445
prometheus:
enabled: true
hubble:
ui:
enabled: false
enabled: true
relay:
enabled: false
enabled: true
tls:
auto:
enabled: true
method: cronJob
certValidityDuration: 1095
schedule: "0 0 1 */4 *"
gatewayAPI:
enabled: false
envoy:

View file

@ -58,6 +58,23 @@ locals {
name = "gateway-api-crds"
contents = file("${path.module}/manifests/gateway-api.crds.yaml")
},
{
name = "oidc-admins"
contents = <<-EOF
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: id-tjo-space:admins
subjects:
- kind: Group
name: oidc:groups:k8s.tjo.cloud admin
apiGroup: rbac.authorization.k8s.io
roleRef:
kind: ClusterRole
name: cluster-admin
apiGroup: rbac.authorization.k8s.io
EOF
},
{
name = "cilium"
contents = data.helm_template.cilium.manifest
@ -77,9 +94,11 @@ locals {
- advertisementType: "Service"
service:
addresses:
- ClusterIP
- ExternalIP
- LoadBalancerIP
selector:
matchExpressions:
- {key: somekey, operator: NotIn, values: ['never-used-value']} # match all services
EOF
},
{
@ -104,20 +123,16 @@ locals {
EOF
},
{
name = "oidc-admins"
name = "cilium-load-balancer-ip-pool"
contents = <<-EOF
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
apiVersion: cilium.io/v2alpha1
kind: CiliumLoadBalancerIPPool
metadata:
name: id-tjo-space:admins
subjects:
- kind: Group
name: oidc:groups:k8s.tjo.cloud admin
apiGroup: rbac.authorization.k8s.io
roleRef:
kind: ClusterRole
name: cluster-admin
apiGroup: rbac.authorization.k8s.io
name: default
spec:
blocks:
- cidr: "${var.cluster.load_balancer_cidr.ipv4}"
- cidr: "${var.cluster.load_balancer_cidr.ipv6}"
EOF
},
],
@ -183,12 +198,13 @@ locals {
image = "factory.talos.dev/installer/${var.talos.schematic_id}:${var.talos.version}"
disk = "/dev/vda"
}
#features = {
# hostDNS = {
# enabled = true
# forwardKubeDNSToHost = false
# }
#}
features = {
hostDNS = {
enabled = false
resolveMemberNames = false
forwardKubeDNSToHost = false
}
}
}
}

View file

@ -61,6 +61,10 @@ variable "cluster" {
ipv4 = string
ipv6 = string
})
load_balancer_cidr = object({
ipv4 = string
ipv6 = string
})
})
}