Files
buun-stack/airflow/airflow-values.gomplate.yaml
2025-11-23 14:59:47 +09:00

270 lines
6.4 KiB
YAML

useStandardNaming: true
webserverSecretKey: {{ .Env.AIRFLOW_WEBSERVER_SECRET_KEY }}
{{- if eq (.Env.AIRFLOW_ENV_SECRETS_EXIST | default "false") "true" }}
# Extra envFrom for all Airflow containers
extraEnvFrom: |
- secretRef:
name: airflow-env-secret
{{- end }}
executor: CeleryExecutor
# Custom Airflow configuration
config:
logging:
# Enable debug logging for troubleshooting
# logging_level: DEBUG
scheduler:
# Process DAG files more frequently (30 seconds instead of default 30)
min_file_process_interval: 30
# Number of processes to parse DAGs in parallel
parsing_processes: 2
# Scan for new DAG files every 10 seconds instead of 300
dag_dir_list_interval: 10
apiServer:
replicas: 1
apiServerConfigConfigMapName: airflow-api-server-config
env:
- name: AIRFLOW_OAUTH_CLIENT_ID
valueFrom:
secretKeyRef:
name: airflow-oauth-secret
key: client_id
- name: AIRFLOW_OAUTH_CLIENT_SECRET
valueFrom:
secretKeyRef:
name: airflow-oauth-secret
key: client_secret
- name: KEYCLOAK_HOST
value: "{{ .Env.KEYCLOAK_HOST }}"
- name: KEYCLOAK_REALM
value: "{{ .Env.KEYCLOAK_REALM }}"
webserver:
enabled: true
replicas: 1
createUserJob:
useHelmHooks: false
applyCustomEnv: false
migrateDatabaseJob:
useHelmHooks: false
applyCustomEnv: false
images:
migrationsWaitTimeout: 180
# Install additional packages using init containers
workers:
extraInitContainers:
- name: install-packages
image: apache/airflow:3.0.2
command:
- /bin/bash
- -c
- |
pip install --target /opt/airflow/site-packages {{ .Env.AIRFLOW_EXTRA_PACKAGES }}
volumeMounts:
- name: extra-packages
mountPath: /opt/airflow/site-packages
securityContext:
allowPrivilegeEscalation: false
runAsNonRoot: true
runAsUser: 1000
runAsGroup: 0
seccompProfile:
type: RuntimeDefault
capabilities:
drop:
- ALL
extraVolumes:
- name: extra-packages
emptyDir: {}
extraVolumeMounts:
- name: extra-packages
mountPath: /opt/airflow/site-packages
env:
- name: PYTHONPATH
value: "/opt/airflow/site-packages:$PYTHONPATH"
# Override args to fix Celery worker hostname issue
# The HOSTNAME will be set automatically by Kubernetes for StatefulSet pods
args:
- bash
- -c
- |
exec airflow celery worker --celery-hostname=${HOSTNAME}
scheduler:
extraInitContainers:
- name: install-packages
image: apache/airflow:3.0.2
command:
- /bin/bash
- -c
- |
pip install --target /opt/airflow/site-packages {{ .Env.AIRFLOW_EXTRA_PACKAGES }}
volumeMounts:
- name: extra-packages
mountPath: /opt/airflow/site-packages
securityContext:
allowPrivilegeEscalation: false
runAsNonRoot: true
runAsUser: 1000
runAsGroup: 0
seccompProfile:
type: RuntimeDefault
capabilities:
drop:
- ALL
extraVolumes:
- name: extra-packages
emptyDir: {}
extraVolumeMounts:
- name: extra-packages
mountPath: /opt/airflow/site-packages
env:
- name: PYTHONPATH
value: "/opt/airflow/site-packages:$PYTHONPATH"
dagProcessor:
extraInitContainers:
- name: install-packages
image: apache/airflow:3.0.2
command:
- /bin/bash
- -c
- |
pip install --target /opt/airflow/site-packages {{ .Env.AIRFLOW_EXTRA_PACKAGES }}
volumeMounts:
- name: extra-packages
mountPath: /opt/airflow/site-packages
securityContext:
allowPrivilegeEscalation: false
runAsNonRoot: true
runAsUser: 1000
runAsGroup: 0
seccompProfile:
type: RuntimeDefault
capabilities:
drop:
- ALL
extraVolumes:
- name: extra-packages
emptyDir: {}
extraVolumeMounts:
- name: extra-packages
mountPath: /opt/airflow/site-packages
env:
- name: PYTHONPATH
value: "/opt/airflow/site-packages:$PYTHONPATH"
flower:
enabled: false
# StatsD configuration with Prometheus exporter
statsd:
enabled: true
securityContexts:
pod:
runAsNonRoot: true
runAsUser: 65534
runAsGroup: 65534
fsGroup: 65534
seccompProfile:
type: RuntimeDefault
container:
allowPrivilegeEscalation: false
runAsNonRoot: true
runAsUser: 65534
runAsGroup: 65534
seccompProfile:
type: RuntimeDefault
capabilities:
drop:
- ALL
{{- if .Env.MONITORING_ENABLED }}
# Prometheus metrics configuration
metrics:
enabled: true
serviceMonitor:
enabled: true
interval: 30s
selector:
release: kube-prometheus-stack
{{- end }}
# Redis security context for restricted Pod Security Standard
redis:
securityContexts:
pod:
runAsNonRoot: true
runAsUser: 999
runAsGroup: 999
fsGroup: 999
seccompProfile:
type: RuntimeDefault
container:
allowPrivilegeEscalation: false
runAsNonRoot: true
runAsUser: 999
runAsGroup: 999
seccompProfile:
type: RuntimeDefault
capabilities:
drop:
- ALL
postgresql:
enabled: false
data:
metadataSecretName: airflow-metadata-connection
# DAG persistence configuration
dags:
persistence:
enabled: {{ .Env.AIRFLOW_DAGS_PERSISTENCE_ENABLED | default "true" }}
{{- if eq (.Env.AIRFLOW_DAGS_STORAGE_TYPE | default "default") "nfs" }}
existingClaim: airflow-dags-nfs-pvc
{{- else }}
existingClaim: airflow-dags-pvc
{{- end }}
ingress:
apiServer:
enabled: true
annotations:
kubernetes.io/ingress.class: traefik
traefik.ingress.kubernetes.io/router.entrypoints: websecure
ingressClassName: traefik
hosts:
- name: {{ .Env.AIRFLOW_HOST }}
tls:
enabled: true
# Security contexts for restricted Pod Security Standard
# Also compatible with shared file system access (JupyterHub)
securityContexts:
pod:
runAsNonRoot: true
runAsUser: 1000
runAsGroup: 0
fsGroup: 101
seccompProfile:
type: RuntimeDefault
container:
allowPrivilegeEscalation: false
runAsNonRoot: true
runAsUser: 1000
runAsGroup: 0
seccompProfile:
type: RuntimeDefault
capabilities:
drop:
- ALL