-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathvalues.localdev.yaml
221 lines (186 loc) · 6.76 KB
/
values.localdev.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
# Default values for workbench.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
### Kubernetes deployment options
extraDeploy: []
controller:
strategy_type: "RollingUpdate" # default: RollingUpdate
serviceAccount:
create: false
images:
webui: "ndslabs/webui:develop"
apiserver: "ndslabs/apiserver:develop"
ingress:
class: "nginx"
host: "kubernetes.docker.internal"
tls:
- hosts:
- "kubernetes.docker.internal"
### Workbench config and cutomization
config:
frontend:
live_reload: false
support_email: "[email protected]"
domain: "https://kubernetes.docker.internal"
analytics_tracking_id: ""
signin_url: "https://kubernetes.docker.internal/oauth2/start?rd=https%3A%2F%2Fkubernetes.docker.internal%2Fmy-apps"
signout_url: "https://kubernetes.docker.internal/oauth2/sign_out?rd=https%3A%2F%2Fkubernetes.docker.internal%2F"
backend:
# Point at internal mongodb
mongo:
uri: "mongodb://workbench:[email protected]:27017/ndslabs?authSource=admin"
db: ndslabs
# Point at internal Keycloak instance + imported realm
keycloak:
hostname: "https://kubernetes.docker.internal/auth"
realmName: "workbench-dev"
clientId: "workbench-local"
clientSecret: ""
# Define our own domain and config params
domain: "kubernetes.docker.internal"
insecure_ssl_verify: "false" # default: true
swagger_url: openapi/swagger-v1.yml
namespace: "workbench"
# Define parameters about the created userapp
userapps:
home_storage:
enabled: true
storage_class: nfs
shared_storage:
enabled: false
ingress:
annotations:
ingress.kubernetes.io/ssl-redirect: "true"
ingress.kubernetes.io/force-ssl-redirect: "true"
# Auth annotations for Traefik
#ingress.kubernetes.io/auth-type: forward
#ingress.kubernetes.io/auth-url: "https://kubernetes.docker.internal/oauth2/auth"
#ingress.kubernetes.io/signin-url: "https://kubernetes.docker.internal/oauth2/start?rd=https%3A%2F%2Fkubernetes.docker.internal%2Fmy-apps"
# Auth annotations for NGINX
nginx.ingress.kubernetes.io/auth-url: "https://kubernetes.docker.internal/oauth2/auth"
nginx.ingress.kubernetes.io/signin-url: "https://kubernetes.docker.internal/oauth2/start?rd=https%3A%2F%2Fkubernetes.docker.internal%2Fmy-apps"
nginx.ingress.kubernetes.io/auth-response-headers: "x-auth-request-user, x-auth-request-email, x-auth-request-access-token, x-auth-request-redirect, x-auth-request-preferred-username"
class: nginx
tls:
hosts:
- kubernetes.docker.internal
- '*.kubernetes.docker.internal'
# TODO: Legacy config options (currently ignored)
timeout: 30
inactivity_timeout: 480
### Optional dependency subcharts
# Enable this to run an NGINX ingress controller (if you aren't running another ingress controller)
ingress-nginx:
enabled: true
controller:
# If you have an existing TLS secret, you can uncomment this to specify it here
# Otherwise NGINX will generate a self-signed and use that instead
#extraArgs:
# default-ssl-certificate: workbench/ndslabs-tls
hostPort:
enabled: true
# Enable this to use an external NFS server to provision user volumes (e.g. nfs-condo)
nfs-client-provisioner:
enabled: false # WARNING: experimental
# Enable this to run a local NFS server (development only)
nfs-server-provisioner:
enabled: true
persistence:
enabled: true
storageClass: "hostpath"
# Enable this to run a local Keycloak instance (development only)
keycloak:
enabled: true
httpRelativePath: "/auth/"
auth:
adminUser: "admin"
adminPassword: "workbench"
proxyAddressForwarding: true
global:
storageClass: "hostpath"
ingress:
className: nginx
tls: true
annotations:
kubernetes.io/ingress.class: nginx
# without this, signups (and other large proxy bodies) will fail with a 502
nginx.ingress.kubernetes.io/proxy-buffer-size: "128k"
extraTls:
- hosts:
- kubernetes.docker.internal
oauth2-proxy:
serviceAccount:
create: false
name: workbench
# Need to define a custom role and binding to wait-for-keycloak
initContainers:
- name: wait-for-keycloak
image: ghcr.io/groundnuty/k8s-wait-for:v1.6
imagePullPolicy: Always
args:
- "pod"
- "-lapp.kubernetes.io/component=keycloak"
extraArgs:
# Keycloak OIDC config:
- --provider=keycloak-oidc # "oidc" works as well, but this gives us roles too
- --provider-display-name=Workbench Login
- --redirect-url=https://kubernetes.docker.internal/oauth2/callback
- --oidc-issuer-url=https://kubernetes.docker.internal/auth/realms/workbench-dev
- --client-id=workbench-local
# Authorization config:
#- --email-domain=illinois.edu
- --whitelist-domain=.docker.internal # needed to use the "rd" query string parameter
- --cookie-domain=.docker.internal # forward your cookie automatically to subdomains
#- --cookie-samesite=lax
- --scope=email profile openid
- --allowed-role=workbench-user
# Local Development Only:
- --insecure-oidc-skip-issuer-verification=true
- --insecure-oidc-allow-unverified-email=true
- --ssl-insecure-skip-verify=true
- --ssl-upstream-insecure-skip-verify=true
- --force-json-errors=true
ingress:
enabled: true
ingressClassName: nginx
path: /oauth2/
pathtype: Prefix
hostname: kubernetes.docker.internal
tls:
- hosts:
- kubernetes.docker.internal
mongodb:
enabled: true
autoimport:
enabled: true
annotations:
"helm.sh/hook": "post-install,post-upgrade"
"helm.sh/hook-delete-policy": before-hook-creation
env:
- name: FORCE
value: "true"
- name: MONGO_URI
value: "mongodb://workbench:[email protected]:27017/ndslabs?authSource=admin"
- name: GIT_REPO
value: "https://github.com/nds-org/ndslabs-specs"
- name: GIT_BRANCH
value: "develop"
architecture: standalone # WARNING: experimental
#replicaCount: 3
auth:
replicaSetKey: changeme
rootUser: workbench
rootPassword: workbench
# TODO: Test AWS + GKE PVs
persistent:
# Values can be "false" for no persistent storage, "aws" for awsElasticBlockStore,
# or "gce" for gcePersistentDisk
type: false
# If using awsElasticBlockStore enter the EBS volume id, if using gcePersistentDisk
# enter the persistent disk name
volume_id:
persistence:
resourcePolicy: keep
storage_class: "hostpath"
access_mode: "ReadWriteOnce" # default: ReadWriteOnce
size: "1Gi"