From ef45c032b24708f2586d73503133d241133cbf26 Mon Sep 17 00:00:00 2001 From: adam Date: Mon, 19 Aug 2024 12:06:40 -0700 Subject: [PATCH 001/567] Nublado 7 / TMPDIR on /tmp (==tmpfs) --- applications/nublado/Chart.yaml | 2 +- applications/nublado/values-idfdemo.yaml | 1 + applications/nublado/values-idfdev.yaml | 1 + applications/nublado/values-idfint.yaml | 1 + applications/nublado/values-idfprod.yaml | 1 + 5 files changed, 5 insertions(+), 1 deletion(-) diff --git a/applications/nublado/Chart.yaml b/applications/nublado/Chart.yaml index 554c445ffd..1b0e3dad86 100644 --- a/applications/nublado/Chart.yaml +++ b/applications/nublado/Chart.yaml @@ -5,7 +5,7 @@ description: JupyterHub and custom spawner for the Rubin Science Platform sources: - https://github.com/lsst-sqre/nublado home: https://nublado.lsst.io/ -appVersion: 6.3.0 +appVersion: 7.0.0 dependencies: - name: jupyterhub diff --git a/applications/nublado/values-idfdemo.yaml b/applications/nublado/values-idfdemo.yaml index c36c3b5df1..49ea663521 100644 --- a/applications/nublado/values-idfdemo.yaml +++ b/applications/nublado/values-idfdemo.yaml @@ -23,6 +23,7 @@ controller: DAF_BUTLER_REPOSITORY_INDEX: "https://demo.lsst.cloud/api/butler/configs/idf-repositories.yaml" GOOGLE_APPLICATION_CREDENTIALS: "/opt/lsst/software/jupyterlab/secrets/butler-gcs-idf-creds.json" S3_ENDPOINT_URL: "https://storage.googleapis.com" + TMPDIR: "/tmp" initContainers: - name: "inithome" image: diff --git a/applications/nublado/values-idfdev.yaml b/applications/nublado/values-idfdev.yaml index 6432ec3fa2..94760a4c94 100644 --- a/applications/nublado/values-idfdev.yaml +++ b/applications/nublado/values-idfdev.yaml @@ -23,6 +23,7 @@ controller: DAF_BUTLER_REPOSITORY_INDEX: "https://data-dev.lsst.cloud/api/butler/configs/idf-repositories.yaml" GOOGLE_APPLICATION_CREDENTIALS: "/opt/lsst/software/jupyterlab/secrets/butler-gcs-idf-creds.json" S3_ENDPOINT_URL: "https://storage.googleapis.com" + TMPDIR: "/tmp" initContainers: - name: "inithome" image: diff --git a/applications/nublado/values-idfint.yaml b/applications/nublado/values-idfint.yaml index 0ce19538b5..74deab6857 100644 --- a/applications/nublado/values-idfint.yaml +++ b/applications/nublado/values-idfint.yaml @@ -33,6 +33,7 @@ controller: PANDAMON_URL: "https://usdf-panda-bigmon.slac.stanford.edu:8443/" PANDA_CONFIG_ROOT: "~" CULL_TERMINAL_INACTIVE_TIMEOUT: "432000" # 5 days + TMPDIR: "/tmp" initContainers: - name: "inithome" image: diff --git a/applications/nublado/values-idfprod.yaml b/applications/nublado/values-idfprod.yaml index 2f64319a2f..85d9196fef 100644 --- a/applications/nublado/values-idfprod.yaml +++ b/applications/nublado/values-idfprod.yaml @@ -18,6 +18,7 @@ controller: DAF_BUTLER_REPOSITORY_INDEX: "https://data.lsst.cloud/api/butler/configs/idf-repositories.yaml" S3_ENDPOINT_URL: "https://storage.googleapis.com" CULL_TERMINAL_INACTIVE_TIMEOUT: "432000" # 5 days + TMPDIR: "/tmp" initContainers: - name: "inithome" image: From 218a12f1f9a12e72504d9df22ebcff6acb1ddefb Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 19:12:04 +0000 Subject: [PATCH 002/567] chore(deps): update helm release strimzi-kafka-operator to v0.43.0 --- applications/strimzi/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/strimzi/Chart.yaml b/applications/strimzi/Chart.yaml index 342761cc8b..fc8ddc5460 100644 --- a/applications/strimzi/Chart.yaml +++ b/applications/strimzi/Chart.yaml @@ -7,5 +7,5 @@ home: https://strimzi.io appVersion: "0.39.0" dependencies: - name: strimzi-kafka-operator - version: "0.42.0" + version: "0.43.0" repository: https://strimzi.io/charts/ From 0350baa551eedad75aa6aac2b9556f2eeb30bcbf Mon Sep 17 00:00:00 2001 From: Dan Fuchs Date: Mon, 26 Aug 2024 13:56:50 -0500 Subject: [PATCH 003/567] DM-44635: `appmetrics` Kafka/Sasquatch user MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Password is in 1pass: RSP data-dev.lsst.cloud/sasquatch/appmetrics-password Test: ``` ❯ echo "blah" | kcat -b sasquatch-dev-kafka-bootstrap.lsst.cloud:9094 -X security.protocol=SASL_SSL -X sasl.mechanism=SCRAM-SHA-512 -X sasl.username=appmetrics -X sasl.password=$KAFKA_PASSWORD -P -t lsst.square.metrics.dfuchs-test ❯ kcat -b sasquatch-dev-kafka-bootstrap.lsst.cloud:9094 -X security.protocol=SASL_SSL -X sasl.mechanism=SCRAM-SHA-512 -X sasl.username=appmetrics -X sasl.password=$KAFKA_PASSWORD -P -t lsst.square.metrics.dfuchs-test -C -o 0blah ``` --- .../charts/strimzi-kafka/templates/users.yaml | 38 +++++++++++++++++++ applications/sasquatch/secrets.yaml | 4 ++ applications/sasquatch/values-idfdev.yaml | 8 ++++ 3 files changed, 50 insertions(+) diff --git a/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml b/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml index 5b30f2a6a3..75b9433255 100644 --- a/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml @@ -300,3 +300,41 @@ spec: host: "*" operation: All {{- end }} +{{- if .Values.users.appmetrics.enabled }} +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaUser +metadata: + name: appmetrics + labels: + strimzi.io/cluster: {{ .Values.cluster.name }} +spec: + authentication: + type: scram-sha-512 + password: + valueFrom: + secretKeyRef: + name: sasquatch + key: appmetrics-password + authorization: + type: simple + acls: + - resource: + type: group + name: "*" + patternType: literal + operation: All + - resource: + type: topic + name: "lsst.square.metrics" + patternType: prefix + type: allow + host: "*" + operation: All + - resource: + type: cluster + operations: + - Describe + - DescribeConfigs + # TODO: Any quotas needed? +{{- end }} diff --git a/applications/sasquatch/secrets.yaml b/applications/sasquatch/secrets.yaml index 2a19674f17..8634cb3df1 100644 --- a/applications/sasquatch/secrets.yaml +++ b/applications/sasquatch/secrets.yaml @@ -69,6 +69,10 @@ ts-salkafka-password: description: >- ts-salkafka KafkaUser password. if: strimzi-kafka.users.tsSalKafka.enabled +appmetrics-password: + description: >- + appmetrics KafkaUser password. + if: strimzi-kafka.users.appmetrics.enabled connect-push-secret: description: >- Write token for pushing generated Strimzi Kafka Connect image to GitHub Container Registry. diff --git a/applications/sasquatch/values-idfdev.yaml b/applications/sasquatch/values-idfdev.yaml index 6519b85afb..4db585f4d5 100644 --- a/applications/sasquatch/values-idfdev.yaml +++ b/applications/sasquatch/values-idfdev.yaml @@ -32,6 +32,8 @@ strimzi-kafka: enabled: true kafkaConnectManager: enabled: true + appmetrics: + enabled: true kraft: enabled: true kafkaController: @@ -73,6 +75,12 @@ telegraf-kafka-consumer: replicaCount: 1 topicRegexps: | [ "lsst.Test.*" ] + appmetrics: + enabled: true + database: "metrics" + replicaCount: 1 + topicRegexps: | + [ "lsst.square.metrics.*" ] kafdrop: cmdArgs: "--message.format=AVRO --topic.deleteEnabled=true --topic.createEnabled=true" From f991ca276298e1ad2fb7210fd35f9cd657f7b710 Mon Sep 17 00:00:00 2001 From: Stelios Voutsinas Date: Mon, 26 Aug 2024 12:05:26 -0700 Subject: [PATCH 004/567] Enable cadcBaseUuid in gafaelfawr for roe environment --- applications/gafaelfawr/values-roe.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/applications/gafaelfawr/values-roe.yaml b/applications/gafaelfawr/values-roe.yaml index f53b9e0ead..f3914a1d96 100644 --- a/applications/gafaelfawr/values-roe.yaml +++ b/applications/gafaelfawr/values-roe.yaml @@ -8,6 +8,9 @@ config: github: clientId: "10172b4db1b67ee31620" + # Support generating user metadata for CADC authentication code. + cadcBaseUuid: "4cb5f948-aad9-466c-837b-5eae565b0a77" + # Allow access by GitHub team. groupMapping: "exec:admin": From c4cb5f4587650f6ed0ad1dca4071b44f9f7f00a8 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 27 Aug 2024 11:56:38 -0700 Subject: [PATCH 005/567] Add default flag for appmetrics kafka user --- applications/sasquatch/README.md | 1 + applications/sasquatch/charts/strimzi-kafka/README.md | 1 + applications/sasquatch/charts/strimzi-kafka/values.yaml | 5 +++++ 3 files changed, 7 insertions(+) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index dc8b8488d3..b8a250eace 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -387,6 +387,7 @@ Rubin Observatory's telemetry service | strimzi-kafka.registry.resources | object | See `values.yaml` | Kubernetes requests and limits for the Schema Registry | | strimzi-kafka.registry.schemaTopic | string | `"registry-schemas"` | Name of the topic used by the Schema Registry | | strimzi-kafka.superusers | list | `["kafka-admin"]` | A list of usernames for users who should have global admin permissions. These users will be created, along with their credentials. | +| strimzi-kafka.users.appmetrics.enabled | bool | `false` | Enable user appmetrics | | strimzi-kafka.users.camera.enabled | bool | `false` | Enable user camera, used at the camera environments | | strimzi-kafka.users.consdb.enabled | bool | `false` | Enable user consdb | | strimzi-kafka.users.kafdrop.enabled | bool | `false` | Enable user Kafdrop (deployed by parent Sasquatch chart). | diff --git a/applications/sasquatch/charts/strimzi-kafka/README.md b/applications/sasquatch/charts/strimzi-kafka/README.md index ce4efaea25..4e844c02a3 100644 --- a/applications/sasquatch/charts/strimzi-kafka/README.md +++ b/applications/sasquatch/charts/strimzi-kafka/README.md @@ -65,6 +65,7 @@ A subchart to deploy Strimzi Kafka components for Sasquatch. | registry.resources | object | See `values.yaml` | Kubernetes requests and limits for the Schema Registry | | registry.schemaTopic | string | `"registry-schemas"` | Name of the topic used by the Schema Registry | | superusers | list | `["kafka-admin"]` | A list of usernames for users who should have global admin permissions. These users will be created, along with their credentials. | +| users.appmetrics.enabled | bool | `false` | Enable user appmetrics | | users.camera.enabled | bool | `false` | Enable user camera, used at the camera environments | | users.consdb.enabled | bool | `false` | Enable user consdb | | users.kafdrop.enabled | bool | `false` | Enable user Kafdrop (deployed by parent Sasquatch chart). | diff --git a/applications/sasquatch/charts/strimzi-kafka/values.yaml b/applications/sasquatch/charts/strimzi-kafka/values.yaml index 2ae8501f6f..f43fd60e4c 100644 --- a/applications/sasquatch/charts/strimzi-kafka/values.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/values.yaml @@ -285,6 +285,11 @@ users: # -- Enable user consdb enabled: false + appmetrics: + # -- Enable user appmetrics + enabled: false + + mirrormaker2: # -- Enable replication in the target (passive) cluster enabled: false From 136d663d256e04907e9d8b53b2a668ec8cc4ef1c Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 27 Aug 2024 11:06:18 -0700 Subject: [PATCH 006/567] Upgrade Kafka to version 3.8.0 --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/strimzi-kafka/README.md | 2 +- applications/sasquatch/charts/strimzi-kafka/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index b8a250eace..3e53b27e34 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -363,7 +363,7 @@ Rubin Observatory's telemetry service | strimzi-kafka.kafka.storage.size | string | `"500Gi"` | Size of the backing storage disk for each of the Kafka brokers | | strimzi-kafka.kafka.storage.storageClassName | string | `""` | Name of a StorageClass to use when requesting persistent volumes | | strimzi-kafka.kafka.tolerations | list | `[]` | Tolerations for Kafka broker pod assignment | -| strimzi-kafka.kafka.version | string | `"3.7.1"` | Version of Kafka to deploy | +| strimzi-kafka.kafka.version | string | `"3.8.0"` | Version of Kafka to deploy | | strimzi-kafka.kafkaController.enabled | bool | `false` | Enable Kafka Controller | | strimzi-kafka.kafkaController.resources | object | See `values.yaml` | Kubernetes requests and limits for the Kafka Controller | | strimzi-kafka.kafkaController.storage.size | string | `"20Gi"` | Size of the backing storage disk for each of the Kafka controllers | diff --git a/applications/sasquatch/charts/strimzi-kafka/README.md b/applications/sasquatch/charts/strimzi-kafka/README.md index 4e844c02a3..fd425d5279 100644 --- a/applications/sasquatch/charts/strimzi-kafka/README.md +++ b/applications/sasquatch/charts/strimzi-kafka/README.md @@ -41,7 +41,7 @@ A subchart to deploy Strimzi Kafka components for Sasquatch. | kafka.storage.size | string | `"500Gi"` | Size of the backing storage disk for each of the Kafka brokers | | kafka.storage.storageClassName | string | `""` | Name of a StorageClass to use when requesting persistent volumes | | kafka.tolerations | list | `[]` | Tolerations for Kafka broker pod assignment | -| kafka.version | string | `"3.7.1"` | Version of Kafka to deploy | +| kafka.version | string | `"3.8.0"` | Version of Kafka to deploy | | kafkaController.enabled | bool | `false` | Enable Kafka Controller | | kafkaController.resources | object | See `values.yaml` | Kubernetes requests and limits for the Kafka Controller | | kafkaController.storage.size | string | `"20Gi"` | Size of the backing storage disk for each of the Kafka controllers | diff --git a/applications/sasquatch/charts/strimzi-kafka/values.yaml b/applications/sasquatch/charts/strimzi-kafka/values.yaml index f43fd60e4c..fa0deaa57b 100644 --- a/applications/sasquatch/charts/strimzi-kafka/values.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/values.yaml @@ -11,7 +11,7 @@ cluster: kafka: # -- Version of Kafka to deploy - version: "3.7.1" + version: "3.8.0" # -- Number of Kafka broker replicas to run replicas: 3 From d86df1c94ef7a1c2d03fac663b86d4a85b030d07 Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Tue, 27 Aug 2024 14:07:13 -0700 Subject: [PATCH 007/567] Increase Butler memory limit Butler server's memory limit was set too low previously -- it could easily result in an out-of-memory kill with just a couple in-flight requests. --- applications/butler/values.yaml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/applications/butler/values.yaml b/applications/butler/values.yaml index c59779c6a5..81264a34cf 100644 --- a/applications/butler/values.yaml +++ b/applications/butler/values.yaml @@ -41,10 +41,15 @@ podAnnotations: {} resources: limits: cpu: "1" - memory: "324Mi" + # Worst case peak usage for a single container would be something like all + # 40 threads in the thread pool running large queries costing ~35MB each. + memory: "1.5Gi" requests: cpu: "15m" - memory: "150Mi" + # Butler server uses around 200MB idle at startup, but under dynamic usage + # Python seems to want to hold onto another couple hundred megabytes of + # heap. + memory: "0.5Gi" # -- Node selection rules for the butler deployment pod nodeSelector: {} From 63a694db2e23b0f3be27dfac25480fae52c2ea5e Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Thu, 29 Aug 2024 16:19:57 -0700 Subject: [PATCH 008/567] BTS: Change Kafka broker message timestamp type. --- applications/sasquatch/values-base.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index 257afa096b..0eb4939051 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -22,6 +22,7 @@ strimzi-kafka: config: auto.create.topics.enable: false log.cleaner.min.compaction.lag.ms: 259200000 + log.message.timestamp.type: LogAppendTime log.retention.hours: 72 log.retention.ms: 259200000 storage: From 5a13c5100f4391e6c26b4ea634fd03052799f570 Mon Sep 17 00:00:00 2001 From: Fritz Mueller Date: Fri, 23 Aug 2024 09:09:33 -0700 Subject: [PATCH 009/567] cm-service: remove redis --- applications/cm-service/Chart.yaml | 5 ----- applications/cm-service/README.md | 2 -- applications/cm-service/secrets.yaml | 14 +++----------- applications/cm-service/templates/deployment.yaml | 7 ------- .../cm-service/templates/vault-secrets.yaml | 13 ------------- .../cm-service/templates/worker-deployment.yaml | 8 -------- applications/cm-service/values.yaml | 9 --------- 7 files changed, 3 insertions(+), 55 deletions(-) diff --git a/applications/cm-service/Chart.yaml b/applications/cm-service/Chart.yaml index ad1b2ef5a1..ede07c7126 100644 --- a/applications/cm-service/Chart.yaml +++ b/applications/cm-service/Chart.yaml @@ -6,8 +6,3 @@ sources: - https://github.com/lsst-dm/cm-service type: application version: 1.0.0 - -dependencies: -- name: redis - version: 1.0.13 - repository: https://lsst-sqre.github.io/charts/ diff --git a/applications/cm-service/README.md b/applications/cm-service/README.md index 88b43969e7..9f720ea79d 100644 --- a/applications/cm-service/README.md +++ b/applications/cm-service/README.md @@ -30,8 +30,6 @@ Campaign Management for Rubin Data Release Production | image.repository | string | `"ghcr.io/lsst-dm/cm-service"` | Image to use for frontend containers | | image.tag | string | The appVersion of the chart | Tag of frontend image to use | | ingress.annotations | object | `{}` | Additional annotations for the frontend ingress rule | -| redis.config.secretKey | string | `"password"` | Key inside secret from which to get the Redis password (do not change) | -| redis.config.secretName | string | `"redis-secret"` | Name of secret containing Redis password | | worker.affinity | object | `{}` | Affinity rules for the worker pods | | worker.htcondor.config.contents | string | `nil` | If specified, contents of htcondor config file to be injected into worker containers | | worker.htcondor.config.mountPath | string | `nil` | If specified, location for htcondor config file to be injected into worker containers | diff --git a/applications/cm-service/secrets.yaml b/applications/cm-service/secrets.yaml index 414e59c34f..681ae4c8a3 100644 --- a/applications/cm-service/secrets.yaml +++ b/applications/cm-service/secrets.yaml @@ -1,16 +1,8 @@ -redis-password: - description: >- - Password used to authenticate cm-service to its internal Redis server, - deployed as part of the same Argo CD application. This secret can be - changed at any time, but both the Redis server and the cm-service - deployments will then have to be restarted to pick up the new value. - generate: - type: password postgres-password: description: >- Password used to authenticate cm-service to its internal cnpg Postgres - server, deployed as part of the same Argo CD application. This secret can - be changed at any time, but both the Redis server and the cm-service - deployments will then have to be restarted to pick up the new value. + server, deployed as part of the same Argo CD application. This secret can be + changed at any time, but the cm-service deployments will then have to be + restarted to pick up the new value. generate: type: password diff --git a/applications/cm-service/templates/deployment.yaml b/applications/cm-service/templates/deployment.yaml index b50e8277eb..bfe3b38d8a 100644 --- a/applications/cm-service/templates/deployment.yaml +++ b/applications/cm-service/templates/deployment.yaml @@ -28,11 +28,6 @@ spec: containers: - name: "cm-service" env: - - name: CM_ARQ_REDIS_PASSWORD - valueFrom: - secretKeyRef: - name: redis-secret - key: password - name: CM_DATABASE_PASSWORD valueFrom: secretKeyRef: @@ -48,8 +43,6 @@ spec: value: {{ .Values.config.logProfile | quote }} - name: CM_LOG_LEVEL value: {{ .Values.config.logLevel | quote }} - - name: CM_ARQ_REDIS_URL - value: "redis://cm-service-redis/1" image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy }} ports: diff --git a/applications/cm-service/templates/vault-secrets.yaml b/applications/cm-service/templates/vault-secrets.yaml index 26f72b46e6..996a6617d8 100644 --- a/applications/cm-service/templates/vault-secrets.yaml +++ b/applications/cm-service/templates/vault-secrets.yaml @@ -1,18 +1,5 @@ apiVersion: ricoberger.de/v1alpha1 kind: VaultSecret -metadata: - name: redis-secret - labels: - {{- include "cm-service.labels" . | nindent 4 }} -spec: - path: "{{ .Values.global.vaultSecretsPath }}/cm-service" - templates: - password: >- - {% index .Secrets "redis-password" %} - type: Opaque ---- -apiVersion: ricoberger.de/v1alpha1 -kind: VaultSecret metadata: name: postgres-secret labels: diff --git a/applications/cm-service/templates/worker-deployment.yaml b/applications/cm-service/templates/worker-deployment.yaml index e0bce6f8c1..3218cd651d 100644 --- a/applications/cm-service/templates/worker-deployment.yaml +++ b/applications/cm-service/templates/worker-deployment.yaml @@ -27,14 +27,6 @@ spec: automountServiceAccountToken: false containers: - name: "cm-service-worker" - env: - - name: CM_ARQ_REDIS_PASSWORD - valueFrom: - secretKeyRef: - name: redis-secret - key: password - - name: CM_ARQ_REDIS_URL - value: "redis://cm-service-redis/1" image: "{{ .Values.worker.image.repository }}:{{ .Values.worker.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy }} resources: diff --git a/applications/cm-service/values.yaml b/applications/cm-service/values.yaml index 363b8a4e25..ae260853f4 100644 --- a/applications/cm-service/values.yaml +++ b/applications/cm-service/values.yaml @@ -129,15 +129,6 @@ worker: # -- If specified, location for htcondor schedd address file to be injected into worker pods contents: null -redis: - config: - # -- Name of secret containing Redis password - secretName: "redis-secret" - - # -- Key inside secret from which to get the Redis password (do not - # change) - secretKey: "password" - # The following will be set by parameters injected by Argo CD and should not # be set in the individual environment values files. global: From c46d2021dabd92e7979ce20f1b8873558edba712 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 30 Aug 2024 08:28:24 -0700 Subject: [PATCH 010/567] BTS: Increase M1M3 LOVE producer memory resources. --- applications/love/values-base.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/applications/love/values-base.yaml b/applications/love/values-base.yaml index 3742727392..fecc6d4326 100644 --- a/applications/love/values-base.yaml +++ b/applications/love/values-base.yaml @@ -296,6 +296,13 @@ love-producer: csc: MTDomeTrajectory:0 --log-level 10 - name: mtm1m3 csc: MTM1M3:0 --log-level 10 + resources: + requests: + cpu: 10m + memory: 200Mi + limits: + cpu: 100m + memory: 600Mi - name: mtm2 csc: MTM2:0 --log-level 10 - name: mtmount From 89b5250286f5fb3a82b7b3f7ba249dd9428615fa Mon Sep 17 00:00:00 2001 From: A I Date: Fri, 30 Aug 2024 17:15:31 +0100 Subject: [PATCH 011/567] enabled ssotap --- environments/values-roe.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/environments/values-roe.yaml b/environments/values-roe.yaml index 8759014a8d..444f3bd295 100644 --- a/environments/values-roe.yaml +++ b/environments/values-roe.yaml @@ -11,3 +11,4 @@ applications: postgres: true squareone: true tap: true + ssotap: true From 003ac4ef7e9c67610a16d7e78f47fb0f62ce640a Mon Sep 17 00:00:00 2001 From: Fritz Mueller Date: Fri, 30 Aug 2024 09:48:54 -0700 Subject: [PATCH 012/567] cm-service: upgrade to v0.1.1 --- applications/cm-service/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/cm-service/Chart.yaml b/applications/cm-service/Chart.yaml index ede07c7126..9ea2b7b9f1 100644 --- a/applications/cm-service/Chart.yaml +++ b/applications/cm-service/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: 0.1.0 +appVersion: 0.1.1 description: Campaign Management for Rubin Data Release Production name: cm-service sources: From da60b21b885b4d074f0f82e2e241d28b5d8679f7 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Thu, 22 Aug 2024 10:22:50 -0700 Subject: [PATCH 013/567] Add config for raw microservice to Prompt Processing. The microservice provides an optional way to identify previously downloaded raw images. It is only expected to be deployed for Rubin cameras. --- applications/prompt-proto-service-hsc-gpu/README.md | 1 + applications/prompt-proto-service-hsc-gpu/values.yaml | 4 ++++ applications/prompt-proto-service-hsc/README.md | 1 + applications/prompt-proto-service-hsc/values.yaml | 4 ++++ applications/prompt-proto-service-latiss/README.md | 1 + applications/prompt-proto-service-latiss/values.yaml | 4 ++++ applications/prompt-proto-service-lsstcam/README.md | 1 + applications/prompt-proto-service-lsstcam/values.yaml | 4 ++++ applications/prompt-proto-service-lsstcomcam/README.md | 1 + applications/prompt-proto-service-lsstcomcam/values.yaml | 4 ++++ applications/prompt-proto-service-lsstcomcamsim/README.md | 1 + applications/prompt-proto-service-lsstcomcamsim/values.yaml | 4 ++++ charts/prompt-proto-service/README.md | 1 + .../prompt-proto-service/templates/prompt-proto-service.yaml | 2 ++ charts/prompt-proto-service/values.yaml | 4 ++++ 15 files changed, 37 insertions(+) diff --git a/applications/prompt-proto-service-hsc-gpu/README.md b/applications/prompt-proto-service-hsc-gpu/README.md index e244bf3f12..b97ddaa42a 100644 --- a/applications/prompt-proto-service-hsc-gpu/README.md +++ b/applications/prompt-proto-service-hsc-gpu/README.md @@ -45,6 +45,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | +| prompt-proto-service.raw_microservice | string | `""` | The URI to a microservice that maps image metadata to a file location. If empty, Prompt Processing does not use a microservice. | | prompt-proto-service.registry.centralRepoFile | bool | `false` | If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. | | prompt-proto-service.s3.auth_env | bool | `true` | If set, get S3 credentials from this application's Vault secret. | | prompt-proto-service.s3.disableBucketValidation | int | `0` | Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. | diff --git a/applications/prompt-proto-service-hsc-gpu/values.yaml b/applications/prompt-proto-service-hsc-gpu/values.yaml index 46c7db1e09..b8cc85249d 100644 --- a/applications/prompt-proto-service-hsc-gpu/values.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values.yaml @@ -73,6 +73,10 @@ prompt-proto-service: # -- Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. disableBucketValidation: 0 + # -- The URI to a microservice that maps image metadata to a file location. + # If empty, Prompt Processing does not use a microservice. + raw_microservice: "" + imageNotifications: # -- Hostname and port of the Kafka provider # @default -- None, must be set diff --git a/applications/prompt-proto-service-hsc/README.md b/applications/prompt-proto-service-hsc/README.md index 3c10244ada..1d6c810a2c 100644 --- a/applications/prompt-proto-service-hsc/README.md +++ b/applications/prompt-proto-service-hsc/README.md @@ -45,6 +45,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | +| prompt-proto-service.raw_microservice | string | `""` | The URI to a microservice that maps image metadata to a file location. If empty, Prompt Processing does not use a microservice. | | prompt-proto-service.registry.centralRepoFile | bool | `false` | If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. | | prompt-proto-service.s3.auth_env | bool | `true` | If set, get S3 credentials from this application's Vault secret. | | prompt-proto-service.s3.disableBucketValidation | int | `0` | Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. | diff --git a/applications/prompt-proto-service-hsc/values.yaml b/applications/prompt-proto-service-hsc/values.yaml index 931f3525b9..3f4b799c67 100644 --- a/applications/prompt-proto-service-hsc/values.yaml +++ b/applications/prompt-proto-service-hsc/values.yaml @@ -73,6 +73,10 @@ prompt-proto-service: # -- Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. disableBucketValidation: 0 + # -- The URI to a microservice that maps image metadata to a file location. + # If empty, Prompt Processing does not use a microservice. + raw_microservice: "" + imageNotifications: # -- Hostname and port of the Kafka provider # @default -- None, must be set diff --git a/applications/prompt-proto-service-latiss/README.md b/applications/prompt-proto-service-latiss/README.md index 605ee8a88e..17da7029ab 100644 --- a/applications/prompt-proto-service-latiss/README.md +++ b/applications/prompt-proto-service-latiss/README.md @@ -45,6 +45,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | +| prompt-proto-service.raw_microservice | string | `""` | The URI to a microservice that maps image metadata to a file location. If empty, Prompt Processing does not use a microservice. | | prompt-proto-service.registry.centralRepoFile | bool | `false` | If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. | | prompt-proto-service.s3.auth_env | bool | `true` | If set, get S3 credentials from this application's Vault secret. | | prompt-proto-service.s3.disableBucketValidation | string | `"0"` | Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. | diff --git a/applications/prompt-proto-service-latiss/values.yaml b/applications/prompt-proto-service-latiss/values.yaml index 410e4e5225..9768a1c05d 100644 --- a/applications/prompt-proto-service-latiss/values.yaml +++ b/applications/prompt-proto-service-latiss/values.yaml @@ -73,6 +73,10 @@ prompt-proto-service: # -- Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. disableBucketValidation: '0' + # -- The URI to a microservice that maps image metadata to a file location. + # If empty, Prompt Processing does not use a microservice. + raw_microservice: "" + imageNotifications: # -- Hostname and port of the Kafka provider # @default -- None, must be set diff --git a/applications/prompt-proto-service-lsstcam/README.md b/applications/prompt-proto-service-lsstcam/README.md index abdafa6f39..20834485da 100644 --- a/applications/prompt-proto-service-lsstcam/README.md +++ b/applications/prompt-proto-service-lsstcam/README.md @@ -45,6 +45,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | +| prompt-proto-service.raw_microservice | string | `""` | The URI to a microservice that maps image metadata to a file location. If empty, Prompt Processing does not use a microservice. | | prompt-proto-service.registry.centralRepoFile | bool | `false` | If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. | | prompt-proto-service.s3.auth_env | bool | `true` | If set, get S3 credentials from this application's Vault secret. | | prompt-proto-service.s3.disableBucketValidation | int | `0` | Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. | diff --git a/applications/prompt-proto-service-lsstcam/values.yaml b/applications/prompt-proto-service-lsstcam/values.yaml index 6304b41272..6221360a93 100644 --- a/applications/prompt-proto-service-lsstcam/values.yaml +++ b/applications/prompt-proto-service-lsstcam/values.yaml @@ -73,6 +73,10 @@ prompt-proto-service: # -- Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. disableBucketValidation: 0 + # -- The URI to a microservice that maps image metadata to a file location. + # If empty, Prompt Processing does not use a microservice. + raw_microservice: "" + imageNotifications: # -- Hostname and port of the Kafka provider # @default -- None, must be set diff --git a/applications/prompt-proto-service-lsstcomcam/README.md b/applications/prompt-proto-service-lsstcomcam/README.md index 13cf5a2017..ca625a5b66 100644 --- a/applications/prompt-proto-service-lsstcomcam/README.md +++ b/applications/prompt-proto-service-lsstcomcam/README.md @@ -45,6 +45,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | +| prompt-proto-service.raw_microservice | string | `""` | The URI to a microservice that maps image metadata to a file location. If empty, Prompt Processing does not use a microservice. | | prompt-proto-service.registry.centralRepoFile | bool | `false` | If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. | | prompt-proto-service.s3.auth_env | bool | `true` | If set, get S3 credentials from this application's Vault secret. | | prompt-proto-service.s3.disableBucketValidation | int | `0` | Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. | diff --git a/applications/prompt-proto-service-lsstcomcam/values.yaml b/applications/prompt-proto-service-lsstcomcam/values.yaml index c4f253abe0..67fc0978a9 100644 --- a/applications/prompt-proto-service-lsstcomcam/values.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values.yaml @@ -73,6 +73,10 @@ prompt-proto-service: # -- Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. disableBucketValidation: 0 + # -- The URI to a microservice that maps image metadata to a file location. + # If empty, Prompt Processing does not use a microservice. + raw_microservice: "" + imageNotifications: # -- Hostname and port of the Kafka provider # @default -- None, must be set diff --git a/applications/prompt-proto-service-lsstcomcamsim/README.md b/applications/prompt-proto-service-lsstcomcamsim/README.md index ad995209a9..55d6b814c6 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/README.md +++ b/applications/prompt-proto-service-lsstcomcamsim/README.md @@ -45,6 +45,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | +| prompt-proto-service.raw_microservice | string | `""` | The URI to a microservice that maps image metadata to a file location. If empty, Prompt Processing does not use a microservice. | | prompt-proto-service.registry.centralRepoFile | bool | `false` | If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. | | prompt-proto-service.s3.auth_env | bool | `true` | If set, get S3 credentials from this application's Vault secret. | | prompt-proto-service.s3.disableBucketValidation | int | `0` | Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. | diff --git a/applications/prompt-proto-service-lsstcomcamsim/values.yaml b/applications/prompt-proto-service-lsstcomcamsim/values.yaml index 1cab015e99..47815b63cf 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values.yaml @@ -73,6 +73,10 @@ prompt-proto-service: # -- Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. disableBucketValidation: 0 + # -- The URI to a microservice that maps image metadata to a file location. + # If empty, Prompt Processing does not use a microservice. + raw_microservice: "" + imageNotifications: # -- Hostname and port of the Kafka provider # @default -- None, must be set diff --git a/charts/prompt-proto-service/README.md b/charts/prompt-proto-service/README.md index b046a4e24b..5f3f2efadb 100644 --- a/charts/prompt-proto-service/README.md +++ b/charts/prompt-proto-service/README.md @@ -51,6 +51,7 @@ Event-driven processing of camera images | nameOverride | string | `""` | Override the base name for resources | | nodeSelector | object | `{}` | | | podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | +| raw_microservice | string | `""` | The URI to a microservice that maps image metadata to a file location. If empty, Prompt Processing does not use a microservice. | | registry.centralRepoFile | bool | `false` | If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. | | s3.auth_env | bool | `true` | If set, get S3 credentials from this application's Vault secret. | | s3.disableBucketValidation | int | `0` | Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. | diff --git a/charts/prompt-proto-service/templates/prompt-proto-service.yaml b/charts/prompt-proto-service/templates/prompt-proto-service.yaml index f6572190d2..690c5b34aa 100644 --- a/charts/prompt-proto-service/templates/prompt-proto-service.yaml +++ b/charts/prompt-proto-service/templates/prompt-proto-service.yaml @@ -70,6 +70,8 @@ spec: value: {{ .Values.apdb.config }} - name: KAFKA_CLUSTER value: {{ .Values.imageNotifications.kafkaClusterAddress }} + - name: RAW_MICROSERVICE + value: {{ .Values.raw_microservice }} - name: SASQUATCH_URL value: {{ .Values.sasquatch.endpointUrl }} {{- if and .Values.sasquatch.endpointUrl .Values.sasquatch.auth_env }} diff --git a/charts/prompt-proto-service/values.yaml b/charts/prompt-proto-service/values.yaml index 8c4454ee27..7751ab89ea 100644 --- a/charts/prompt-proto-service/values.yaml +++ b/charts/prompt-proto-service/values.yaml @@ -75,6 +75,10 @@ s3: # -- Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. disableBucketValidation: 0 +# -- The URI to a microservice that maps image metadata to a file location. +# If empty, Prompt Processing does not use a microservice. +raw_microservice: "" + imageNotifications: # -- Hostname and port of the Kafka provider # @default -- None, must be set From 65d27fa0f45b4c5e052ff5ccd7c740dd167e96ca Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Thu, 22 Aug 2024 10:28:18 -0700 Subject: [PATCH 014/567] Use raw microservice with LATISS and ComCamSim Prompt Processing. These are the only two instances deployed at present. --- .../values-usdfprod-prompt-processing.yaml | 2 ++ .../values-usdfprod-prompt-processing.yaml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index dafffeab21..b244df2ebc 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -49,6 +49,8 @@ prompt-proto-service: imageBucket: rubin-summit endpointUrl: https://s3dfrgw.slac.stanford.edu + raw_microservice: http://172.24.5.144:8080/presence + imageNotifications: kafkaClusterAddress: prompt-processing-2-kafka-bootstrap.kafka:9092 topic: rubin-prompt-processing-prod diff --git a/applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml index 9f0fdf75cb..ce3809fb7c 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml @@ -33,6 +33,8 @@ prompt-proto-service: imageBucket: rubin-summit endpointUrl: https://sdfembs3.sdf.slac.stanford.edu + raw_microservice: http://172.24.5.158:8080/presence + imageNotifications: kafkaClusterAddress: prompt-processing-2-kafka-bootstrap.kafka:9092 topic: rubin-summit-notification From e6af09ab3e0945414872041f3449681f1f6918ba Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 30 Aug 2024 15:31:59 -0700 Subject: [PATCH 015/567] Increase default flush_interval - Set flush_interval=10s by default to fix warning ["outputs.influxdb"] did not complete within its flush interval --- .../charts/telegraf-kafka-consumer/templates/configmap.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml index 5e55c1a59e..31d160897f 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml @@ -15,7 +15,7 @@ data: metric_batch_size = {{ default 1000 $value.metric_batch_size }} metric_buffer_limit = {{ default 10000 $value.metric_buffer_limit }} collection_jitter = {{ default "0s" $value.collection_jitter | quote }} - flush_interval = {{ default "1s" $value.flush_interval | quote }} + flush_interval = {{ default "10s" $value.flush_interval | quote }} flush_jitter = {{ default "0s" $value.flush_jitter | quote }} debug = {{ default false $value.debug }} omit_hostname = true From 52e220d0b14851ccbae76823925007de24516595 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 30 Aug 2024 15:33:26 -0700 Subject: [PATCH 016/567] Improve app labels - Use k8s recommended labels, allow to select connector pods using app.kubernetes.io/instance label --- .../telegraf-kafka-consumer/templates/configmap.yaml | 4 +++- .../telegraf-kafka-consumer/templates/deployment.yaml | 8 +++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml index 31d160897f..8e5e0aa29d 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml @@ -6,7 +6,9 @@ kind: ConfigMap metadata: name: sasquatch-telegraf-{{ $key }} labels: - app: sasquatch-telegraf-kakfa-consumer + app.kubernetes.io/name: sasquatch-telegraf + app.kubernetes.io/instance: sasquatch-telegraf-{{ $key }} + app.kubernetes.io/part-of: sasquatch data: telegraf.conf: |+ [agent] diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/deployment.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/deployment.yaml index addd04a6e6..5408f4f93f 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/deployment.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/deployment.yaml @@ -6,16 +6,18 @@ kind: Deployment metadata: name: sasquatch-telegraf-{{ $key }} labels: - app: sasquatch-telegraf-kafka-consumer + app.kubernetes.io/name: sasquatch-telegraf + app.kubernetes.io/instance: sasquatch-telegraf-{{ $key }} + app.kubernetes.io/part-of: sasquatch spec: replicas: {{ default 1 $value.replicaCount }} selector: matchLabels: - app: sasquatch-telegraf-kafka-consumer + app.kubernetes.io/instance: sasquatch-telegraf-{{ $key }} template: metadata: labels: - app: sasquatch-telegraf-kafka-consumer + app.kubernetes.io/instance: sasquatch-telegraf-{{ $key }} {{- if $.Values.podAnnotations }} annotations: {{- toYaml $.Values.podAnnotations | nindent 8 }} From 3d63c92d890deef5fbfa8a6f4106b692e203fe42 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 30 Aug 2024 11:47:05 -0700 Subject: [PATCH 017/567] Remove kafka-connect-manager on BTS - These connectors will be replaced by the Telegraf based connectors --- applications/sasquatch/values-base.yaml | 50 ------------------------- 1 file changed, 50 deletions(-) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index 0eb4939051..a60f95113f 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -131,56 +131,6 @@ influxdb: enabled: true hostname: base-lsp.lsst.codes -kafka-connect-manager: - influxdbSink: - # Based on the kafka producers configuration for the BTS - # https://github.com/lsst-ts/argocd-csc/blob/main/apps/kafka-producers/values-base-teststand.yaml - connectors: - auxtel: - enabled: true - topicsRegex: "lsst.sal.ATAOS|lsst.sal.ATDome|lsst.sal.ATDomeTrajectory|lsst.sal.ATHexapod|lsst.sal.ATPneumatics|lsst.sal.ATPtg|lsst.sal.ATMCS" - maintel: - enabled: true - topicsRegex: "lsst.sal.MTAOS|lsst.sal.MTDome|lsst.sal.MTDomeTrajectory|lsst.sal.MTPtg" - mtmount: - enabled: true - topicsRegex: "lsst.sal.MTMount" - tasksMax: "8" - eas: - enabled: true - topicsRegex: "lsst.sal.DIMM|lsst.sal.DSM|lsst.sal.EPM|lsst.sal.ESS|lsst.sal.HVAC|lsst.sal.WeatherForecast" - latiss: - enabled: true - topicsRegex: "lsst.sal.ATCamera|lsst.sal.ATHeaderService|lsst.sal.ATOODS|lsst.sal.ATSpectrograph" - m1m3: - enabled: true - topicsRegex: "lsst.sal.MTM1M3" - tasksMax: "8" - m2: - enabled: true - topicsRegex: "lsst.sal.MTHexapod|lsst.sal.MTM2|lsst.sal.MTRotator" - obssys: - enabled: true - topicsRegex: "lsst.sal.Scheduler|lsst.sal.Script|lsst.sal.ScriptQueue|lsst.sal.Watcher" - ocps: - enabled: true - topicsRegex: "lsst.sal.OCPS" - test: - enabled: true - topicsRegex: "lsst.sal.Test" - mtaircompressor: - enabled: true - topicsRegex: "lsst.sal.MTAirCompressor" - lasertracker: - enabled: true - topicsRegex: "lsst.sal.LaserTracker" - genericcamera: - enabled: true - topicsRegex: "lsst.sal.GCHeaderService|lsst.sal.GenericCamera" - lsstcam: - enabled: true - topicsRegex: "lsst.sal.MTCamera|lsst.sal.MTHeaderService|lsst.sal.MTOODS" - telegraf-kafka-consumer: enabled: false kafkaConsumers: From f2454340e0e10c7f9f1e59db2f7c68a6eece6f6d Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 30 Aug 2024 12:00:59 -0700 Subject: [PATCH 018/567] Review telegraf-based connector configuration at base - Enable telegraf-based connectors - Most of the parameters are now default, so we don't need to repeat them in the configuration - Run 8 connectors "tasks" for M1M3 and MTMount CSCs --- applications/sasquatch/values-base.yaml | 102 ++++-------------------- 1 file changed, 17 insertions(+), 85 deletions(-) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index a60f95113f..f818146427 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -132,146 +132,78 @@ influxdb: hostname: base-lsp.lsst.codes telegraf-kafka-consumer: - enabled: false + enabled: true kafkaConsumers: auxtel: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] maintel: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.MTAOS", "lsst.sal.MTDome", "lsst.sal.MTDomeTrajectory", "lsst.sal.MTPtg" ] mtmount: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" + replicaCount: 8 topicRegexps: | [ "lsst.sal.MTMount" ] eas: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] latiss: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.ATCamera", "lsst.sal.ATHeaderService", "lsst.sal.ATOODS", "lsst.sal.ATSpectrograph" ] m1m3: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" + replicaCount: 8 topicRegexps: | [ "lsst.sal.MTM1M3" ] m2: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.MTHexapod", "lsst.sal.MTM2", "lsst.sal.MTRotator" ] obssys: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.Scheduler", "lsst.sal.Script", "lsst.sal.ScriptQueue", "lsst.sal.Watcher" ] ocps: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.OCPS" ] test: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.Test" ] mtaircompressor: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.MTAirCompressor" ] lasertracker: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.LaserTracker" ] genericcamera: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.GCHeaderService", "lsst.sal.GenericCamera" ] lsstcam: enabled: true - replicaCount: 1 - interval: "1s" - flush_interval: "1s" - union_mode: "nullable" - timestamp_format: "unix" - timestamp_field: "private_efdStamp" + database: "efd" topicRegexps: | [ "lsst.sal.MTCamera", "lsst.sal.MTHeaderService", "lsst.sal.MTOODS" ] From 602825f7f885dfd019f514d2d2b17169db0d2be7 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 09:44:28 +0000 Subject: [PATCH 019/567] Update Helm release argo-workflows to v0.42.1 --- applications/argo-workflows/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argo-workflows/Chart.yaml b/applications/argo-workflows/Chart.yaml index aa76df123b..285c51625d 100644 --- a/applications/argo-workflows/Chart.yaml +++ b/applications/argo-workflows/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-workflows - version: 0.42.0 + version: 0.42.1 repository: https://argoproj.github.io/argo-helm From 232f6a82fbfae619d0af6396bc4ba15a5a35eeb3 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 18:10:59 +0000 Subject: [PATCH 020/567] Update Helm release argo-cd to v7.5.2 --- applications/argocd/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argocd/Chart.yaml b/applications/argocd/Chart.yaml index 2956e31a97..81ca4bd156 100644 --- a/applications/argocd/Chart.yaml +++ b/applications/argocd/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-cd - version: 7.4.5 + version: 7.5.2 repository: https://argoproj.github.io/argo-helm From ec653e7527c3e80abfb58403c1b1c3372f26f7bc Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Fri, 31 May 2024 16:04:32 -0700 Subject: [PATCH 021/567] Add ir2 repo to idfdev Butler server We want to use the ir2 repo to test the "hybrid model" where the Butler's database is at Google but the data is at USDF. Added a configuration option to Butler to allow it to use two different sets of S3 credentials simultaneously. Added the ir2 repo to data-dev. --- applications/butler/README.md | 1 + applications/butler/secrets.yaml | 6 ++++++ applications/butler/templates/deployment.yaml | 7 +++++++ applications/butler/values-idfdev.yaml | 1 + applications/butler/values.yaml | 5 +++++ 5 files changed, 20 insertions(+) diff --git a/applications/butler/README.md b/applications/butler/README.md index ac80574a22..73d7c17812 100644 --- a/applications/butler/README.md +++ b/applications/butler/README.md @@ -15,6 +15,7 @@ Server for Butler data abstraction service | autoscaling.maxReplicas | int | `100` | Maximum number of butler deployment pods | | autoscaling.minReplicas | int | `1` | Minimum number of butler deployment pods | | autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization of butler deployment pods | +| config.additionalS3ProfileName | string | No second S3 profile is available. | Profile name identifying a second S3 endpoint and set of credentials to use for accessing files in the datastore. | | config.dp02PostgresUri | string | No configuration file for DP02 will be generated. | Postgres connection string pointing to the registry database hosting Data Preview 0.2 data. | | config.pathPrefix | string | `"/api/butler"` | The prefix of the path portion of the URL where the Butler service will be exposed. For example, if the service should be exposed at `https://data.lsst.cloud/api/butler`, this should be set to `/api/butler` | | config.pguser | string | Use values specified in per-repository Butler config files. | Postgres username used to connect to the Butler DB | diff --git a/applications/butler/secrets.yaml b/applications/butler/secrets.yaml index 1b2d88511e..96fc6efe1f 100644 --- a/applications/butler/secrets.yaml +++ b/applications/butler/secrets.yaml @@ -18,3 +18,9 @@ copy: application: nublado key: "postgres-credentials.txt" +"additional-s3-profile": + description: >- + Credentials and endpoint for a second S3 profile to use, in addition to the + default endpoint. For docs on format see + https://github.com/lsst/resources/blob/a34598e125919799d3db4bd8a2363087c3de434e/python/lsst/resources/s3utils.py#L201 + if: additionalS3ProfileName diff --git a/applications/butler/templates/deployment.yaml b/applications/butler/templates/deployment.yaml index 9ba64a4257..c7e3f06b4c 100644 --- a/applications/butler/templates/deployment.yaml +++ b/applications/butler/templates/deployment.yaml @@ -65,6 +65,13 @@ spec: - name: PGUSER value: {{ .Values.config.pguser | quote }} {{ end }} + {{ if .Values.config.additionalS3ProfileName }} + - name: LSST_RESOURCES_S3_PROFILE_{{ .Values.config.additionalS3ProfileName }} + valueFrom: + secretKeyRef: + name: {{ include "butler.fullname" . }} + key: additional-s3-profile + {{ end }} volumeMounts: - name: "butler-secrets" mountPath: "/opt/lsst/butler/secrets" diff --git a/applications/butler/values-idfdev.yaml b/applications/butler/values-idfdev.yaml index 08c73c983f..90d8a9db26 100644 --- a/applications/butler/values-idfdev.yaml +++ b/applications/butler/values-idfdev.yaml @@ -6,3 +6,4 @@ config: s3EndpointUrl: "https://storage.googleapis.com" repositories: dp02: "file:///opt/lsst/butler/config/dp02.yaml" + ir2: "s3://butler-us-central1-panda-dev/ir2/butler-ir2.yaml" diff --git a/applications/butler/values.yaml b/applications/butler/values.yaml index 81264a34cf..18086ea1c0 100644 --- a/applications/butler/values.yaml +++ b/applications/butler/values.yaml @@ -92,6 +92,11 @@ config: # -- URL for the S3 service where files for datasets are stored by Butler. s3EndpointUrl: "" + # -- Profile name identifying a second S3 endpoint and set of credentials + # to use for accessing files in the datastore. + # @default -- No second S3 profile is available. + additionalS3ProfileName: "" + # -- The prefix of the path portion of the URL where the Butler service will # be exposed. For example, if the service should be exposed at # `https://data.lsst.cloud/api/butler`, this should be set to `/api/butler` From c7aaad3b94656d01191ab5cec5e6add59e1459f0 Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Fri, 31 May 2024 16:24:32 -0700 Subject: [PATCH 022/567] Add conditional secret for Butler idfdev --- applications/butler/secrets.yaml | 2 +- applications/butler/values-idfdev.yaml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/applications/butler/secrets.yaml b/applications/butler/secrets.yaml index 96fc6efe1f..23ee59d217 100644 --- a/applications/butler/secrets.yaml +++ b/applications/butler/secrets.yaml @@ -23,4 +23,4 @@ Credentials and endpoint for a second S3 profile to use, in addition to the default endpoint. For docs on format see https://github.com/lsst/resources/blob/a34598e125919799d3db4bd8a2363087c3de434e/python/lsst/resources/s3utils.py#L201 - if: additionalS3ProfileName + if: config.additionalS3ProfileName diff --git a/applications/butler/values-idfdev.yaml b/applications/butler/values-idfdev.yaml index 90d8a9db26..e70e31b433 100644 --- a/applications/butler/values-idfdev.yaml +++ b/applications/butler/values-idfdev.yaml @@ -4,6 +4,7 @@ image: config: dp02PostgresUri: postgresql://postgres@sqlproxy-butler-int.sqlproxy-cross-project:5432/dp02 s3EndpointUrl: "https://storage.googleapis.com" + additionalS3ProfileName: "ir2" repositories: dp02: "file:///opt/lsst/butler/config/dp02.yaml" ir2: "s3://butler-us-central1-panda-dev/ir2/butler-ir2.yaml" From af6920938282cd5c57e684f1daf75477a21fa688 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 3 Sep 2024 18:38:15 -0700 Subject: [PATCH 023/567] Update dependencies --- .pre-commit-config.yaml | 4 +- requirements/dev.txt | 438 ++++++++++++++++++++-------------------- requirements/main.txt | 94 +++++---- requirements/tox.txt | 44 ++-- 4 files changed, 294 insertions(+), 286 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 772024b059..3c7869cff9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,7 +14,7 @@ repos: - -c=.yamllint.yml - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.29.1 + rev: 0.29.2 hooks: - id: check-jsonschema files: ^applications/.*/secrets(-[^./-]+)?\.yaml @@ -46,7 +46,7 @@ repos: - --template-files=../helm-docs.md.gotmpl - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.7 + rev: v0.6.3 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/requirements/dev.txt b/requirements/dev.txt index d1d038b509..8658c4de3d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -10,6 +10,10 @@ annotated-types==0.7.0 \ # via # -c requirements/main.txt # pydantic +appnope==0.1.4 \ + --hash=sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee \ + --hash=sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c + # via ipykernel asttokens==2.4.1 \ --hash=sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24 \ --hash=sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0 @@ -32,9 +36,9 @@ beautifulsoup4==4.12.3 \ --hash=sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051 \ --hash=sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed # via pydata-sphinx-theme -certifi==2024.7.4 \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 # via # -c requirements/main.txt # requests @@ -270,9 +274,9 @@ docutils==0.21.2 \ # sphinx-jinja # sphinx-prompt # sphinxcontrib-bibtex -executing==2.0.1 \ - --hash=sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147 \ - --hash=sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc +executing==2.1.0 \ + --hash=sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf \ + --hash=sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab # via stack-data fastjsonschema==2.20.0 \ --hash=sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23 \ @@ -353,12 +357,10 @@ greenlet==3.0.3 \ --hash=sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf \ --hash=sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da \ --hash=sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33 - # via - # -r requirements/dev.in - # sqlalchemy -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 + # via -r requirements/dev.in +idna==3.8 \ + --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ + --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 # via # -c requirements/main.txt # requests @@ -367,9 +369,9 @@ imagesize==1.4.1 \ --hash=sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b \ --hash=sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a # via sphinx -importlib-metadata==8.2.0 \ - --hash=sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369 \ - --hash=sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d +importlib-metadata==8.4.0 \ + --hash=sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 \ + --hash=sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5 # via # jupyter-cache # myst-nb @@ -381,9 +383,9 @@ ipykernel==6.29.5 \ --hash=sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5 \ --hash=sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215 # via myst-nb -ipython==8.26.0 \ - --hash=sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c \ - --hash=sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff +ipython==8.27.0 \ + --hash=sha256:0b99a2dc9f15fd68692e898e5568725c6d49c527d36a9fb5960ffbdeaa82ff7e \ + --hash=sha256:f68b3cb8bde357a5d7adc9598d57e22a45dfbea19eb6b98286fa3b288c9cd55c # via # ipykernel # myst-nb @@ -522,34 +524,34 @@ mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -mypy==1.11.1 \ - --hash=sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54 \ - --hash=sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a \ - --hash=sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72 \ - --hash=sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69 \ - --hash=sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b \ - --hash=sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe \ - --hash=sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4 \ - --hash=sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd \ - --hash=sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0 \ - --hash=sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525 \ - --hash=sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2 \ - --hash=sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c \ - --hash=sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5 \ - --hash=sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de \ - --hash=sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74 \ - --hash=sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c \ - --hash=sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e \ - --hash=sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58 \ - --hash=sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b \ - --hash=sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417 \ - --hash=sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411 \ - --hash=sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb \ - --hash=sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03 \ - --hash=sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca \ - --hash=sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8 \ - --hash=sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08 \ - --hash=sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809 +mypy==1.11.2 \ + --hash=sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36 \ + --hash=sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce \ + --hash=sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6 \ + --hash=sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b \ + --hash=sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca \ + --hash=sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24 \ + --hash=sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383 \ + --hash=sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7 \ + --hash=sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86 \ + --hash=sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d \ + --hash=sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4 \ + --hash=sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8 \ + --hash=sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987 \ + --hash=sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385 \ + --hash=sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79 \ + --hash=sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef \ + --hash=sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6 \ + --hash=sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70 \ + --hash=sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca \ + --hash=sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70 \ + --hash=sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12 \ + --hash=sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104 \ + --hash=sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a \ + --hash=sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318 \ + --hash=sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1 \ + --hash=sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b \ + --hash=sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d # via -r requirements/dev.in mypy-extensions==1.0.0 \ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ @@ -855,116 +857,116 @@ pyyaml==6.0.2 \ # myst-parser # pybtex # sphinxcontrib-redoc -pyzmq==26.1.0 \ - --hash=sha256:038ae4ffb63e3991f386e7fda85a9baab7d6617fe85b74a8f9cab190d73adb2b \ - --hash=sha256:05bacc4f94af468cc82808ae3293390278d5f3375bb20fef21e2034bb9a505b6 \ - --hash=sha256:0614aed6f87d550b5cecb03d795f4ddbb1544b78d02a4bd5eecf644ec98a39f6 \ - --hash=sha256:08f74904cb066e1178c1ec706dfdb5c6c680cd7a8ed9efebeac923d84c1f13b1 \ - --hash=sha256:093a1a3cae2496233f14b57f4b485da01b4ff764582c854c0f42c6dd2be37f3d \ - --hash=sha256:0a1f6ea5b1d6cdbb8cfa0536f0d470f12b4b41ad83625012e575f0e3ecfe97f0 \ - --hash=sha256:0e6cea102ffa16b737d11932c426f1dc14b5938cf7bc12e17269559c458ac334 \ - --hash=sha256:263cf1e36862310bf5becfbc488e18d5d698941858860c5a8c079d1511b3b18e \ - --hash=sha256:28a8b2abb76042f5fd7bd720f7fea48c0fd3e82e9de0a1bf2c0de3812ce44a42 \ - --hash=sha256:2ae7c57e22ad881af78075e0cea10a4c778e67234adc65c404391b417a4dda83 \ - --hash=sha256:2cd0f4d314f4a2518e8970b6f299ae18cff7c44d4a1fc06fc713f791c3a9e3ea \ - --hash=sha256:2fa76ebcebe555cce90f16246edc3ad83ab65bb7b3d4ce408cf6bc67740c4f88 \ - --hash=sha256:314d11564c00b77f6224d12eb3ddebe926c301e86b648a1835c5b28176c83eab \ - --hash=sha256:347e84fc88cc4cb646597f6d3a7ea0998f887ee8dc31c08587e9c3fd7b5ccef3 \ - --hash=sha256:359c533bedc62c56415a1f5fcfd8279bc93453afdb0803307375ecf81c962402 \ - --hash=sha256:393daac1bcf81b2a23e696b7b638eedc965e9e3d2112961a072b6cd8179ad2eb \ - --hash=sha256:3b3b8e36fd4c32c0825b4461372949ecd1585d326802b1321f8b6dc1d7e9318c \ - --hash=sha256:3c397b1b450f749a7e974d74c06d69bd22dd362142f370ef2bd32a684d6b480c \ - --hash=sha256:3d3146b1c3dcc8a1539e7cc094700b2be1e605a76f7c8f0979b6d3bde5ad4072 \ - --hash=sha256:3ee647d84b83509b7271457bb428cc347037f437ead4b0b6e43b5eba35fec0aa \ - --hash=sha256:416ac51cabd54f587995c2b05421324700b22e98d3d0aa2cfaec985524d16f1d \ - --hash=sha256:451e16ae8bea3d95649317b463c9f95cd9022641ec884e3d63fc67841ae86dfe \ - --hash=sha256:45cb1a70eb00405ce3893041099655265fabcd9c4e1e50c330026e82257892c1 \ - --hash=sha256:46d6800b45015f96b9d92ece229d92f2aef137d82906577d55fadeb9cf5fcb71 \ - --hash=sha256:471312a7375571857a089342beccc1a63584315188560c7c0da7e0a23afd8a5c \ - --hash=sha256:471880c4c14e5a056a96cd224f5e71211997d40b4bf5e9fdded55dafab1f98f2 \ - --hash=sha256:5384c527a9a004445c5074f1e20db83086c8ff1682a626676229aafd9cf9f7d1 \ - --hash=sha256:57bb2acba798dc3740e913ffadd56b1fcef96f111e66f09e2a8db3050f1f12c8 \ - --hash=sha256:58c33dc0e185dd97a9ac0288b3188d1be12b756eda67490e6ed6a75cf9491d79 \ - --hash=sha256:59d0acd2976e1064f1b398a00e2c3e77ed0a157529779e23087d4c2fb8aaa416 \ - --hash=sha256:5a6ed52f0b9bf8dcc64cc82cce0607a3dfed1dbb7e8c6f282adfccc7be9781de \ - --hash=sha256:5bc2431167adc50ba42ea3e5e5f5cd70d93e18ab7b2f95e724dd8e1bd2c38120 \ - --hash=sha256:5cca7b4adb86d7470e0fc96037771981d740f0b4cb99776d5cb59cd0e6684a73 \ - --hash=sha256:61dfa5ee9d7df297c859ac82b1226d8fefaf9c5113dc25c2c00ecad6feeeb04f \ - --hash=sha256:63c1d3a65acb2f9c92dce03c4e1758cc552f1ae5c78d79a44e3bb88d2fa71f3a \ - --hash=sha256:65c6e03cc0222eaf6aad57ff4ecc0a070451e23232bb48db4322cc45602cede0 \ - --hash=sha256:67976d12ebfd61a3bc7d77b71a9589b4d61d0422282596cf58c62c3866916544 \ - --hash=sha256:68a0a1d83d33d8367ddddb3e6bb4afbb0f92bd1dac2c72cd5e5ddc86bdafd3eb \ - --hash=sha256:6c5aeea71f018ebd3b9115c7cb13863dd850e98ca6b9258509de1246461a7e7f \ - --hash=sha256:754c99a9840839375ee251b38ac5964c0f369306eddb56804a073b6efdc0cd88 \ - --hash=sha256:75a95c2358fcfdef3374cb8baf57f1064d73246d55e41683aaffb6cfe6862917 \ - --hash=sha256:7688653574392d2eaeef75ddcd0b2de5b232d8730af29af56c5adf1df9ef8d6f \ - --hash=sha256:77ce6a332c7e362cb59b63f5edf730e83590d0ab4e59c2aa5bd79419a42e3449 \ - --hash=sha256:7907419d150b19962138ecec81a17d4892ea440c184949dc29b358bc730caf69 \ - --hash=sha256:79e45a4096ec8388cdeb04a9fa5e9371583bcb826964d55b8b66cbffe7b33c86 \ - --hash=sha256:7bcbfbab4e1895d58ab7da1b5ce9a327764f0366911ba5b95406c9104bceacb0 \ - --hash=sha256:80b0c9942430d731c786545da6be96d824a41a51742e3e374fedd9018ea43106 \ - --hash=sha256:8b88641384e84a258b740801cd4dbc45c75f148ee674bec3149999adda4a8598 \ - --hash=sha256:8d4dac7d97f15c653a5fedcafa82626bd6cee1450ccdaf84ffed7ea14f2b07a4 \ - --hash=sha256:8d906d43e1592be4b25a587b7d96527cb67277542a5611e8ea9e996182fae410 \ - --hash=sha256:8efb782f5a6c450589dbab4cb0f66f3a9026286333fe8f3a084399149af52f29 \ - --hash=sha256:906e532c814e1d579138177a00ae835cd6becbf104d45ed9093a3aaf658f6a6a \ - --hash=sha256:90d4feb2e83dfe9ace6374a847e98ee9d1246ebadcc0cb765482e272c34e5820 \ - --hash=sha256:911c43a4117915203c4cc8755e0f888e16c4676a82f61caee2f21b0c00e5b894 \ - --hash=sha256:91d1a20bdaf3b25f3173ff44e54b1cfbc05f94c9e8133314eb2962a89e05d6e3 \ - --hash=sha256:94c4262626424683feea0f3c34951d39d49d354722db2745c42aa6bb50ecd93b \ - --hash=sha256:96d7c1d35ee4a495df56c50c83df7af1c9688cce2e9e0edffdbf50889c167595 \ - --hash=sha256:9869fa984c8670c8ab899a719eb7b516860a29bc26300a84d24d8c1b71eae3ec \ - --hash=sha256:98c03bd7f3339ff47de7ea9ac94a2b34580a8d4df69b50128bb6669e1191a895 \ - --hash=sha256:995301f6740a421afc863a713fe62c0aaf564708d4aa057dfdf0f0f56525294b \ - --hash=sha256:998444debc8816b5d8d15f966e42751032d0f4c55300c48cc337f2b3e4f17d03 \ - --hash=sha256:9a6847c92d9851b59b9f33f968c68e9e441f9a0f8fc972c5580c5cd7cbc6ee24 \ - --hash=sha256:9bdfcb74b469b592972ed881bad57d22e2c0acc89f5e8c146782d0d90fb9f4bf \ - --hash=sha256:9f136a6e964830230912f75b5a116a21fe8e34128dcfd82285aa0ef07cb2c7bd \ - --hash=sha256:a0f0ab9df66eb34d58205913f4540e2ad17a175b05d81b0b7197bc57d000e829 \ - --hash=sha256:a4b7a989c8f5a72ab1b2bbfa58105578753ae77b71ba33e7383a31ff75a504c4 \ - --hash=sha256:a7b8aab50e5a288c9724d260feae25eda69582be84e97c012c80e1a5e7e03fb2 \ - --hash=sha256:ad875277844cfaeca7fe299ddf8c8d8bfe271c3dc1caf14d454faa5cdbf2fa7a \ - --hash=sha256:add52c78a12196bc0fda2de087ba6c876ea677cbda2e3eba63546b26e8bf177b \ - --hash=sha256:b10163e586cc609f5f85c9b233195554d77b1e9a0801388907441aaeb22841c5 \ - --hash=sha256:b24079a14c9596846bf7516fe75d1e2188d4a528364494859106a33d8b48be38 \ - --hash=sha256:b281b5ff5fcc9dcbfe941ac5c7fcd4b6c065adad12d850f95c9d6f23c2652384 \ - --hash=sha256:b3bb34bebaa1b78e562931a1687ff663d298013f78f972a534f36c523311a84d \ - --hash=sha256:b45e6445ac95ecb7d728604bae6538f40ccf4449b132b5428c09918523abc96d \ - --hash=sha256:ba0a31d00e8616149a5ab440d058ec2da621e05d744914774c4dde6837e1f545 \ - --hash=sha256:baba2fd199b098c5544ef2536b2499d2e2155392973ad32687024bd8572a7d1c \ - --hash=sha256:bd13f0231f4788db619347b971ca5f319c5b7ebee151afc7c14632068c6261d3 \ - --hash=sha256:bd3f6329340cef1c7ba9611bd038f2d523cea79f09f9c8f6b0553caba59ec562 \ - --hash=sha256:bdeb2c61611293f64ac1073f4bf6723b67d291905308a7de9bb2ca87464e3273 \ - --hash=sha256:bef24d3e4ae2c985034439f449e3f9e06bf579974ce0e53d8a507a1577d5b2ab \ - --hash=sha256:c0665d85535192098420428c779361b8823d3d7ec4848c6af3abb93bc5c915bf \ - --hash=sha256:c5668dac86a869349828db5fc928ee3f58d450dce2c85607067d581f745e4fb1 \ - --hash=sha256:c9b9305004d7e4e6a824f4f19b6d8f32b3578aad6f19fc1122aaf320cbe3dc83 \ - --hash=sha256:ccb42ca0a4a46232d716779421bbebbcad23c08d37c980f02cc3a6bd115ad277 \ - --hash=sha256:ce6f2b66799971cbae5d6547acefa7231458289e0ad481d0be0740535da38d8b \ - --hash=sha256:d36b8fffe8b248a1b961c86fbdfa0129dfce878731d169ede7fa2631447331be \ - --hash=sha256:d3dd5523ed258ad58fed7e364c92a9360d1af8a9371e0822bd0146bdf017ef4c \ - --hash=sha256:d416f2088ac8f12daacffbc2e8918ef4d6be8568e9d7155c83b7cebed49d2322 \ - --hash=sha256:d4fafc2eb5d83f4647331267808c7e0c5722c25a729a614dc2b90479cafa78bd \ - --hash=sha256:d5c8b17f6e8f29138678834cf8518049e740385eb2dbf736e8f07fc6587ec682 \ - --hash=sha256:d9270fbf038bf34ffca4855bcda6e082e2c7f906b9eb8d9a8ce82691166060f7 \ - --hash=sha256:dcc37d9d708784726fafc9c5e1232de655a009dbf97946f117aefa38d5985a0f \ - --hash=sha256:ddbb2b386128d8eca92bd9ca74e80f73fe263bcca7aa419f5b4cbc1661e19741 \ - --hash=sha256:e1e5d0a25aea8b691a00d6b54b28ac514c8cc0d8646d05f7ca6cb64b97358250 \ - --hash=sha256:e5c88b2f13bcf55fee78ea83567b9fe079ba1a4bef8b35c376043440040f7edb \ - --hash=sha256:e7eca8b89e56fb8c6c26dd3e09bd41b24789022acf1cf13358e96f1cafd8cae3 \ - --hash=sha256:e8746ce968be22a8a1801bf4a23e565f9687088580c3ed07af5846580dd97f76 \ - --hash=sha256:ec7248673ffc7104b54e4957cee38b2f3075a13442348c8d651777bf41aa45ee \ - --hash=sha256:ecb6c88d7946166d783a635efc89f9a1ff11c33d680a20df9657b6902a1d133b \ - --hash=sha256:ef3b048822dca6d231d8a8ba21069844ae38f5d83889b9b690bf17d2acc7d099 \ - --hash=sha256:f133d05aaf623519f45e16ab77526e1e70d4e1308e084c2fb4cedb1a0c764bbb \ - --hash=sha256:f3292d384537b9918010769b82ab3e79fca8b23d74f56fc69a679106a3e2c2cf \ - --hash=sha256:f774841bb0e8588505002962c02da420bcfb4c5056e87a139c6e45e745c0e2e2 \ - --hash=sha256:f9499c70c19ff0fbe1007043acb5ad15c1dec7d8e84ab429bca8c87138e8f85c \ - --hash=sha256:f99de52b8fbdb2a8f5301ae5fc0f9e6b3ba30d1d5fc0421956967edcc6914242 \ - --hash=sha256:fa25a620eed2a419acc2cf10135b995f8f0ce78ad00534d729aa761e4adcef8a \ - --hash=sha256:fbf558551cf415586e91160d69ca6416f3fce0b86175b64e4293644a7416b81b \ - --hash=sha256:fc82269d24860cfa859b676d18850cbb8e312dcd7eada09e7d5b007e2f3d9eb1 \ - --hash=sha256:ff832cce719edd11266ca32bc74a626b814fff236824aa1aeaad399b69fe6eae +pyzmq==26.2.0 \ + --hash=sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6 \ + --hash=sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a \ + --hash=sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9 \ + --hash=sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f \ + --hash=sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37 \ + --hash=sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc \ + --hash=sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed \ + --hash=sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097 \ + --hash=sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d \ + --hash=sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52 \ + --hash=sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6 \ + --hash=sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6 \ + --hash=sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2 \ + --hash=sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282 \ + --hash=sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3 \ + --hash=sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732 \ + --hash=sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5 \ + --hash=sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18 \ + --hash=sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306 \ + --hash=sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f \ + --hash=sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3 \ + --hash=sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b \ + --hash=sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277 \ + --hash=sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a \ + --hash=sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797 \ + --hash=sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca \ + --hash=sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c \ + --hash=sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f \ + --hash=sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5 \ + --hash=sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a \ + --hash=sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44 \ + --hash=sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20 \ + --hash=sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4 \ + --hash=sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8 \ + --hash=sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780 \ + --hash=sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386 \ + --hash=sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5 \ + --hash=sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2 \ + --hash=sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0 \ + --hash=sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971 \ + --hash=sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b \ + --hash=sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50 \ + --hash=sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c \ + --hash=sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f \ + --hash=sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231 \ + --hash=sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c \ + --hash=sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08 \ + --hash=sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5 \ + --hash=sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6 \ + --hash=sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073 \ + --hash=sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e \ + --hash=sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4 \ + --hash=sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317 \ + --hash=sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3 \ + --hash=sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072 \ + --hash=sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad \ + --hash=sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a \ + --hash=sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb \ + --hash=sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd \ + --hash=sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f \ + --hash=sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef \ + --hash=sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5 \ + --hash=sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187 \ + --hash=sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711 \ + --hash=sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988 \ + --hash=sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640 \ + --hash=sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c \ + --hash=sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764 \ + --hash=sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1 \ + --hash=sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1 \ + --hash=sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289 \ + --hash=sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb \ + --hash=sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a \ + --hash=sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218 \ + --hash=sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c \ + --hash=sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf \ + --hash=sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7 \ + --hash=sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8 \ + --hash=sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726 \ + --hash=sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9 \ + --hash=sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93 \ + --hash=sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88 \ + --hash=sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115 \ + --hash=sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6 \ + --hash=sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672 \ + --hash=sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2 \ + --hash=sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea \ + --hash=sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc \ + --hash=sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b \ + --hash=sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa \ + --hash=sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003 \ + --hash=sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797 \ + --hash=sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940 \ + --hash=sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db \ + --hash=sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc \ + --hash=sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27 \ + --hash=sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3 \ + --hash=sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e \ + --hash=sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98 \ + --hash=sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b \ + --hash=sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629 \ + --hash=sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9 \ + --hash=sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6 \ + --hash=sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec \ + --hash=sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951 \ + --hash=sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae \ + --hash=sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4 \ + --hash=sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6 \ + --hash=sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919 # via # ipykernel # jupyter-client @@ -1089,9 +1091,9 @@ rpds-py==0.20.0 \ # via # jsonschema # referencing -setuptools==72.2.0 \ - --hash=sha256:80aacbf633704e9c8bfa1d99fa5dd4dc59573efcf9e4042c13d3bcef91ac2ef9 \ - --hash=sha256:f11dd94b7bae3a156a95ec151f24e4637fb4fa19c878e4d191bfb8b2d82728c4 +setuptools==74.1.1 \ + --hash=sha256:2353af060c06388be1cecbf5953dcdb1f38362f87a2356c480b6b4d5fcfc8847 \ + --hash=sha256:fc91b5f89e392ef5b77fe143b17e32f65d3024744fba66dc3afe07201684d766 # via documenteer six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ @@ -1138,9 +1140,9 @@ sphinx==8.0.2 \ # sphinxcontrib-youtube # sphinxext-opengraph # sphinxext-rediraffe -sphinx-autodoc-typehints==2.2.3 \ - --hash=sha256:b7058e8c5831e5598afca1a78fda0695d3291388d954464a6e480c36198680c0 \ - --hash=sha256:fde3d888949bd0a91207cf1e54afda58121dbb4bf1f183d0cc78a0826654c974 +sphinx-autodoc-typehints==2.3.0 \ + --hash=sha256:3098e2c6d0ba99eacd013eb06861acc9b51c6e595be86ab05c08ee5506ac0c67 \ + --hash=sha256:535c78ed2d6a1bad393ba9f3dfa2602cf424e2631ee207263e07874c38fde084 # via documenteer sphinx-automodapi==0.17.0 \ --hash=sha256:4d029cb79eef29413e94ab01bb0177ebd2d5ba86e9789b73575afe9c06ae1501 \ @@ -1223,56 +1225,56 @@ sphinxext-rediraffe==0.2.7 \ --hash=sha256:651dcbfae5ffda9ffd534dfb8025f36120e5efb6ea1a33f5420023862b9f725d \ --hash=sha256:9e430a52d4403847f4ffb3a8dd6dfc34a9fe43525305131f52ed899743a5fd8c # via documenteer -sqlalchemy==2.0.32 \ - --hash=sha256:01438ebcdc566d58c93af0171c74ec28efe6a29184b773e378a385e6215389da \ - --hash=sha256:0c1c9b673d21477cec17ab10bc4decb1322843ba35b481585facd88203754fc5 \ - --hash=sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619 \ - --hash=sha256:0d322cc9c9b2154ba7e82f7bf25ecc7c36fbe2d82e2933b3642fc095a52cfc78 \ - --hash=sha256:0ef18a84e5116340e38eca3e7f9eeaaef62738891422e7c2a0b80feab165905f \ - --hash=sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389 \ - --hash=sha256:14e09e083a5796d513918a66f3d6aedbc131e39e80875afe81d98a03312889e6 \ - --hash=sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533 \ - --hash=sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9 \ - --hash=sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f \ - --hash=sha256:251f0d1108aab8ea7b9aadbd07fb47fb8e3a5838dde34aa95a3349876b5a1f1d \ - --hash=sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0 \ - --hash=sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c \ - --hash=sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d \ - --hash=sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d \ - --hash=sha256:328429aecaba2aee3d71e11f2477c14eec5990fb6d0e884107935f7fb6001632 \ - --hash=sha256:3bd1cae7519283ff525e64645ebd7a3e0283f3c038f461ecc1c7b040a0c932a1 \ - --hash=sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8 \ - --hash=sha256:3eb6a97a1d39976f360b10ff208c73afb6a4de86dd2a6212ddf65c4a6a2347d5 \ - --hash=sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb \ - --hash=sha256:4488120becf9b71b3ac718f4138269a6be99a42fe023ec457896ba4f80749525 \ - --hash=sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2 \ - --hash=sha256:4979dc80fbbc9d2ef569e71e0896990bc94df2b9fdbd878290bd129b65ab579c \ - --hash=sha256:52fec964fba2ef46476312a03ec8c425956b05c20220a1a03703537824b5e8e1 \ - --hash=sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b \ - --hash=sha256:62e23d0ac103bcf1c5555b6c88c114089587bc64d048fef5bbdb58dfd26f96da \ - --hash=sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92 \ - --hash=sha256:6c742be912f57586ac43af38b3848f7688863a403dfb220193a882ea60e1ec3a \ - --hash=sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d \ - --hash=sha256:78c03d0f8a5ab4f3034c0e8482cfcc415a3ec6193491cfa1c643ed707d476f16 \ - --hash=sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec \ - --hash=sha256:7dd8583df2f98dea28b5cd53a1beac963f4f9d087888d75f22fcc93a07cf8d84 \ - --hash=sha256:85a01b5599e790e76ac3fe3aa2f26e1feba56270023d6afd5550ed63c68552b3 \ - --hash=sha256:8a37e4d265033c897892279e8adf505c8b6b4075f2b40d77afb31f7185cd6ecd \ - --hash=sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924 \ - --hash=sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb \ - --hash=sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28 \ - --hash=sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22 \ - --hash=sha256:aaf04784797dcdf4c0aa952c8d234fa01974c4729db55c45732520ce12dd95b4 \ - --hash=sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961 \ - --hash=sha256:ada0102afff4890f651ed91120c1120065663506b760da4e7823913ebd3258be \ - --hash=sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5 \ - --hash=sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0 \ - --hash=sha256:b8afd5b26570bf41c35c0121801479958b4446751a3971fb9a480c1afd85558e \ - --hash=sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8 \ - --hash=sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8 \ - --hash=sha256:c41a2b9ca80ee555decc605bd3c4520cc6fef9abde8fd66b1cf65126a6922d65 \ - --hash=sha256:c750987fc876813f27b60d619b987b057eb4896b81117f73bb8d9918c14f1cad \ - --hash=sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202 +sqlalchemy==2.0.33 \ + --hash=sha256:06504d9625e3ef114b39803ebca6f379133acad58a87c33117ddc5df66079915 \ + --hash=sha256:06b30bbc43c6dd8b7cdc509cd2e58f4f1dce867565642e1d1a65e32459c89bd0 \ + --hash=sha256:0ea64443a86c3b5a0fd7c93363ad2f9465cb3af61f9920b7c75d1a7bebbeef8a \ + --hash=sha256:1109cc6dc5c9d1223c42186391e6a5509e6d4ab2c30fa629573c10184f742f2e \ + --hash=sha256:17d0c69f66392ad2db1609373a74d1f834b2e632f3f52d446747b8ec220aea53 \ + --hash=sha256:1d81e3aeab456fe24c3f0dcfd4f952a3a5ee45e9c14fc66d34c1d7a60cf7b698 \ + --hash=sha256:2415824ec658891ac38d13a2f36b4ceb2033f034dee1c226f83917589a65f072 \ + --hash=sha256:28c0800c851955f5bd11c0b904638c1343002650d0c071c6fbf0d157cc78627d \ + --hash=sha256:2b1e98507ec2aa200af980d592e936e9dac1c1ec50acc94330ae4b13c55d6fea \ + --hash=sha256:30a3f55be76364b64c83788728faaba782ab282a24909e1994404c2146d39982 \ + --hash=sha256:31e56020832be602201fbf8189f379569cf5c3604cdc4ce79f10dbbfcbf8a0eb \ + --hash=sha256:32a4f38d2efca066ec793451ef6852cb0d9086dc3d5479d88a5a25529d1d1861 \ + --hash=sha256:3ad94634338d8c576b1d47a96c798be186650aa5282072053ce2d12c6f309f82 \ + --hash=sha256:3c64d58e83a68e228b1ae6ebac8721241e9d8cc5e0c0dd11ed5d89155477b243 \ + --hash=sha256:454e9b4355f0051063daebc4060140251c19f33fc5d02151c347431860fd104b \ + --hash=sha256:459099ab8dd43a5edbb99f58ba4730baec457df9c06ebc71434c6b4b78cc8cf9 \ + --hash=sha256:49541a43828e273325c520fbacf786615bd974dad63ff60b8ea1e1216e914d1a \ + --hash=sha256:4f1c44c8d66101e6f627f330d8b5b3de5ad25eedb6df3ce39a2e6f92debbcf15 \ + --hash=sha256:523ae689c023cbf0fe1613101254824515193f85f806ba04611dee83302660b5 \ + --hash=sha256:570ec43e8c3c020abac4f0720baa5fe5187334e3f1e8e1777183c041962b61cc \ + --hash=sha256:60c54b677d4f0a0b2df3b79e89e84d601fb931c720176641742efd66b50601f9 \ + --hash=sha256:61e9a2d68a5a8ca6a84cbc79aa7f2e430ae854d3351d6e9ceb3edf6798797b63 \ + --hash=sha256:63b7d9890f7958dabd95cf98a3f48740fbe2bb0493523aef590e82164fa68194 \ + --hash=sha256:67eb8e0ffbebd3d82ec5079ca5f807a661c574b482785483717857c2acab833a \ + --hash=sha256:684aee5fd811091b2f48006fb3fe6c7f2de4a716ef8d294a2aab762099753133 \ + --hash=sha256:751eaafa907a66dd9a328a9d15c3dcfdcba3ef8dd8f7f4a9771cdacdec45d9bf \ + --hash=sha256:77eaf8fdf305266b806a91ae4633edbf86ad37e13bd92ac85e305e7f654c19a5 \ + --hash=sha256:7fd0a28bc24a75326f13735a58272247f65c9e8ee16205eacb2431d6ee94f44a \ + --hash=sha256:816c927dd51e4951d6e79870c945340057a5d8e63543419dee0d247bd67a88f8 \ + --hash=sha256:81759e77a4985abdbac068762a0eaf0f11860fe041ad6da170aae7615ea72531 \ + --hash=sha256:82c72da5be489c8d150deba70d5732398695418df5232bceb52ee323ddd9753b \ + --hash=sha256:8bef11d31a1c48f5943e577d1ef81085ec1550c37552bfc9bf8e5d184ce47142 \ + --hash=sha256:91c93333c2b37ff721dc83b37e28c29de4c502b5612f2d093468037b86aa2be0 \ + --hash=sha256:92249ac94279b8e5f0c0c8420e09b804d0a49d2269f52f549d4cb536c8382434 \ + --hash=sha256:93efa4b72f7cb70555b0f66ee5e113ae40073c57054a72887e50b05bfd97baa4 \ + --hash=sha256:9d035a672d5b3e4793a4a8865c3274a7bbbac7fac67a47b415023b5539105087 \ + --hash=sha256:9e5819822050e6e36e2aa41260d05074c026a1bbb9baa6869170b5ce64db7a4d \ + --hash=sha256:a3926e4ed4a3e956c8b2b0f1140493378c8cd17cad123b4fc1e0f6ecd3e05b19 \ + --hash=sha256:a3da2371628e28ef279f3f756f5e58858fad7820de08508138c9f5f9e4d8f4ac \ + --hash=sha256:ac252bafe8cbadfac7b1e8a74748ffd775e27325186d12b82600b652d9adcb86 \ + --hash=sha256:ae294808afde1b14a1a69aa86a69cadfe391848bbb233a5332a8065e4081cabc \ + --hash=sha256:c40e0213beaf410a151e4329e30c73687838c251c998ba1b312975dbbcb2d05d \ + --hash=sha256:c5d5a733c6af7f392435e673d1b136f6bdf2366033abb35eed680400dc730840 \ + --hash=sha256:c633e2d2f8a7b88c06e276bbe16cb7e62fed815fcbeb69cd9752cea166ecb8e8 \ + --hash=sha256:c9f4f92eee7d06531cc6a5b814e603a0c7639876aab03638dcc70c420a3974f6 \ + --hash=sha256:ca8788dc1baee100f09110f33a01d928cf9df4483d2bfb25a37be31a659d46bb \ + --hash=sha256:d004a623ad4aa8d2eb31b37e65b5e020c9f65a1852b8b9e6301f0e411aca5b9a \ + --hash=sha256:ee2b82b170591ccd19d463c9798a9caeea0cad967a8d2f3264de459f582696d5 \ + --hash=sha256:f7c82a7930126bb5ccfbb73fc1562d52942fbffb2fda2791fab49de249fc202a # via jupyter-cache stack-data==0.6.3 \ --hash=sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9 \ @@ -1286,9 +1288,9 @@ termcolor==2.4.0 \ --hash=sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63 \ --hash=sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a # via pytest-sugar -tomlkit==0.13.0 \ - --hash=sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72 \ - --hash=sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264 +tomlkit==0.13.2 \ + --hash=sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde \ + --hash=sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79 # via documenteer tornado==6.4.1 \ --hash=sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8 \ @@ -1390,7 +1392,7 @@ wcwidth==0.2.13 \ --hash=sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859 \ --hash=sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5 # via prompt-toolkit -zipp==3.20.0 \ - --hash=sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31 \ - --hash=sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d +zipp==3.20.1 \ + --hash=sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064 \ + --hash=sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b # via importlib-metadata diff --git a/requirements/main.txt b/requirements/main.txt index e4b5e19f13..67830cad11 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -39,9 +39,9 @@ bcrypt==4.2.0 \ --hash=sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221 \ --hash=sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db # via -r requirements/main.in -certifi==2024.7.4 \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 # via # httpcore # httpx @@ -213,41 +213,41 @@ click==8.1.7 \ # via # -r requirements/main.in # safir -cryptography==43.0.0 \ - --hash=sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709 \ - --hash=sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069 \ - --hash=sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2 \ - --hash=sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b \ - --hash=sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e \ - --hash=sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70 \ - --hash=sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778 \ - --hash=sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22 \ - --hash=sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895 \ - --hash=sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf \ - --hash=sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431 \ - --hash=sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f \ - --hash=sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947 \ - --hash=sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74 \ - --hash=sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc \ - --hash=sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66 \ - --hash=sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66 \ - --hash=sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf \ - --hash=sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f \ - --hash=sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5 \ - --hash=sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e \ - --hash=sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f \ - --hash=sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55 \ - --hash=sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1 \ - --hash=sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47 \ - --hash=sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5 \ - --hash=sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0 +cryptography==43.0.1 \ + --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ + --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ + --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ + --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ + --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ + --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ + --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ + --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ + --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ + --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ + --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ + --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ + --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ + --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ + --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ + --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ + --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ + --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ + --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ + --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ + --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ + --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ + --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ + --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ + --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ + --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ + --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 # via # -r requirements/main.in # pyjwt # safir -fastapi==0.112.0 \ - --hash=sha256:3487ded9778006a45834b8c816ec4a48d522e2631ca9e75ec5a774f1b052f821 \ - --hash=sha256:d262bc56b7d101d1f4e8fc0ad2ac75bb9935fec504d2b7117686cec50710cf05 +fastapi==0.112.2 \ + --hash=sha256:3d4729c038414d5193840706907a41839d839523da6ed0c2811f1168cac1798c \ + --hash=sha256:db84b470bd0e2b1075942231e90e3577e12a903c4dc8696f0d206a7904a7af1c # via safir gidgethub==5.3.0 \ --hash=sha256:4dd92f2252d12756b13f9dd15cde322bfb0d625b6fb5d680da1567ec74b462c0 \ @@ -279,9 +279,9 @@ hvac==2.3.0 \ --hash=sha256:1b85e3320e8642dd82f234db63253cda169a817589e823713dc5fca83119b1e2 \ --hash=sha256:a3afc5710760b6ee9b3571769df87a0333da45da05a5f9f963e1d3925a84be7d # via -r requirements/main.in -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 +idna==3.8 \ + --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ + --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 # via # anyio # requests @@ -531,10 +531,14 @@ rfc3986==1.5.0 \ --hash=sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835 \ --hash=sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97 # via httpx -safir==6.2.0 \ - --hash=sha256:335219abba8ed663395bcf6cf86a60ec8de8412ea212dc0dbe8425e9faa7bc97 \ - --hash=sha256:61cf6fd3839c0945bcc7c01469dc8fcd19351eba33b6022c596684d87763e50e +safir==6.3.0 \ + --hash=sha256:2fcd64bf37dd42eacedd6378341b2487cd06dbaf1f28403301b8d80f60a4fb56 \ + --hash=sha256:6ad7dad520d87d853628849ef95a348c55dbd0180ad3f15c1cf2f7f8fe32f915 # via -r requirements/main.in +safir-logging==6.3.0 \ + --hash=sha256:491dfe85de89a3f2daa29c491a22a0551f0961444490418d91ec50c040ae16eb \ + --hash=sha256:e14754ab0bba6cfa248c3fc4cb5ca28410d97ff3965e831eab6581ed37485e79 + # via safir six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 @@ -550,16 +554,18 @@ sniffio==1.3.1 \ # anyio # httpcore # httpx -starlette==0.37.2 \ - --hash=sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee \ - --hash=sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823 +starlette==0.38.4 \ + --hash=sha256:526f53a77f0e43b85f583438aee1a940fd84f8fd610353e8b0c1a77ad8a87e76 \ + --hash=sha256:53a7439060304a208fea17ed407e998f46da5e5d9b1addfea3040094512a6379 # via # fastapi # safir structlog==24.4.0 \ --hash=sha256:597f61e80a91cc0749a9fd2a098ed76715a1c8a01f73e336b746504d1aad7610 \ --hash=sha256:b27bfecede327a6d2da5fbc96bd859f114ecc398a6389d664f62085ee7ae6fc4 - # via safir + # via + # safir + # safir-logging typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 diff --git a/requirements/tox.txt b/requirements/tox.txt index 2c83eef26d..5acb5ec80e 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -1,8 +1,8 @@ # This file was autogenerated by uv via the following command: # uv pip compile --generate-hashes --output-file requirements/tox.txt requirements/tox.in -cachetools==5.4.0 \ - --hash=sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474 \ - --hash=sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827 +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a # via tox chardet==5.2.0 \ --hash=sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7 \ @@ -57,25 +57,25 @@ tox-uv==1.11.2 \ --hash=sha256:7f8f1737b3277e1cddcb5b89fcc5931d04923562c940ae60f29e140908566df2 \ --hash=sha256:a7aded5c3fb69f055b523357988c1055bb573e91bfd7ecfb9b5233ebcab5d10b # via -r requirements/tox.in -uv==0.2.36 \ - --hash=sha256:083e56a18fc33395aeed4f56a47003e08f2ad9d5039af63ad5b107a241d0e7a3 \ - --hash=sha256:139889680c2475afbab61c725df951c4dfa030c42c4eaa8f27d05286c96e8aab \ - --hash=sha256:3f18322decfb0da577e40675620f6e6b9ffe1d8ee1de88a448bfe67fe7884626 \ - --hash=sha256:463a45a34277b9334e500fce463f59408a6bab0c1b5cb3023f25185a4805a562 \ - --hash=sha256:5a3800d2484b989041139ef96b395cec0e4e0a13132584b0147c739063a2494f \ - --hash=sha256:5c8d624975f8355e00ad5f802ed27fcfc7b86d0bd50b57efe24bd665fd3f9a9b \ - --hash=sha256:8753851cc10b0a67e5c5dd29a6f35a072341290cf27a7bb3193ddd92bda19f51 \ - --hash=sha256:8820dd5b77ffcda07dde09712a43d969d39b0aace112d8074c540f19a4911cc2 \ - --hash=sha256:89d3fb3d7a66fa4a4f7c938be0277457fe71179ec4e72758cfe16faec1daa362 \ - --hash=sha256:8e5e2e8e218eb672a3bb57af0ab2d2d3de79119b5dc6b6edb03d349739e474ff \ - --hash=sha256:a08d485cd8eae0352b4765005a4499ad5db073c3534866d68617bbb831ee219a \ - --hash=sha256:a4fddaf0a6a995591042a57ac48557b9b2c1e2c7f09e0f7880f40c34e61f53f8 \ - --hash=sha256:a7961f4d88100fc48129c918545cbb17b9a0d8e3d163c65985e1d1682e056321 \ - --hash=sha256:a837b799e3af1535473b8ab14e414e50f595d547d295879db0d6b0943b7476df \ - --hash=sha256:d093fd10aaf29e85128beaa514f8d37d7374cf1d1a95da966e15788a6fe7d55d \ - --hash=sha256:e36cd4e9c1187d155139b98bcd2cfbfb275f9f601c550fcc38a283983c74f93d \ - --hash=sha256:e79a4cdb3b89b011fafcaa853ebbb9254115f3f7cadbd9141492c48ceeac1b2d \ - --hash=sha256:f1d711629dd8610933687ceea4ad82156ef7b2102c4e9da72afe6c01981f8a1a +uv==0.4.4 \ + --hash=sha256:051589ab42bfdb2997ea61a56a78a2bab0b6ae7d014f96a578dcc5f9d8766757 \ + --hash=sha256:0c9ada2fbfe3ca29c50914acd714fe35100ab56fdb83510d1aadd00d55191d1b \ + --hash=sha256:0d0af47198dc4ca635540b72c933219c6c967885788fd1f651112f168fcade0a \ + --hash=sha256:0d51db6bf89b7b0a4aae229f7efee00fc52a1d7391605f3b789996f9c7986653 \ + --hash=sha256:14f06ed0e0f163c9ec8b26f4fc2df14530080e405d7348ad0c59f9c296c55918 \ + --hash=sha256:3e9139f171329b6fa40a064f9e7923848d44e60bc31da138758695ec34d8200d \ + --hash=sha256:433c69a6e7f35c865172d69e51bf78521a9d87eac6f8772af04667f5d25cc9a9 \ + --hash=sha256:718bbdf0675eab8d15f52041725b60743a9496fde3dc493d34913aa4a15f0a81 \ + --hash=sha256:8ba084d6d5baf92a3cfe41a20fd912dea4e2ea3eca8401f1892394c5c2b79c92 \ + --hash=sha256:918d4da22f9ff4403dc72dfb4c58c994400a64797a3a17f00f5c0d3717d1cb8c \ + --hash=sha256:9ba6abad0a531181bcb90b9af818e2490211f2d4b3eb83eb2a27df1f07f299fb \ + --hash=sha256:c1b7db1db176e46184c974ed30687671ec5d67cfcce34c7ed4a63141ecb6c70e \ + --hash=sha256:d2e2c9db83efd81b0b8dcaa45533b461b058d5aec49a6ed6cc98832e56e45856 \ + --hash=sha256:d66242bba1bbec847b77fcdc7d3191eab733189213a5d2717dbda1ff04e24b46 \ + --hash=sha256:da3a77ad858be5239ae33509ddfeaf097d7bda77fc0b2a42994cbec32cef4769 \ + --hash=sha256:dc881ea11dcb443940bbac5d7601cd7c74f80e7086c2e310e86ebf10d1c8816b \ + --hash=sha256:dd94e5be00a0a06ab5cbba7014720440a12bae73150d8146bc3535f3a22ff069 \ + --hash=sha256:f866f9a44982ef8041a982c3197a17e18d4a8ac7717b4462477ea0ca6a088a52 # via tox-uv virtualenv==20.26.3 \ --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ From b7fc857f575871ca5c48763054d8d4e1306ecaab Mon Sep 17 00:00:00 2001 From: MAINETTI Gabriele Date: Wed, 4 Sep 2024 13:18:02 +0200 Subject: [PATCH 024/567] UK qserv as tap backend --- applications/tap/values-ccin2p3.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/applications/tap/values-ccin2p3.yaml b/applications/tap/values-ccin2p3.yaml index cf30c0a947..64e5530e06 100644 --- a/applications/tap/values-ccin2p3.yaml +++ b/applications/tap/values-ccin2p3.yaml @@ -1,8 +1,7 @@ cadc-tap: tapSchema: image: - repository: "gabrimaine/tap-schema-ccin2p3" - tag: 2.4.1 + repository: "stvoutsin/tap-schema-roe" config: gcsBucket: "lsstrsp:async-results.lsst.codes" @@ -11,4 +10,4 @@ cadc-tap: datalinkPayloadUrl: "https://github.com/gabrimaine/sdm_schemas/releases/download/2.4.1/datalink-snippets.zip" qserv: - host: "ccqserv201.in2p3.fr:30040" + host: "192.41.122.85:30040" From aeced08b97428686e769ff67b11a1115cf76b84a Mon Sep 17 00:00:00 2001 From: MAINETTI Gabriele Date: Wed, 4 Sep 2024 13:19:45 +0200 Subject: [PATCH 025/567] UK qserv: add image tag --- applications/tap/values-ccin2p3.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/tap/values-ccin2p3.yaml b/applications/tap/values-ccin2p3.yaml index 64e5530e06..ac85822b0e 100644 --- a/applications/tap/values-ccin2p3.yaml +++ b/applications/tap/values-ccin2p3.yaml @@ -2,6 +2,7 @@ cadc-tap: tapSchema: image: repository: "stvoutsin/tap-schema-roe" + tag: 2.3.0 config: gcsBucket: "lsstrsp:async-results.lsst.codes" From 6ad393017c8fb717ec3d5f880e81d3b8ad9b031d Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 3 Sep 2024 18:46:51 -0700 Subject: [PATCH 026/567] Add InfluxDB Enterprise - Add configuration for a single data node InfluxDB Enterprise deployment and three meta nodes --- applications/sasquatch/values-summit.yaml | 49 +++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 8c62d1c356..9f1f21a29f 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -67,6 +67,55 @@ influxdb: memory: 128Gi cpu: 16 +influxdb-enterprise: + enabled: true + license: + secret: + name: sasquatch + key: influxdb-enterprise-license + meta: + ingress: + enabled: true + hostname: summit-lsp.lsst.codes + persistence: + enabled: true + accessMode: ReadWriteOnce + size: 16Gi + resources: + requests: + memory: 2Gi + cpu: 2 + limits: + memory: 4Gi + cpu: 4 + data: + replicas: 1 + ingress: + enabled: true + hostname: summit-lsp.lsst.codes + persistence: + enabled: true + accessMode: ReadWriteOnce + storageClass: localdrive + size: 15Ti + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: kubernetes.io/hostname + operator: In + values: + - yagan20 + # -- InfluxDB Enterprise data pod resources, 16 cores single node license + resources: + requests: + memory: 256Gi + cpu: 16 + limits: + memory: 256Gi + cpu: 16 + kafka-connect-manager: influxdbSink: # Based on the kafka producers configuration for the Summit From 45440f10b0cc10193f7507f27eee88d2577be181 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 3 Sep 2024 18:49:08 -0700 Subject: [PATCH 027/567] Add secret for InfluxDB Enterprise license --- applications/sasquatch/secrets.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/applications/sasquatch/secrets.yaml b/applications/sasquatch/secrets.yaml index 8634cb3df1..b6648a6eb1 100644 --- a/applications/sasquatch/secrets.yaml +++ b/applications/sasquatch/secrets.yaml @@ -81,3 +81,7 @@ camera-password: description: >- camera KafkaUser password. if: strimzi-kafka.users.camera.enabled +influxdb-enterprise-license: + description: >- + InfluxDB Enterprise license. + if: influxdb-enterprise.enabled From 07d81881b22c2bd317b404e0b8aab025dd6b38d4 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 4 Sep 2024 05:51:51 -0700 Subject: [PATCH 028/567] Configure key for InfluxDB Enterprise meta shared secret --- applications/sasquatch/README.md | 4 +++- .../sasquatch/charts/influxdb-enterprise/README.md | 4 +++- .../influxdb-enterprise/templates/meta-statefulset.yaml | 4 ++-- .../sasquatch/charts/influxdb-enterprise/values.yaml | 7 ++++++- 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 3e53b27e34..2ae35a9f58 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -163,7 +163,9 @@ Rubin Observatory's telemetry service | influxdb-enterprise.meta.service.loadBalancerIP | string | Do not allocate a load balancer IP | Load balancer IP for the meta service | | influxdb-enterprise.meta.service.nodePort | int | Do not allocate a node port | Node port for the meta service | | influxdb-enterprise.meta.service.type | string | `"ClusterIP"` | Service type for the meta service | -| influxdb-enterprise.meta.sharedSecret.secretName | string | `"influxdb-enterprise-shared-secret"` | Shared secret used by the internal API for JWT authentication between InfluxDB nodes. Must have a key named `secret` that should be a long, random string See [documentation for shared-internal-secret](https://docs.influxdata.com/enterprise_influxdb/v1/administration/configure/config-data-nodes/#meta-internal-shared-secret). | +| influxdb-enterprise.meta.sharedSecret.secret | object | `{"key":"secret","name":"influxdb-enterprise-shared-secret"}` | Shared secret used by the internal API for JWT authentication between InfluxDB nodes. Must have a key named `secret` that should be a long, random string See [documentation for shared-internal-secret](https://docs.influxdata.com/enterprise_influxdb/v1/administration/configure/config-data-nodes/#meta-internal-shared-secret). | +| influxdb-enterprise.meta.sharedSecret.secret.key | string | `"secret"` | Key within that secret that contains the shared secret | +| influxdb-enterprise.meta.sharedSecret.secret.name | string | `"influxdb-enterprise-shared-secret"` | Name of the secret containing the shared secret | | influxdb-enterprise.meta.tolerations | list | `[]` | Tolerations for meta pods | | influxdb-enterprise.nameOverride | string | `""` | Override the base name for resources | | influxdb-enterprise.serviceAccount.annotations | object | `{}` | Annotations to add to the service account | diff --git a/applications/sasquatch/charts/influxdb-enterprise/README.md b/applications/sasquatch/charts/influxdb-enterprise/README.md index aba97b90d0..12233edf75 100644 --- a/applications/sasquatch/charts/influxdb-enterprise/README.md +++ b/applications/sasquatch/charts/influxdb-enterprise/README.md @@ -92,7 +92,9 @@ Run InfluxDB Enterprise on Kubernetes | meta.service.loadBalancerIP | string | Do not allocate a load balancer IP | Load balancer IP for the meta service | | meta.service.nodePort | int | Do not allocate a node port | Node port for the meta service | | meta.service.type | string | `"ClusterIP"` | Service type for the meta service | -| meta.sharedSecret.secretName | string | `"influxdb-enterprise-shared-secret"` | Shared secret used by the internal API for JWT authentication between InfluxDB nodes. Must have a key named `secret` that should be a long, random string See [documentation for shared-internal-secret](https://docs.influxdata.com/enterprise_influxdb/v1/administration/configure/config-data-nodes/#meta-internal-shared-secret). | +| meta.sharedSecret.secret | object | `{"key":"secret","name":"influxdb-enterprise-shared-secret"}` | Shared secret used by the internal API for JWT authentication between InfluxDB nodes. Must have a key named `secret` that should be a long, random string See [documentation for shared-internal-secret](https://docs.influxdata.com/enterprise_influxdb/v1/administration/configure/config-data-nodes/#meta-internal-shared-secret). | +| meta.sharedSecret.secret.key | string | `"secret"` | Key within that secret that contains the shared secret | +| meta.sharedSecret.secret.name | string | `"influxdb-enterprise-shared-secret"` | Name of the secret containing the shared secret | | meta.tolerations | list | `[]` | Tolerations for meta pods | | nameOverride | string | `""` | Override the base name for resources | | serviceAccount.annotations | object | `{}` | Annotations to add to the service account | diff --git a/applications/sasquatch/charts/influxdb-enterprise/templates/meta-statefulset.yaml b/applications/sasquatch/charts/influxdb-enterprise/templates/meta-statefulset.yaml index beff940f34..cf543c32a4 100644 --- a/applications/sasquatch/charts/influxdb-enterprise/templates/meta-statefulset.yaml +++ b/applications/sasquatch/charts/influxdb-enterprise/templates/meta-statefulset.yaml @@ -59,8 +59,8 @@ spec: - name: INFLUXDB_META_INTERNAL_SHARED_SECRET valueFrom: secretKeyRef: - name: {{ .Values.meta.sharedSecret.secretName }} - key: secret + name: {{ .Values.meta.sharedSecret.secret.name }} + key: {{ .Values.meta.sharedSecret.secret.key }} {{- if .Values.meta.env }} {{ toYaml .Values.meta.env | indent 12 }} {{- end}} diff --git a/applications/sasquatch/charts/influxdb-enterprise/values.yaml b/applications/sasquatch/charts/influxdb-enterprise/values.yaml index 412b131f72..0709b449c6 100644 --- a/applications/sasquatch/charts/influxdb-enterprise/values.yaml +++ b/applications/sasquatch/charts/influxdb-enterprise/values.yaml @@ -137,7 +137,12 @@ meta: # InfluxDB nodes. Must have a key named `secret` that should be a long, # random string See [documentation for # shared-internal-secret](https://docs.influxdata.com/enterprise_influxdb/v1/administration/configure/config-data-nodes/#meta-internal-shared-secret). - secretName: influxdb-enterprise-shared-secret + secret: + # -- Name of the secret containing the shared secret + name: influxdb-enterprise-shared-secret + + # -- Key within that secret that contains the shared secret + key: secret service: # -- Service type for the meta service From 0cd6fe6251bee74474cde6a25b4daf6b4b1976ac Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 4 Sep 2024 05:52:33 -0700 Subject: [PATCH 029/567] Add InfluxDB Enterprise shared secret --- applications/sasquatch/secrets.yaml | 4 ++++ applications/sasquatch/values-summit.yaml | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/applications/sasquatch/secrets.yaml b/applications/sasquatch/secrets.yaml index b6648a6eb1..7f84437a65 100644 --- a/applications/sasquatch/secrets.yaml +++ b/applications/sasquatch/secrets.yaml @@ -85,3 +85,7 @@ influxdb-enterprise-license: description: >- InfluxDB Enterprise license. if: influxdb-enterprise.enabled +influxdb-enterprise-shared-secret: + description: >- + InfluxDB Enterprise shared secret. + if: influxdb-enterprise.enabled diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 9f1f21a29f..972f8f4dd5 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -81,6 +81,10 @@ influxdb-enterprise: enabled: true accessMode: ReadWriteOnce size: 16Gi + sharedSecret: + secret: + name: sasquatch + key: influxdb-enterprise-shared-secret resources: requests: memory: 2Gi From 6bdc24fdc0e2da683eb6a07de65c543d2ace79a9 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 4 Sep 2024 06:29:39 -0700 Subject: [PATCH 030/567] Configure connectors for InfluxDB enterprise - InfluxDB OSS and Enterprise will run simultaneously at the Summit for a while - Create a second instance of connectors to write to InfluxDB Enterprise --- applications/sasquatch/Chart.yaml | 4 + applications/sasquatch/README.md | 36 +++++ applications/sasquatch/values-summit.yaml | 173 +++++++++++++++++++++- 3 files changed, 210 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/Chart.yaml b/applications/sasquatch/Chart.yaml index 93bb8ee863..600032104e 100644 --- a/applications/sasquatch/Chart.yaml +++ b/applications/sasquatch/Chart.yaml @@ -46,6 +46,10 @@ dependencies: - name: telegraf-kafka-consumer condition: telegraf-kafka-consumer.enabled version: 1.0.0 + - name: telegraf-kafka-consumer + alias: telegraf-kafka-consumer-oss + condition: telegraf-kafka-consumer-oss.enabled + version: 1.0.0 - name: rest-proxy condition: rest-proxy.enabled version: 1.0.0 diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 2ae35a9f58..2e8e8fca70 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -434,3 +434,39 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer.podLabels | object | `{}` | Labels for telegraf-kafka-consumer pods | | telegraf-kafka-consumer.resources | object | See `values.yaml` | Kubernetes resources requests and limits | | telegraf-kafka-consumer.tolerations | list | `[]` | Tolerations for pod assignment | +| telegraf-kafka-consumer-oss.affinity | object | `{}` | Affinity for pod assignment | +| telegraf-kafka-consumer-oss.args | list | `[]` | Arguments passed to the Telegraf agent containers | +| telegraf-kafka-consumer-oss.enabled | bool | `false` | Wether the Telegraf Kafka Consumer is enabled | +| telegraf-kafka-consumer-oss.env | list | See `values.yaml` | Telegraf agent enviroment variables | +| telegraf-kafka-consumer-oss.envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | +| telegraf-kafka-consumer-oss.image.pullPolicy | string | `"Always"` | Image pull policy | +| telegraf-kafka-consumer-oss.image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | +| telegraf-kafka-consumer-oss.image.tag | string | `"1.30.2-alpine"` | Telegraf image tag | +| telegraf-kafka-consumer-oss.imagePullSecrets | list | `[]` | Secret names to use for Docker pulls | +| telegraf-kafka-consumer-oss.influxdb.database | string | `"telegraf-kafka-consumer-v1"` | Name of the InfluxDB v1 database to write to | +| telegraf-kafka-consumer-oss.influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.collection_jitter | string | "0s" | Data collection jitter. This is used to jitter the collection by a random amount. Each plugin will sleep for a random time within jitter before collecting. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.consumer_fetch_default | string | "20MB" | Maximum amount of data the server should return for a fetch request. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.debug | bool | false | Run Telegraf in debug mode. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.enabled | bool | `false` | Enable the Telegraf Kafka consumer. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.fields | list | `[]` | List of Avro fields to be recorded as InfluxDB fields. If not specified, any Avro field that is not marked as a tag will become an InfluxDB field. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.flush_interval | string | "1s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.interval | string | "1s" | Data collection interval for the Kafka consumer. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.metric_batch_size | int | 1000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.metric_buffer_limit | int | 10000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.offset | string | `"oldest"` | Kafka consumer offset. Possible values are `oldest` and `newest`. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.precision | string | "1us" | Data precision. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.replicaCount | int | `1` | Number of Telegraf Kafka consumer replicas. Increase this value to increase the consumer throughput. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.tags | list | `[]` | List of Avro fields to be recorded as InfluxDB tags. The Avro fields specified as tags will be converted to strings before ingestion into InfluxDB. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.timestamp_field | string | `"private_efdStamp"` | Avro field to be used as the InfluxDB timestamp (optional). If unspecified or set to the empty string, Telegraf will use the time it received the measurement. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.timestamp_format | string | `"unix"` | Timestamp format. Possible values are `unix` (the default if unset) a timestamp in seconds since the Unix epoch, `unix_ms` (milliseconds), `unix_us` (microsseconds), or `unix_ns` (nanoseconds). | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.topicRegexps | string | `"[ \".*Test\" ]\n"` | List of regular expressions to specify the Kafka topics consumed by this agent. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.union_field_separator | string | `""` | Union field separator: if a single Avro field is flattened into more than one InfluxDB field (e.g. an array `a`, with four members, would yield `a0`, `a1`, `a2`, `a3`; if the field separator were `_`, these would be `a_0`...`a_3`. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.union_mode | string | `"nullable"` | Union mode: this can be one of `flatten`, `nullable`, or `any`. See `values.yaml` for extensive discussion. | +| telegraf-kafka-consumer-oss.nodeSelector | object | `{}` | Node labels for pod assignment | +| telegraf-kafka-consumer-oss.podAnnotations | object | `{}` | Annotations for telegraf-kafka-consumers pods | +| telegraf-kafka-consumer-oss.podLabels | object | `{}` | Labels for telegraf-kafka-consumer pods | +| telegraf-kafka-consumer-oss.resources | object | See `values.yaml` | Kubernetes resources requests and limits | +| telegraf-kafka-consumer-oss.tolerations | list | `[]` | Tolerations for pod assignment | diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 972f8f4dd5..5c82488f2a 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -203,8 +203,52 @@ kafka-connect-manager: repairerConnector: false topicsRegex: "lsst.sal.MTCamera|lsst.sal.MTHeaderService|lsst.sal.MTOODS" +telegraf-kafka-consumer-oss: + enabled: true + kafkaConsumers: + oss-backpack: + enabled: true + replicaCount: 1 + database: "lsst.backpack" + timestamp_format: "unix" + timestamp_field: "timestamp" + topicRegexps: | + [ "lsst.backpack" ] + oss-atcamera: + enabled: true + replicaCount: 1 + database: "lsst.ATCamera" + timestamp_format: "unix_ms" + timestamp_field: "timestamp" + tags: | + [ "Agent", "Aspic", "Location", "Raft", "Reb", "Sensor", "Source" ] + topicRegexps: | + [ "lsst.ATCamera" ] + oss-cccamera: + enabled: true + replicaCount: 1 + database: "lsst.CCCamera" + timestamp_format: "unix_ms" + timestamp_field: "timestamp" + tags: | + [ "Agent", "Aspic", "Cold", "Cryo", "Hardware", "Location", "Ps", "RTD", "Raft", "Reb", "Segment", "Sensor", "Source" ] + topicRegexps: | + [ "lsst.CCCamera" ] + oss-mtcamera: + enabled: true + replicaCount: 1 + database: "lsst.MTCamera" + timestamp_format: "unix_ms" + timestamp_field: "timestamp" + tags: | + [ "Agent", "Aspic", "Axis", "Canbus", "Cip", "Clamp", "Cold", "Controller", "Cryo", "Gateway", "Hardware", "Hip", "Hook", "Latch", "Location", "Ps", "RTD", "Raft", "Reb", "Segment", "Sensor", "Socket", "Source", "Truck" ] + topicRegexps: | + [ "lsst.MTCamera" ] + telegraf-kafka-consumer: enabled: true + influxdb: + url: "http://sasquatch-influxdb-enterprise-data.sasquatch:8086" kafkaConsumers: backpack: enabled: true @@ -214,9 +258,134 @@ telegraf-kafka-consumer: timestamp_field: "timestamp" topicRegexps: | [ "lsst.backpack" ] + # CSC connectors + maintel: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.MTAOS", "lsst.sal.MTDome", "lsst.sal.MTDomeTrajectory", "lsst.sal.MTPtg" ] + offset: "newest" + mtmount: + enabled: true + database: "efd" + replicaCount: 8 + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.MTMount" ] + offset: "newest" + comcam: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.CCCamera", "lsst.sal.CCHeaderService", "lsst.sal.CCOODS" ] + offset: "newest" + eas: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + offset: "newest" + m1m3: + enabled: true + database: "efd" + replicaCount: 8 + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.MTM1M3" ] + offset: "newest" + m2: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.MTHexapod", "lsst.sal.MTM2", "lsst.sal.MTRotator" ] + offset: "newest" + obssys: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.Scheduler", "lsst.sal.Script", "lsst.sal.ScriptQueue", "lsst.sal.Watcher" ] + offset: "newest" + ocps: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.OCPS" ] + offset: "newest" + pmd: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.PMD" ] + offset: "newest" + calsys: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.ATMonochromator", "lsst.sal.ATWhiteLight", "lsst.sal.CBP", "lsst.sal.Electrometer", "lsst.sal.FiberSpectrograph", "lsst.sal.LinearStage", "lsst.sal.TunableLaser" ] + offset: "newest" + mtaircompressor: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.MTAirCompressor" ] + offset: "newest" + genericcamera: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.GCHeaderService", "lsst.sal.GenericCamera" ] + offset: "newest" + gis: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.GIS" ] + offset: "newest" + lsstcam: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.MTCamera", "lsst.sal.MTHeaderService", "lsst.sal.MTOODS" ] + offset: "newest" + auxtel: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] + latiss: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.ATCamera", "lsst.sal.ATHeaderService", "lsst.sal.ATOODS", "lsst.sal.ATSpectrograph" ] + test: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.Test" ] + lasertracker: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.LaserTracker" ] + # CCS connectors (experimental) data is being written on separate databases for now atcamera: enabled: true - replicaCount: 1 database: "lsst.ATCamera" timestamp_format: "unix_ms" timestamp_field: "timestamp" @@ -226,7 +395,6 @@ telegraf-kafka-consumer: [ "lsst.ATCamera" ] cccamera: enabled: true - replicaCount: 1 database: "lsst.CCCamera" timestamp_format: "unix_ms" timestamp_field: "timestamp" @@ -236,7 +404,6 @@ telegraf-kafka-consumer: [ "lsst.CCCamera" ] mtcamera: enabled: true - replicaCount: 1 database: "lsst.MTCamera" timestamp_format: "unix_ms" timestamp_field: "timestamp" From d1bde581396ac96e903a61f7c9a714e8c502e254 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Mon, 19 Aug 2024 12:44:03 -0700 Subject: [PATCH 031/567] Split Prompt Processing pipelines into preload and prompt subsets. The two subsets will now be run separately. --- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfprod-prompt-processing.yaml | 4 ++-- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfprod-prompt-processing.yaml | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml index 6ecda4e3cb..fe819556fd 100644 --- a/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml @@ -13,7 +13,7 @@ prompt-proto-service: instrument: pipelines: main: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/ApPipe.yaml] - preprocessing: (survey="SURVEY")=[] + preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/Preprocessing.yaml] calibRepo: s3://rubin-pp-dev-users/central_repo/ s3: diff --git a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml index 987d3b4e9f..f507024096 100644 --- a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: instrument: pipelines: main: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/ApPipe.yaml] - preprocessing: (survey="SURVEY")=[] + preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/Preprocessing.yaml] calibRepo: s3://rubin-pp-dev-users/central_repo/ s3: diff --git a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml index c9f5d5677e..3c34271230 100644 --- a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: main: >- (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/ApPipe.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr.yaml] - preprocessing: (survey="SURVEY")=[] + preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Preprocessing.yaml] calibRepo: s3://rubin-pp-dev-users/central_repo/ s3: diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index b244df2ebc..5115052b25 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -34,8 +34,8 @@ prompt-proto-service: (survey="BLOCK-295")=[] (survey="")=[] preprocessing: >- - (survey="AUXTEL_PHOTO_IMAGING")=[] - (survey="AUXTEL_DRP_IMAGING")=[] + (survey="AUXTEL_PHOTO_IMAGING")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Preprocessing.yaml] + (survey="AUXTEL_DRP_IMAGING")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Preprocessing.yaml] (survey="BLOCK-T17")=[] (survey="cwfs")=[] (survey="cwfs-focus-sweep")=[] diff --git a/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml index df319a9054..86f51c8ce7 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml @@ -15,7 +15,7 @@ prompt-proto-service: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/ApPipe.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/SingleFrame.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Isr.yaml] - preprocessing: (survey="SURVEY")=[] + preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Preprocessing.yaml] calibRepo: s3://rubin-pp-dev-users/central_repo/ s3: diff --git a/applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml index ce3809fb7c..2195736e75 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml @@ -25,7 +25,7 @@ prompt-proto-service: ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Isr.yaml] (survey="")=[] preprocessing: >- - (survey="BLOCK-297")=[] + (survey="BLOCK-297")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Preprocessing.yaml] (survey="")=[] calibRepo: s3://rubin-summit-users From 74e846aed8d1b34ef1b3be19ecaa96ce5fe9aa5f Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 3 Sep 2024 17:47:01 -0400 Subject: [PATCH 032/567] love: refactor configurations to split managers producers by categories on BTS --- applications/love/README.md | 105 ++++---- .../love/charts/love-manager/README.md | 103 ++++---- .../love-manager/templates/_helpers.tpl | 12 +- .../manager-producers-deployment.yaml | 49 ++-- .../templates/manager-producers-hpa.yaml | 34 +-- .../templates/manager-producers-service.yaml | 12 +- .../love/charts/love-manager/values.yaml | 240 ++++++++++-------- .../love/charts/love-producer/README.md | 2 +- .../love-producer/templates/deployment.yaml | 2 + .../love/charts/love-producer/values.yaml | 3 +- applications/love/values-base.yaml | 234 ++++++++++++++++- 11 files changed, 537 insertions(+), 259 deletions(-) diff --git a/applications/love/README.md b/applications/love/README.md index ac03b1cff6..fae75a25ca 100644 --- a/applications/love/README.md +++ b/applications/love/README.md @@ -17,6 +17,7 @@ Deployment for the LSST Operators Visualization Environment | global.controlSystem.topicName | string | Set by ArgoCD | Topic name tag for the control system deployment | | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | +| love-manager.manager | object | `{"frontend":{"affinity":{},"autoscaling":{"enabled":true,"maxReplicas":100,"minReplicas":1,"scaleDownPolicy":{},"scaleUpPolicy":{},"targetCPUUtilizationPercentage":80,"targetMemoryUtilizationPercentage":""},"env":{"AUTH_LDAP_1_SERVER_URI":"ldap://ipa1.lsst.local","AUTH_LDAP_2_SERVER_URI":"ldap://ipa2.lsst.local","AUTH_LDAP_3_SERVER_URI":"ldap://ipa3.lsst.local","COMMANDER_HOSTNAME":"love-commander-service","COMMANDER_PORT":5000,"DB_ENGINE":"postgresql","DB_HOST":"love-manager-database-service","DB_NAME":"love","DB_PORT":5432,"DB_USER":"love","JIRA_API_HOSTNAME":"rubinobs.atlassian.net","JIRA_PROJECT_ID":10063,"LOVE_PRODUCER_WEBSOCKET_HOST":"love-service/manager/ws/subscription","LOVE_SITE":"local","OLE_API_HOSTNAME":"site.lsst.local","REDIS_CONFIG_CAPACITY":5000,"REDIS_CONFIG_EXPIRY":5,"REDIS_HOST":"love-manager-redis-service","REMOTE_STORAGE":true,"SERVER_URL":"love.lsst.local","URL_SUBPATH":"/love"},"envSecrets":{"ADMIN_USER_PASS":"admin-user-pass","AUTHLIST_USER_PASS":"authlist-user-pass","AUTH_LDAP_BIND_PASSWORD":"auth-ldap-bind-password","CMD_USER_PASS":"cmd-user-pass","DB_PASS":"db-pass","JIRA_API_TOKEN":"jira-api-token","PROCESS_CONNECTION_PASS":"process-connection-pass","REDIS_PASS":"redis-pass","SECRET_KEY":"manager-secret-key","USER_USER_PASS":"user-user-pass"},"image":{"nexus3":"","pullPolicy":"IfNotPresent","repository":"lsstts/love-manager"},"nodeSelector":{},"ports":{"container":8000,"node":30000},"readinessProbe":{},"replicas":1,"resources":{},"tolerations":[]},"producers":[{"affinity":{},"autoscaling":{"enabled":true,"maxReplicas":100,"minReplicas":1,"scaleDownPolicy":{},"scaleUpPolicy":{},"targetCPUUtilizationPercentage":80,"targetMemoryUtilizationPercentage":""},"env":{"AUTH_LDAP_1_SERVER_URI":"ldap://ipa1.lsst.local","AUTH_LDAP_2_SERVER_URI":"ldap://ipa2.lsst.local","AUTH_LDAP_3_SERVER_URI":"ldap://ipa3.lsst.local","COMMANDER_HOSTNAME":"love-commander-service","COMMANDER_PORT":5000,"DB_ENGINE":"postgresql","DB_HOST":"love-manager-database-service","DB_NAME":"love","DB_PORT":5432,"DB_USER":"love","HEARTBEAT_QUERY_COMMANDER":false,"JIRA_API_HOSTNAME":"rubinobs.atlassian.net","JIRA_PROJECT_ID":10063,"LOVE_SITE":"local","OLE_API_HOSTNAME":"site.lsst.local","REDIS_CONFIG_CAPACITY":5000,"REDIS_CONFIG_EXPIRY":5,"REDIS_HOST":"love-manager-redis-service","REMOTE_STORAGE":true,"SERVER_URL":"love.lsst.local","URL_SUBPATH":"/love"},"envSecrets":{"ADMIN_USER_PASS":"admin-user-pass","AUTHLIST_USER_PASS":"authlist-user-pass","AUTH_LDAP_BIND_PASSWORD":"auth-ldap-bind-password","CMD_USER_PASS":"cmd-user-pass","DB_PASS":"db-pass","JIRA_API_TOKEN":"jira-api-token","PROCESS_CONNECTION_PASS":"process-connection-pass","REDIS_PASS":"redis-pass","SECRET_KEY":"manager-secret-key","USER_USER_PASS":"user-user-pass"},"image":{"nexus3":"","pullPolicy":"IfNotPresent","repository":"lsstts/love-manager"},"name":"example-producer","nodeSelector":{},"ports":{"container":8000,"node":30000},"readinessProbe":{},"replicas":1,"resources":{},"tolerations":[]}],"producers_ports":{"container":8000,"node":30000}}` | Configuration for the different manager instances. This is divided into two sessions; frontend and producers. _frontend_ Configuration for the manager frontend. The frontend session defines the configuration for the so-called frontend managers. These serves the frontend artifacts as well as handles the data piping from the system to the frontend. Every time a user opens a view in LOVE the page will connect to the frontend manager and will receive the telemetry data from the system. Once a connection is established between a frontend and the manager it is kept alive. As more connections come in, the autoscaler will scale up the number of frontend managers and new connections should be redirected to them. The redirect is handled by the manager-frontend-service ClusterIP. _producers_ Configurations for the manger producers. This is basically a list of managers (with the same structure as the frontend, but in a list). These defines services that the LOVE-producers connect to, to feed data from the control system. | | love-manager.manager.frontend.affinity | object | `{}` | Affinity rules for the LOVE manager frontend pods | | love-manager.manager.frontend.autoscaling.enabled | bool | `true` | Whether automatic horizontal scaling is active | | love-manager.manager.frontend.autoscaling.maxReplicas | int | `100` | The allowed maximum number of replicas | @@ -66,55 +67,59 @@ Deployment for the LSST Operators Visualization Environment | love-manager.manager.frontend.replicas | int | `1` | Set the default number of LOVE manager frontend pod replicas | | love-manager.manager.frontend.resources | object | `{}` | Resource specifications for the LOVE manager frontend pods | | love-manager.manager.frontend.tolerations | list | `[]` | Toleration specifications for the LOVE manager frontend pods | -| love-manager.manager.producers.affinity | object | `{}` | Affinity rules for the LOVE manager producers pods | -| love-manager.manager.producers.autoscaling.enabled | bool | `true` | Whether automatic horizontal scaling is active | -| love-manager.manager.producers.autoscaling.maxReplicas | int | `100` | The allowed maximum number of replicas | -| love-manager.manager.producers.autoscaling.minReplicas | int | `1` | The allowed minimum number of replicas | -| love-manager.manager.producers.autoscaling.scaleDownPolicy | object | `{}` | Policy for scaling down manager pods | -| love-manager.manager.producers.autoscaling.scaleUpPolicy | object | `{}` | Policy for scaling up manager pods | -| love-manager.manager.producers.autoscaling.targetCPUUtilizationPercentage | int | `80` | The percentage of CPU utilization that will trigger the scaling | -| love-manager.manager.producers.autoscaling.targetMemoryUtilizationPercentage | int | `""` | The percentage of memory utilization that will trigger the scaling | -| love-manager.manager.producers.env.AUTH_LDAP_1_SERVER_URI | string | `"ldap://ipa1.lsst.local"` | Set the URI for the 1st LDAP server | -| love-manager.manager.producers.env.AUTH_LDAP_2_SERVER_URI | string | `"ldap://ipa2.lsst.local"` | Set the URI for the 2nd LDAP server | -| love-manager.manager.producers.env.AUTH_LDAP_3_SERVER_URI | string | `"ldap://ipa3.lsst.local"` | Set the URI for the 3rd LDAP server | -| love-manager.manager.producers.env.COMMANDER_HOSTNAME | string | `"love-commander-service"` | Label for the LOVE commander service. Must match the one spcified in the LOVE commander chart | -| love-manager.manager.producers.env.COMMANDER_PORT | int | `5000` | Port number for the LOVE commander service. Must match the one spcified in the LOVE commander chart | -| love-manager.manager.producers.env.DB_ENGINE | string | `"postgresql"` | The type of database engine being used for the LOVE manager producers | -| love-manager.manager.producers.env.DB_HOST | string | `"love-manager-database-service"` | The name of the database service | -| love-manager.manager.producers.env.DB_NAME | string | `"love"` | The name of the database being used for the LOVE manager producers | -| love-manager.manager.producers.env.DB_PORT | int | `5432` | The port for the database service | -| love-manager.manager.producers.env.DB_USER | string | `"love"` | The database user needed for access from the LOVE manager producers | -| love-manager.manager.producers.env.HEARTBEAT_QUERY_COMMANDER | bool | `false` | Have the LOVE producer managers not query commander | -| love-manager.manager.producers.env.JIRA_API_HOSTNAME | string | `"rubinobs.atlassian.net"` | Set the hostname for the Jira instance | -| love-manager.manager.producers.env.JIRA_PROJECT_ID | int | `10063` | Set the Jira project ID | -| love-manager.manager.producers.env.LOVE_SITE | string | `"local"` | The site tag where LOVE is being run | -| love-manager.manager.producers.env.OLE_API_HOSTNAME | string | `"site.lsst.local"` | Set the URL for the OLE instance | -| love-manager.manager.producers.env.REDIS_CONFIG_CAPACITY | int | `5000` | The connection capacity for the redis service | -| love-manager.manager.producers.env.REDIS_CONFIG_EXPIRY | int | `5` | The expiration time for the redis service | -| love-manager.manager.producers.env.REDIS_HOST | string | `"love-manager-redis-service"` | The name of the redis service | -| love-manager.manager.producers.env.REMOTE_STORAGE | bool | `true` | Set the manager to use LFA storage | -| love-manager.manager.producers.env.SERVER_URL | string | `"love.lsst.local"` | The external URL from the NGINX server for LOVE | -| love-manager.manager.producers.env.URL_SUBPATH | string | `"/love"` | The Kubernetes sub-path for LOVE | -| love-manager.manager.producers.envSecrets.ADMIN_USER_PASS | string | `"admin-user-pass"` | The LOVE manager producers admin user password secret key name | -| love-manager.manager.producers.envSecrets.AUTHLIST_USER_PASS | string | `"authlist-user-pass"` | The LOVE manager producers authlist_user password secret key name | -| love-manager.manager.producers.envSecrets.AUTH_LDAP_BIND_PASSWORD | string | `"auth-ldap-bind-password"` | The LOVE manager producers LDAP binding password secret key name | -| love-manager.manager.producers.envSecrets.CMD_USER_PASS | string | `"cmd-user-pass"` | The LOVE manager producers cmd_user user password secret key name | -| love-manager.manager.producers.envSecrets.DB_PASS | string | `"db-pass"` | The database password secret key name. Must match `database.envSecrets.POSTGRES_PASSWORD` | -| love-manager.manager.producers.envSecrets.JIRA_API_TOKEN | string | `"jira-api-token"` | The LOVE manager jira API token secret key name | -| love-manager.manager.producers.envSecrets.PROCESS_CONNECTION_PASS | string | `"process-connection-pass"` | The LOVE manager producers process connection password secret key name | -| love-manager.manager.producers.envSecrets.REDIS_PASS | string | `"redis-pass"` | The redis password secret key name. Must match `redis.envSecrets.REDIS_PASS` | -| love-manager.manager.producers.envSecrets.SECRET_KEY | string | `"manager-secret-key"` | The LOVE manager producers secret secret key name | -| love-manager.manager.producers.envSecrets.USER_USER_PASS | string | `"user-user-pass"` | The LOVE manager producers user user password secret key name | -| love-manager.manager.producers.image.nexus3 | string | `""` | The tag name for the Nexus3 Docker repository secrets if private images need to be pulled | -| love-manager.manager.producers.image.pullPolicy | string | `"IfNotPresent"` | The pull policy on the LOVE manager producers image | -| love-manager.manager.producers.image.repository | string | `"lsstts/love-manager"` | The LOVE manager producers image to use | -| love-manager.manager.producers.nodeSelector | object | `{}` | Node selection rules for the LOVE manager producers pods | -| love-manager.manager.producers.ports.container | int | `8000` | The port on the container for normal communications | -| love-manager.manager.producers.ports.node | int | `30000` | The port on the node for normal communcations | -| love-manager.manager.producers.readinessProbe | object | `{}` | Configuration for the LOVE manager producers pods readiness probe | -| love-manager.manager.producers.replicas | int | `1` | Set the default number of LOVE manager producers pod replicas | -| love-manager.manager.producers.resources | object | `{}` | Resource specifications for the LOVE manager producers pods | -| love-manager.manager.producers.tolerations | list | `[]` | Toleration specifications for the LOVE manager producers pods | +| love-manager.manager.producers[0] | object | `{"affinity":{},"autoscaling":{"enabled":true,"maxReplicas":100,"minReplicas":1,"scaleDownPolicy":{},"scaleUpPolicy":{},"targetCPUUtilizationPercentage":80,"targetMemoryUtilizationPercentage":""},"env":{"AUTH_LDAP_1_SERVER_URI":"ldap://ipa1.lsst.local","AUTH_LDAP_2_SERVER_URI":"ldap://ipa2.lsst.local","AUTH_LDAP_3_SERVER_URI":"ldap://ipa3.lsst.local","COMMANDER_HOSTNAME":"love-commander-service","COMMANDER_PORT":5000,"DB_ENGINE":"postgresql","DB_HOST":"love-manager-database-service","DB_NAME":"love","DB_PORT":5432,"DB_USER":"love","HEARTBEAT_QUERY_COMMANDER":false,"JIRA_API_HOSTNAME":"rubinobs.atlassian.net","JIRA_PROJECT_ID":10063,"LOVE_SITE":"local","OLE_API_HOSTNAME":"site.lsst.local","REDIS_CONFIG_CAPACITY":5000,"REDIS_CONFIG_EXPIRY":5,"REDIS_HOST":"love-manager-redis-service","REMOTE_STORAGE":true,"SERVER_URL":"love.lsst.local","URL_SUBPATH":"/love"},"envSecrets":{"ADMIN_USER_PASS":"admin-user-pass","AUTHLIST_USER_PASS":"authlist-user-pass","AUTH_LDAP_BIND_PASSWORD":"auth-ldap-bind-password","CMD_USER_PASS":"cmd-user-pass","DB_PASS":"db-pass","JIRA_API_TOKEN":"jira-api-token","PROCESS_CONNECTION_PASS":"process-connection-pass","REDIS_PASS":"redis-pass","SECRET_KEY":"manager-secret-key","USER_USER_PASS":"user-user-pass"},"image":{"nexus3":"","pullPolicy":"IfNotPresent","repository":"lsstts/love-manager"},"name":"example-producer","nodeSelector":{},"ports":{"container":8000,"node":30000},"readinessProbe":{},"replicas":1,"resources":{},"tolerations":[]}` | Example producer configuration. Each producer should follow the same structure as frontend with the added name field. | +| love-manager.manager.producers[0].affinity | object | `{}` | Affinity rules for the LOVE manager producers pods | +| love-manager.manager.producers[0].autoscaling.enabled | bool | `true` | Whether automatic horizontal scaling is active | +| love-manager.manager.producers[0].autoscaling.maxReplicas | int | `100` | The allowed maximum number of replicas | +| love-manager.manager.producers[0].autoscaling.minReplicas | int | `1` | The allowed minimum number of replicas | +| love-manager.manager.producers[0].autoscaling.scaleDownPolicy | object | `{}` | Policy for scaling down manager pods | +| love-manager.manager.producers[0].autoscaling.scaleUpPolicy | object | `{}` | Policy for scaling up manager pods | +| love-manager.manager.producers[0].autoscaling.targetCPUUtilizationPercentage | int | `80` | The percentage of CPU utilization that will trigger the scaling | +| love-manager.manager.producers[0].autoscaling.targetMemoryUtilizationPercentage | int | `""` | The percentage of memory utilization that will trigger the scaling | +| love-manager.manager.producers[0].env.AUTH_LDAP_1_SERVER_URI | string | `"ldap://ipa1.lsst.local"` | Set the URI for the 1st LDAP server | +| love-manager.manager.producers[0].env.AUTH_LDAP_2_SERVER_URI | string | `"ldap://ipa2.lsst.local"` | Set the URI for the 2nd LDAP server | +| love-manager.manager.producers[0].env.AUTH_LDAP_3_SERVER_URI | string | `"ldap://ipa3.lsst.local"` | Set the URI for the 3rd LDAP server | +| love-manager.manager.producers[0].env.COMMANDER_HOSTNAME | string | `"love-commander-service"` | Label for the LOVE commander service. Must match the one spcified in the LOVE commander chart | +| love-manager.manager.producers[0].env.COMMANDER_PORT | int | `5000` | Port number for the LOVE commander service. Must match the one spcified in the LOVE commander chart | +| love-manager.manager.producers[0].env.DB_ENGINE | string | `"postgresql"` | The type of database engine being used for the LOVE manager producers | +| love-manager.manager.producers[0].env.DB_HOST | string | `"love-manager-database-service"` | The name of the database service | +| love-manager.manager.producers[0].env.DB_NAME | string | `"love"` | The name of the database being used for the LOVE manager producers | +| love-manager.manager.producers[0].env.DB_PORT | int | `5432` | The port for the database service | +| love-manager.manager.producers[0].env.DB_USER | string | `"love"` | The database user needed for access from the LOVE manager producers | +| love-manager.manager.producers[0].env.HEARTBEAT_QUERY_COMMANDER | bool | `false` | Have the LOVE producer managers not query commander | +| love-manager.manager.producers[0].env.JIRA_API_HOSTNAME | string | `"rubinobs.atlassian.net"` | Set the hostname for the Jira instance | +| love-manager.manager.producers[0].env.JIRA_PROJECT_ID | int | `10063` | Set the Jira project ID | +| love-manager.manager.producers[0].env.LOVE_SITE | string | `"local"` | The site tag where LOVE is being run | +| love-manager.manager.producers[0].env.OLE_API_HOSTNAME | string | `"site.lsst.local"` | Set the URL for the OLE instance | +| love-manager.manager.producers[0].env.REDIS_CONFIG_CAPACITY | int | `5000` | The connection capacity for the redis service | +| love-manager.manager.producers[0].env.REDIS_CONFIG_EXPIRY | int | `5` | The expiration time for the redis service | +| love-manager.manager.producers[0].env.REDIS_HOST | string | `"love-manager-redis-service"` | The name of the redis service | +| love-manager.manager.producers[0].env.REMOTE_STORAGE | bool | `true` | Set the manager to use LFA storage | +| love-manager.manager.producers[0].env.SERVER_URL | string | `"love.lsst.local"` | The external URL from the NGINX server for LOVE | +| love-manager.manager.producers[0].env.URL_SUBPATH | string | `"/love"` | The Kubernetes sub-path for LOVE | +| love-manager.manager.producers[0].envSecrets.ADMIN_USER_PASS | string | `"admin-user-pass"` | The LOVE manager producers admin user password secret key name | +| love-manager.manager.producers[0].envSecrets.AUTHLIST_USER_PASS | string | `"authlist-user-pass"` | The LOVE manager producers authlist_user password secret key name | +| love-manager.manager.producers[0].envSecrets.AUTH_LDAP_BIND_PASSWORD | string | `"auth-ldap-bind-password"` | The LOVE manager producers LDAP binding password secret key name | +| love-manager.manager.producers[0].envSecrets.CMD_USER_PASS | string | `"cmd-user-pass"` | The LOVE manager producers cmd_user user password secret key name | +| love-manager.manager.producers[0].envSecrets.DB_PASS | string | `"db-pass"` | The database password secret key name. Must match `database.envSecrets.POSTGRES_PASSWORD` | +| love-manager.manager.producers[0].envSecrets.JIRA_API_TOKEN | string | `"jira-api-token"` | The LOVE manager jira API token secret key name | +| love-manager.manager.producers[0].envSecrets.PROCESS_CONNECTION_PASS | string | `"process-connection-pass"` | The LOVE manager producers process connection password secret key name | +| love-manager.manager.producers[0].envSecrets.REDIS_PASS | string | `"redis-pass"` | The redis password secret key name. Must match `redis.envSecrets.REDIS_PASS` | +| love-manager.manager.producers[0].envSecrets.SECRET_KEY | string | `"manager-secret-key"` | The LOVE manager producers secret secret key name | +| love-manager.manager.producers[0].envSecrets.USER_USER_PASS | string | `"user-user-pass"` | The LOVE manager producers user user password secret key name | +| love-manager.manager.producers[0].image.nexus3 | string | `""` | The tag name for the Nexus3 Docker repository secrets if private images need to be pulled | +| love-manager.manager.producers[0].image.pullPolicy | string | `"IfNotPresent"` | The pull policy on the LOVE manager producers image | +| love-manager.manager.producers[0].image.repository | string | `"lsstts/love-manager"` | The LOVE manager producers image to use | +| love-manager.manager.producers[0].nodeSelector | object | `{}` | Node selection rules for the LOVE manager producers pods | +| love-manager.manager.producers[0].ports.container | int | `8000` | The port on the container for normal communications | +| love-manager.manager.producers[0].ports.node | int | `30000` | The port on the node for normal communcations | +| love-manager.manager.producers[0].readinessProbe | object | `{}` | Configuration for the LOVE manager producers pods readiness probe | +| love-manager.manager.producers[0].replicas | int | `1` | Set the default number of LOVE manager producers pod replicas | +| love-manager.manager.producers[0].resources | object | `{}` | Resource specifications for the LOVE manager producers pods | +| love-manager.manager.producers[0].tolerations | list | `[]` | Toleration specifications for the LOVE manager producers pods | +| love-manager.manager.producers_ports | object | `{"container":8000,"node":30000}` | Configuration for the producers ports. this is a single configuration for all the producers. | +| love-manager.manager.producers_ports.container | int | `8000` | The port on the container for normal communications | +| love-manager.manager.producers_ports.node | int | `30000` | The port on the node for normal communcations | | love-manager.namespace | string | `"love"` | The overall namespace for the application | | love-manager.redis.affinity | object | `{}` | Affinity rules for the LOVE redis pods | | love-manager.redis.config | string | `"timeout 60\n"` | Configuration specification for the redis service | @@ -170,7 +175,7 @@ Deployment for the LSST Operators Visualization Environment | love-nginx.tolerations | list | `[]` | Toleration specifications for the NGINX pod | | love-producer.affinity | object | `{}` | Affinity rules applied to all LOVE producer pods | | love-producer.annotations | object | `{}` | This allows for the specification of pod annotations. | -| love-producer.env | object | `{"WEBSOCKET_HOST":"love-nginx/manager/ws/subscription"}` | This section holds a set of key, value pairs for environmental variables | +| love-producer.env | object | `{}` | This section holds a set of key, value pairs for environmental variables | | love-producer.envSecrets | object | `{"PROCESS_CONNECTION_PASS":"process-connection-pass"}` | This section holds a set of key, value pairs for secrets | | love-producer.image.pullPolicy | string | `"IfNotPresent"` | The pull policy on the LOVE producer image | | love-producer.image.repository | string | `"lsstts/love-producer"` | The LOVE producer image to use | diff --git a/applications/love/charts/love-manager/README.md b/applications/love/charts/love-manager/README.md index 8db2596b51..47a93da5c5 100644 --- a/applications/love/charts/love-manager/README.md +++ b/applications/love/charts/love-manager/README.md @@ -6,6 +6,7 @@ Helm chart for the LOVE manager service. | Key | Type | Default | Description | |-----|------|---------|-------------| +| manager | object | `{"frontend":{"affinity":{},"autoscaling":{"enabled":true,"maxReplicas":100,"minReplicas":1,"scaleDownPolicy":{},"scaleUpPolicy":{},"targetCPUUtilizationPercentage":80,"targetMemoryUtilizationPercentage":""},"env":{"AUTH_LDAP_1_SERVER_URI":"ldap://ipa1.lsst.local","AUTH_LDAP_2_SERVER_URI":"ldap://ipa2.lsst.local","AUTH_LDAP_3_SERVER_URI":"ldap://ipa3.lsst.local","COMMANDER_HOSTNAME":"love-commander-service","COMMANDER_PORT":5000,"DB_ENGINE":"postgresql","DB_HOST":"love-manager-database-service","DB_NAME":"love","DB_PORT":5432,"DB_USER":"love","JIRA_API_HOSTNAME":"rubinobs.atlassian.net","JIRA_PROJECT_ID":10063,"LOVE_PRODUCER_WEBSOCKET_HOST":"love-service/manager/ws/subscription","LOVE_SITE":"local","OLE_API_HOSTNAME":"site.lsst.local","REDIS_CONFIG_CAPACITY":5000,"REDIS_CONFIG_EXPIRY":5,"REDIS_HOST":"love-manager-redis-service","REMOTE_STORAGE":true,"SERVER_URL":"love.lsst.local","URL_SUBPATH":"/love"},"envSecrets":{"ADMIN_USER_PASS":"admin-user-pass","AUTHLIST_USER_PASS":"authlist-user-pass","AUTH_LDAP_BIND_PASSWORD":"auth-ldap-bind-password","CMD_USER_PASS":"cmd-user-pass","DB_PASS":"db-pass","JIRA_API_TOKEN":"jira-api-token","PROCESS_CONNECTION_PASS":"process-connection-pass","REDIS_PASS":"redis-pass","SECRET_KEY":"manager-secret-key","USER_USER_PASS":"user-user-pass"},"image":{"nexus3":"","pullPolicy":"IfNotPresent","repository":"lsstts/love-manager"},"nodeSelector":{},"ports":{"container":8000,"node":30000},"readinessProbe":{},"replicas":1,"resources":{},"tolerations":[]},"producers":[{"affinity":{},"autoscaling":{"enabled":true,"maxReplicas":100,"minReplicas":1,"scaleDownPolicy":{},"scaleUpPolicy":{},"targetCPUUtilizationPercentage":80,"targetMemoryUtilizationPercentage":""},"env":{"AUTH_LDAP_1_SERVER_URI":"ldap://ipa1.lsst.local","AUTH_LDAP_2_SERVER_URI":"ldap://ipa2.lsst.local","AUTH_LDAP_3_SERVER_URI":"ldap://ipa3.lsst.local","COMMANDER_HOSTNAME":"love-commander-service","COMMANDER_PORT":5000,"DB_ENGINE":"postgresql","DB_HOST":"love-manager-database-service","DB_NAME":"love","DB_PORT":5432,"DB_USER":"love","HEARTBEAT_QUERY_COMMANDER":false,"JIRA_API_HOSTNAME":"rubinobs.atlassian.net","JIRA_PROJECT_ID":10063,"LOVE_SITE":"local","OLE_API_HOSTNAME":"site.lsst.local","REDIS_CONFIG_CAPACITY":5000,"REDIS_CONFIG_EXPIRY":5,"REDIS_HOST":"love-manager-redis-service","REMOTE_STORAGE":true,"SERVER_URL":"love.lsst.local","URL_SUBPATH":"/love"},"envSecrets":{"ADMIN_USER_PASS":"admin-user-pass","AUTHLIST_USER_PASS":"authlist-user-pass","AUTH_LDAP_BIND_PASSWORD":"auth-ldap-bind-password","CMD_USER_PASS":"cmd-user-pass","DB_PASS":"db-pass","JIRA_API_TOKEN":"jira-api-token","PROCESS_CONNECTION_PASS":"process-connection-pass","REDIS_PASS":"redis-pass","SECRET_KEY":"manager-secret-key","USER_USER_PASS":"user-user-pass"},"image":{"nexus3":"","pullPolicy":"IfNotPresent","repository":"lsstts/love-manager"},"name":"example-producer","nodeSelector":{},"ports":{"container":8000,"node":30000},"readinessProbe":{},"replicas":1,"resources":{},"tolerations":[]}],"producers_ports":{"container":8000,"node":30000}}` | Configuration for the different manager instances. This is divided into two sessions; frontend and producers. _frontend_ Configuration for the manager frontend. The frontend session defines the configuration for the so-called frontend managers. These serves the frontend artifacts as well as handles the data piping from the system to the frontend. Every time a user opens a view in LOVE the page will connect to the frontend manager and will receive the telemetry data from the system. Once a connection is established between a frontend and the manager it is kept alive. As more connections come in, the autoscaler will scale up the number of frontend managers and new connections should be redirected to them. The redirect is handled by the manager-frontend-service ClusterIP. _producers_ Configurations for the manger producers. This is basically a list of managers (with the same structure as the frontend, but in a list). These defines services that the LOVE-producers connect to, to feed data from the control system. | | manager.frontend.affinity | object | `{}` | Affinity rules for the LOVE manager frontend pods | | manager.frontend.autoscaling.enabled | bool | `true` | Whether automatic horizontal scaling is active | | manager.frontend.autoscaling.maxReplicas | int | `100` | The allowed maximum number of replicas | @@ -55,55 +56,59 @@ Helm chart for the LOVE manager service. | manager.frontend.replicas | int | `1` | Set the default number of LOVE manager frontend pod replicas | | manager.frontend.resources | object | `{}` | Resource specifications for the LOVE manager frontend pods | | manager.frontend.tolerations | list | `[]` | Toleration specifications for the LOVE manager frontend pods | -| manager.producers.affinity | object | `{}` | Affinity rules for the LOVE manager producers pods | -| manager.producers.autoscaling.enabled | bool | `true` | Whether automatic horizontal scaling is active | -| manager.producers.autoscaling.maxReplicas | int | `100` | The allowed maximum number of replicas | -| manager.producers.autoscaling.minReplicas | int | `1` | The allowed minimum number of replicas | -| manager.producers.autoscaling.scaleDownPolicy | object | `{}` | Policy for scaling down manager pods | -| manager.producers.autoscaling.scaleUpPolicy | object | `{}` | Policy for scaling up manager pods | -| manager.producers.autoscaling.targetCPUUtilizationPercentage | int | `80` | The percentage of CPU utilization that will trigger the scaling | -| manager.producers.autoscaling.targetMemoryUtilizationPercentage | int | `""` | The percentage of memory utilization that will trigger the scaling | -| manager.producers.env.AUTH_LDAP_1_SERVER_URI | string | `"ldap://ipa1.lsst.local"` | Set the URI for the 1st LDAP server | -| manager.producers.env.AUTH_LDAP_2_SERVER_URI | string | `"ldap://ipa2.lsst.local"` | Set the URI for the 2nd LDAP server | -| manager.producers.env.AUTH_LDAP_3_SERVER_URI | string | `"ldap://ipa3.lsst.local"` | Set the URI for the 3rd LDAP server | -| manager.producers.env.COMMANDER_HOSTNAME | string | `"love-commander-service"` | Label for the LOVE commander service. Must match the one spcified in the LOVE commander chart | -| manager.producers.env.COMMANDER_PORT | int | `5000` | Port number for the LOVE commander service. Must match the one spcified in the LOVE commander chart | -| manager.producers.env.DB_ENGINE | string | `"postgresql"` | The type of database engine being used for the LOVE manager producers | -| manager.producers.env.DB_HOST | string | `"love-manager-database-service"` | The name of the database service | -| manager.producers.env.DB_NAME | string | `"love"` | The name of the database being used for the LOVE manager producers | -| manager.producers.env.DB_PORT | int | `5432` | The port for the database service | -| manager.producers.env.DB_USER | string | `"love"` | The database user needed for access from the LOVE manager producers | -| manager.producers.env.HEARTBEAT_QUERY_COMMANDER | bool | `false` | Have the LOVE producer managers not query commander | -| manager.producers.env.JIRA_API_HOSTNAME | string | `"rubinobs.atlassian.net"` | Set the hostname for the Jira instance | -| manager.producers.env.JIRA_PROJECT_ID | int | `10063` | Set the Jira project ID | -| manager.producers.env.LOVE_SITE | string | `"local"` | The site tag where LOVE is being run | -| manager.producers.env.OLE_API_HOSTNAME | string | `"site.lsst.local"` | Set the URL for the OLE instance | -| manager.producers.env.REDIS_CONFIG_CAPACITY | int | `5000` | The connection capacity for the redis service | -| manager.producers.env.REDIS_CONFIG_EXPIRY | int | `5` | The expiration time for the redis service | -| manager.producers.env.REDIS_HOST | string | `"love-manager-redis-service"` | The name of the redis service | -| manager.producers.env.REMOTE_STORAGE | bool | `true` | Set the manager to use LFA storage | -| manager.producers.env.SERVER_URL | string | `"love.lsst.local"` | The external URL from the NGINX server for LOVE | -| manager.producers.env.URL_SUBPATH | string | `"/love"` | The Kubernetes sub-path for LOVE | -| manager.producers.envSecrets.ADMIN_USER_PASS | string | `"admin-user-pass"` | The LOVE manager producers admin user password secret key name | -| manager.producers.envSecrets.AUTHLIST_USER_PASS | string | `"authlist-user-pass"` | The LOVE manager producers authlist_user password secret key name | -| manager.producers.envSecrets.AUTH_LDAP_BIND_PASSWORD | string | `"auth-ldap-bind-password"` | The LOVE manager producers LDAP binding password secret key name | -| manager.producers.envSecrets.CMD_USER_PASS | string | `"cmd-user-pass"` | The LOVE manager producers cmd_user user password secret key name | -| manager.producers.envSecrets.DB_PASS | string | `"db-pass"` | The database password secret key name. Must match `database.envSecrets.POSTGRES_PASSWORD` | -| manager.producers.envSecrets.JIRA_API_TOKEN | string | `"jira-api-token"` | The LOVE manager jira API token secret key name | -| manager.producers.envSecrets.PROCESS_CONNECTION_PASS | string | `"process-connection-pass"` | The LOVE manager producers process connection password secret key name | -| manager.producers.envSecrets.REDIS_PASS | string | `"redis-pass"` | The redis password secret key name. Must match `redis.envSecrets.REDIS_PASS` | -| manager.producers.envSecrets.SECRET_KEY | string | `"manager-secret-key"` | The LOVE manager producers secret secret key name | -| manager.producers.envSecrets.USER_USER_PASS | string | `"user-user-pass"` | The LOVE manager producers user user password secret key name | -| manager.producers.image.nexus3 | string | `""` | The tag name for the Nexus3 Docker repository secrets if private images need to be pulled | -| manager.producers.image.pullPolicy | string | `"IfNotPresent"` | The pull policy on the LOVE manager producers image | -| manager.producers.image.repository | string | `"lsstts/love-manager"` | The LOVE manager producers image to use | -| manager.producers.nodeSelector | object | `{}` | Node selection rules for the LOVE manager producers pods | -| manager.producers.ports.container | int | `8000` | The port on the container for normal communications | -| manager.producers.ports.node | int | `30000` | The port on the node for normal communcations | -| manager.producers.readinessProbe | object | `{}` | Configuration for the LOVE manager producers pods readiness probe | -| manager.producers.replicas | int | `1` | Set the default number of LOVE manager producers pod replicas | -| manager.producers.resources | object | `{}` | Resource specifications for the LOVE manager producers pods | -| manager.producers.tolerations | list | `[]` | Toleration specifications for the LOVE manager producers pods | +| manager.producers[0] | object | `{"affinity":{},"autoscaling":{"enabled":true,"maxReplicas":100,"minReplicas":1,"scaleDownPolicy":{},"scaleUpPolicy":{},"targetCPUUtilizationPercentage":80,"targetMemoryUtilizationPercentage":""},"env":{"AUTH_LDAP_1_SERVER_URI":"ldap://ipa1.lsst.local","AUTH_LDAP_2_SERVER_URI":"ldap://ipa2.lsst.local","AUTH_LDAP_3_SERVER_URI":"ldap://ipa3.lsst.local","COMMANDER_HOSTNAME":"love-commander-service","COMMANDER_PORT":5000,"DB_ENGINE":"postgresql","DB_HOST":"love-manager-database-service","DB_NAME":"love","DB_PORT":5432,"DB_USER":"love","HEARTBEAT_QUERY_COMMANDER":false,"JIRA_API_HOSTNAME":"rubinobs.atlassian.net","JIRA_PROJECT_ID":10063,"LOVE_SITE":"local","OLE_API_HOSTNAME":"site.lsst.local","REDIS_CONFIG_CAPACITY":5000,"REDIS_CONFIG_EXPIRY":5,"REDIS_HOST":"love-manager-redis-service","REMOTE_STORAGE":true,"SERVER_URL":"love.lsst.local","URL_SUBPATH":"/love"},"envSecrets":{"ADMIN_USER_PASS":"admin-user-pass","AUTHLIST_USER_PASS":"authlist-user-pass","AUTH_LDAP_BIND_PASSWORD":"auth-ldap-bind-password","CMD_USER_PASS":"cmd-user-pass","DB_PASS":"db-pass","JIRA_API_TOKEN":"jira-api-token","PROCESS_CONNECTION_PASS":"process-connection-pass","REDIS_PASS":"redis-pass","SECRET_KEY":"manager-secret-key","USER_USER_PASS":"user-user-pass"},"image":{"nexus3":"","pullPolicy":"IfNotPresent","repository":"lsstts/love-manager"},"name":"example-producer","nodeSelector":{},"ports":{"container":8000,"node":30000},"readinessProbe":{},"replicas":1,"resources":{},"tolerations":[]}` | Example producer configuration. Each producer should follow the same structure as frontend with the added name field. | +| manager.producers[0].affinity | object | `{}` | Affinity rules for the LOVE manager producers pods | +| manager.producers[0].autoscaling.enabled | bool | `true` | Whether automatic horizontal scaling is active | +| manager.producers[0].autoscaling.maxReplicas | int | `100` | The allowed maximum number of replicas | +| manager.producers[0].autoscaling.minReplicas | int | `1` | The allowed minimum number of replicas | +| manager.producers[0].autoscaling.scaleDownPolicy | object | `{}` | Policy for scaling down manager pods | +| manager.producers[0].autoscaling.scaleUpPolicy | object | `{}` | Policy for scaling up manager pods | +| manager.producers[0].autoscaling.targetCPUUtilizationPercentage | int | `80` | The percentage of CPU utilization that will trigger the scaling | +| manager.producers[0].autoscaling.targetMemoryUtilizationPercentage | int | `""` | The percentage of memory utilization that will trigger the scaling | +| manager.producers[0].env.AUTH_LDAP_1_SERVER_URI | string | `"ldap://ipa1.lsst.local"` | Set the URI for the 1st LDAP server | +| manager.producers[0].env.AUTH_LDAP_2_SERVER_URI | string | `"ldap://ipa2.lsst.local"` | Set the URI for the 2nd LDAP server | +| manager.producers[0].env.AUTH_LDAP_3_SERVER_URI | string | `"ldap://ipa3.lsst.local"` | Set the URI for the 3rd LDAP server | +| manager.producers[0].env.COMMANDER_HOSTNAME | string | `"love-commander-service"` | Label for the LOVE commander service. Must match the one spcified in the LOVE commander chart | +| manager.producers[0].env.COMMANDER_PORT | int | `5000` | Port number for the LOVE commander service. Must match the one spcified in the LOVE commander chart | +| manager.producers[0].env.DB_ENGINE | string | `"postgresql"` | The type of database engine being used for the LOVE manager producers | +| manager.producers[0].env.DB_HOST | string | `"love-manager-database-service"` | The name of the database service | +| manager.producers[0].env.DB_NAME | string | `"love"` | The name of the database being used for the LOVE manager producers | +| manager.producers[0].env.DB_PORT | int | `5432` | The port for the database service | +| manager.producers[0].env.DB_USER | string | `"love"` | The database user needed for access from the LOVE manager producers | +| manager.producers[0].env.HEARTBEAT_QUERY_COMMANDER | bool | `false` | Have the LOVE producer managers not query commander | +| manager.producers[0].env.JIRA_API_HOSTNAME | string | `"rubinobs.atlassian.net"` | Set the hostname for the Jira instance | +| manager.producers[0].env.JIRA_PROJECT_ID | int | `10063` | Set the Jira project ID | +| manager.producers[0].env.LOVE_SITE | string | `"local"` | The site tag where LOVE is being run | +| manager.producers[0].env.OLE_API_HOSTNAME | string | `"site.lsst.local"` | Set the URL for the OLE instance | +| manager.producers[0].env.REDIS_CONFIG_CAPACITY | int | `5000` | The connection capacity for the redis service | +| manager.producers[0].env.REDIS_CONFIG_EXPIRY | int | `5` | The expiration time for the redis service | +| manager.producers[0].env.REDIS_HOST | string | `"love-manager-redis-service"` | The name of the redis service | +| manager.producers[0].env.REMOTE_STORAGE | bool | `true` | Set the manager to use LFA storage | +| manager.producers[0].env.SERVER_URL | string | `"love.lsst.local"` | The external URL from the NGINX server for LOVE | +| manager.producers[0].env.URL_SUBPATH | string | `"/love"` | The Kubernetes sub-path for LOVE | +| manager.producers[0].envSecrets.ADMIN_USER_PASS | string | `"admin-user-pass"` | The LOVE manager producers admin user password secret key name | +| manager.producers[0].envSecrets.AUTHLIST_USER_PASS | string | `"authlist-user-pass"` | The LOVE manager producers authlist_user password secret key name | +| manager.producers[0].envSecrets.AUTH_LDAP_BIND_PASSWORD | string | `"auth-ldap-bind-password"` | The LOVE manager producers LDAP binding password secret key name | +| manager.producers[0].envSecrets.CMD_USER_PASS | string | `"cmd-user-pass"` | The LOVE manager producers cmd_user user password secret key name | +| manager.producers[0].envSecrets.DB_PASS | string | `"db-pass"` | The database password secret key name. Must match `database.envSecrets.POSTGRES_PASSWORD` | +| manager.producers[0].envSecrets.JIRA_API_TOKEN | string | `"jira-api-token"` | The LOVE manager jira API token secret key name | +| manager.producers[0].envSecrets.PROCESS_CONNECTION_PASS | string | `"process-connection-pass"` | The LOVE manager producers process connection password secret key name | +| manager.producers[0].envSecrets.REDIS_PASS | string | `"redis-pass"` | The redis password secret key name. Must match `redis.envSecrets.REDIS_PASS` | +| manager.producers[0].envSecrets.SECRET_KEY | string | `"manager-secret-key"` | The LOVE manager producers secret secret key name | +| manager.producers[0].envSecrets.USER_USER_PASS | string | `"user-user-pass"` | The LOVE manager producers user user password secret key name | +| manager.producers[0].image.nexus3 | string | `""` | The tag name for the Nexus3 Docker repository secrets if private images need to be pulled | +| manager.producers[0].image.pullPolicy | string | `"IfNotPresent"` | The pull policy on the LOVE manager producers image | +| manager.producers[0].image.repository | string | `"lsstts/love-manager"` | The LOVE manager producers image to use | +| manager.producers[0].nodeSelector | object | `{}` | Node selection rules for the LOVE manager producers pods | +| manager.producers[0].ports.container | int | `8000` | The port on the container for normal communications | +| manager.producers[0].ports.node | int | `30000` | The port on the node for normal communcations | +| manager.producers[0].readinessProbe | object | `{}` | Configuration for the LOVE manager producers pods readiness probe | +| manager.producers[0].replicas | int | `1` | Set the default number of LOVE manager producers pod replicas | +| manager.producers[0].resources | object | `{}` | Resource specifications for the LOVE manager producers pods | +| manager.producers[0].tolerations | list | `[]` | Toleration specifications for the LOVE manager producers pods | +| manager.producers_ports | object | `{"container":8000,"node":30000}` | Configuration for the producers ports. this is a single configuration for all the producers. | +| manager.producers_ports.container | int | `8000` | The port on the container for normal communications | +| manager.producers_ports.node | int | `30000` | The port on the node for normal communcations | | namespace | string | `"love"` | The overall namespace for the application | | redis.affinity | object | `{}` | Affinity rules for the LOVE redis pods | | redis.config | string | `"timeout 60\n"` | Configuration specification for the redis service | diff --git a/applications/love/charts/love-manager/templates/_helpers.tpl b/applications/love/charts/love-manager/templates/_helpers.tpl index 13e1c5bcec..f95f771b7b 100644 --- a/applications/love/charts/love-manager/templates/_helpers.tpl +++ b/applications/love/charts/love-manager/templates/_helpers.tpl @@ -33,8 +33,8 @@ Manager frontend fullname {{/* Manager producers fullname */}} -{{- define "love-manager-producers.fullname" -}} -{{ include "love-manager.fullname" . }}-producers +{{- define "love-manager-producer.fullname" -}} +{{ include "love-manager.fullname" . }}-producer {{- end }} {{/* @@ -63,9 +63,9 @@ helm.sh/chart: {{ include "love-manager.chart" . }} {{/* Manager Producers Common labels */}} -{{- define "love-manager-producers.labels" -}} +{{- define "love-manager-producer.labels" -}} helm.sh/chart: {{ include "love-manager.chart" . }} -{{ include "love-manager-producers.selectorLabels" . }} +{{ include "love-manager-producer.selectorLabels" . }} {{- end }} {{/* @@ -87,9 +87,9 @@ app.kubernetes.io/instance: {{ include "love-manager.name" . }}-frontend {{/* Manager Producers Selector labels */}} -{{- define "love-manager-producers.selectorLabels" -}} +{{- define "love-manager-producer.selectorLabels" -}} app.kubernetes.io/name: {{ include "love-manager.name" . }} -app.kubernetes.io/instance: {{ include "love-manager.name" . }}-producers +app.kubernetes.io/instance: {{ include "love-manager.name" . }}-producer {{- end }} {{/* diff --git a/applications/love/charts/love-manager/templates/manager-producers-deployment.yaml b/applications/love/charts/love-manager/templates/manager-producers-deployment.yaml index 308f2eb69b..855fe7d4d9 100644 --- a/applications/love/charts/love-manager/templates/manager-producers-deployment.yaml +++ b/applications/love/charts/love-manager/templates/manager-producers-deployment.yaml @@ -1,55 +1,62 @@ +{{ range $manager_producer:= .Values.manager.producers }} +{{ $_ := set $.Values "manager_producer" $manager_producer }} +--- apiVersion: apps/v1 kind: Deployment metadata: - name: {{ include "love-manager-producers.fullname" . }} + name: {{ include "love-manager-producer.fullname" $ }}-{{ $manager_producer.name }} namespace: {{ $.Values.global.controlSystem.appNamespace }} labels: - {{- include "love-manager-producers.labels" . | nindent 4 }} + app.kubernetes.io/instance: {{ include "love-manager-producer.fullname" $ }}-{{ $manager_producer.name }} + app.kubernetes.io/name: {{ include "love-manager-producer.fullname" $ }} spec: selector: matchLabels: - {{- include "love-manager-producers.selectorLabels" . | nindent 6 }} - {{- if not .Values.manager.producers.autoscaling.enabled }} - replicas: {{ .Values.manager.producers.replicas }} + app.kubernetes.io/instance: {{ include "love-manager-producer.fullname" $ }}-{{ $manager_producer.name }} + app.kubernetes.io/name: {{ include "love-manager-producer.fullname" $ }} + {{- if not $manager_producer.autoscaling.enabled }} + replicas: {{ $manager_producer.replicas }} {{- end }} template: metadata: labels: - {{- include "love-manager-producers.selectorLabels" . | nindent 8 }} + app.kubernetes.io/instance: {{ include "love-manager-producer.fullname" $ }}-{{ $manager_producer.name }} + app.kubernetes.io/name: {{ include "love-manager-producer.fullname" $ }} spec: containers: - - name: {{ include "love-manager-producers.fullname" . }} - {{- $imageTag := .Values.manager.producers.image.tag | default $.Values.global.controlSystem.imageTag }} - image: "{{ .Values.manager.producers.image.repository }}:{{ $imageTag }}" - imagePullPolicy: {{ .Values.manager.producers.image.pullPolicy }} + - name: {{ include "love-manager-producer.fullname" $ }}-{{ $manager_producer.name }} + {{- $imageTag := $manager_producer.image.tag | default $.Values.global.controlSystem.imageTag }} + image: "{{ $manager_producer.image.repository }}:{{ $imageTag }}" + imagePullPolicy: {{ $manager_producer.image.pullPolicy }} ports: - - containerPort: {{ .Values.manager.producers.ports.container }} + - containerPort: {{ $.Values.manager.producers_ports.container }} env: - {{- $data := dict "env" .Values.manager.producers.env "secret" false }} + {{- $data := dict "env" $manager_producer.env "secret" false }} {{- include "helpers.envFromList" $data | indent 10 }} - {{- if .Values.manager.producers.envSecrets }} - {{- $data := dict "secret" true "env" .Values.manager.producers.envSecrets }} + {{- if $manager_producer.envSecrets }} + {{- $data := dict "secret" true "env" $manager_producer.envSecrets }} {{- include "helpers.envFromList" $data | indent 10 }} {{- end }} - {{- with $.Values.manager.producers.resources }} + {{- with $manager_producer.resources }} resources: - {{- toYaml $.Values.manager.producers.resources | nindent 10 }} + {{- toYaml $manager_producer.resources | nindent 10 }} {{- end }} - {{- with $.Values.manager.producers.readinessProbe }} + {{- with $manager_producer.readinessProbe }} readinessProbe: - {{- toYaml $.Values.manager.producers.readinessProbe | nindent 10 }} + {{- toYaml $manager_producer.readinessProbe | nindent 10 }} {{- end }} imagePullSecrets: - name: pull-secret - {{- with $.Values.manager.producers.nodeSelector }} + {{- with $manager_producer.nodeSelector }} nodeSelector: {{- toYaml $ | nindent 8 }} {{- end }} - {{- with $.Values.manager.producers.affinity }} + {{- with $manager_producer.affinity }} affinity: {{- toYaml $ | nindent 8 }} {{- end }} - {{- with $.Values.manager.producers.tolerations }} + {{- with $manager_producer.tolerations }} tolerations: {{- toYaml $ | nindent 8 }} {{- end }} +{{- end }} \ No newline at end of file diff --git a/applications/love/charts/love-manager/templates/manager-producers-hpa.yaml b/applications/love/charts/love-manager/templates/manager-producers-hpa.yaml index a44422835b..238c66f21c 100644 --- a/applications/love/charts/love-manager/templates/manager-producers-hpa.yaml +++ b/applications/love/charts/love-manager/templates/manager-producers-hpa.yaml @@ -1,47 +1,51 @@ -{{- if .Values.manager.producers.autoscaling.enabled }} +{{ range $manager_producer:= .Values.manager.producers }} +{{ $_ := set $.Values "manager_producer" $manager_producer }} +--- +{{- if $manager_producer.autoscaling.enabled }} apiVersion: autoscaling/v2 kind: HorizontalPodAutoscaler metadata: - name: {{ include "love-manager-producers.fullname" . }} + name: {{ include "love-manager-producer.fullname" $ }}-{{ $manager_producer.name }} labels: - {{- include "love-manager-producers.labels" . | nindent 4 }} + {{- include "love-manager-producer.labels" $ | nindent 4 }} spec: scaleTargetRef: apiVersion: apps/v1 kind: Deployment - name: {{ include "love-manager-producers.fullname" . }} - minReplicas: {{ .Values.manager.producers.autoscaling.minReplicas }} - maxReplicas: {{ .Values.manager.producers.autoscaling.maxReplicas }} + name: {{ include "love-manager-producer.fullname" $ }}-{{ $manager_producer.name }} + minReplicas: {{ $manager_producer.autoscaling.minReplicas }} + maxReplicas: {{ $manager_producer.autoscaling.maxReplicas }} metrics: - {{- if .Values.manager.producers.autoscaling.targetCPUUtilizationPercentage }} + {{- if $manager_producer.autoscaling.targetCPUUtilizationPercentage }} - type: Resource resource: name: cpu target: type: Utilization - averageUtilization: {{ .Values.manager.producers.autoscaling.targetCPUUtilizationPercentage }} + averageUtilization: {{ $manager_producer.autoscaling.targetCPUUtilizationPercentage }} {{- end }} - {{- if .Values.manager.producers.autoscaling.targetMemoryUtilizationPercentage }} + {{- if $manager_producer.autoscaling.targetMemoryUtilizationPercentage }} - type: Resource resource: name: memory target: type: Utilization - averageUtilization: {{ .Values.manager.producers.autoscaling.targetMemoryUtilizationPercentage }} + averageUtilization: {{ $manager_producer.autoscaling.targetMemoryUtilizationPercentage }} {{- end }} - {{- if or .Values.manager.producers.autoscaling.scaleUpPolicy .Values.manager.producers.autoscaling.scaleDownPolicy }} + {{- if or $manager_producer.autoscaling.scaleUpPolicy $manager_producer.autoscaling.scaleDownPolicy }} behavior: - {{- if .Values.manager.producers.autoscaling.scaleUpPolicy }} + {{- if $manager_producer.autoscaling.scaleUpPolicy }} scaleUp: - {{- with .Values.manager.producers.autoscaling.scaleUpPolicy }} + {{- with $manager_producer.autoscaling.scaleUpPolicy }} {{- toYaml . | nindent 6 }} {{- end }} {{- end }} - {{- if .Values.manager.producers.autoscaling.scaleDownPolicy }} + {{- if $manager_producer.autoscaling.scaleDownPolicy }} scaleDown: - {{- with .Values.manager.producers.autoscaling.scaleDownPolicy }} + {{- with $manager_producer.autoscaling.scaleDownPolicy }} {{- toYaml . | nindent 6 }} {{- end }} {{- end }} {{- end }} {{- end }} +{{- end }} \ No newline at end of file diff --git a/applications/love/charts/love-manager/templates/manager-producers-service.yaml b/applications/love/charts/love-manager/templates/manager-producers-service.yaml index bf90a53f9b..1195507e30 100644 --- a/applications/love/charts/love-manager/templates/manager-producers-service.yaml +++ b/applications/love/charts/love-manager/templates/manager-producers-service.yaml @@ -1,10 +1,14 @@ +{{ range $manager_producer:= .Values.manager.producers }} +{{ $_ := set $.Values "manager_producer" $manager_producer }} +--- apiVersion: v1 kind: Service metadata: - name: {{ include "love-manager-producers.fullname" . }}-service - namespace: {{ .Values.namespace }} + name: {{ include "love-manager-producer.fullname" $ }}-{{ $manager_producer.name }}-service + namespace: {{ $.Values.namespace }} spec: selector: - app.kubernetes.io/instance: {{ include "love-manager-producers.fullname" . }} + app.kubernetes.io/instance: {{ include "love-manager-producer.fullname" $ }}-{{ $manager_producer.name }} ports: - - port: {{ .Values.manager.producers.ports.container }} + - port: {{ $.Values.manager.producers_ports.container }} +{{- end }} \ No newline at end of file diff --git a/applications/love/charts/love-manager/values.yaml b/applications/love/charts/love-manager/values.yaml index 391b5c51e7..d5534ee77c 100644 --- a/applications/love/charts/love-manager/values.yaml +++ b/applications/love/charts/love-manager/values.yaml @@ -1,5 +1,21 @@ # -- The overall namespace for the application namespace: love +# -- Configuration for the different manager instances. +# This is divided into two sessions; frontend and producers. +# _frontend_ Configuration for the manager frontend. +# The frontend session defines the configuration for the +# so-called frontend managers. These serves the frontend artifacts +# as well as handles the data piping from the system to the frontend. +# Every time a user opens a view in LOVE the page will connect to the +# frontend manager and will receive the telemetry data from the system. +# Once a connection is established between a frontend and the manager it +# is kept alive. As more connections come in, the autoscaler will scale +# up the number of frontend managers and new connections should be redirected +# to them. The redirect is handled by the manager-frontend-service ClusterIP. +# _producers_ Configurations for the manger producers. +# This is basically a list of managers (with the same structure as the +# frontend, but in a list). These defines services that the LOVE-producers +# connect to, to feed data from the control system. manager: frontend: image: @@ -110,113 +126,123 @@ manager: # -- Configuration for the LOVE manager frontend pods readiness probe readinessProbe: {} producers: - image: - # -- The LOVE manager producers image to use - repository: lsstts/love-manager - # -- The pull policy on the LOVE manager producers image - pullPolicy: IfNotPresent - # -- The tag name for the Nexus3 Docker repository secrets if private images need to be pulled - nexus3: "" - ports: - # -- The port on the container for normal communications - container: 8000 - # -- The port on the node for normal communcations - node: 30000 - env: - # -- The site tag where LOVE is being run - LOVE_SITE: local - # -- The external URL from the NGINX server for LOVE - SERVER_URL: love.lsst.local - # -- The Kubernetes sub-path for LOVE - URL_SUBPATH: /love - # -- Set the manager to use LFA storage - REMOTE_STORAGE: true - # -- Set the hostname for the Jira instance - JIRA_API_HOSTNAME: rubinobs.atlassian.net - # -- Set the Jira project ID - JIRA_PROJECT_ID: 10063 - # -- Set the URL for the OLE instance - OLE_API_HOSTNAME: site.lsst.local - # -- Set the URI for the 1st LDAP server - AUTH_LDAP_1_SERVER_URI: ldap://ipa1.lsst.local - # -- Set the URI for the 2nd LDAP server - AUTH_LDAP_2_SERVER_URI: ldap://ipa2.lsst.local - # -- Set the URI for the 3rd LDAP server - AUTH_LDAP_3_SERVER_URI: ldap://ipa3.lsst.local - # -- Have the LOVE producer managers not query commander - HEARTBEAT_QUERY_COMMANDER: false - # -- Label for the LOVE commander service. - # Must match the one spcified in the LOVE commander chart - COMMANDER_HOSTNAME: love-commander-service - # -- Port number for the LOVE commander service. - # Must match the one spcified in the LOVE commander chart - COMMANDER_PORT: 5000 - # -- The type of database engine being used for the LOVE manager producers - DB_ENGINE: postgresql - # -- The name of the database being used for the LOVE manager producers - DB_NAME: love - # -- The database user needed for access from the LOVE manager producers - DB_USER: love - # -- The name of the database service - DB_HOST: love-manager-database-service - # -- The port for the database service - DB_PORT: 5432 - # -- The name of the redis service - REDIS_HOST: love-manager-redis-service - # -- The expiration time for the redis service - REDIS_CONFIG_EXPIRY: 5 - # -- The connection capacity for the redis service - REDIS_CONFIG_CAPACITY: 5000 - envSecrets: - # -- The LOVE manager producers secret secret key name - SECRET_KEY: manager-secret-key - # -- The LOVE manager producers process connection password secret key name - PROCESS_CONNECTION_PASS: process-connection-pass - # -- The LOVE manager producers admin user password secret key name - ADMIN_USER_PASS: admin-user-pass - # -- The LOVE manager producers user user password secret key name - USER_USER_PASS: user-user-pass - # -- The LOVE manager producers cmd_user user password secret key name - CMD_USER_PASS: cmd-user-pass - # -- The LOVE manager producers authlist_user password secret key name - AUTHLIST_USER_PASS: authlist-user-pass - # -- The LOVE manager producers LDAP binding password secret key name - AUTH_LDAP_BIND_PASSWORD: auth-ldap-bind-password - # -- The database password secret key name. - # Must match `database.envSecrets.POSTGRES_PASSWORD` - DB_PASS: db-pass - # -- The redis password secret key name. - # Must match `redis.envSecrets.REDIS_PASS` - REDIS_PASS: redis-pass - # -- The LOVE manager jira API token secret key name - JIRA_API_TOKEN: jira-api-token - # -- Set the default number of LOVE manager producers pod replicas - replicas: 1 - autoscaling: - # -- Whether automatic horizontal scaling is active - enabled: true - # -- The allowed minimum number of replicas - minReplicas: 1 - # -- The allowed maximum number of replicas - maxReplicas: 100 - # -- The percentage of CPU utilization that will trigger the scaling - targetCPUUtilizationPercentage: 80 - # -- (int) The percentage of memory utilization that will trigger the scaling - targetMemoryUtilizationPercentage: "" - # -- Policy for scaling up manager pods - scaleUpPolicy: {} - # -- Policy for scaling down manager pods - scaleDownPolicy: {} - # -- Resource specifications for the LOVE manager producers pods - resources: {} - # -- Node selection rules for the LOVE manager producers pods - nodeSelector: {} - # -- Toleration specifications for the LOVE manager producers pods - tolerations: [] - # -- Affinity rules for the LOVE manager producers pods - affinity: {} - # -- Configuration for the LOVE manager producers pods readiness probe - readinessProbe: {} + # -- Example producer configuration. Each producer should follow the + # same structure as frontend with the added name field. + - name: example-producer + image: + # -- The LOVE manager producers image to use + repository: lsstts/love-manager + # -- The pull policy on the LOVE manager producers image + pullPolicy: IfNotPresent + # -- The tag name for the Nexus3 Docker repository secrets if private images need to be pulled + nexus3: "" + ports: + # -- The port on the container for normal communications + container: 8000 + # -- The port on the node for normal communcations + node: 30000 + env: + # -- The site tag where LOVE is being run + LOVE_SITE: local + # -- The external URL from the NGINX server for LOVE + SERVER_URL: love.lsst.local + # -- The Kubernetes sub-path for LOVE + URL_SUBPATH: /love + # -- Set the manager to use LFA storage + REMOTE_STORAGE: true + # -- Set the hostname for the Jira instance + JIRA_API_HOSTNAME: rubinobs.atlassian.net + # -- Set the Jira project ID + JIRA_PROJECT_ID: 10063 + # -- Set the URL for the OLE instance + OLE_API_HOSTNAME: site.lsst.local + # -- Set the URI for the 1st LDAP server + AUTH_LDAP_1_SERVER_URI: ldap://ipa1.lsst.local + # -- Set the URI for the 2nd LDAP server + AUTH_LDAP_2_SERVER_URI: ldap://ipa2.lsst.local + # -- Set the URI for the 3rd LDAP server + AUTH_LDAP_3_SERVER_URI: ldap://ipa3.lsst.local + # -- Have the LOVE producer managers not query commander + HEARTBEAT_QUERY_COMMANDER: false + # -- Label for the LOVE commander service. + # Must match the one spcified in the LOVE commander chart + COMMANDER_HOSTNAME: love-commander-service + # -- Port number for the LOVE commander service. + # Must match the one spcified in the LOVE commander chart + COMMANDER_PORT: 5000 + # -- The type of database engine being used for the LOVE manager producers + DB_ENGINE: postgresql + # -- The name of the database being used for the LOVE manager producers + DB_NAME: love + # -- The database user needed for access from the LOVE manager producers + DB_USER: love + # -- The name of the database service + DB_HOST: love-manager-database-service + # -- The port for the database service + DB_PORT: 5432 + # -- The name of the redis service + REDIS_HOST: love-manager-redis-service + # -- The expiration time for the redis service + REDIS_CONFIG_EXPIRY: 5 + # -- The connection capacity for the redis service + REDIS_CONFIG_CAPACITY: 5000 + envSecrets: + # -- The LOVE manager producers secret secret key name + SECRET_KEY: manager-secret-key + # -- The LOVE manager producers process connection password secret key name + PROCESS_CONNECTION_PASS: process-connection-pass + # -- The LOVE manager producers admin user password secret key name + ADMIN_USER_PASS: admin-user-pass + # -- The LOVE manager producers user user password secret key name + USER_USER_PASS: user-user-pass + # -- The LOVE manager producers cmd_user user password secret key name + CMD_USER_PASS: cmd-user-pass + # -- The LOVE manager producers authlist_user password secret key name + AUTHLIST_USER_PASS: authlist-user-pass + # -- The LOVE manager producers LDAP binding password secret key name + AUTH_LDAP_BIND_PASSWORD: auth-ldap-bind-password + # -- The database password secret key name. + # Must match `database.envSecrets.POSTGRES_PASSWORD` + DB_PASS: db-pass + # -- The redis password secret key name. + # Must match `redis.envSecrets.REDIS_PASS` + REDIS_PASS: redis-pass + # -- The LOVE manager jira API token secret key name + JIRA_API_TOKEN: jira-api-token + # -- Set the default number of LOVE manager producers pod replicas + replicas: 1 + autoscaling: + # -- Whether automatic horizontal scaling is active + enabled: true + # -- The allowed minimum number of replicas + minReplicas: 1 + # -- The allowed maximum number of replicas + maxReplicas: 100 + # -- The percentage of CPU utilization that will trigger the scaling + targetCPUUtilizationPercentage: 80 + # -- (int) The percentage of memory utilization that will trigger the scaling + targetMemoryUtilizationPercentage: "" + # -- Policy for scaling up manager pods + scaleUpPolicy: {} + # -- Policy for scaling down manager pods + scaleDownPolicy: {} + # -- Resource specifications for the LOVE manager producers pods + resources: {} + # -- Node selection rules for the LOVE manager producers pods + nodeSelector: {} + # -- Toleration specifications for the LOVE manager producers pods + tolerations: [] + # -- Affinity rules for the LOVE manager producers pods + affinity: {} + # -- Configuration for the LOVE manager producers pods readiness probe + readinessProbe: {} + # -- Configuration for the producers ports. + # this is a single configuration for all the producers. + producers_ports: + # -- The port on the container for normal communications + container: 8000 + # -- The port on the node for normal communcations + node: 30000 redis: image: # -- The redis image to use diff --git a/applications/love/charts/love-producer/README.md b/applications/love/charts/love-producer/README.md index 7857e17d30..5420c2e03f 100644 --- a/applications/love/charts/love-producer/README.md +++ b/applications/love/charts/love-producer/README.md @@ -8,7 +8,7 @@ Helm chart for the LOVE producers. |-----|------|---------|-------------| | affinity | object | `{}` | Affinity rules applied to all LOVE producer pods | | annotations | object | `{}` | This allows for the specification of pod annotations. | -| env | object | `{"WEBSOCKET_HOST":"love-nginx/manager/ws/subscription"}` | This section holds a set of key, value pairs for environmental variables | +| env | object | `{}` | This section holds a set of key, value pairs for environmental variables | | envSecrets | object | `{"PROCESS_CONNECTION_PASS":"process-connection-pass"}` | This section holds a set of key, value pairs for secrets | | image.pullPolicy | string | `"IfNotPresent"` | The pull policy on the LOVE producer image | | image.repository | string | `"lsstts/love-producer"` | The LOVE producer image to use | diff --git a/applications/love/charts/love-producer/templates/deployment.yaml b/applications/love/charts/love-producer/templates/deployment.yaml index 77209f1579..5221670b21 100644 --- a/applications/love/charts/love-producer/templates/deployment.yaml +++ b/applications/love/charts/love-producer/templates/deployment.yaml @@ -34,6 +34,8 @@ spec: env: - name: LOVE_CSC_PRODUCER value: {{ $producer.csc | quote }} + - name: WEBSOCKET_HOST + value: {{ $producer.WEBSOCKET_HOST | quote }} - name: LSST_KAFKA_SECURITY_PASSWORD valueFrom: secretKeyRef: diff --git a/applications/love/charts/love-producer/values.yaml b/applications/love/charts/love-producer/values.yaml index ca39d63d95..49d6de9594 100644 --- a/applications/love/charts/love-producer/values.yaml +++ b/applications/love/charts/love-producer/values.yaml @@ -8,8 +8,7 @@ image: # -- The pull policy on the LOVE producer image pullPolicy: IfNotPresent # -- This section holds a set of key, value pairs for environmental variables -env: - WEBSOCKET_HOST: love-nginx/manager/ws/subscription +env: {} # -- This section holds a set of key, value pairs for secrets envSecrets: PROCESS_CONNECTION_PASS: process-connection-pass diff --git a/applications/love/values-base.yaml b/applications/love/values-base.yaml index fecc6d4326..090346b111 100644 --- a/applications/love/values-base.yaml +++ b/applications/love/values-base.yaml @@ -23,6 +23,7 @@ love-manager: frontend: image: repository: ts-dockerhub.lsst.org/love-manager + tag: k0002 pullPolicy: Always env: SERVER_URL: base-lsp.lsst.codes @@ -59,19 +60,174 @@ love-manager: initialDelaySeconds: 20 periodSeconds: 10 producers: + - name: general image: repository: ts-dockerhub.lsst.org/love-manager + tag: k0002 pullPolicy: Always env: + LOVE_SITE: base SERVER_URL: base-lsp.lsst.codes OLE_API_HOSTNAME: base-lsp.lsst.codes AUTH_LDAP_1_SERVER_URI: ldap://ipa1.ls.lsst.org AUTH_LDAP_2_SERVER_URI: ldap://ipa2.ls.lsst.org AUTH_LDAP_3_SERVER_URI: ldap://ipa3.ls.lsst.org + COMMANDER_HOSTNAME: love-commander-service + COMMANDER_PORT: 5000 DB_HOST: postgresdb01.ls.lsst.org + DB_ENGINE: postgresql + DB_NAME: love + DB_PORT: 5432 + DB_USER: love + HEARTBEAT_QUERY_COMMANDER: false + JIRA_API_HOSTNAME: rubinobs.atlassian.net + JIRA_PROJECT_ID: 10063 + REDIS_CONFIG_CAPACITY: 5000 + REDIS_CONFIG_EXPIRY: 5 + REDIS_HOST: love-manager-redis-service + REMOTE_STORAGE: true + URL_SUBPATH: /love + envSecrets: + SECRET_KEY: manager-secret-key + PROCESS_CONNECTION_PASS: process-connection-pass + ADMIN_USER_PASS: admin-user-pass + USER_USER_PASS: user-user-pass + CMD_USER_PASS: cmd-user-pass + AUTHLIST_USER_PASS: authlist-user-pass + AUTH_LDAP_BIND_PASSWORD: auth-ldap-bind-password + DB_PASS: db-pass + REDIS_PASS: redis-pass + replicas: 10 + autoscaling: + enabled: false + minReplicas: 2 + maxReplicas: 25 + targetCPUUtilizationPercentage: 50 + scaleDownPolicy: + policies: + - type: Pods + value: 2 + periodSeconds: 120 + - type: Percent + value: 10 + periodSeconds: 120 + selectPolicy: Min + resources: + requests: + cpu: 150m + memory: 200Mi + limits: + cpu: 1000m + memory: 1500Mi + readinessProbe: + tcpSocket: + port: 8000 + initialDelaySeconds: 20 + periodSeconds: 10 + - name: queue + image: + repository: ts-dockerhub.lsst.org/love-manager + tag: k0002 + pullPolicy: Always + env: LOVE_SITE: base + SERVER_URL: base-lsp.lsst.codes + OLE_API_HOSTNAME: base-lsp.lsst.codes + AUTH_LDAP_1_SERVER_URI: ldap://ipa1.ls.lsst.org + AUTH_LDAP_2_SERVER_URI: ldap://ipa2.ls.lsst.org + AUTH_LDAP_3_SERVER_URI: ldap://ipa3.ls.lsst.org + COMMANDER_HOSTNAME: love-commander-service + COMMANDER_PORT: 5000 + DB_HOST: postgresdb01.ls.lsst.org + DB_ENGINE: postgresql + DB_NAME: love + DB_PORT: 5432 + DB_USER: love + HEARTBEAT_QUERY_COMMANDER: false + JIRA_API_HOSTNAME: rubinobs.atlassian.net + JIRA_PROJECT_ID: 10063 + REDIS_CONFIG_CAPACITY: 5000 + REDIS_CONFIG_EXPIRY: 5 + REDIS_HOST: love-manager-redis-service + REMOTE_STORAGE: true + URL_SUBPATH: /love + envSecrets: + SECRET_KEY: manager-secret-key + PROCESS_CONNECTION_PASS: process-connection-pass + ADMIN_USER_PASS: admin-user-pass + USER_USER_PASS: user-user-pass + CMD_USER_PASS: cmd-user-pass + AUTHLIST_USER_PASS: authlist-user-pass + AUTH_LDAP_BIND_PASSWORD: auth-ldap-bind-password + DB_PASS: db-pass + REDIS_PASS: redis-pass + replicas: 3 autoscaling: - enabled: true + enabled: false + minReplicas: 2 + maxReplicas: 25 + targetCPUUtilizationPercentage: 50 + scaleDownPolicy: + policies: + - type: Pods + value: 2 + periodSeconds: 120 + - type: Percent + value: 10 + periodSeconds: 120 + selectPolicy: Min + resources: + requests: + cpu: 150m + memory: 200Mi + limits: + cpu: 1000m + memory: 1500Mi + readinessProbe: + tcpSocket: + port: 8000 + initialDelaySeconds: 20 + periodSeconds: 10 + - name: m1m3 + image: + repository: ts-dockerhub.lsst.org/love-manager + tag: k0002 + pullPolicy: Always + env: + LOVE_SITE: base + SERVER_URL: base-lsp.lsst.codes + OLE_API_HOSTNAME: base-lsp.lsst.codes + AUTH_LDAP_1_SERVER_URI: ldap://ipa1.ls.lsst.org + AUTH_LDAP_2_SERVER_URI: ldap://ipa2.ls.lsst.org + AUTH_LDAP_3_SERVER_URI: ldap://ipa3.ls.lsst.org + COMMANDER_HOSTNAME: love-commander-service + COMMANDER_PORT: 5000 + DB_HOST: postgresdb01.ls.lsst.org + DB_ENGINE: postgresql + DB_NAME: love + DB_PORT: 5432 + DB_USER: love + HEARTBEAT_QUERY_COMMANDER: false + JIRA_API_HOSTNAME: rubinobs.atlassian.net + JIRA_PROJECT_ID: 10063 + REDIS_CONFIG_CAPACITY: 5000 + REDIS_CONFIG_EXPIRY: 5 + REDIS_HOST: love-manager-redis-service + REMOTE_STORAGE: true + URL_SUBPATH: /love + envSecrets: + SECRET_KEY: manager-secret-key + PROCESS_CONNECTION_PASS: process-connection-pass + ADMIN_USER_PASS: admin-user-pass + USER_USER_PASS: user-user-pass + CMD_USER_PASS: cmd-user-pass + AUTHLIST_USER_PASS: authlist-user-pass + AUTH_LDAP_BIND_PASSWORD: auth-ldap-bind-password + DB_PASS: db-pass + REDIS_PASS: redis-pass + replicas: 1 + autoscaling: + enabled: false minReplicas: 2 maxReplicas: 25 targetCPUUtilizationPercentage: 50 @@ -156,7 +312,23 @@ love-nginx: proxy_redirect off; } location /love/manager/producers { - proxy_pass http://love-manager-producers-service:8000; + proxy_pass http://love-manager-producer-general-service:8000; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_redirect off; + } + location /love/manager/m1m3 { + proxy_pass http://love-manager-producer-m1m3-service:8000; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_redirect off; + } + location /love/manager/queue { + proxy_pass http://love-manager-producer-queue-service:8000; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection "upgrade"; @@ -200,8 +372,6 @@ love-producer: image: repository: ts-dockerhub.lsst.org/love-producer pullPolicy: Always - env: - WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription resources: requests: cpu: 10m @@ -212,90 +382,133 @@ love-producer: producers: - name: ataos csc: ATAOS:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atcamera csc: ATCamera:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atdome csc: ATDome:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atdometrajectory csc: ATDomeTrajectory:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atheaderservice csc: ATHeaderService:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: athexapod csc: ATHexapod:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atmcs csc: ATMCS:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atocps csc: OCPS:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atoods csc: ATOODS:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atpneumatics csc: ATPneumatics:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atptg csc: ATPtg:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atscheduler csc: Scheduler:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atscriptqueue csc: ScriptQueue:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/queue/ws/subscription - name: atspectrograph csc: ATSpectrograph:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: auxteless201 csc: ESS:201 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: auxteless202 csc: ESS:202 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: auxteless203 csc: ESS:203 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: auxteless204 csc: ESS:204 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: auxteless205 csc: ESS:205 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: calibhilless301 csc: ESS:301 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: camerahexapod csc: MTHexapod:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: dimm1 csc: DIMM:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: dimm2 csc: DIMM:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: dsm1 csc: DSM:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: dsm2 csc: DSM:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: epm1 csc: EPM:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: gcheaderservice1 csc: GCHeaderService:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: genericcamera1 csc: GenericCamera:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: lasertracker1 csc: LaserTracker:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: love csc: LOVE:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: m2ess106 csc: ESS:106 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: m2hexapod csc: MTHexapod:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtaircompressor1 csc: MTAirCompressor:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtaircompressor2 csc: MTAirCompressor:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtaos csc: MTAOS:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdome csc: MTDome:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdomeess101 csc: ESS:101 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdomeess102 csc: ESS:102 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdomeess103 csc: ESS:103 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdomeess107 csc: ESS:107 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdomeess108 csc: ESS:108 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdometrajectory csc: MTDomeTrajectory:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtm1m3 csc: MTM1M3:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/m1m3/ws/subscription resources: requests: cpu: 10m @@ -305,27 +518,40 @@ love-producer: memory: 600Mi - name: mtm2 csc: MTM2:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtmount csc: MTMount:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtptg csc: MTPtg:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtrotator csc: MTRotator:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtscheduler csc: Scheduler:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtscriptqueue csc: ScriptQueue:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/queue/ws/subscription - name: ocsscheduler csc: Scheduler:3 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: ocsscriptqueue csc: ScriptQueue:3 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/queue/ws/subscription - name: tmaess001 csc: ESS:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: tmaess104 csc: ESS:104 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: tmaess105 csc: ESS:105 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: watcher csc: Watcher:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: weatherforecast csc: WeatherForecast:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription From c57bdefeb07ae126a4c6b767e61d41ddf150787a Mon Sep 17 00:00:00 2001 From: Hsin-Fang Chiang Date: Wed, 4 Sep 2024 11:24:44 -0700 Subject: [PATCH 033/567] Use new survey name BLOCK-306 for LATISS prompt processing "BLOCK-306" is the new "AUXTEL_PHOTO_IMAGING". The survey name change happened on 2024-09-04. --- .../values-usdfprod-prompt-processing.yaml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index 5115052b25..da86def651 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -20,10 +20,7 @@ prompt-proto-service: pipelines: # BLOCK-295 is the daily calibration sequence as of May 27, 2024 main: >- - (survey="AUXTEL_PHOTO_IMAGING")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/ApPipe.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/SingleFrame.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr.yaml] - (survey="AUXTEL_DRP_IMAGING")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/ApPipe.yaml, + (survey="BLOCK-306")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/ApPipe.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/SingleFrame.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr.yaml] (survey="BLOCK-T17")=[] @@ -34,8 +31,7 @@ prompt-proto-service: (survey="BLOCK-295")=[] (survey="")=[] preprocessing: >- - (survey="AUXTEL_PHOTO_IMAGING")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Preprocessing.yaml] - (survey="AUXTEL_DRP_IMAGING")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Preprocessing.yaml] + (survey="BLOCK-306")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Preprocessing.yaml] (survey="BLOCK-T17")=[] (survey="cwfs")=[] (survey="cwfs-focus-sweep")=[] From 3b6cbe81f03d9241da8f4b6c81d45037ec057a3d Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Wed, 4 Sep 2024 10:54:49 -0400 Subject: [PATCH 034/567] Deploy Times Square 0.12.0 https://github.com/lsst-sqre/times-square/pull/80 https://github.com/lsst-sqre/times-square/releases/tag/0.12.0 --- applications/times-square/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/times-square/Chart.yaml b/applications/times-square/Chart.yaml index fc984d18bb..daa934699b 100644 --- a/applications/times-square/Chart.yaml +++ b/applications/times-square/Chart.yaml @@ -8,7 +8,7 @@ sources: type: application # The default version tag of the times-square docker image -appVersion: "0.11.0" +appVersion: "0.12.0" dependencies: - name: redis From 513321109c514e24b1a5273d4395bcbc4d8a8184 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 6 Sep 2024 10:05:50 -0700 Subject: [PATCH 035/567] Summit: Add LSSTCam butler directories to nublado. --- applications/nublado/values-summit.yaml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/applications/nublado/values-summit.yaml b/applications/nublado/values-summit.yaml index 289eaea4e6..0ccb69cac4 100644 --- a/applications/nublado/values-summit.yaml +++ b/applications/nublado/values-summit.yaml @@ -60,6 +60,11 @@ controller: type: "nfs" serverPath: "/repo/LSSTComCam" server: "comcam-archiver.cp.lsst.org" + - name: "lsstcam" + source: + type: "nfs" + serverPath: "/lsstcam/repo/LSSTCam" + server: "nfs3.cp.lsst.org" - name: "obs-env" source: type: "nfs" @@ -80,6 +85,11 @@ controller: type: "nfs" serverPath: "/auxtel/lsstdata" server: "nfs-auxtel.cp.lsst.org" + - name: "lsstdata-lsstcam" + source: + type: "nfs" + serverPath: "/lsstcam/lsstdata" + server: "nfs3.cp.lsst.org" - name: "lsstdata-base-comcam" source: type: "nfs" @@ -90,6 +100,11 @@ controller: type: "nfs" serverPath: "/auxtel/lsstdata/base/auxtel" server: "nfs-auxtel.cp.lsst.org" + - name: "lsstdata-base-lsstcam" + source: + type: "nfs" + serverPath: "/lsstcam/lsstdata/base/lsstcam" + server: "nfs3.cp.lsst.org" volumeMounts: - containerPath: "/home" volumeName: "home" @@ -101,6 +116,8 @@ controller: volumeName: "latiss" - containerPath: "/repo/LSSTComCam" volumeName: "lsstcomcam" + - containerPath: "/repo/LSSTCam" + volumeName: "lsstcam" - containerPath: "/net/obs-env" volumeName: "obs-env" - containerPath: "/readonly/lsstdata/other" @@ -109,10 +126,14 @@ controller: volumeName: "lsstdata-comcam" - containerPath: "/readonly/lsstdata/auxtel" volumeName: "lsstdata-auxtel" + - containerPath: "/readonly/lsstdata/lsstcam" + volumeName: "lsstdata-lsstcam" - containerPath: "/data/lsstdata/base/comcam" volumeName: "lsstdata-base-comcam" - containerPath: "/data/lsstdata/base/auxtel" volumeName: "lsstdata-base-auxtel" + - containerPath: "/data/lsstdata/base/lsstcam" + volumeName: "lsstdata-base-lsstcam" hub: internalDatabase: false From 5563beaa4973bf057af92ced6abd42254c0588a6 Mon Sep 17 00:00:00 2001 From: Erin Howard Date: Fri, 6 Sep 2024 11:16:28 -0700 Subject: [PATCH 036/567] Update LATISS Prompt Processing to 4.4.0. --- .../values-usdfprod-prompt-processing.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index da86def651..d6ac56dc6c 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.3.0 + tag: 4.4.0 instrument: pipelines: From ba1f9bcde694f7c9bc94f9fcd7c0a0eedce8edeb Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 6 Sep 2024 12:54:44 -0700 Subject: [PATCH 037/567] BTS: Update to Kafka Cycle 2. --- applications/nublado/values-base.yaml | 2 +- environments/values-base.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/nublado/values-base.yaml b/applications/nublado/values-base.yaml index 269181582f..6fb6f5ad9c 100644 --- a/applications/nublado/values-base.yaml +++ b/applications/nublado/values-base.yaml @@ -10,7 +10,7 @@ controller: numWeeklies: 3 numDailies: 2 cycle: null - recommendedTag: "recommended_k0001" + recommendedTag: "recommended_k0002" lab: extraAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/dds" diff --git a/environments/values-base.yaml b/environments/values-base.yaml index b5d2a8c5ac..e0a262f932 100644 --- a/environments/values-base.yaml +++ b/environments/values-base.yaml @@ -34,6 +34,6 @@ applications: uws: true controlSystem: - imageTag: "k0001" + imageTag: "k0002" siteTag: "base" s3EndpointUrl: "https://s3.ls.lsst.org" From 3bfe48eb409fd59eecca880776f6f6b3ce944dc8 Mon Sep 17 00:00:00 2001 From: Tiago Ribeiro Date: Fri, 6 Sep 2024 20:56:52 -0700 Subject: [PATCH 038/567] Add new IMAGE_SERVER_URL environment variable for all the ScriptQueue deployments at BTS. --- applications/obssys/values-base.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/applications/obssys/values-base.yaml b/applications/obssys/values-base.yaml index 399aab63ef..c221197c1b 100644 --- a/applications/obssys/values-base.yaml +++ b/applications/obssys/values-base.yaml @@ -7,6 +7,7 @@ atqueue: DAF_BUTLER_REPOSITORY_INDEX: /project/data-repos.yaml RUN_ARG: 2 --state enabled USER_USERNAME: user + IMAGE_SERVER_URL: http://lsstcam-mcm.ls.lsst.org butlerSecret: containerPath: &bS-cP /home/saluser/.lsst dbUser: &bS-dbU oods @@ -84,6 +85,7 @@ mtqueue: DAF_BUTLER_REPOSITORY_INDEX: /project/data-repos.yaml RUN_ARG: 1 --state enabled USER_USERNAME: user + IMAGE_SERVER_URL: http://lsstcam-mcm.ls.lsst.org butlerSecret: containerPath: *bS-cP dbUser: *bS-dbU @@ -161,6 +163,7 @@ ocsqueue: DAF_BUTLER_REPOSITORY_INDEX: /project/data-repos.yaml RUN_ARG: 3 --state enabled USER_USERNAME: user + IMAGE_SERVER_URL: http://lsstcam-mcm.ls.lsst.org butlerSecret: containerPath: *bS-cP dbUser: *bS-dbU From c1200cf81ccc0123b74d8fb780dfd3771d9f524b Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 9 Sep 2024 08:12:00 -0700 Subject: [PATCH 039/567] Summit: Fix nublado LSSTCam mount. --- applications/nublado/values-summit.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/nublado/values-summit.yaml b/applications/nublado/values-summit.yaml index 0ccb69cac4..3fddbdcecc 100644 --- a/applications/nublado/values-summit.yaml +++ b/applications/nublado/values-summit.yaml @@ -103,7 +103,7 @@ controller: - name: "lsstdata-base-lsstcam" source: type: "nfs" - serverPath: "/lsstcam/lsstdata/base/lsstcam" + serverPath: "/lsstcam/lsstdata/base/maintel" server: "nfs3.cp.lsst.org" volumeMounts: - containerPath: "/home" @@ -132,7 +132,7 @@ controller: volumeName: "lsstdata-base-comcam" - containerPath: "/data/lsstdata/base/auxtel" volumeName: "lsstdata-base-auxtel" - - containerPath: "/data/lsstdata/base/lsstcam" + - containerPath: "/data/lsstdata/base/maintel" volumeName: "lsstdata-base-lsstcam" hub: From 2ccfbfa6d4aacbd15ef324d0027a0c290fa28e10 Mon Sep 17 00:00:00 2001 From: Hsin-Fang Chiang Date: Mon, 12 Aug 2024 16:06:57 -0700 Subject: [PATCH 040/567] Enable prompt processing for BLOCK-T17 BLOCK-T17 is the LATISS daytime checkout. Let the second exposure in the daytime checkout trigger ISR processinga as a system test and potentially expose problems if any, before the telescope goes on sky. --- .../values-usdfprod-prompt-processing.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index d6ac56dc6c..a379a6d328 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -23,7 +23,7 @@ prompt-proto-service: (survey="BLOCK-306")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/ApPipe.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/SingleFrame.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr.yaml] - (survey="BLOCK-T17")=[] + (survey="BLOCK-T17")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr-cal.yaml] (survey="cwfs")=[] (survey="cwfs-focus-sweep")=[] (survey="spec-survey")=[] From d194f563376351ab54f6855ae3d0a79b9d50ad4c Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 6 Sep 2024 19:34:26 -0700 Subject: [PATCH 041/567] TTS: Update nublado to Cycle 39. --- applications/nublado/values-tucson-teststand.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/nublado/values-tucson-teststand.yaml b/applications/nublado/values-tucson-teststand.yaml index abe987c409..bafd20a7a2 100644 --- a/applications/nublado/values-tucson-teststand.yaml +++ b/applications/nublado/values-tucson-teststand.yaml @@ -8,8 +8,8 @@ controller: numReleases: 0 numWeeklies: 3 numDailies: 2 - cycle: 38 - recommendedTag: "recommended_c0038" + cycle: 39 + recommendedTag: "recommended_c0039" lab: extraAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/dds" From fc22c27ae27a80126af441d3031e553c9f2e768f Mon Sep 17 00:00:00 2001 From: Colin Slater Date: Thu, 15 Aug 2024 14:57:22 -0700 Subject: [PATCH 042/567] Update USDF TAP authentication to qserv. --- applications/tap/secrets-usdfdev.yaml | 4 ++++ applications/tap/secrets-usdfint.yaml | 4 ++++ applications/tap/secrets-usdfprod.yaml | 4 ++++ applications/tap/values-usdfdev.yaml | 3 ++- applications/tap/values-usdfint.yaml | 3 ++- applications/tap/values-usdfprod.yaml | 3 ++- 6 files changed, 18 insertions(+), 3 deletions(-) create mode 100644 applications/tap/secrets-usdfdev.yaml create mode 100644 applications/tap/secrets-usdfint.yaml create mode 100644 applications/tap/secrets-usdfprod.yaml diff --git a/applications/tap/secrets-usdfdev.yaml b/applications/tap/secrets-usdfdev.yaml new file mode 100644 index 0000000000..f6a85b9f26 --- /dev/null +++ b/applications/tap/secrets-usdfdev.yaml @@ -0,0 +1,4 @@ +qserv-password: + description: >- + Password for the QServ database server + if: cadc-tap.config.qserv.passwordEnabled diff --git a/applications/tap/secrets-usdfint.yaml b/applications/tap/secrets-usdfint.yaml new file mode 100644 index 0000000000..f6a85b9f26 --- /dev/null +++ b/applications/tap/secrets-usdfint.yaml @@ -0,0 +1,4 @@ +qserv-password: + description: >- + Password for the QServ database server + if: cadc-tap.config.qserv.passwordEnabled diff --git a/applications/tap/secrets-usdfprod.yaml b/applications/tap/secrets-usdfprod.yaml new file mode 100644 index 0000000000..f6a85b9f26 --- /dev/null +++ b/applications/tap/secrets-usdfprod.yaml @@ -0,0 +1,4 @@ +qserv-password: + description: >- + Password for the QServ database server + if: cadc-tap.config.qserv.passwordEnabled diff --git a/applications/tap/values-usdfdev.yaml b/applications/tap/values-usdfdev.yaml index e82393ad97..5cd01dbb7e 100644 --- a/applications/tap/values-usdfdev.yaml +++ b/applications/tap/values-usdfdev.yaml @@ -6,7 +6,8 @@ cadc-tap: config: qserv: host: "172.24.49.51:4040" - jdbcParams: "?enabledTLSProtocols=TLSv1.2" + jdbcParams: "?enabledTLSProtocols=TLSv1.3" + passwordEnabled: true gcsBucket: "rubin:rubin-qserv" gcsBucketUrl: "https://s3dfrgw.slac.stanford.edu" diff --git a/applications/tap/values-usdfint.yaml b/applications/tap/values-usdfint.yaml index b8f8ab9404..06b5e08204 100644 --- a/applications/tap/values-usdfint.yaml +++ b/applications/tap/values-usdfint.yaml @@ -6,7 +6,8 @@ cadc-tap: config: qserv: host: "172.24.49.51:4040" - jdbcParams: "?enabledTLSProtocols=TLSv1.2" + jdbcParams: "?enabledTLSProtocols=TLSv1.3" + passwordEnabled: true gcsBucket: "rubin:rubin-qserv" gcsBucketUrl: "https://s3dfrgw.slac.stanford.edu" diff --git a/applications/tap/values-usdfprod.yaml b/applications/tap/values-usdfprod.yaml index b8f8ab9404..06b5e08204 100644 --- a/applications/tap/values-usdfprod.yaml +++ b/applications/tap/values-usdfprod.yaml @@ -6,7 +6,8 @@ cadc-tap: config: qserv: host: "172.24.49.51:4040" - jdbcParams: "?enabledTLSProtocols=TLSv1.2" + jdbcParams: "?enabledTLSProtocols=TLSv1.3" + passwordEnabled: true gcsBucket: "rubin:rubin-qserv" gcsBucketUrl: "https://s3dfrgw.slac.stanford.edu" From 598a5d24d0c46c5895ac77ac51ef88baea2f48a7 Mon Sep 17 00:00:00 2001 From: Colin Slater Date: Thu, 15 Aug 2024 15:19:35 -0700 Subject: [PATCH 043/567] Point USDF TAP int/dev to USDF qserv int. Use FQDNs. --- applications/tap/values-usdfdev.yaml | 2 +- applications/tap/values-usdfint.yaml | 2 +- applications/tap/values-usdfprod.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/tap/values-usdfdev.yaml b/applications/tap/values-usdfdev.yaml index 5cd01dbb7e..69109d427f 100644 --- a/applications/tap/values-usdfdev.yaml +++ b/applications/tap/values-usdfdev.yaml @@ -5,7 +5,7 @@ cadc-tap: config: qserv: - host: "172.24.49.51:4040" + host: "sdfqserv001.sdf.slac.stanford.edu:4090" jdbcParams: "?enabledTLSProtocols=TLSv1.3" passwordEnabled: true diff --git a/applications/tap/values-usdfint.yaml b/applications/tap/values-usdfint.yaml index 06b5e08204..ca53594d8f 100644 --- a/applications/tap/values-usdfint.yaml +++ b/applications/tap/values-usdfint.yaml @@ -5,7 +5,7 @@ cadc-tap: config: qserv: - host: "172.24.49.51:4040" + host: "sdfqserv001.sdf.slac.stanford.edu:4090" jdbcParams: "?enabledTLSProtocols=TLSv1.3" passwordEnabled: true diff --git a/applications/tap/values-usdfprod.yaml b/applications/tap/values-usdfprod.yaml index 06b5e08204..9021a9e3fa 100644 --- a/applications/tap/values-usdfprod.yaml +++ b/applications/tap/values-usdfprod.yaml @@ -5,7 +5,7 @@ cadc-tap: config: qserv: - host: "172.24.49.51:4040" + host: "sdfqserv001.sdf.slac.stanford.edu:4040" jdbcParams: "?enabledTLSProtocols=TLSv1.3" passwordEnabled: true From d3143f4f08d938071773d3614f9d148e446de9d2 Mon Sep 17 00:00:00 2001 From: pav511 <38131208+pav511@users.noreply.github.com> Date: Tue, 10 Sep 2024 11:15:23 -0700 Subject: [PATCH 044/567] vbecker argocd --- applications/argocd/values-usdfdev.yaml | 1 + applications/argocd/values-usdfint.yaml | 1 + applications/argocd/values-usdfprod.yaml | 1 + 3 files changed, 3 insertions(+) diff --git a/applications/argocd/values-usdfdev.yaml b/applications/argocd/values-usdfdev.yaml index c5343fe22b..a58a82f77e 100644 --- a/applications/argocd/values-usdfdev.yaml +++ b/applications/argocd/values-usdfdev.yaml @@ -58,6 +58,7 @@ argo-cd: g, smart@slac.stanford.edu, role:developer g, spothi@slac.stanford.edu, role:developer g, bbrond@slac.stanford.edu, role:developer + g, vbecker@slac.stanford.edu, role:developer scopes: "[email]" server: diff --git a/applications/argocd/values-usdfint.yaml b/applications/argocd/values-usdfint.yaml index 5eaeafb2bd..12ba88dd96 100644 --- a/applications/argocd/values-usdfint.yaml +++ b/applications/argocd/values-usdfint.yaml @@ -57,6 +57,7 @@ argo-cd: g, smart@slac.stanford.edu, role:developer g, spothi@slac.stanford.edu, role:developer g, bbrond@slac.stanford.edu, role:developer + g, vbecker@slac.stanford.edu, role:developer scopes: "[email]" server: diff --git a/applications/argocd/values-usdfprod.yaml b/applications/argocd/values-usdfprod.yaml index 59a611f653..9c5fdf1734 100644 --- a/applications/argocd/values-usdfprod.yaml +++ b/applications/argocd/values-usdfprod.yaml @@ -53,6 +53,7 @@ argo-cd: g, smart@slac.stanford.edu, role:developer g, spothi@slac.stanford.edu, role:developer g, bbrond@slac.stanford.edu, role:developer + g, vbecker@slac.stanford.edu, role:developer scopes: "[email]" server: From f1ff416adffb12345f1fe078e25582e964a03a85 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 6 Sep 2024 14:00:31 -0700 Subject: [PATCH 045/567] Enable collection of memory statistics --- .../charts/telegraf-kafka-consumer/templates/configmap.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml index 8e5e0aa29d..fa0e7e4cf1 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml @@ -58,5 +58,6 @@ data: [[inputs.internal]] collect_memstats = false + collect_memstats = true {{- end }} {{- end }} From 9288ad4de455df038e469c1edb29ae05cb0371d2 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 6 Sep 2024 14:01:00 -0700 Subject: [PATCH 046/567] Add an extra tag to identify the Telegraf instance --- .../charts/telegraf-kafka-consumer/templates/configmap.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml index fa0e7e4cf1..ad6fe31dbd 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml @@ -57,7 +57,8 @@ data: consumer_fetch_default = {{ default "20MB" $value.consumer_fetch_default | quote }} [[inputs.internal]] - collect_memstats = false collect_memstats = true + tags = { instance = "{{ $key }}" } + {{- end }} {{- end }} From 872d85b0cb9c18ecf3d601a02f1591f7ce001b97 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 6 Sep 2024 14:20:28 -0700 Subject: [PATCH 047/567] Route Telegraf internal metrics to its own database - Now that we are tagging the Telegraf internal metrics send them to a separate database. This allows for setting a different retention period for the telegraf database. --- .../telegraf-kafka-consumer/templates/configmap.yaml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml index ad6fe31dbd..c2419057fc 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml @@ -30,6 +30,15 @@ data: username = "${INFLUXDB_USER}" password = "${INFLUXDB_PASSWORD}" + [[outputs.influxdb]] + namepass = ["telegraf_*"] + urls = [ + {{ $.Values.influxdb.url | quote }} + ] + database = "telegraf" + username = "${INFLUXDB_USER}" + password = "${INFLUXDB_PASSWORD}" + [[inputs.kafka_consumer]] brokers = [ "sasquatch-kafka-brokers.sasquatch:9092" @@ -57,6 +66,7 @@ data: consumer_fetch_default = {{ default "20MB" $value.consumer_fetch_default | quote }} [[inputs.internal]] + name_prefix = "telegraf_" collect_memstats = true tags = { instance = "{{ $key }}" } From 3b1c8f55bfdb4f08d461038be8de19d0be0ba932 Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Wed, 11 Sep 2024 09:39:47 -0700 Subject: [PATCH 048/567] Upgrade Butler server Upgrade to a new version of Butler server with support for a `query_collection_info` endpoint that is needed by the new Butler Collections API that will be part of the 2024_37 pipelines stack. --- applications/butler/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/butler/Chart.yaml b/applications/butler/Chart.yaml index 20d3063518..9d3b40a094 100644 --- a/applications/butler/Chart.yaml +++ b/applications/butler/Chart.yaml @@ -4,4 +4,4 @@ version: 1.0.0 description: Server for Butler data abstraction service sources: - https://github.com/lsst/daf_butler -appVersion: server-2.0.0 +appVersion: server-2.1.0 From 9224f18a8514129bb14943e7a57bffff7a8637bd Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Wed, 11 Sep 2024 14:40:48 -0700 Subject: [PATCH 049/567] Update dependencies Switch to universal dependencies and remove the explicit dependency on greenlet. Move the runtime dependencies into pyproject.toml so that normal Python package installation will pick them up, but keep freezing dependencies as well. --- .pre-commit-config.yaml | 2 +- Makefile | 16 +- pyproject.toml | 12 + requirements/dev.in | 5 - requirements/dev.txt | 566 ++++++++++++++++++++++++---------------- requirements/main.in | 22 -- requirements/main.txt | 362 ++++++++++++------------- requirements/tox.txt | 75 +++--- 8 files changed, 581 insertions(+), 479 deletions(-) delete mode 100644 requirements/main.in diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3c7869cff9..99ad10ee7c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,7 +46,7 @@ repos: - --template-files=../helm-docs.md.gotmpl - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.3 + rev: v0.6.4 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/Makefile b/Makefile index 7916bf066c..48c10041bb 100644 --- a/Makefile +++ b/Makefile @@ -41,20 +41,20 @@ update-deps: pip install --upgrade pip uv uv pip install --upgrade pre-commit pre-commit autoupdate - uv pip compile --upgrade --generate-hashes \ - --output-file requirements/main.txt requirements/main.in - uv pip compile --upgrade --generate-hashes \ + uv pip compile --upgrade --universal --generate-hashes \ + --output-file requirements/main.txt pyproject.toml + uv pip compile --upgrade --universal --generate-hashes \ --output-file requirements/dev.txt requirements/dev.in - uv pip compile --upgrade --generate-hashes \ + uv pip compile --upgrade --universal --generate-hashes \ --output-file requirements/tox.txt requirements/tox.in # Useful for testing against a Git version of Safir. .PHONY: update-deps-no-hashes update-deps-no-hashes: pip install --upgrade uv - uv pip compile --upgrade \ - --output-file requirements/main.txt requirements/main.in - uv pip compile --upgrade \ + uv pip compile --upgrade --universal \ + --output-file requirements/main.txt pyproject.toml + uv pip compile --upgrade --universal \ --output-file requirements/dev.txt requirements/dev.in - uv pip compile --upgrade \ + uv pip compile --upgrade --universal \ --output-file requirements/tox.txt requirements/tox.in diff --git a/pyproject.toml b/pyproject.toml index 3b5c56bdb4..2911638a09 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,18 @@ classifiers = [ "Operating System :: POSIX", ] requires-python = ">=3.11" +dependencies = [ + "bcrypt", + "click", + "cryptography", + "GitPython", + "hvac", + "jinja2", + "onepasswordconnectsdk", + "pydantic>2", + "PyYAML", + "safir>5", +] [project.scripts] phalanx = "phalanx.cli:main" diff --git a/requirements/dev.in b/requirements/dev.in index de327ec14d..00f15c1027 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -21,8 +21,3 @@ documenteer[guide]>1 sphinx-click sphinx-diagrams sphinx-jinja - -# Greenlet is a SQLAlchemy dependency on x86_64 but not on macOS, so we need -# to explicitly include it. Otherwise, if dependencies are rebuilt on macOS, -# dependency installation will fail on all other platforms. -greenlet diff --git a/requirements/dev.txt b/requirements/dev.txt index 8658c4de3d..515080cba5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -1,5 +1,5 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --generate-hashes --output-file requirements/dev.txt requirements/dev.in +# uv pip compile --universal --generate-hashes --output-file requirements/dev.txt requirements/dev.in alabaster==1.0.0 \ --hash=sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e \ --hash=sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b @@ -10,7 +10,7 @@ annotated-types==0.7.0 \ # via # -c requirements/main.txt # pydantic -appnope==0.1.4 \ +appnope==0.1.4 ; platform_system == 'Darwin' \ --hash=sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee \ --hash=sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c # via ipykernel @@ -43,6 +43,77 @@ certifi==2024.8.30 \ # -c requirements/main.txt # requests # sphinx-prompt +cffi==1.17.1 ; implementation_name == 'pypy' \ + --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ + --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ + --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ + --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ + --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ + --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ + --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ + --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ + --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ + --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ + --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ + --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ + --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ + --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ + --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ + --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ + --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ + --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ + --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ + --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ + --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ + --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ + --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ + --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ + --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ + --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ + --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ + --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ + --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ + --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ + --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ + --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ + --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ + --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ + --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ + --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ + --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ + --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ + --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ + --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ + --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ + --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ + --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ + --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ + --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ + --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ + --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ + --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ + --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ + --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ + --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ + --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ + --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ + --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ + --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ + --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ + --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ + --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ + --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ + --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ + --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ + --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ + --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ + --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ + --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ + --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ + --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b + # via + # -c requirements/main.txt + # pyzmq charset-normalizer==3.3.2 \ --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ @@ -145,6 +216,15 @@ click==8.1.7 \ # documenteer # jupyter-cache # sphinx-click +colorama==0.4.6 ; sys_platform == 'win32' or platform_system == 'Windows' \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via + # -c requirements/main.txt + # click + # ipython + # pytest + # sphinx comm==0.2.2 \ --hash=sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e \ --hash=sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3 @@ -298,66 +378,74 @@ graphviz==0.20.3 \ --hash=sha256:09d6bc81e6a9fa392e7ba52135a9d49f1ed62526f96499325930e87ca1b5925d \ --hash=sha256:81f848f2904515d8cd359cc611faba817598d2feaac4027b266aa3eda7b3dde5 # via diagrams -greenlet==3.0.3 \ - --hash=sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67 \ - --hash=sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6 \ - --hash=sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257 \ - --hash=sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4 \ - --hash=sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676 \ - --hash=sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61 \ - --hash=sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc \ - --hash=sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca \ - --hash=sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7 \ - --hash=sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728 \ - --hash=sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305 \ - --hash=sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6 \ - --hash=sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379 \ - --hash=sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414 \ - --hash=sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04 \ - --hash=sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a \ - --hash=sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf \ - --hash=sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491 \ - --hash=sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559 \ - --hash=sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e \ - --hash=sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274 \ - --hash=sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb \ - --hash=sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b \ - --hash=sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9 \ - --hash=sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b \ - --hash=sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be \ - --hash=sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506 \ - --hash=sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405 \ - --hash=sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113 \ - --hash=sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f \ - --hash=sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5 \ - --hash=sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230 \ - --hash=sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d \ - --hash=sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f \ - --hash=sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a \ - --hash=sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e \ - --hash=sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61 \ - --hash=sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6 \ - --hash=sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d \ - --hash=sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71 \ - --hash=sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22 \ - --hash=sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2 \ - --hash=sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3 \ - --hash=sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067 \ - --hash=sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc \ - --hash=sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881 \ - --hash=sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3 \ - --hash=sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e \ - --hash=sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac \ - --hash=sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53 \ - --hash=sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0 \ - --hash=sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b \ - --hash=sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83 \ - --hash=sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41 \ - --hash=sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c \ - --hash=sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf \ - --hash=sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da \ - --hash=sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33 - # via -r requirements/dev.in +greenlet==3.1.0 ; (python_full_version < '3.13' and platform_machine == 'AMD64') or (python_full_version < '3.13' and platform_machine == 'WIN32') or (python_full_version < '3.13' and platform_machine == 'aarch64') or (python_full_version < '3.13' and platform_machine == 'amd64') or (python_full_version < '3.13' and platform_machine == 'ppc64le') or (python_full_version < '3.13' and platform_machine == 'win32') or (python_full_version < '3.13' and platform_machine == 'x86_64') \ + --hash=sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9 \ + --hash=sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17 \ + --hash=sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc \ + --hash=sha256:13ff8c8e54a10472ce3b2a2da007f915175192f18e6495bad50486e87c7f6637 \ + --hash=sha256:1544b8dd090b494c55e60c4ff46e238be44fdc472d2589e943c241e0169bcea2 \ + --hash=sha256:184258372ae9e1e9bddce6f187967f2e08ecd16906557c4320e3ba88a93438c3 \ + --hash=sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6 \ + --hash=sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b \ + --hash=sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf \ + --hash=sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27 \ + --hash=sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1 \ + --hash=sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc \ + --hash=sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a \ + --hash=sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b \ + --hash=sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d \ + --hash=sha256:2d004db911ed7b6218ec5c5bfe4cf70ae8aa2223dffbb5b3c69e342bb253cb28 \ + --hash=sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303 \ + --hash=sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99 \ + --hash=sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f \ + --hash=sha256:44cd313629ded43bb3b98737bba2f3e2c2c8679b55ea29ed73daea6b755fe8e7 \ + --hash=sha256:4a3dae7492d16e85ea6045fd11cb8e782b63eac8c8d520c3a92c02ac4573b0a6 \ + --hash=sha256:4b5ea3664eed571779403858d7cd0a9b0ebf50d57d2cdeafc7748e09ef8cd81a \ + --hash=sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc \ + --hash=sha256:5415b9494ff6240b09af06b91a375731febe0090218e2898d2b85f9b92abcda0 \ + --hash=sha256:5fd6e94593f6f9714dbad1aaba734b5ec04593374fa6638df61592055868f8b8 \ + --hash=sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a \ + --hash=sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca \ + --hash=sha256:665b21e95bc0fce5cab03b2e1d90ba9c66c510f1bb5fdc864f3a377d0f553f6b \ + --hash=sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989 \ + --hash=sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19 \ + --hash=sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6 \ + --hash=sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484 \ + --hash=sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd \ + --hash=sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25 \ + --hash=sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b \ + --hash=sha256:7f346d24d74c00b6730440f5eb8ec3fe5774ca8d1c9574e8e57c8671bb51b910 \ + --hash=sha256:81eeec4403a7d7684b5812a8aaa626fa23b7d0848edb3a28d2eb3220daddcbd0 \ + --hash=sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5 \ + --hash=sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345 \ + --hash=sha256:9eb4a1d7399b9f3c7ac68ae6baa6be5f9195d1d08c9ddc45ad559aa6b556bce6 \ + --hash=sha256:a0409bc18a9f85321399c29baf93545152d74a49d92f2f55302f122007cfda00 \ + --hash=sha256:a22f4e26400f7f48faef2d69c20dc055a1f3043d330923f9abe08ea0aecc44df \ + --hash=sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811 \ + --hash=sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca \ + --hash=sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8 \ + --hash=sha256:a8870983af660798dc1b529e1fd6f1cefd94e45135a32e58bd70edd694540f33 \ + --hash=sha256:ac0adfdb3a21dc2a24ed728b61e72440d297d0fd3a577389df566651fcd08f97 \ + --hash=sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0 \ + --hash=sha256:b9505a0c8579899057cbefd4ec34d865ab99852baf1ff33a9481eb3924e2da0b \ + --hash=sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682 \ + --hash=sha256:c3967dcc1cd2ea61b08b0b276659242cbce5caca39e7cbc02408222fb9e6ff39 \ + --hash=sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64 \ + --hash=sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f \ + --hash=sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665 \ + --hash=sha256:cfcfb73aed40f550a57ea904629bdaf2e562c68fa1164fa4588e752af6efdc3f \ + --hash=sha256:d0dd943282231480aad5f50f89bdf26690c995e8ff555f26d8a5b9887b559bcc \ + --hash=sha256:d3c59a06c2c28a81a026ff11fbf012081ea34fb9b7052f2ed0366e14896f0a1d \ + --hash=sha256:d45b75b0f3fd8d99f62eb7908cfa6d727b7ed190737dec7fe46d993da550b81a \ + --hash=sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0 \ + --hash=sha256:d58ec349e0c2c0bc6669bf2cd4982d2f93bf067860d23a0ea1fe677b0f0b1e09 \ + --hash=sha256:db1b3ccb93488328c74e97ff888604a8b95ae4f35f4f56677ca57a4fc3a4220b \ + --hash=sha256:dd65695a8df1233309b701dec2539cc4b11e97d4fcc0f4185b4a12ce54db0491 \ + --hash=sha256:f9482c2ed414781c0af0b35d9d575226da6b728bd1a720668fa05837184965b7 \ + --hash=sha256:f9671e7282d8c6fcabc32c0fb8d7c0ea8894ae85cee89c9aadc2d7129e1a9954 \ + --hash=sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501 \ + --hash=sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54 + # via sqlalchemy idna==3.8 \ --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 @@ -369,9 +457,9 @@ imagesize==1.4.1 \ --hash=sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b \ --hash=sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a # via sphinx -importlib-metadata==8.4.0 \ - --hash=sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 \ - --hash=sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5 +importlib-metadata==8.5.0 \ + --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \ + --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7 # via # jupyter-cache # myst-nb @@ -516,9 +604,9 @@ matplotlib-inline==0.1.7 \ # via # ipykernel # ipython -mdit-py-plugins==0.4.1 \ - --hash=sha256:1020dfe4e6bfc2c79fb49ae4e3f5b297f5ccd20f010187acc52af2921e27dc6a \ - --hash=sha256:834b8ac23d1cd60cec703646ffd22ae97b7955a6d596eb1d304be1e251ae499c +mdit-py-plugins==0.4.2 \ + --hash=sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636 \ + --hash=sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5 # via myst-parser mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ @@ -597,13 +685,13 @@ parso==0.8.4 \ --hash=sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18 \ --hash=sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d # via jedi -pexpect==4.9.0 \ +pexpect==4.9.0 ; sys_platform != 'emscripten' and sys_platform != 'win32' \ --hash=sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523 \ --hash=sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f # via ipython -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.2 \ + --hash=sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c \ + --hash=sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617 # via jupyter-core pluggy==1.5.0 \ --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ @@ -632,7 +720,7 @@ psutil==6.0.0 \ --hash=sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14 \ --hash=sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0 # via ipykernel -ptyprocess==0.7.0 \ +ptyprocess==0.7.0 ; sys_platform != 'emscripten' and sys_platform != 'win32' \ --hash=sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35 \ --hash=sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220 # via pexpect @@ -650,110 +738,116 @@ pybtex-docutils==1.0.3 \ --hash=sha256:3a7ebdf92b593e00e8c1c538aa9a20bca5d92d84231124715acc964d51d93c6b \ --hash=sha256:8fd290d2ae48e32fcb54d86b0efb8d573198653c7e2447d5bec5847095f430b9 # via sphinxcontrib-bibtex -pydantic==2.8.2 \ - --hash=sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a \ - --hash=sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8 +pycparser==2.22 ; implementation_name == 'pypy' \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc + # via + # -c requirements/main.txt + # cffi +pydantic==2.9.1 \ + --hash=sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2 \ + --hash=sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612 # via # -c requirements/main.txt # autodoc-pydantic # documenteer # pydantic-settings -pydantic-core==2.20.1 \ - --hash=sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d \ - --hash=sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f \ - --hash=sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686 \ - --hash=sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482 \ - --hash=sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006 \ - --hash=sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83 \ - --hash=sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6 \ - --hash=sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88 \ - --hash=sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86 \ - --hash=sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a \ - --hash=sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6 \ - --hash=sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a \ - --hash=sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6 \ - --hash=sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6 \ - --hash=sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43 \ - --hash=sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c \ - --hash=sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4 \ - --hash=sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e \ - --hash=sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203 \ - --hash=sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd \ - --hash=sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1 \ - --hash=sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24 \ - --hash=sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc \ - --hash=sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc \ - --hash=sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3 \ - --hash=sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598 \ - --hash=sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98 \ - --hash=sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331 \ - --hash=sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2 \ - --hash=sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a \ - --hash=sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6 \ - --hash=sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688 \ - --hash=sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91 \ - --hash=sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa \ - --hash=sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b \ - --hash=sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0 \ - --hash=sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840 \ - --hash=sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c \ - --hash=sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd \ - --hash=sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3 \ - --hash=sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231 \ - --hash=sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1 \ - --hash=sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953 \ - --hash=sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250 \ - --hash=sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a \ - --hash=sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2 \ - --hash=sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20 \ - --hash=sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434 \ - --hash=sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab \ - --hash=sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703 \ - --hash=sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a \ - --hash=sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2 \ - --hash=sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac \ - --hash=sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611 \ - --hash=sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121 \ - --hash=sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e \ - --hash=sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b \ - --hash=sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09 \ - --hash=sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906 \ - --hash=sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9 \ - --hash=sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7 \ - --hash=sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b \ - --hash=sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987 \ - --hash=sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c \ - --hash=sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b \ - --hash=sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e \ - --hash=sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237 \ - --hash=sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1 \ - --hash=sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19 \ - --hash=sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b \ - --hash=sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad \ - --hash=sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0 \ - --hash=sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94 \ - --hash=sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312 \ - --hash=sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f \ - --hash=sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669 \ - --hash=sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1 \ - --hash=sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe \ - --hash=sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99 \ - --hash=sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a \ - --hash=sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a \ - --hash=sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52 \ - --hash=sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c \ - --hash=sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad \ - --hash=sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1 \ - --hash=sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a \ - --hash=sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f \ - --hash=sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a \ - --hash=sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27 +pydantic-core==2.23.3 \ + --hash=sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801 \ + --hash=sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec \ + --hash=sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295 \ + --hash=sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba \ + --hash=sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e \ + --hash=sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e \ + --hash=sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4 \ + --hash=sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211 \ + --hash=sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea \ + --hash=sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c \ + --hash=sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835 \ + --hash=sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d \ + --hash=sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c \ + --hash=sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c \ + --hash=sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61 \ + --hash=sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83 \ + --hash=sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb \ + --hash=sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1 \ + --hash=sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5 \ + --hash=sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690 \ + --hash=sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b \ + --hash=sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7 \ + --hash=sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70 \ + --hash=sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a \ + --hash=sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8 \ + --hash=sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd \ + --hash=sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee \ + --hash=sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1 \ + --hash=sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab \ + --hash=sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958 \ + --hash=sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5 \ + --hash=sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b \ + --hash=sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961 \ + --hash=sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c \ + --hash=sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25 \ + --hash=sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4 \ + --hash=sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4 \ + --hash=sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f \ + --hash=sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326 \ + --hash=sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab \ + --hash=sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8 \ + --hash=sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b \ + --hash=sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6 \ + --hash=sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8 \ + --hash=sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01 \ + --hash=sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc \ + --hash=sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d \ + --hash=sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e \ + --hash=sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b \ + --hash=sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855 \ + --hash=sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700 \ + --hash=sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a \ + --hash=sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa \ + --hash=sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541 \ + --hash=sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791 \ + --hash=sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162 \ + --hash=sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611 \ + --hash=sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef \ + --hash=sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe \ + --hash=sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5 \ + --hash=sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba \ + --hash=sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28 \ + --hash=sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa \ + --hash=sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27 \ + --hash=sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4 \ + --hash=sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b \ + --hash=sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2 \ + --hash=sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c \ + --hash=sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8 \ + --hash=sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb \ + --hash=sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c \ + --hash=sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e \ + --hash=sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305 \ + --hash=sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8 \ + --hash=sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4 \ + --hash=sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433 \ + --hash=sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45 \ + --hash=sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16 \ + --hash=sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed \ + --hash=sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0 \ + --hash=sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d \ + --hash=sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710 \ + --hash=sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48 \ + --hash=sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423 \ + --hash=sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf \ + --hash=sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9 \ + --hash=sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63 \ + --hash=sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5 \ + --hash=sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb # via # -c requirements/main.txt # pydantic -pydantic-settings==2.4.0 \ - --hash=sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315 \ - --hash=sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88 +pydantic-settings==2.5.2 \ + --hash=sha256:2c912e55fd5794a59bf8c832b9de832dcfdf4778d79ff79b708744eed499a907 \ + --hash=sha256:f90b139682bee4d2065273d5185d71d37ea46cfe57e1b5ae184fc6a0b2484ca0 # via autodoc-pydantic pydata-sphinx-theme==0.12.0 \ --hash=sha256:7a07c3ac1fb1cfbb5f7d1e147a9500fb120e329d610e0fa2caac4a645141bdd9 \ @@ -770,9 +864,9 @@ pygments==2.18.0 \ pylatexenc==2.10 \ --hash=sha256:3dd8fd84eb46dc30bee1e23eaab8d8fb5a7f507347b23e5f38ad9675c84f40d3 # via documenteer -pytest==8.3.2 \ - --hash=sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5 \ - --hash=sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce +pytest==8.3.3 \ + --hash=sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181 \ + --hash=sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2 # via # -r requirements/dev.in # pytest-cov @@ -795,6 +889,22 @@ python-dotenv==1.0.1 \ --hash=sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca \ --hash=sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a # via pydantic-settings +pywin32==306 ; platform_python_implementation != 'PyPy' and sys_platform == 'win32' \ + --hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \ + --hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \ + --hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \ + --hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \ + --hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \ + --hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \ + --hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \ + --hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \ + --hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \ + --hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \ + --hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \ + --hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \ + --hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \ + --hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0 + # via jupyter-core pyyaml==6.0.2 \ --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ --hash=sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48 \ @@ -1091,9 +1201,9 @@ rpds-py==0.20.0 \ # via # jsonschema # referencing -setuptools==74.1.1 \ - --hash=sha256:2353af060c06388be1cecbf5953dcdb1f38362f87a2356c480b6b4d5fcfc8847 \ - --hash=sha256:fc91b5f89e392ef5b77fe143b17e32f65d3024744fba66dc3afe07201684d766 +setuptools==74.1.2 \ + --hash=sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308 \ + --hash=sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6 # via documenteer six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ @@ -1140,9 +1250,9 @@ sphinx==8.0.2 \ # sphinxcontrib-youtube # sphinxext-opengraph # sphinxext-rediraffe -sphinx-autodoc-typehints==2.3.0 \ - --hash=sha256:3098e2c6d0ba99eacd013eb06861acc9b51c6e595be86ab05c08ee5506ac0c67 \ - --hash=sha256:535c78ed2d6a1bad393ba9f3dfa2602cf424e2631ee207263e07874c38fde084 +sphinx-autodoc-typehints==2.4.0 \ + --hash=sha256:8f8281654ddf5709875429b7120d367f4edee39a131e13d5806e4f779a81bf0f \ + --hash=sha256:c9774d47e7d304cf975e073df49ebf19763dca94ac0295e7013b522b26cb18de # via documenteer sphinx-automodapi==0.17.0 \ --hash=sha256:4d029cb79eef29413e94ab01bb0177ebd2d5ba86e9789b73575afe9c06ae1501 \ @@ -1225,56 +1335,56 @@ sphinxext-rediraffe==0.2.7 \ --hash=sha256:651dcbfae5ffda9ffd534dfb8025f36120e5efb6ea1a33f5420023862b9f725d \ --hash=sha256:9e430a52d4403847f4ffb3a8dd6dfc34a9fe43525305131f52ed899743a5fd8c # via documenteer -sqlalchemy==2.0.33 \ - --hash=sha256:06504d9625e3ef114b39803ebca6f379133acad58a87c33117ddc5df66079915 \ - --hash=sha256:06b30bbc43c6dd8b7cdc509cd2e58f4f1dce867565642e1d1a65e32459c89bd0 \ - --hash=sha256:0ea64443a86c3b5a0fd7c93363ad2f9465cb3af61f9920b7c75d1a7bebbeef8a \ - --hash=sha256:1109cc6dc5c9d1223c42186391e6a5509e6d4ab2c30fa629573c10184f742f2e \ - --hash=sha256:17d0c69f66392ad2db1609373a74d1f834b2e632f3f52d446747b8ec220aea53 \ - --hash=sha256:1d81e3aeab456fe24c3f0dcfd4f952a3a5ee45e9c14fc66d34c1d7a60cf7b698 \ - --hash=sha256:2415824ec658891ac38d13a2f36b4ceb2033f034dee1c226f83917589a65f072 \ - --hash=sha256:28c0800c851955f5bd11c0b904638c1343002650d0c071c6fbf0d157cc78627d \ - --hash=sha256:2b1e98507ec2aa200af980d592e936e9dac1c1ec50acc94330ae4b13c55d6fea \ - --hash=sha256:30a3f55be76364b64c83788728faaba782ab282a24909e1994404c2146d39982 \ - --hash=sha256:31e56020832be602201fbf8189f379569cf5c3604cdc4ce79f10dbbfcbf8a0eb \ - --hash=sha256:32a4f38d2efca066ec793451ef6852cb0d9086dc3d5479d88a5a25529d1d1861 \ - --hash=sha256:3ad94634338d8c576b1d47a96c798be186650aa5282072053ce2d12c6f309f82 \ - --hash=sha256:3c64d58e83a68e228b1ae6ebac8721241e9d8cc5e0c0dd11ed5d89155477b243 \ - --hash=sha256:454e9b4355f0051063daebc4060140251c19f33fc5d02151c347431860fd104b \ - --hash=sha256:459099ab8dd43a5edbb99f58ba4730baec457df9c06ebc71434c6b4b78cc8cf9 \ - --hash=sha256:49541a43828e273325c520fbacf786615bd974dad63ff60b8ea1e1216e914d1a \ - --hash=sha256:4f1c44c8d66101e6f627f330d8b5b3de5ad25eedb6df3ce39a2e6f92debbcf15 \ - --hash=sha256:523ae689c023cbf0fe1613101254824515193f85f806ba04611dee83302660b5 \ - --hash=sha256:570ec43e8c3c020abac4f0720baa5fe5187334e3f1e8e1777183c041962b61cc \ - --hash=sha256:60c54b677d4f0a0b2df3b79e89e84d601fb931c720176641742efd66b50601f9 \ - --hash=sha256:61e9a2d68a5a8ca6a84cbc79aa7f2e430ae854d3351d6e9ceb3edf6798797b63 \ - --hash=sha256:63b7d9890f7958dabd95cf98a3f48740fbe2bb0493523aef590e82164fa68194 \ - --hash=sha256:67eb8e0ffbebd3d82ec5079ca5f807a661c574b482785483717857c2acab833a \ - --hash=sha256:684aee5fd811091b2f48006fb3fe6c7f2de4a716ef8d294a2aab762099753133 \ - --hash=sha256:751eaafa907a66dd9a328a9d15c3dcfdcba3ef8dd8f7f4a9771cdacdec45d9bf \ - --hash=sha256:77eaf8fdf305266b806a91ae4633edbf86ad37e13bd92ac85e305e7f654c19a5 \ - --hash=sha256:7fd0a28bc24a75326f13735a58272247f65c9e8ee16205eacb2431d6ee94f44a \ - --hash=sha256:816c927dd51e4951d6e79870c945340057a5d8e63543419dee0d247bd67a88f8 \ - --hash=sha256:81759e77a4985abdbac068762a0eaf0f11860fe041ad6da170aae7615ea72531 \ - --hash=sha256:82c72da5be489c8d150deba70d5732398695418df5232bceb52ee323ddd9753b \ - --hash=sha256:8bef11d31a1c48f5943e577d1ef81085ec1550c37552bfc9bf8e5d184ce47142 \ - --hash=sha256:91c93333c2b37ff721dc83b37e28c29de4c502b5612f2d093468037b86aa2be0 \ - --hash=sha256:92249ac94279b8e5f0c0c8420e09b804d0a49d2269f52f549d4cb536c8382434 \ - --hash=sha256:93efa4b72f7cb70555b0f66ee5e113ae40073c57054a72887e50b05bfd97baa4 \ - --hash=sha256:9d035a672d5b3e4793a4a8865c3274a7bbbac7fac67a47b415023b5539105087 \ - --hash=sha256:9e5819822050e6e36e2aa41260d05074c026a1bbb9baa6869170b5ce64db7a4d \ - --hash=sha256:a3926e4ed4a3e956c8b2b0f1140493378c8cd17cad123b4fc1e0f6ecd3e05b19 \ - --hash=sha256:a3da2371628e28ef279f3f756f5e58858fad7820de08508138c9f5f9e4d8f4ac \ - --hash=sha256:ac252bafe8cbadfac7b1e8a74748ffd775e27325186d12b82600b652d9adcb86 \ - --hash=sha256:ae294808afde1b14a1a69aa86a69cadfe391848bbb233a5332a8065e4081cabc \ - --hash=sha256:c40e0213beaf410a151e4329e30c73687838c251c998ba1b312975dbbcb2d05d \ - --hash=sha256:c5d5a733c6af7f392435e673d1b136f6bdf2366033abb35eed680400dc730840 \ - --hash=sha256:c633e2d2f8a7b88c06e276bbe16cb7e62fed815fcbeb69cd9752cea166ecb8e8 \ - --hash=sha256:c9f4f92eee7d06531cc6a5b814e603a0c7639876aab03638dcc70c420a3974f6 \ - --hash=sha256:ca8788dc1baee100f09110f33a01d928cf9df4483d2bfb25a37be31a659d46bb \ - --hash=sha256:d004a623ad4aa8d2eb31b37e65b5e020c9f65a1852b8b9e6301f0e411aca5b9a \ - --hash=sha256:ee2b82b170591ccd19d463c9798a9caeea0cad967a8d2f3264de459f582696d5 \ - --hash=sha256:f7c82a7930126bb5ccfbb73fc1562d52942fbffb2fda2791fab49de249fc202a +sqlalchemy==2.0.34 \ + --hash=sha256:10d8f36990dd929690666679b0f42235c159a7051534adb135728ee52828dd22 \ + --hash=sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3 \ + --hash=sha256:165bbe0b376541092bf49542bd9827b048357f4623486096fc9aaa6d4e7c59a2 \ + --hash=sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd \ + --hash=sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d \ + --hash=sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c \ + --hash=sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278 \ + --hash=sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b \ + --hash=sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021 \ + --hash=sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba \ + --hash=sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8 \ + --hash=sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b \ + --hash=sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e \ + --hash=sha256:430093fce0efc7941d911d34f75a70084f12f6ca5c15d19595c18753edb7c33b \ + --hash=sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a \ + --hash=sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82 \ + --hash=sha256:53e68b091492c8ed2bd0141e00ad3089bcc6bf0e6ec4142ad6505b4afe64163e \ + --hash=sha256:5bc08e75ed11693ecb648b7a0a4ed80da6d10845e44be0c98c03f2f880b68ff4 \ + --hash=sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3 \ + --hash=sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b \ + --hash=sha256:6daeb8382d0df526372abd9cb795c992e18eed25ef2c43afe518c73f8cccb721 \ + --hash=sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d \ + --hash=sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a \ + --hash=sha256:7286c353ee6475613d8beff83167374006c6b3e3f0e6491bfe8ca610eb1dec0f \ + --hash=sha256:79cb400c360c7c210097b147c16a9e4c14688a6402445ac848f296ade6283bbc \ + --hash=sha256:7cee4c6917857fd6121ed84f56d1dc78eb1d0e87f845ab5a568aba73e78adf83 \ + --hash=sha256:80bd73ea335203b125cf1d8e50fef06be709619eb6ab9e7b891ea34b5baa2287 \ + --hash=sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434 \ + --hash=sha256:8fddde2368e777ea2a4891a3fb4341e910a056be0bb15303bf1b92f073b80c02 \ + --hash=sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db \ + --hash=sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8 \ + --hash=sha256:97b850f73f8abbffb66ccbab6e55a195a0eb655e5dc74624d15cff4bfb35bd74 \ + --hash=sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24 \ + --hash=sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7 \ + --hash=sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0 \ + --hash=sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5 \ + --hash=sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f \ + --hash=sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c \ + --hash=sha256:bcd18441a49499bf5528deaa9dee1f5c01ca491fc2791b13604e8f972877f812 \ + --hash=sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768 \ + --hash=sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8 \ + --hash=sha256:c3330415cd387d2b88600e8e26b510d0370db9b7eaf984354a43e19c40df2e2b \ + --hash=sha256:c7db3db284a0edaebe87f8f6642c2b2c27ed85c3e70064b84d1c9e4ec06d5d84 \ + --hash=sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796 \ + --hash=sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b \ + --hash=sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2 \ + --hash=sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06 \ + --hash=sha256:fb1b30f31a36c7f3fee848391ff77eebdd3af5750bf95fbf9b8b5323edfdb4ec \ + --hash=sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580 # via jupyter-cache stack-data==0.6.3 \ --hash=sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9 \ diff --git a/requirements/main.in b/requirements/main.in deleted file mode 100644 index 8847952ab1..0000000000 --- a/requirements/main.in +++ /dev/null @@ -1,22 +0,0 @@ -# Editable runtime dependencies (equivalent to project.dependencies). -# Add direct runtime dependencies here, as well as implicit dependencies -# with constrained versions. These should be sufficient to run the phalanx -# command-line tool. -# -# After editing, update requirements/main.txt by running: -# make update-deps - -bcrypt -click -cryptography -GitPython -hvac -jinja2 -onepasswordconnectsdk -pydantic>2 -PyYAML -safir>5 - -# Uncomment this, change the branch, comment out safir above, and run make -# update-deps-no-hashes to test against an unreleased version of Safir. -# safir @ git+https://github.com/lsst-sqre/safir@main diff --git a/requirements/main.txt b/requirements/main.txt index 67830cad11..2bbd0d4a85 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -1,5 +1,5 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --generate-hashes --output-file requirements/main.txt requirements/main.in +# uv pip compile --universal --generate-hashes --output-file requirements/main.txt pyproject.toml annotated-types==0.7.0 \ --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \ --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89 @@ -38,7 +38,7 @@ bcrypt==4.2.0 \ --hash=sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8 \ --hash=sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221 \ --hash=sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db - # via -r requirements/main.in + # via phalanx (pyproject.toml) certifi==2024.8.30 \ --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 @@ -46,74 +46,74 @@ certifi==2024.8.30 \ # httpcore # httpx # requests -cffi==1.17.0 \ - --hash=sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f \ - --hash=sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab \ - --hash=sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499 \ - --hash=sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058 \ - --hash=sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693 \ - --hash=sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb \ - --hash=sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377 \ - --hash=sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885 \ - --hash=sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2 \ - --hash=sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401 \ - --hash=sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4 \ - --hash=sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b \ - --hash=sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59 \ - --hash=sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f \ - --hash=sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c \ - --hash=sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555 \ - --hash=sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa \ - --hash=sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424 \ - --hash=sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb \ - --hash=sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2 \ - --hash=sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8 \ - --hash=sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e \ - --hash=sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9 \ - --hash=sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82 \ - --hash=sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828 \ - --hash=sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759 \ - --hash=sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc \ - --hash=sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118 \ - --hash=sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf \ - --hash=sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932 \ - --hash=sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a \ - --hash=sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29 \ - --hash=sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206 \ - --hash=sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2 \ - --hash=sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c \ - --hash=sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c \ - --hash=sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0 \ - --hash=sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a \ - --hash=sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195 \ - --hash=sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6 \ - --hash=sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9 \ - --hash=sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc \ - --hash=sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb \ - --hash=sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0 \ - --hash=sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7 \ - --hash=sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb \ - --hash=sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a \ - --hash=sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492 \ - --hash=sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720 \ - --hash=sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42 \ - --hash=sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7 \ - --hash=sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d \ - --hash=sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d \ - --hash=sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb \ - --hash=sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4 \ - --hash=sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2 \ - --hash=sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b \ - --hash=sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8 \ - --hash=sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e \ - --hash=sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204 \ - --hash=sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3 \ - --hash=sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150 \ - --hash=sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4 \ - --hash=sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76 \ - --hash=sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e \ - --hash=sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb \ - --hash=sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91 +cffi==1.17.1 ; platform_python_implementation != 'PyPy' \ + --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ + --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ + --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ + --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ + --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ + --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ + --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ + --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ + --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ + --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ + --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ + --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ + --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ + --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ + --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ + --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ + --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ + --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ + --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ + --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ + --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ + --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ + --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ + --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ + --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ + --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ + --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ + --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ + --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ + --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ + --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ + --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ + --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ + --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ + --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ + --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ + --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ + --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ + --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ + --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ + --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ + --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ + --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ + --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ + --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ + --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ + --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ + --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ + --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ + --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ + --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ + --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ + --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ + --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ + --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ + --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ + --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ + --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ + --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ + --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ + --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ + --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ + --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ + --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ + --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ + --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ + --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b # via cryptography charset-normalizer==3.3.2 \ --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ @@ -211,8 +211,12 @@ click==8.1.7 \ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de # via - # -r requirements/main.in + # phalanx (pyproject.toml) # safir +colorama==0.4.6 ; platform_system == 'Windows' \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via click cryptography==43.0.1 \ --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ @@ -242,12 +246,12 @@ cryptography==43.0.1 \ --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 # via - # -r requirements/main.in + # phalanx (pyproject.toml) # pyjwt # safir -fastapi==0.112.2 \ - --hash=sha256:3d4729c038414d5193840706907a41839d839523da6ed0c2811f1168cac1798c \ - --hash=sha256:db84b470bd0e2b1075942231e90e3577e12a903c4dc8696f0d206a7904a7af1c +fastapi==0.114.1 \ + --hash=sha256:1d7bbbeabbaae0acb0c22f0ab0b040f642d3093ca3645f8c876b6f91391861d8 \ + --hash=sha256:5d4746f6e4b7dff0b4f6b6c6d5445645285f662fe75886e99af7ee2d6b58bb3e # via safir gidgethub==5.3.0 \ --hash=sha256:4dd92f2252d12756b13f9dd15cde322bfb0d625b6fb5d680da1567ec74b462c0 \ @@ -260,7 +264,7 @@ gitdb==4.0.11 \ gitpython==3.1.43 \ --hash=sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c \ --hash=sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff - # via -r requirements/main.in + # via phalanx (pyproject.toml) h11==0.14.0 \ --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 @@ -278,7 +282,7 @@ httpx==0.23.3 \ hvac==2.3.0 \ --hash=sha256:1b85e3320e8642dd82f234db63253cda169a817589e823713dc5fca83119b1e2 \ --hash=sha256:a3afc5710760b6ee9b3571769df87a0333da45da05a5f9f963e1d3925a84be7d - # via -r requirements/main.in + # via phalanx (pyproject.toml) idna==3.8 \ --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 @@ -289,7 +293,7 @@ idna==3.8 \ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d - # via -r requirements/main.in + # via phalanx (pyproject.toml) markupsafe==2.1.5 \ --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ @@ -355,108 +359,108 @@ markupsafe==2.1.5 \ onepasswordconnectsdk==1.5.1 \ --hash=sha256:8924c614ffed98f29faada03dba940dc0bc47851b1f5f4ef7e312e43c10ec25b \ --hash=sha256:f8e033dbb5dcc5ff08fbdbbfe329655adce6ec44cfe54652474d7e31175de48e - # via -r requirements/main.in -pycparser==2.22 \ + # via phalanx (pyproject.toml) +pycparser==2.22 ; platform_python_implementation != 'PyPy' \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pydantic==2.8.2 \ - --hash=sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a \ - --hash=sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8 +pydantic==2.9.1 \ + --hash=sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2 \ + --hash=sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612 # via - # -r requirements/main.in + # phalanx (pyproject.toml) # fastapi # safir -pydantic-core==2.20.1 \ - --hash=sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d \ - --hash=sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f \ - --hash=sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686 \ - --hash=sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482 \ - --hash=sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006 \ - --hash=sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83 \ - --hash=sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6 \ - --hash=sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88 \ - --hash=sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86 \ - --hash=sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a \ - --hash=sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6 \ - --hash=sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a \ - --hash=sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6 \ - --hash=sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6 \ - --hash=sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43 \ - --hash=sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c \ - --hash=sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4 \ - --hash=sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e \ - --hash=sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203 \ - --hash=sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd \ - --hash=sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1 \ - --hash=sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24 \ - --hash=sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc \ - --hash=sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc \ - --hash=sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3 \ - --hash=sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598 \ - --hash=sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98 \ - --hash=sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331 \ - --hash=sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2 \ - --hash=sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a \ - --hash=sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6 \ - --hash=sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688 \ - --hash=sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91 \ - --hash=sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa \ - --hash=sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b \ - --hash=sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0 \ - --hash=sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840 \ - --hash=sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c \ - --hash=sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd \ - --hash=sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3 \ - --hash=sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231 \ - --hash=sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1 \ - --hash=sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953 \ - --hash=sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250 \ - --hash=sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a \ - --hash=sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2 \ - --hash=sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20 \ - --hash=sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434 \ - --hash=sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab \ - --hash=sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703 \ - --hash=sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a \ - --hash=sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2 \ - --hash=sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac \ - --hash=sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611 \ - --hash=sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121 \ - --hash=sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e \ - --hash=sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b \ - --hash=sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09 \ - --hash=sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906 \ - --hash=sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9 \ - --hash=sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7 \ - --hash=sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b \ - --hash=sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987 \ - --hash=sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c \ - --hash=sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b \ - --hash=sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e \ - --hash=sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237 \ - --hash=sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1 \ - --hash=sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19 \ - --hash=sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b \ - --hash=sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad \ - --hash=sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0 \ - --hash=sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94 \ - --hash=sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312 \ - --hash=sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f \ - --hash=sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669 \ - --hash=sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1 \ - --hash=sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe \ - --hash=sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99 \ - --hash=sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a \ - --hash=sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a \ - --hash=sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52 \ - --hash=sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c \ - --hash=sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad \ - --hash=sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1 \ - --hash=sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a \ - --hash=sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f \ - --hash=sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a \ - --hash=sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27 +pydantic-core==2.23.3 \ + --hash=sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801 \ + --hash=sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec \ + --hash=sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295 \ + --hash=sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba \ + --hash=sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e \ + --hash=sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e \ + --hash=sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4 \ + --hash=sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211 \ + --hash=sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea \ + --hash=sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c \ + --hash=sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835 \ + --hash=sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d \ + --hash=sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c \ + --hash=sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c \ + --hash=sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61 \ + --hash=sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83 \ + --hash=sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb \ + --hash=sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1 \ + --hash=sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5 \ + --hash=sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690 \ + --hash=sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b \ + --hash=sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7 \ + --hash=sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70 \ + --hash=sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a \ + --hash=sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8 \ + --hash=sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd \ + --hash=sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee \ + --hash=sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1 \ + --hash=sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab \ + --hash=sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958 \ + --hash=sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5 \ + --hash=sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b \ + --hash=sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961 \ + --hash=sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c \ + --hash=sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25 \ + --hash=sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4 \ + --hash=sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4 \ + --hash=sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f \ + --hash=sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326 \ + --hash=sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab \ + --hash=sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8 \ + --hash=sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b \ + --hash=sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6 \ + --hash=sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8 \ + --hash=sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01 \ + --hash=sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc \ + --hash=sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d \ + --hash=sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e \ + --hash=sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b \ + --hash=sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855 \ + --hash=sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700 \ + --hash=sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a \ + --hash=sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa \ + --hash=sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541 \ + --hash=sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791 \ + --hash=sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162 \ + --hash=sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611 \ + --hash=sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef \ + --hash=sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe \ + --hash=sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5 \ + --hash=sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba \ + --hash=sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28 \ + --hash=sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa \ + --hash=sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27 \ + --hash=sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4 \ + --hash=sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b \ + --hash=sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2 \ + --hash=sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c \ + --hash=sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8 \ + --hash=sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb \ + --hash=sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c \ + --hash=sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e \ + --hash=sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305 \ + --hash=sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8 \ + --hash=sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4 \ + --hash=sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433 \ + --hash=sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45 \ + --hash=sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16 \ + --hash=sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed \ + --hash=sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0 \ + --hash=sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d \ + --hash=sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710 \ + --hash=sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48 \ + --hash=sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423 \ + --hash=sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf \ + --hash=sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9 \ + --hash=sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63 \ + --hash=sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5 \ + --hash=sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb # via # pydantic # safir @@ -522,7 +526,7 @@ pyyaml==6.0.2 \ --hash=sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba \ --hash=sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12 \ --hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4 - # via -r requirements/main.in + # via phalanx (pyproject.toml) requests==2.32.3 \ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 @@ -534,7 +538,7 @@ rfc3986==1.5.0 \ safir==6.3.0 \ --hash=sha256:2fcd64bf37dd42eacedd6378341b2487cd06dbaf1f28403301b8d80f60a4fb56 \ --hash=sha256:6ad7dad520d87d853628849ef95a348c55dbd0180ad3f15c1cf2f7f8fe32f915 - # via -r requirements/main.in + # via phalanx (pyproject.toml) safir-logging==6.3.0 \ --hash=sha256:491dfe85de89a3f2daa29c491a22a0551f0961444490418d91ec50c040ae16eb \ --hash=sha256:e14754ab0bba6cfa248c3fc4cb5ca28410d97ff3965e831eab6581ed37485e79 @@ -554,9 +558,9 @@ sniffio==1.3.1 \ # anyio # httpcore # httpx -starlette==0.38.4 \ - --hash=sha256:526f53a77f0e43b85f583438aee1a940fd84f8fd610353e8b0c1a77ad8a87e76 \ - --hash=sha256:53a7439060304a208fea17ed407e998f46da5e5d9b1addfea3040094512a6379 +starlette==0.38.5 \ + --hash=sha256:04a92830a9b6eb1442c766199d62260c3d4dc9c4f9188360626b1e0273cb7077 \ + --hash=sha256:632f420a9d13e3ee2a6f18f437b0a9f1faecb0bc42e1942aa2ea0e379a4c4206 # via # fastapi # safir diff --git a/requirements/tox.txt b/requirements/tox.txt index 5acb5ec80e..5a2eb7afd2 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -1,5 +1,5 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --generate-hashes --output-file requirements/tox.txt requirements/tox.in +# uv pip compile --universal --generate-hashes --output-file requirements/tox.txt requirements/tox.in cachetools==5.5.0 \ --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a @@ -11,14 +11,17 @@ chardet==5.2.0 \ colorama==0.4.6 \ --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 - # via tox + # via + # -c requirements/dev.txt + # -c requirements/main.txt + # tox distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.0 \ + --hash=sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec \ + --hash=sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609 # via # tox # virtualenv @@ -30,9 +33,9 @@ packaging==24.1 \ # pyproject-api # tox # tox-uv -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.2 \ + --hash=sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c \ + --hash=sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617 # via # -c requirements/dev.txt # tox @@ -47,37 +50,37 @@ pyproject-api==1.7.1 \ --hash=sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb \ --hash=sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827 # via tox -tox==4.18.0 \ - --hash=sha256:0a457400cf70615dc0627eb70d293e80cd95d8ce174bb40ac011011f0c03a249 \ - --hash=sha256:5dfa1cab9f146becd6e351333a82f9e0ade374451630ba65ee54584624c27b58 +tox==4.18.1 \ + --hash=sha256:35d472032ee1f73fe20c3e0e73d7073a4e85075c86ff02c576f9fc7c6a15a578 \ + --hash=sha256:3c0c96bc3a568a5c7e66387a4cfcf8c875b52e09f4d47c9f7a277ec82f1a0b11 # via # -r requirements/tox.in # tox-uv -tox-uv==1.11.2 \ - --hash=sha256:7f8f1737b3277e1cddcb5b89fcc5931d04923562c940ae60f29e140908566df2 \ - --hash=sha256:a7aded5c3fb69f055b523357988c1055bb573e91bfd7ecfb9b5233ebcab5d10b +tox-uv==1.11.3 \ + --hash=sha256:316f559ae5525edec12791d9e1f393e405ded5b7e7d50fbaee4726676951f49a \ + --hash=sha256:d434787406ff2854600c1ceaa555519080026208cf7f65bb5d4b2d7c9c4776de # via -r requirements/tox.in -uv==0.4.4 \ - --hash=sha256:051589ab42bfdb2997ea61a56a78a2bab0b6ae7d014f96a578dcc5f9d8766757 \ - --hash=sha256:0c9ada2fbfe3ca29c50914acd714fe35100ab56fdb83510d1aadd00d55191d1b \ - --hash=sha256:0d0af47198dc4ca635540b72c933219c6c967885788fd1f651112f168fcade0a \ - --hash=sha256:0d51db6bf89b7b0a4aae229f7efee00fc52a1d7391605f3b789996f9c7986653 \ - --hash=sha256:14f06ed0e0f163c9ec8b26f4fc2df14530080e405d7348ad0c59f9c296c55918 \ - --hash=sha256:3e9139f171329b6fa40a064f9e7923848d44e60bc31da138758695ec34d8200d \ - --hash=sha256:433c69a6e7f35c865172d69e51bf78521a9d87eac6f8772af04667f5d25cc9a9 \ - --hash=sha256:718bbdf0675eab8d15f52041725b60743a9496fde3dc493d34913aa4a15f0a81 \ - --hash=sha256:8ba084d6d5baf92a3cfe41a20fd912dea4e2ea3eca8401f1892394c5c2b79c92 \ - --hash=sha256:918d4da22f9ff4403dc72dfb4c58c994400a64797a3a17f00f5c0d3717d1cb8c \ - --hash=sha256:9ba6abad0a531181bcb90b9af818e2490211f2d4b3eb83eb2a27df1f07f299fb \ - --hash=sha256:c1b7db1db176e46184c974ed30687671ec5d67cfcce34c7ed4a63141ecb6c70e \ - --hash=sha256:d2e2c9db83efd81b0b8dcaa45533b461b058d5aec49a6ed6cc98832e56e45856 \ - --hash=sha256:d66242bba1bbec847b77fcdc7d3191eab733189213a5d2717dbda1ff04e24b46 \ - --hash=sha256:da3a77ad858be5239ae33509ddfeaf097d7bda77fc0b2a42994cbec32cef4769 \ - --hash=sha256:dc881ea11dcb443940bbac5d7601cd7c74f80e7086c2e310e86ebf10d1c8816b \ - --hash=sha256:dd94e5be00a0a06ab5cbba7014720440a12bae73150d8146bc3535f3a22ff069 \ - --hash=sha256:f866f9a44982ef8041a982c3197a17e18d4a8ac7717b4462477ea0ca6a088a52 +uv==0.4.9 \ + --hash=sha256:0340d2c7bf9afe0098e3301c1885de10e317232cfa346f0ac16374cee284a4cb \ + --hash=sha256:060af185481ef46ab97008cad330f3cd7a7aa1ce3d219b67d27c5a2a551ac2ea \ + --hash=sha256:1a8acc7abb2174bd3c8f5fc98345f2bb602f31b7558e37f3d23bef99ddd58dec \ + --hash=sha256:34bce9f4892130b01a7605d27bbeb71395e9b031d793123c250b79187ee307ca \ + --hash=sha256:45bf0cead2436b1977f71669e945db19990ca70a7765111fb951545815467bb6 \ + --hash=sha256:52101bc8652b4284b78fac52ed7878f3bae414bc4076c377735962666b309dde \ + --hash=sha256:5422680436f4cebef945bb2e562e01c02a4fa0a95f85d1b8010f2ee868a0b8c1 \ + --hash=sha256:55cf2522262ef663114bda5d80375ddc7f7af0d054df89426372a0d494380875 \ + --hash=sha256:566d4d7a475aacd21dbb4aba053cd4f4f52d65acdef2c83c59bcdff08756701e \ + --hash=sha256:5b66a52cb60a2882a882bc5f13afa6daf3172a54fe9fb998529d19418d5aed18 \ + --hash=sha256:630a6fe215829f734278e618c1633c2bb88ee03dc6a92ae9890fabd98ee810a9 \ + --hash=sha256:69529b6bf5de6ec8fbe8e022f5bcbaef778e76136fc37fae6ec7a8b18b3f9024 \ + --hash=sha256:71e87038fcc9f61b2d6f66c4a92354c6d0abe4baae21bb90241693f161ddeaa1 \ + --hash=sha256:8869637ea6231f66fe643be22f9334874db3496844b3d8bfd8efd4227ded3d44 \ + --hash=sha256:9c9b70f016f28cc05633b564d8690cfdb7ebac4d2210d9158819947841e00347 \ + --hash=sha256:b54a9022e9e1fdbf3ae15ef340a0d1d1847dd739df5023896aa8d97d88af1efe \ + --hash=sha256:bf834f7f360a192372d879eda86f6a1dd94195faf68154dcf7c90247098d2bb2 \ + --hash=sha256:f50cbdfbc8399e1211c580e47f42650a184541ee398af95ad29bf9a2e977baba # via tox-uv -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +virtualenv==20.26.4 \ + --hash=sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55 \ + --hash=sha256:c17f4e0f3e6036e9f26700446f85c76ab11df65ff6d8a9cbfad9f71aabfcf23c # via tox From d9e60d1312a554a2b10766eac971e5f99f861d80 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Wed, 11 Sep 2024 14:48:25 -0700 Subject: [PATCH 050/567] Update secrets.yaml schema The new version of Pydantic generates a slightly different schema for secrets.yaml. --- docs/extras/schemas/secrets.json | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/docs/extras/schemas/secrets.json b/docs/extras/schemas/secrets.json index 415c78af8a..638d86b804 100644 --- a/docs/extras/schemas/secrets.json +++ b/docs/extras/schemas/secrets.json @@ -60,11 +60,7 @@ "title": "Condition" }, "onepassword": { - "allOf": [ - { - "$ref": "#/$defs/SecretOnepasswordConfig" - } - ], + "$ref": "#/$defs/SecretOnepasswordConfig", "description": "Configuration for how the secret is stored in 1Password", "title": "1Password configuration" }, From 72ad6b0700b9389fee94ef2a92082bd4a24b35c2 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 10 Sep 2024 17:58:22 -0300 Subject: [PATCH 051/567] exposurelog: add butler secrets --- applications/exposurelog/secrets-usdfdev.yaml | 12 ++++++++++++ applications/exposurelog/templates/deployment.yaml | 6 ++++++ 2 files changed, 18 insertions(+) create mode 100644 applications/exposurelog/secrets-usdfdev.yaml diff --git a/applications/exposurelog/secrets-usdfdev.yaml b/applications/exposurelog/secrets-usdfdev.yaml new file mode 100644 index 0000000000..317e9c5aab --- /dev/null +++ b/applications/exposurelog/secrets-usdfdev.yaml @@ -0,0 +1,12 @@ +"aws-credentials.ini": + description: >- + S3 Butler credentials in AWS format. + copy: + application: nublado + key: "aws-credentials.ini" +"postgres-credentials.txt": + description: >- + PostgreSQL credentials in its pgpass format for the Butler database. + copy: + application: nublado + key: "postgres-credentials.txt" diff --git a/applications/exposurelog/templates/deployment.yaml b/applications/exposurelog/templates/deployment.yaml index 775d9a7635..8f7e1f0ca0 100644 --- a/applications/exposurelog/templates/deployment.yaml +++ b/applications/exposurelog/templates/deployment.yaml @@ -86,6 +86,8 @@ spec: - name: volume2 mountPath: /volume_2 {{- end }} + - name: aws-secrets + mountPath: /var/secrets/aws - name: tmp mountPath: /tmp volumes: @@ -110,6 +112,10 @@ spec: readOnly: true server: {{ .Values.config.nfs_server_3 }} {{- end }} + - name: aws-secrets + secret: + defaultMode: 420 + secretName: exposurelog - name: tmp emptyDir: {} {{- with .Values.nodeSelector }} From f39451012fc55066316743458f6e09cc9ee12369 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Wed, 11 Sep 2024 16:06:41 -0300 Subject: [PATCH 052/567] exposurelog: add init container for secrets permission fix --- .../exposurelog/templates/deployment.yaml | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/applications/exposurelog/templates/deployment.yaml b/applications/exposurelog/templates/deployment.yaml index 8f7e1f0ca0..312033545d 100644 --- a/applications/exposurelog/templates/deployment.yaml +++ b/applications/exposurelog/templates/deployment.yaml @@ -86,10 +86,25 @@ spec: - name: volume2 mountPath: /volume_2 {{- end }} - - name: aws-secrets - mountPath: /var/secrets/aws + - name: user-secrets + mountPath: /var/secrets/butler - name: tmp mountPath: /tmp + initContainers: + - name: secret-perm-fixer + image: busybox + command: + - "/bin/sh" + - "-c" + - | + cp /secrets/* /etc/secrets && \ + chown 1000:1000 /etc/secrets/* && \ + chmod 0400 /etc/secrets/* + volumeMounts: + - name: butler-secrets + mountPath: /secrets + - name: user-secrets + mountPath: /etc/secrets volumes: {{- if .Values.config.nfs_path_1 }} - name: volume1 @@ -112,10 +127,12 @@ spec: readOnly: true server: {{ .Values.config.nfs_server_3 }} {{- end }} - - name: aws-secrets + - name: butler-secrets secret: defaultMode: 420 secretName: exposurelog + - name: user-secrets + emptyDir: {} - name: tmp emptyDir: {} {{- with .Values.nodeSelector }} From 137807b3b9e04ad487d99682dc30886798285097 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Wed, 11 Sep 2024 16:14:22 -0300 Subject: [PATCH 053/567] exposurelog: add environment variable to point to `aws-credentials.ini` file --- applications/exposurelog/templates/deployment.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/applications/exposurelog/templates/deployment.yaml b/applications/exposurelog/templates/deployment.yaml index 312033545d..714ccb5136 100644 --- a/applications/exposurelog/templates/deployment.yaml +++ b/applications/exposurelog/templates/deployment.yaml @@ -77,6 +77,8 @@ spec: value: {{ .Values.db.database | quote }} - name: SITE_ID value: {{ .Values.config.site_id | quote }} + - name: AWS_SHARED_CREDENTIALS_FILE + value: /var/secrets/butler/aws-credentials.ini volumeMounts: {{- if .Values.config.nfs_path_1 }} - name: volume1 From 7d3a98f0297e3dc3c8c9b253fab2a92c0b449587 Mon Sep 17 00:00:00 2001 From: Stelios Voutsinas Date: Thu, 12 Sep 2024 11:50:26 -0700 Subject: [PATCH 054/567] Upgrade version of ssotap to 1.18.6 --- charts/cadc-tap/README.md | 2 +- charts/cadc-tap/values.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/charts/cadc-tap/README.md b/charts/cadc-tap/README.md index 3e13b7fc98..f0ab35a1aa 100644 --- a/charts/cadc-tap/README.md +++ b/charts/cadc-tap/README.md @@ -31,7 +31,7 @@ IVOA TAP service | config.pg.host | string | None, must be set if backend is `pg` | Host to connect to | | config.pg.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the TAP image | | config.pg.image.repository | string | `"ghcr.io/lsst-sqre/tap-postgres-service"` | TAP image to use | -| config.pg.image.tag | string | `"1.18.5"` | Tag of TAP image to use | +| config.pg.image.tag | string | `"1.18.6"` | Tag of TAP image to use | | config.pg.username | string | None, must be set if backend is `pg` | Username to connect with | | config.qserv.host | string | `"mock-db:3306"` (the mock QServ) | QServ hostname:port to connect to | | config.qserv.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the TAP image | diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index 317a89e879..d61fdf37af 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -71,7 +71,7 @@ config: pullPolicy: "IfNotPresent" # -- Tag of TAP image to use - tag: "1.18.5" + tag: "1.18.6" qserv: # -- QServ hostname:port to connect to From 35fd94bb00e14cc8f09d46cc08db933d0949505e Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Fri, 6 Sep 2024 13:46:36 -0400 Subject: [PATCH 055/567] Deploy Times Square 0.13.0 See https://github.com/lsst-sqre/times-square/pull/81 https://github.com/lsst-sqre/times-square/releases/tag/0.13.0 --- applications/times-square/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/times-square/Chart.yaml b/applications/times-square/Chart.yaml index daa934699b..5a48fbb4f9 100644 --- a/applications/times-square/Chart.yaml +++ b/applications/times-square/Chart.yaml @@ -8,7 +8,7 @@ sources: type: application # The default version tag of the times-square docker image -appVersion: "0.12.0" +appVersion: "0.13.0" dependencies: - name: redis From ce778b0ea064abc47f7117bee0e9f8e181547e7d Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Mon, 9 Sep 2024 16:13:51 -0400 Subject: [PATCH 056/567] Deploy Noteburst 0.13.0 https://github.com/lsst-sqre/noteburst/releases/tag/0.13.0 --- applications/noteburst/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/noteburst/Chart.yaml b/applications/noteburst/Chart.yaml index dbb9d3de95..bcac46aef7 100644 --- a/applications/noteburst/Chart.yaml +++ b/applications/noteburst/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v2 name: noteburst version: 1.0.0 -appVersion: "0.12.1" +appVersion: "0.13.0" description: Noteburst is a notebook execution service for the Rubin Science Platform. type: application home: https://noteburst.lsst.io/ From b333b95d9645093e963b7914fcf97cee36db34ed Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Wed, 11 Sep 2024 18:24:14 -0400 Subject: [PATCH 057/567] Add timeout configurations for Times Square This adds TS_CHECK_RUN_TIMEOUT and TS_DEFAULT_EXECUTION_TIMEOUT environment variables. --- applications/times-square/README.md | 2 ++ applications/times-square/templates/configmap.yaml | 2 ++ applications/times-square/values.yaml | 6 ++++++ 3 files changed, 10 insertions(+) diff --git a/applications/times-square/README.md b/applications/times-square/README.md index c690816d26..e1c49b6ed0 100644 --- a/applications/times-square/README.md +++ b/applications/times-square/README.md @@ -23,8 +23,10 @@ An API service for managing and rendering parameterized Jupyter notebooks. | cloudsql.instanceConnectionName | string | `""` | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.serviceAccount | string | `""` | The Google service account that has an IAM binding to the `times-square` Kubernetes service accounts and has the `cloudsql.client` role | | config.databaseUrl | string | None, must be set | URL for the PostgreSQL database | +| config.defaultExecutionTimeout | string | `"60"` | Default execution timeout for notebooks in seconds | | config.enableGitHubApp | string | `"False"` | Toggle to enable the GitHub App functionality | | config.githubAppId | string | `""` | GitHub application ID | +| config.githubCheckRunTimeout | string | `"600"` | Timeout for GitHub check runs in seconds | | config.githubOrgs | string | `"lsst,lsst-sqre,lsst-dm,lsst-ts,lsst-sitcom,lsst-pst"` | GitHub organizations that can sync repos to Times Square (comma-separated). | | config.logLevel | string | `"INFO"` | Logging level: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL" | | config.name | string | `"times-square"` | Name of the service. | diff --git a/applications/times-square/templates/configmap.yaml b/applications/times-square/templates/configmap.yaml index 739914e85b..d11584634f 100644 --- a/applications/times-square/templates/configmap.yaml +++ b/applications/times-square/templates/configmap.yaml @@ -16,3 +16,5 @@ data: TS_ENABLE_GITHUB_APP: {{ .Values.config.enableGitHubApp | quote }} TS_GITHUB_APP_ID: {{ .Values.config.githubAppId | quote }} TS_GITHUB_ORGS: {{ .Values.config.githubOrgs | quote }} + TS_CHECK_RUN_TIMEOUT: {{ .Values.config.githubCheckRunTimeout | quote }} + TS_DEFAULT_EXECUTION_TIMEOUT: {{ .Values.config.defaultExecutionTimeout | quote }} diff --git a/applications/times-square/values.yaml b/applications/times-square/values.yaml index ac482b06df..595bb9677b 100644 --- a/applications/times-square/values.yaml +++ b/applications/times-square/values.yaml @@ -136,6 +136,12 @@ config: # -- GitHub organizations that can sync repos to Times Square (comma-separated). githubOrgs: "lsst,lsst-sqre,lsst-dm,lsst-ts,lsst-sitcom,lsst-pst" + # -- Timeout for GitHub check runs in seconds + githubCheckRunTimeout: "600" # 10 minutes + + # -- Default execution timeout for notebooks in seconds + defaultExecutionTimeout: "60" # 1 minute + worker: # -- Enable liveness checks for the arq queue enableLivenessCheck: true From ada629f17fdd345701b3e3fba2d2dc791fa06c8d Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Thu, 12 Sep 2024 12:55:34 -0400 Subject: [PATCH 058/567] Increase default Times Square timeouts Make the defaults large enough to handle some of the heavier-weight Times Square notebooks like https://usdf-rsp-dev.slac.stanford.edu/times-square/github/lsst/schedview_notebooks/nightly/scheduler-nightsum that currently take 200 seconds to execute. On data-dev I've kept the shorter timeout for testing. --- applications/times-square/README.md | 4 ++-- applications/times-square/values-idfdev.yaml | 2 ++ applications/times-square/values.yaml | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/applications/times-square/README.md b/applications/times-square/README.md index e1c49b6ed0..7385571ed7 100644 --- a/applications/times-square/README.md +++ b/applications/times-square/README.md @@ -23,10 +23,10 @@ An API service for managing and rendering parameterized Jupyter notebooks. | cloudsql.instanceConnectionName | string | `""` | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.serviceAccount | string | `""` | The Google service account that has an IAM binding to the `times-square` Kubernetes service accounts and has the `cloudsql.client` role | | config.databaseUrl | string | None, must be set | URL for the PostgreSQL database | -| config.defaultExecutionTimeout | string | `"60"` | Default execution timeout for notebooks in seconds | +| config.defaultExecutionTimeout | string | `"300"` | Default execution timeout for notebooks in seconds | | config.enableGitHubApp | string | `"False"` | Toggle to enable the GitHub App functionality | | config.githubAppId | string | `""` | GitHub application ID | -| config.githubCheckRunTimeout | string | `"600"` | Timeout for GitHub check runs in seconds | +| config.githubCheckRunTimeout | string | `"900"` | Timeout for GitHub check runs in seconds | | config.githubOrgs | string | `"lsst,lsst-sqre,lsst-dm,lsst-ts,lsst-sitcom,lsst-pst"` | GitHub organizations that can sync repos to Times Square (comma-separated). | | config.logLevel | string | `"INFO"` | Logging level: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL" | | config.name | string | `"times-square"` | Name of the service. | diff --git a/applications/times-square/values-idfdev.yaml b/applications/times-square/values-idfdev.yaml index de7c4d6e60..9adb89ef9b 100644 --- a/applications/times-square/values-idfdev.yaml +++ b/applications/times-square/values-idfdev.yaml @@ -7,6 +7,8 @@ config: databaseUrl: "postgresql://times-square@localhost/times-square" githubAppId: "196798" enableGitHubApp: "True" + githubCheckRunTimeout: "600" # 10 minutes + defaultExecutionTimeout: "60" # 1 minute cloudsql: enabled: true instanceConnectionName: "science-platform-dev-7696:us-central1:science-platform-dev-e9e11de2" diff --git a/applications/times-square/values.yaml b/applications/times-square/values.yaml index 595bb9677b..1a26a01356 100644 --- a/applications/times-square/values.yaml +++ b/applications/times-square/values.yaml @@ -137,10 +137,10 @@ config: githubOrgs: "lsst,lsst-sqre,lsst-dm,lsst-ts,lsst-sitcom,lsst-pst" # -- Timeout for GitHub check runs in seconds - githubCheckRunTimeout: "600" # 10 minutes + githubCheckRunTimeout: "900" # 15 minutes # -- Default execution timeout for notebooks in seconds - defaultExecutionTimeout: "60" # 1 minute + defaultExecutionTimeout: "300" # 5 minutes worker: # -- Enable liveness checks for the arq queue From 3d2bd2520a881cfbde8bb9246f5ac98cf5c0e279 Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Thu, 12 Sep 2024 12:58:50 -0400 Subject: [PATCH 059/567] Change noteburst maxConcurrentJobs to 1 This seems better because the JupyterLab notebook execution endpoint is synchronous, so we may need to also run the noteburst jobs synchronously. --- applications/noteburst/README.md | 2 +- applications/noteburst/values.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/noteburst/README.md b/applications/noteburst/README.md index 5db1416c00..0f195c8d15 100644 --- a/applications/noteburst/README.md +++ b/applications/noteburst/README.md @@ -25,7 +25,7 @@ Noteburst is a notebook execution service for the Rubin Science Platform. | config.worker.imageSelector | string | `"recommended"` | Nublado image stream to select: "recommended", "weekly" or "reference" | | config.worker.jobTimeout | int | `300` | The default notebook execution timeout, in seconds. | | config.worker.keepAlive | string | `"normal"` | Worker keep alive mode: "normal", "fast", "disabled" | -| config.worker.maxConcurrentJobs | int | `3` | Max number of concurrent notebook executions per worker | +| config.worker.maxConcurrentJobs | int | `1` | Max number of concurrent notebook executions per worker | | config.worker.tokenLifetime | string | `"2419200"` | Worker token lifetime, in seconds. | | config.worker.tokenScopes | string | `"exec:notebook,read:image,read:tap,read:alertdb"` | Nublado2 worker account's token scopes as a comma-separated list. | | config.worker.workerCount | int | `1` | Number of workers to run | diff --git a/applications/noteburst/values.yaml b/applications/noteburst/values.yaml index d136cbe1f9..0ec9a6f705 100644 --- a/applications/noteburst/values.yaml +++ b/applications/noteburst/values.yaml @@ -123,7 +123,7 @@ config: jobTimeout: 300 # -- Max number of concurrent notebook executions per worker - maxConcurrentJobs: 3 + maxConcurrentJobs: 1 # -- Worker token lifetime, in seconds. tokenLifetime: "2419200" From 8246fdbb0a2f8d124d48dd5da295dde17b43e11a Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 3 Sep 2024 14:00:07 -0700 Subject: [PATCH 060/567] Enable Cruise Control --- applications/sasquatch/values-summit.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 5c82488f2a..e036c9e140 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -51,6 +51,8 @@ strimzi-kafka: nginx.ingress.kubernetes.io/rewrite-target: /$2 hostname: summit-lsp.lsst.codes path: /schema-registry(/|$)(.*) + cruiseControl: + enabled: true influxdb: persistence: From 7db8b42fbfe48892830e0780b9f3819c4d1d6a50 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 3 Sep 2024 14:16:14 -0700 Subject: [PATCH 061/567] Create new KafkaNodePool resource for local storage --- applications/sasquatch/values-summit.yaml | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index e036c9e140..13390710a1 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -53,7 +53,24 @@ strimzi-kafka: path: /schema-registry(/|$)(.*) cruiseControl: enabled: true - + brokerStorage: + enabled: false + storageClassName: localdrive + size: 15Ti + migration: + enabled: true + rebalance: false + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: kubernetes.io/hostname + operator: In + values: + - yagan17 + - yagan18 + - yagan19 influxdb: persistence: storageClass: rook-ceph-block From 7d49867c5804fb2fa98553557565fcddb4100ee6 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 3 Sep 2024 14:38:45 -0700 Subject: [PATCH 062/567] Add resources requests and limits configuration --- applications/sasquatch/values-summit.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 13390710a1..f58607ae3e 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -18,6 +18,13 @@ strimzi-kafka: - broker: 2 loadBalancerIP: "139.229.180.5" host: sasquatch-summit-kafka-2.lsst.codes + resources: + requests: + memory: 32Gi + cpu: 4 + limits: + memory: 32Gi + cpu: 4 kraft: enabled: true kafkaController: From c0e54c7840b728f86dde8a67641342122534f725 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 4 Sep 2024 09:14:02 -0700 Subject: [PATCH 063/567] Allocate IPs from the LHN pool - Use the metallb.universe.tf/address-pool: lhn annotation to allocate IPs from the LHN pool, that's required to replicate data to USDF - Pin IP addresses after assignment --- applications/sasquatch/values-summit.yaml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index f58607ae3e..11ec819265 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -18,6 +18,21 @@ strimzi-kafka: - broker: 2 loadBalancerIP: "139.229.180.5" host: sasquatch-summit-kafka-2.lsst.codes + - broker: 6 + loadBalancerIP: "139.229.180.8" + host: sasquatch-summit-kafka-6.lsst.codes + annotations: + metallb.universe.tf/address-pool: lhn + - broker: 7 + loadBalancerIP: "139.229.180.9" + host: sasquatch-summit-kafka-7.lsst.codes + annotations: + metallb.universe.tf/address-pool: lhn + - broker: 8 + loadBalancerIP: "139.229.180.10" + host: sasquatch-summit-kafka-8.lsst.codes + annotations: + metallb.universe.tf/address-pool: lhn resources: requests: memory: 32Gi From 6f6e85df3dcf4cdde7cea1375343e9cb223615c5 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 4 Sep 2024 09:19:11 -0700 Subject: [PATCH 064/567] Rollback to Kafka version 3.7.1 temporarily --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/strimzi-kafka/README.md | 2 +- applications/sasquatch/charts/strimzi-kafka/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 2e8e8fca70..243ea90458 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -365,7 +365,7 @@ Rubin Observatory's telemetry service | strimzi-kafka.kafka.storage.size | string | `"500Gi"` | Size of the backing storage disk for each of the Kafka brokers | | strimzi-kafka.kafka.storage.storageClassName | string | `""` | Name of a StorageClass to use when requesting persistent volumes | | strimzi-kafka.kafka.tolerations | list | `[]` | Tolerations for Kafka broker pod assignment | -| strimzi-kafka.kafka.version | string | `"3.8.0"` | Version of Kafka to deploy | +| strimzi-kafka.kafka.version | string | `"3.7.1"` | Version of Kafka to deploy | | strimzi-kafka.kafkaController.enabled | bool | `false` | Enable Kafka Controller | | strimzi-kafka.kafkaController.resources | object | See `values.yaml` | Kubernetes requests and limits for the Kafka Controller | | strimzi-kafka.kafkaController.storage.size | string | `"20Gi"` | Size of the backing storage disk for each of the Kafka controllers | diff --git a/applications/sasquatch/charts/strimzi-kafka/README.md b/applications/sasquatch/charts/strimzi-kafka/README.md index fd425d5279..4e844c02a3 100644 --- a/applications/sasquatch/charts/strimzi-kafka/README.md +++ b/applications/sasquatch/charts/strimzi-kafka/README.md @@ -41,7 +41,7 @@ A subchart to deploy Strimzi Kafka components for Sasquatch. | kafka.storage.size | string | `"500Gi"` | Size of the backing storage disk for each of the Kafka brokers | | kafka.storage.storageClassName | string | `""` | Name of a StorageClass to use when requesting persistent volumes | | kafka.tolerations | list | `[]` | Tolerations for Kafka broker pod assignment | -| kafka.version | string | `"3.8.0"` | Version of Kafka to deploy | +| kafka.version | string | `"3.7.1"` | Version of Kafka to deploy | | kafkaController.enabled | bool | `false` | Enable Kafka Controller | | kafkaController.resources | object | See `values.yaml` | Kubernetes requests and limits for the Kafka Controller | | kafkaController.storage.size | string | `"20Gi"` | Size of the backing storage disk for each of the Kafka controllers | diff --git a/applications/sasquatch/charts/strimzi-kafka/values.yaml b/applications/sasquatch/charts/strimzi-kafka/values.yaml index fa0deaa57b..f43fd60e4c 100644 --- a/applications/sasquatch/charts/strimzi-kafka/values.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/values.yaml @@ -11,7 +11,7 @@ cluster: kafka: # -- Version of Kafka to deploy - version: "3.8.0" + version: "3.7.1" # -- Number of Kafka broker replicas to run replicas: 3 From 428c65365b9b7dd0b522773fa94fda7fad9c5b5a Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 11 Sep 2024 09:00:31 -0700 Subject: [PATCH 065/567] Start Kafka cluster rebalancing --- applications/sasquatch/values-summit.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 11ec819265..ca003f3c8a 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -81,7 +81,11 @@ strimzi-kafka: size: 15Ti migration: enabled: true - rebalance: false + rebalance: true + brokers: + - 0 + - 1 + - 2 affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: From 285ce6c87182c303cb57a4f247d756659084d6f2 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 12 Sep 2024 17:01:22 -0700 Subject: [PATCH 066/567] Complete migration --- applications/sasquatch/values-summit.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index ca003f3c8a..7c4a45f4d6 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -76,12 +76,12 @@ strimzi-kafka: cruiseControl: enabled: true brokerStorage: - enabled: false + enabled: true storageClassName: localdrive size: 15Ti migration: - enabled: true - rebalance: true + enabled: false + rebalance: false brokers: - 0 - 1 From 7d3c94c40e9d76be6a28282af4af9dc4b3de7d72 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 12 Sep 2024 17:16:45 -0700 Subject: [PATCH 067/567] Remove old brokers - Remove reference to the old broker ids --- applications/sasquatch/values-summit.yaml | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 7c4a45f4d6..2c377045ff 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -9,15 +9,6 @@ strimzi-kafka: loadBalancerIP: "139.229.180.2" host: sasquatch-summit-kafka-bootstrap.lsst.codes brokers: - - broker: 0 - loadBalancerIP: "139.229.180.3" - host: sasquatch-summit-kafka-0.lsst.codes - - broker: 1 - loadBalancerIP: "139.229.180.4" - host: sasquatch-summit-kafka-1.lsst.codes - - broker: 2 - loadBalancerIP: "139.229.180.5" - host: sasquatch-summit-kafka-2.lsst.codes - broker: 6 loadBalancerIP: "139.229.180.8" host: sasquatch-summit-kafka-6.lsst.codes @@ -82,10 +73,6 @@ strimzi-kafka: migration: enabled: false rebalance: false - brokers: - - 0 - - 1 - - 2 affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: From c352d3fe346cb30d77ce2bb0680892518112ce0e Mon Sep 17 00:00:00 2001 From: Erin Howard Date: Thu, 12 Sep 2024 17:16:56 -0700 Subject: [PATCH 068/567] Update LATISS Prompt Processing to 4.5.0. --- .../values-usdfprod-prompt-processing.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index a379a6d328..77d8ba6207 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.4.0 + tag: 4.5.0 instrument: pipelines: From 58fbf928f06d051eaa1d464d7b258c616fbb5939 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Fri, 13 Sep 2024 10:56:15 -0300 Subject: [PATCH 069/567] rubintv: update app version for summit and usdf production deployments --- applications/rubintv/values-summit.yaml | 2 +- applications/rubintv/values-usdfprod.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index 62ef88610c..aa74c4dffe 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -20,7 +20,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.2.0 + tag: v2.3.0 pullPolicy: Always workers: diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index 2499da043c..7349f935f4 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -16,7 +16,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.2.0 + tag: v2.3.0 pullPolicy: Always workers: From 0f24f951a46fb8cc1ef66947a4ed86d528a816ae Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Fri, 13 Sep 2024 11:04:24 -0300 Subject: [PATCH 070/567] rubintv: add worker pods for summit production deployment --- applications/rubintv/values-summit.yaml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index aa74c4dffe..c1f2cb88ef 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -24,7 +24,7 @@ rubintv: pullPolicy: Always workers: - replicas: 0 + replicas: 1 image: repository: ts-dockerhub.lsst.org/rapid-analysis tag: c0037 @@ -32,7 +32,7 @@ rubintv: uid: 73006 gid: 73006 scriptsLocation: /repos/rubintv_analysis_service/scripts - script: rubintv_worker.py -a rubintv-dev -p 8080 -l summit + script: rubintv_worker.py -a rubintv -p 8080 -l summit env: - name: S3_ENDPOINT_URL value: *s3E @@ -43,9 +43,12 @@ rubintv: - name: DEPLOY_BRANCH value: *dbE resources: + requests: + cpu: 0.5 + memory: 1G limits: - cpu: 2.0 - memory: "8Gi" + cpu: 1.0 + memory: 2.5G global: tsVaultSecretsPath: "" From ba5ea835630f54087dee48e2d315b62027a2a8d1 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 13 Sep 2024 13:31:42 -0700 Subject: [PATCH 071/567] Summit: Update nublado to Cycle 39. --- applications/nublado/values-summit.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/nublado/values-summit.yaml b/applications/nublado/values-summit.yaml index 3fddbdcecc..6bede5f34d 100644 --- a/applications/nublado/values-summit.yaml +++ b/applications/nublado/values-summit.yaml @@ -8,8 +8,8 @@ controller: numReleases: 0 numWeeklies: 3 numDailies: 2 - cycle: 38 - recommendedTag: "recommended_c0038" + cycle: 39 + recommendedTag: "recommended_c0039" lab: extraAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/dds" From 9810bf8c08db0684900662879183bcde7fefe931 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 10 Sep 2024 14:11:50 -0700 Subject: [PATCH 072/567] Remove the interval setting - The kafka consumer input plugin is a service input plugin. Normal plugins gather metrics determined by the interval setting. Service plugins start a service waits for metrics or events to occur. --- applications/sasquatch/README.md | 2 -- .../sasquatch/charts/telegraf-kafka-consumer/README.md | 1 - .../charts/telegraf-kafka-consumer/templates/configmap.yaml | 2 -- .../sasquatch/charts/telegraf-kafka-consumer/values.yaml | 4 ---- 4 files changed, 9 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 243ea90458..8408c84e4e 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -416,7 +416,6 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer.kafkaConsumers.test.fields | list | `[]` | List of Avro fields to be recorded as InfluxDB fields. If not specified, any Avro field that is not marked as a tag will become an InfluxDB field. | | telegraf-kafka-consumer.kafkaConsumers.test.flush_interval | string | "1s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | | telegraf-kafka-consumer.kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | -| telegraf-kafka-consumer.kafkaConsumers.test.interval | string | "1s" | Data collection interval for the Kafka consumer. | | telegraf-kafka-consumer.kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | | telegraf-kafka-consumer.kafkaConsumers.test.metric_batch_size | int | 1000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | | telegraf-kafka-consumer.kafkaConsumers.test.metric_buffer_limit | int | 10000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | @@ -452,7 +451,6 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer-oss.kafkaConsumers.test.fields | list | `[]` | List of Avro fields to be recorded as InfluxDB fields. If not specified, any Avro field that is not marked as a tag will become an InfluxDB field. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.flush_interval | string | "1s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | | telegraf-kafka-consumer-oss.kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | -| telegraf-kafka-consumer-oss.kafkaConsumers.test.interval | string | "1s" | Data collection interval for the Kafka consumer. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.metric_batch_size | int | 1000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.metric_buffer_limit | int | 10000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md index d9c8dbcb70..a523f08be7 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md @@ -24,7 +24,6 @@ Telegraf is an agent written in Go for collecting, processing, aggregating, and | kafkaConsumers.test.fields | list | `[]` | List of Avro fields to be recorded as InfluxDB fields. If not specified, any Avro field that is not marked as a tag will become an InfluxDB field. | | kafkaConsumers.test.flush_interval | string | "1s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | | kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | -| kafkaConsumers.test.interval | string | "1s" | Data collection interval for the Kafka consumer. | | kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | | kafkaConsumers.test.metric_batch_size | int | 1000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | | kafkaConsumers.test.metric_buffer_limit | int | 10000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml index c2419057fc..25ee702d97 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml @@ -12,8 +12,6 @@ metadata: data: telegraf.conf: |+ [agent] - interval = {{ default "1s" $value.interval | quote }} - round_interval = true metric_batch_size = {{ default 1000 $value.metric_batch_size }} metric_buffer_limit = {{ default 10000 $value.metric_buffer_limit }} collection_jitter = {{ default "0s" $value.collection_jitter | quote }} diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml index 9b8e89ebb3..383d4097d7 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml @@ -60,10 +60,6 @@ kafkaConsumers: # increase the consumer throughput. replicaCount: 1 - # -- Data collection interval for the Kafka consumer. - # @default -- "1s" - interval: "1s" - # -- Sends metrics to the output in batches of at most metric_batch_size # metrics. # @default -- 1000 From 4c1e229787b4711495ba7989305e62bada680db2 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 10 Sep 2024 14:17:04 -0700 Subject: [PATCH 073/567] Tune configuration for high throughput - Increase metric_batch_size, metric_buffer_limit and max_undelivered_messages for high throughput --- applications/sasquatch/README.md | 10 ++++++---- .../charts/telegraf-kafka-consumer/README.md | 5 +++-- .../templates/configmap.yaml | 5 +++-- .../charts/telegraf-kafka-consumer/values.yaml | 14 ++++++++++---- 4 files changed, 22 insertions(+), 12 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 8408c84e4e..821a6d07fa 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -417,8 +417,9 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer.kafkaConsumers.test.flush_interval | string | "1s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | | telegraf-kafka-consumer.kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | | telegraf-kafka-consumer.kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | -| telegraf-kafka-consumer.kafkaConsumers.test.metric_batch_size | int | 1000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | -| telegraf-kafka-consumer.kafkaConsumers.test.metric_buffer_limit | int | 10000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | +| telegraf-kafka-consumer.kafkaConsumers.test.max_undelivered_messages | int | 10000 | Maximum number of undelivered messages. Should be a multiple of metric_batch_size, setting it too low may never flush the broker's messages. | +| telegraf-kafka-consumer.kafkaConsumers.test.metric_batch_size | int | 5000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | +| telegraf-kafka-consumer.kafkaConsumers.test.metric_buffer_limit | int | 100000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | | telegraf-kafka-consumer.kafkaConsumers.test.offset | string | `"oldest"` | Kafka consumer offset. Possible values are `oldest` and `newest`. | | telegraf-kafka-consumer.kafkaConsumers.test.precision | string | "1us" | Data precision. | | telegraf-kafka-consumer.kafkaConsumers.test.replicaCount | int | `1` | Number of Telegraf Kafka consumer replicas. Increase this value to increase the consumer throughput. | @@ -452,8 +453,9 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer-oss.kafkaConsumers.test.flush_interval | string | "1s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | | telegraf-kafka-consumer-oss.kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | -| telegraf-kafka-consumer-oss.kafkaConsumers.test.metric_batch_size | int | 1000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | -| telegraf-kafka-consumer-oss.kafkaConsumers.test.metric_buffer_limit | int | 10000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.max_undelivered_messages | int | 10000 | Maximum number of undelivered messages. Should be a multiple of metric_batch_size, setting it too low may never flush the broker's messages. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.metric_batch_size | int | 5000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.metric_buffer_limit | int | 100000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.offset | string | `"oldest"` | Kafka consumer offset. Possible values are `oldest` and `newest`. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.precision | string | "1us" | Data precision. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.replicaCount | int | `1` | Number of Telegraf Kafka consumer replicas. Increase this value to increase the consumer throughput. | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md index a523f08be7..83e69f6047 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md @@ -25,8 +25,9 @@ Telegraf is an agent written in Go for collecting, processing, aggregating, and | kafkaConsumers.test.flush_interval | string | "1s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | | kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | | kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | -| kafkaConsumers.test.metric_batch_size | int | 1000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | -| kafkaConsumers.test.metric_buffer_limit | int | 10000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | +| kafkaConsumers.test.max_undelivered_messages | int | 10000 | Maximum number of undelivered messages. Should be a multiple of metric_batch_size, setting it too low may never flush the broker's messages. | +| kafkaConsumers.test.metric_batch_size | int | 5000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | +| kafkaConsumers.test.metric_buffer_limit | int | 100000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | | kafkaConsumers.test.offset | string | `"oldest"` | Kafka consumer offset. Possible values are `oldest` and `newest`. | | kafkaConsumers.test.precision | string | "1us" | Data precision. | | kafkaConsumers.test.replicaCount | int | `1` | Number of Telegraf Kafka consumer replicas. Increase this value to increase the consumer throughput. | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml index 25ee702d97..6cc13266b5 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml @@ -12,8 +12,8 @@ metadata: data: telegraf.conf: |+ [agent] - metric_batch_size = {{ default 1000 $value.metric_batch_size }} - metric_buffer_limit = {{ default 10000 $value.metric_buffer_limit }} + metric_batch_size = {{ default 5000 $value.metric_batch_size }} + metric_buffer_limit = {{ default 100000 $value.metric_buffer_limit }} collection_jitter = {{ default "0s" $value.collection_jitter | quote }} flush_interval = {{ default "10s" $value.flush_interval | quote }} flush_jitter = {{ default "0s" $value.flush_jitter | quote }} @@ -62,6 +62,7 @@ data: precision = {{ default "1us" $value.precision | quote }} max_processing_time = {{ default "5s" $value.max_processing_time | quote }} consumer_fetch_default = {{ default "20MB" $value.consumer_fetch_default | quote }} + max_undelivered_messages = {{ default 10000 $value.max_undelivered_messages }} [[inputs.internal]] name_prefix = "telegraf_" diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml index 383d4097d7..683c0fadc6 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml @@ -62,14 +62,14 @@ kafkaConsumers: # -- Sends metrics to the output in batches of at most metric_batch_size # metrics. - # @default -- 1000 - metric_batch_size: 1000 + # @default -- 5000 + metric_batch_size: 5000 # -- Caches metric_buffer_limit metrics for each output, and flushes this # buffer on a successful write. This should be a multiple of metric_batch_size # and could not be less than 2 times metric_batch_size. - # @default -- 10000 - metric_buffer_limit: 10000 + # @default -- 100000 + metric_buffer_limit: 100000 # -- Data collection jitter. This is used to jitter the collection by a # random amount. Each plugin will sleep for a random time within jitter @@ -171,6 +171,12 @@ kafkaConsumers: # @default -- "20MB" consumer_fetch_default: "20MB" + # -- Maximum number of undelivered messages. + # Should be a multiple of metric_batch_size, setting it too low may never + # flush the broker's messages. + # @default -- 10000 + max_undelivered_messages: 10000 + influxdb: # -- URL of the InfluxDB v1 instance to write to url: "http://sasquatch-influxdb.sasquatch:8086" From 6df4c81aa7c7f413d3772e4e43c98d85461beb90 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 10 Sep 2024 14:18:10 -0700 Subject: [PATCH 074/567] Enable message compression - Use the LZ4 compression codec --- applications/sasquatch/README.md | 2 ++ .../sasquatch/charts/telegraf-kafka-consumer/README.md | 1 + .../charts/telegraf-kafka-consumer/templates/configmap.yaml | 1 + .../sasquatch/charts/telegraf-kafka-consumer/values.yaml | 4 ++++ 4 files changed, 8 insertions(+) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 821a6d07fa..70fd2715a2 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -410,6 +410,7 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer.influxdb.database | string | `"telegraf-kafka-consumer-v1"` | Name of the InfluxDB v1 database to write to | | telegraf-kafka-consumer.influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | | telegraf-kafka-consumer.kafkaConsumers.test.collection_jitter | string | "0s" | Data collection jitter. This is used to jitter the collection by a random amount. Each plugin will sleep for a random time within jitter before collecting. | +| telegraf-kafka-consumer.kafkaConsumers.test.compression_codec | int | 3 | Compression codec. 0 : None, 1 : Gzip, 2 : Snappy, 3 : LZ4, 4 : ZSTD | | telegraf-kafka-consumer.kafkaConsumers.test.consumer_fetch_default | string | "20MB" | Maximum amount of data the server should return for a fetch request. | | telegraf-kafka-consumer.kafkaConsumers.test.debug | bool | false | Run Telegraf in debug mode. | | telegraf-kafka-consumer.kafkaConsumers.test.enabled | bool | `false` | Enable the Telegraf Kafka consumer. | @@ -446,6 +447,7 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer-oss.influxdb.database | string | `"telegraf-kafka-consumer-v1"` | Name of the InfluxDB v1 database to write to | | telegraf-kafka-consumer-oss.influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | | telegraf-kafka-consumer-oss.kafkaConsumers.test.collection_jitter | string | "0s" | Data collection jitter. This is used to jitter the collection by a random amount. Each plugin will sleep for a random time within jitter before collecting. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.compression_codec | int | 3 | Compression codec. 0 : None, 1 : Gzip, 2 : Snappy, 3 : LZ4, 4 : ZSTD | | telegraf-kafka-consumer-oss.kafkaConsumers.test.consumer_fetch_default | string | "20MB" | Maximum amount of data the server should return for a fetch request. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.debug | bool | false | Run Telegraf in debug mode. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.enabled | bool | `false` | Enable the Telegraf Kafka consumer. | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md index 83e69f6047..24364be802 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md @@ -18,6 +18,7 @@ Telegraf is an agent written in Go for collecting, processing, aggregating, and | influxdb.database | string | `"telegraf-kafka-consumer-v1"` | Name of the InfluxDB v1 database to write to | | influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | | kafkaConsumers.test.collection_jitter | string | "0s" | Data collection jitter. This is used to jitter the collection by a random amount. Each plugin will sleep for a random time within jitter before collecting. | +| kafkaConsumers.test.compression_codec | int | 3 | Compression codec. 0 : None, 1 : Gzip, 2 : Snappy, 3 : LZ4, 4 : ZSTD | | kafkaConsumers.test.consumer_fetch_default | string | "20MB" | Maximum amount of data the server should return for a fetch request. | | kafkaConsumers.test.debug | bool | false | Run Telegraf in debug mode. | | kafkaConsumers.test.enabled | bool | `false` | Enable the Telegraf Kafka consumer. | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml index 6cc13266b5..5be588773d 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml @@ -63,6 +63,7 @@ data: max_processing_time = {{ default "5s" $value.max_processing_time | quote }} consumer_fetch_default = {{ default "20MB" $value.consumer_fetch_default | quote }} max_undelivered_messages = {{ default 10000 $value.max_undelivered_messages }} + compression_codec = {{ default 3 $value.compression_codec }} [[inputs.internal]] name_prefix = "telegraf_" diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml index 683c0fadc6..5307740270 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml @@ -177,6 +177,10 @@ kafkaConsumers: # @default -- 10000 max_undelivered_messages: 10000 + # -- Compression codec. 0 : None, 1 : Gzip, 2 : Snappy, 3 : LZ4, 4 : ZSTD + # @default -- 3 + compression_codec: 3 + influxdb: # -- URL of the InfluxDB v1 instance to write to url: "http://sasquatch-influxdb.sasquatch:8086" From d4ce565cb4f4238bdf6aac9553c9241069bc9cf8 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 10 Sep 2024 14:20:53 -0700 Subject: [PATCH 075/567] Incrase default flush interval MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - We noticed the error message [“outputs.influxdb”] did not complete within its flush interval in some instances of Telegraf, this means Telegraf is unable to write all the gathered data inside the defined interval. --- applications/sasquatch/README.md | 4 ++-- .../sasquatch/charts/telegraf-kafka-consumer/README.md | 2 +- .../sasquatch/charts/telegraf-kafka-consumer/values.yaml | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 70fd2715a2..459c2f9459 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -415,7 +415,7 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer.kafkaConsumers.test.debug | bool | false | Run Telegraf in debug mode. | | telegraf-kafka-consumer.kafkaConsumers.test.enabled | bool | `false` | Enable the Telegraf Kafka consumer. | | telegraf-kafka-consumer.kafkaConsumers.test.fields | list | `[]` | List of Avro fields to be recorded as InfluxDB fields. If not specified, any Avro field that is not marked as a tag will become an InfluxDB field. | -| telegraf-kafka-consumer.kafkaConsumers.test.flush_interval | string | "1s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | +| telegraf-kafka-consumer.kafkaConsumers.test.flush_interval | string | "10s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | | telegraf-kafka-consumer.kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | | telegraf-kafka-consumer.kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | | telegraf-kafka-consumer.kafkaConsumers.test.max_undelivered_messages | int | 10000 | Maximum number of undelivered messages. Should be a multiple of metric_batch_size, setting it too low may never flush the broker's messages. | @@ -452,7 +452,7 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer-oss.kafkaConsumers.test.debug | bool | false | Run Telegraf in debug mode. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.enabled | bool | `false` | Enable the Telegraf Kafka consumer. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.fields | list | `[]` | List of Avro fields to be recorded as InfluxDB fields. If not specified, any Avro field that is not marked as a tag will become an InfluxDB field. | -| telegraf-kafka-consumer-oss.kafkaConsumers.test.flush_interval | string | "1s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.flush_interval | string | "10s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | | telegraf-kafka-consumer-oss.kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.max_undelivered_messages | int | 10000 | Maximum number of undelivered messages. Should be a multiple of metric_batch_size, setting it too low may never flush the broker's messages. | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md index 24364be802..e361988887 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md @@ -23,7 +23,7 @@ Telegraf is an agent written in Go for collecting, processing, aggregating, and | kafkaConsumers.test.debug | bool | false | Run Telegraf in debug mode. | | kafkaConsumers.test.enabled | bool | `false` | Enable the Telegraf Kafka consumer. | | kafkaConsumers.test.fields | list | `[]` | List of Avro fields to be recorded as InfluxDB fields. If not specified, any Avro field that is not marked as a tag will become an InfluxDB field. | -| kafkaConsumers.test.flush_interval | string | "1s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | +| kafkaConsumers.test.flush_interval | string | "10s" | Data flushing interval for all outputs. Don’t set this below interval. Maximum flush_interval is flush_interval + flush_jitter | | kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | | kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | | kafkaConsumers.test.max_undelivered_messages | int | 10000 | Maximum number of undelivered messages. Should be a multiple of metric_batch_size, setting it too low may never flush the broker's messages. | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml index 5307740270..28b3081941 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml @@ -80,8 +80,8 @@ kafkaConsumers: # -- Data flushing interval for all outputs. # Don’t set this below interval. # Maximum flush_interval is flush_interval + flush_jitter - # @default -- "1s" - flush_interval: "1s" + # @default -- "10s" + flush_interval: "10s" # -- Jitter the flush interval by a random amount. This is primarily to # avoid large write spikes for users running a large number of telegraf From b41a6e63898884930d57ae7582a8f4f81eb582ff Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 10 Sep 2024 14:25:42 -0700 Subject: [PATCH 076/567] Enable debug mode - Enable debug mode and make sure we consume the oldest offsets for testing these changes --- applications/sasquatch/values-summit.yaml | 36 ++++++++++++++--------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 2c377045ff..fd39ff3389 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -290,6 +290,7 @@ telegraf-kafka-consumer: timestamp_field: "timestamp" topicRegexps: | [ "lsst.backpack" ] + debug: true # CSC connectors maintel: enabled: true @@ -297,7 +298,7 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTAOS", "lsst.sal.MTDome", "lsst.sal.MTDomeTrajectory", "lsst.sal.MTPtg" ] - offset: "newest" + debug: true mtmount: enabled: true database: "efd" @@ -305,21 +306,21 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTMount" ] - offset: "newest" + debug: true comcam: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.CCCamera", "lsst.sal.CCHeaderService", "lsst.sal.CCOODS" ] - offset: "newest" + debug: true eas: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] - offset: "newest" + debug: true m1m3: enabled: true database: "efd" @@ -327,94 +328,98 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTM1M3" ] - offset: "newest" + debug: true m2: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTHexapod", "lsst.sal.MTM2", "lsst.sal.MTRotator" ] - offset: "newest" + debug: true obssys: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.Scheduler", "lsst.sal.Script", "lsst.sal.ScriptQueue", "lsst.sal.Watcher" ] - offset: "newest" + debug: true ocps: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.OCPS" ] - offset: "newest" + debug: true pmd: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.PMD" ] - offset: "newest" + debug: true calsys: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.ATMonochromator", "lsst.sal.ATWhiteLight", "lsst.sal.CBP", "lsst.sal.Electrometer", "lsst.sal.FiberSpectrograph", "lsst.sal.LinearStage", "lsst.sal.TunableLaser" ] - offset: "newest" + debug: true mtaircompressor: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTAirCompressor" ] - offset: "newest" + debug: true genericcamera: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.GCHeaderService", "lsst.sal.GenericCamera" ] - offset: "newest" + debug: true gis: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.GIS" ] - offset: "newest" + debug: true lsstcam: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTCamera", "lsst.sal.MTHeaderService", "lsst.sal.MTOODS" ] - offset: "newest" + debug: true auxtel: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] + debug: true latiss: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.ATCamera", "lsst.sal.ATHeaderService", "lsst.sal.ATOODS", "lsst.sal.ATSpectrograph" ] + debug: true test: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.Test" ] + debug: true lasertracker: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.LaserTracker" ] + debug: true # CCS connectors (experimental) data is being written on separate databases for now atcamera: enabled: true @@ -425,6 +430,7 @@ telegraf-kafka-consumer: [ "Agent", "Aspic", "Location", "Raft", "Reb", "Sensor", "Source" ] topicRegexps: | [ "lsst.ATCamera" ] + debug: true cccamera: enabled: true database: "lsst.CCCamera" @@ -434,6 +440,7 @@ telegraf-kafka-consumer: [ "Agent", "Aspic", "Cold", "Cryo", "Hardware", "Location", "Ps", "RTD", "Raft", "Reb", "Segment", "Sensor", "Source" ] topicRegexps: | [ "lsst.CCCamera" ] + debug: true mtcamera: enabled: true database: "lsst.MTCamera" @@ -443,6 +450,7 @@ telegraf-kafka-consumer: [ "Agent", "Aspic", "Axis", "Canbus", "Cip", "Clamp", "Cold", "Controller", "Cryo", "Gateway", "Hardware", "Hip", "Hook", "Latch", "Location", "Ps", "RTD", "Raft", "Reb", "Segment", "Sensor", "Socket", "Source", "Truck" ] topicRegexps: | [ "lsst.MTCamera" ] + debug: true kafdrop: ingress: From 11c574375701c7921e51efb9ede4cb298f2464fd Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 10 Sep 2024 17:30:00 -0700 Subject: [PATCH 077/567] Run one connector replica - Run one connector replica for MTMount and M1M3 --- applications/sasquatch/values-summit.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index fd39ff3389..30b1873a25 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -302,7 +302,6 @@ telegraf-kafka-consumer: mtmount: enabled: true database: "efd" - replicaCount: 8 timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTMount" ] @@ -324,7 +323,6 @@ telegraf-kafka-consumer: m1m3: enabled: true database: "efd" - replicaCount: 8 timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTM1M3" ] From ed049c33e92cc99ba40deb2f4b5cd55a3d28e6ea Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 12:47:03 +0000 Subject: [PATCH 078/567] chore(deps): update helm release argo-workflows to v0.42.2 --- applications/argo-workflows/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argo-workflows/Chart.yaml b/applications/argo-workflows/Chart.yaml index 285c51625d..fa244f7232 100644 --- a/applications/argo-workflows/Chart.yaml +++ b/applications/argo-workflows/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-workflows - version: 0.42.1 + version: 0.42.2 repository: https://argoproj.github.io/argo-helm From 022e1f6dbde6165e657af410c916ae86781a3cac Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 12:47:07 +0000 Subject: [PATCH 079/567] chore(deps): update helm release kubernetes-replicator to v2.10.2 --- applications/kubernetes-replicator/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/kubernetes-replicator/Chart.yaml b/applications/kubernetes-replicator/Chart.yaml index 27c1677bfb..335507f312 100644 --- a/applications/kubernetes-replicator/Chart.yaml +++ b/applications/kubernetes-replicator/Chart.yaml @@ -7,5 +7,5 @@ sources: - https://github.com/mittwald/kubernetes-replicator dependencies: - name: kubernetes-replicator - version: 2.10.1 + version: 2.10.2 repository: https://helm.mittwald.de From 3a108e4489c17b4280ffe0e22aa01865bbbabac1 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Thu, 12 Sep 2024 16:43:51 -0700 Subject: [PATCH 080/567] Update vo-cutouts, add schema migration support Update vo-cutouts to 3.2.0 and add support for schema migrations. --- .../gafaelfawr/templates/serviceaccount.yaml | 1 - applications/vo-cutouts/Chart.yaml | 2 +- applications/vo-cutouts/README.md | 2 + .../vo-cutouts/templates/configmap.yaml | 6 + .../templates/job-schea-update.yaml | 130 ++++++++++++++++++ .../vo-cutouts/templates/serviceaccount.yaml | 5 + applications/vo-cutouts/values.yaml | 6 + 7 files changed, 150 insertions(+), 2 deletions(-) create mode 100644 applications/vo-cutouts/templates/job-schea-update.yaml diff --git a/applications/gafaelfawr/templates/serviceaccount.yaml b/applications/gafaelfawr/templates/serviceaccount.yaml index aa35285b29..acf07b2ed2 100644 --- a/applications/gafaelfawr/templates/serviceaccount.yaml +++ b/applications/gafaelfawr/templates/serviceaccount.yaml @@ -15,7 +15,6 @@ metadata: name: "gafaelfawr-schema-update" labels: {{- include "gafaelfawr.labels" . | nindent 4 }} - annotations: annotations: helm.sh/hook: "pre-install,pre-upgrade" helm.sh/hook-delete-policy: "hook-succeeded" diff --git a/applications/vo-cutouts/Chart.yaml b/applications/vo-cutouts/Chart.yaml index 873d9050c4..4aed5b2fe5 100644 --- a/applications/vo-cutouts/Chart.yaml +++ b/applications/vo-cutouts/Chart.yaml @@ -4,7 +4,7 @@ version: 1.0.0 description: "Image cutout service complying with IVOA SODA" sources: - "https://github.com/lsst-sqre/vo-cutouts" -appVersion: 3.1.0 +appVersion: 3.2.0 dependencies: - name: redis diff --git a/applications/vo-cutouts/README.md b/applications/vo-cutouts/README.md index 3c9f245a24..ad78a23274 100644 --- a/applications/vo-cutouts/README.md +++ b/applications/vo-cutouts/README.md @@ -13,6 +13,7 @@ Image cutout service complying with IVOA SODA | cloudsql.enabled | bool | `false` | Enable the Cloud SQL Auth Proxy sidecar, used with Cloud SQL databases on Google Cloud | | cloudsql.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for Cloud SQL Auth Proxy images | | cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Auth Proxy image to use | +| cloudsql.image.schemaUpdateTagSuffix | string | `"-alpine"` | Tag suffix to use for the proxy for schema updates | | cloudsql.image.tag | string | `"1.37.0"` | Cloud SQL Auth Proxy tag to use | | cloudsql.instanceConnectionName | string | None, must be set if Cloud SQL is used | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.resources | object | See `values.yaml` | Resource limits and requests for the Cloud SQL Proxy container | @@ -26,6 +27,7 @@ Image cutout service complying with IVOA SODA | config.storageBucketUrl | string | None, must be set | URL for the GCS bucket for results (must start with `gs`) | | config.syncTimeout | string | `"1m"` | Timeout for results from a sync cutout in Safir `parse_timedelta` format | | config.timeout | int | 600 (10 minutes) | Timeout for a single cutout job in seconds | +| config.updateSchema | bool | `false` | Whether to automatically update the vo-cutouts database schema | | cutoutWorker.affinity | object | `{}` | Affinity rules for the cutout worker pod | | cutoutWorker.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for cutout workers | | cutoutWorker.image.repository | string | `"ghcr.io/lsst-sqre/vo-cutouts-worker"` | Stack image to use for cutouts | diff --git a/applications/vo-cutouts/templates/configmap.yaml b/applications/vo-cutouts/templates/configmap.yaml index b933134f6b..8a0a3a4dc4 100644 --- a/applications/vo-cutouts/templates/configmap.yaml +++ b/applications/vo-cutouts/templates/configmap.yaml @@ -2,6 +2,12 @@ apiVersion: v1 kind: ConfigMap metadata: name: vo-cutouts + {{- if .Values.config.updateSchema }} + annotations: + helm.sh/hook: "pre-install,pre-upgrade" + helm.sh/hook-delete-policy: "before-hook-creation" + helm.sh/hook-weight: "0" + {{- end }} labels: {{- include "vo-cutouts.labels" . | nindent 4 }} data: diff --git a/applications/vo-cutouts/templates/job-schea-update.yaml b/applications/vo-cutouts/templates/job-schea-update.yaml new file mode 100644 index 0000000000..b59461e887 --- /dev/null +++ b/applications/vo-cutouts/templates/job-schea-update.yaml @@ -0,0 +1,130 @@ +{{- if .Values.config.updateSchema -}} +apiVersion: batch/v1 +kind: Job +metadata: + name: "vo-cutouts-schema-update" + annotations: + annotations: + helm.sh/hook: "pre-install,pre-upgrade" + helm.sh/hook-delete-policy: "hook-succeeded" + helm.sh/hook-weight: "1" + labels: + {{- include "vo-cutouts.labels" . | nindent 4 }} +spec: + template: + metadata: + {{- with .Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "vo-cutouts.selectorLabels" . | nindent 8 }} + app.kubernetes.io/component: "schema-update" + vo-cutouts-redis-client: "true" + spec: + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- if .Values.cloudsql.enabled }} + serviceAccountName: "vo-cutouts" + {{- else }} + automountServiceAccountToken: false + {{- end }} + containers: + {{- if .Values.cloudsql.enabled }} + - name: "cloud-sql-proxy" + # Running the sidecar as normal causes it to keep running and thus + # the Pod never exits, the Job never finishes, and the hook blocks + # the sync. Have the main pod signal the sidecar by writing to a + # file on a shared emptyDir file system, and use a simple watcher + # loop in shell in the sidecar container to terminate the proxy when + # the main container finishes. + # + # Based on https://stackoverflow.com/questions/41679364/ + command: + - "/bin/sh" + - "-c" + args: + - | + /cloud_sql_proxy -ip_address_types=PRIVATE -log_debug_stdout=true -structured_logs=true -instances={{ required "cloudsql.instanceConnectionName must be specified" .Values.cloudsql.instanceConnectionName }}=tcp:5432 & + PID=$! + while true; do + if [[ -f "/lifecycle/main-terminated" ]]; then + kill $PID + exit 0 + fi + sleep 1 + done + image: "{{ .Values.cloudsql.image.repository }}:{{ .Values.cloudsql.image.tag }}{{ .Values.cloudsql.image.schemaUpdateTagSuffix }}" + imagePullPolicy: {{ .Values.cloudsql.image.pullPolicy | quote }} + {{- with .Values.cloudsql.resources }} + resources: + {{- toYaml . | nindent 12 }} + {{- end }} + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - "all" + readOnlyRootFilesystem: true + runAsNonRoot: true + runAsUser: 65532 + runAsGroup: 65532 + volumeMounts: + - name: "lifecycle" + mountPath: "/lifecycle" + {{- end }} + - name: "vo-cutouts" + command: + - "/bin/sh" + - "-c" + - | + vo-cutouts update-schema + touch /lifecycle/main-terminated + env: + - name: "CUTOUT_ARQ_QUEUE_PASSWORD" + valueFrom: + secretKeyRef: + name: "vo-cutouts" + key: "redis-password" + - name: "CUTOUT_DATABASE_PASSWORD" + valueFrom: + secretKeyRef: + name: "vo-cutouts" + key: "database-password" + envFrom: + - configMapRef: + name: "vo-cutouts" + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy | quote }} + {{- with .Values.resources }} + resources: + {{- toYaml . | nindent 12 }} + {{- end }} + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - "all" + readOnlyRootFilesystem: true + volumeMounts: + - name: "lifecycle" + mountPath: "/lifecycle" + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + restartPolicy: "Never" + securityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 1000 + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} + volumes: + - name: "lifecycle" + emptyDir: {} +{{- end }} diff --git a/applications/vo-cutouts/templates/serviceaccount.yaml b/applications/vo-cutouts/templates/serviceaccount.yaml index dfa2303153..c2c43cfbbd 100644 --- a/applications/vo-cutouts/templates/serviceaccount.yaml +++ b/applications/vo-cutouts/templates/serviceaccount.yaml @@ -6,5 +6,10 @@ metadata: labels: {{- include "vo-cutouts.labels" . | nindent 4 }} annotations: + {{- if .Values.config.updateSchema }} + helm.sh/hook: "pre-install,pre-upgrade" + helm.sh/hook-delete-policy: "before-hook-creation" + helm.sh/hook-weight: "0" + {{- end }} iam.gke.io/gcp-service-account: {{ required "config.serviceAccount must be set to a valid Google service account" .Values.config.serviceAccount | quote }} {{- end }} diff --git a/applications/vo-cutouts/values.yaml b/applications/vo-cutouts/values.yaml index f6852a7f6c..17e9ad9ba5 100644 --- a/applications/vo-cutouts/values.yaml +++ b/applications/vo-cutouts/values.yaml @@ -40,6 +40,9 @@ config: # @default -- 600 (10 minutes) timeout: 600 + # -- Whether to automatically update the vo-cutouts database schema + updateSchema: false + image: # -- vo-cutouts image to use for the frontend and database workers repository: "ghcr.io/lsst-sqre/vo-cutouts" @@ -93,6 +96,9 @@ cloudsql: # -- Cloud SQL Auth Proxy tag to use tag: "1.37.0" + # -- Tag suffix to use for the proxy for schema updates + schemaUpdateTagSuffix: "-alpine" + # -- Pull policy for Cloud SQL Auth Proxy images pullPolicy: "IfNotPresent" From fa7b66a115980a91711ae67a9ef2ca904966d0a7 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 16 Sep 2024 15:43:55 -0700 Subject: [PATCH 081/567] Enable schema updates for all environments --- applications/vo-cutouts/values-idfdev.yaml | 1 + applications/vo-cutouts/values-idfint.yaml | 1 + applications/vo-cutouts/values-idfprod.yaml | 1 + 3 files changed, 3 insertions(+) diff --git a/applications/vo-cutouts/values-idfdev.yaml b/applications/vo-cutouts/values-idfdev.yaml index d65f4f8bbe..1ca562074c 100644 --- a/applications/vo-cutouts/values-idfdev.yaml +++ b/applications/vo-cutouts/values-idfdev.yaml @@ -1,6 +1,7 @@ config: serviceAccount: "vo-cutouts@science-platform-dev-7696.iam.gserviceaccount.com" storageBucketUrl: "gs://rubin-cutouts-dev-us-central1-output/" + upgradeSchema: true cloudsql: enabled: true diff --git a/applications/vo-cutouts/values-idfint.yaml b/applications/vo-cutouts/values-idfint.yaml index b7e41291fd..faca2b18da 100644 --- a/applications/vo-cutouts/values-idfint.yaml +++ b/applications/vo-cutouts/values-idfint.yaml @@ -1,6 +1,7 @@ config: serviceAccount: "vo-cutouts@science-platform-int-dc5d.iam.gserviceaccount.com" storageBucketUrl: "gs://rubin-cutouts-int-us-central1-output/" + upgradeSchema: true cloudsql: enabled: true diff --git a/applications/vo-cutouts/values-idfprod.yaml b/applications/vo-cutouts/values-idfprod.yaml index 461cb96fe5..53657a6e3c 100644 --- a/applications/vo-cutouts/values-idfprod.yaml +++ b/applications/vo-cutouts/values-idfprod.yaml @@ -1,6 +1,7 @@ config: serviceAccount: "vo-cutouts@science-platform-stable-6994.iam.gserviceaccount.com" storageBucketUrl: "gs://rubin-cutouts-stable-us-central1-output/" + updateSchema: true cloudsql: enabled: true From 744f78121e2fa7a3039fb49c645d076fdf785ebc Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 16 Sep 2024 15:49:53 -0700 Subject: [PATCH 082/567] Drop neophile support We're abandoning neophile in favor of other approaches to dependency management. Drop its GitHub Actions and tox support, and update Python and pre-commit dependencies. --- .github/workflows/dependencies.yaml | 35 ------- .pre-commit-config.yaml | 2 +- requirements/dev.txt | 143 +++++++++++++--------------- requirements/main.txt | 30 +++--- requirements/tox.txt | 44 ++++----- tox.ini | 7 -- 6 files changed, 106 insertions(+), 155 deletions(-) delete mode 100644 .github/workflows/dependencies.yaml diff --git a/.github/workflows/dependencies.yaml b/.github/workflows/dependencies.yaml deleted file mode 100644 index 49b52fbb6d..0000000000 --- a/.github/workflows/dependencies.yaml +++ /dev/null @@ -1,35 +0,0 @@ -name: Dependency Update - -"on": - schedule: - - cron: "0 12 * * 1" - workflow_dispatch: {} - -jobs: - update: - runs-on: ubuntu-latest - timeout-minutes: 10 - - steps: - - uses: actions/checkout@v4 - - # Omit pre-commit updates for now until neophile looks only at releases - # so that it doesn't pick up an old helm-docs release. - - name: Run neophile - uses: lsst-sqre/run-neophile@v1 - with: - python-version: "3.12" - mode: pr - types: python - app-id: ${{ secrets.NEOPHILE_APP_ID }} - app-secret: ${{ secrets.NEOPHILE_PRIVATE_KEY }} - - - name: Report status - if: always() - uses: ravsamhq/notify-slack-action@v2 - with: - status: ${{ job.status }} - notify_when: "failure" - notification_title: "Periodic dependency update for {repo} failed" - env: - SLACK_WEBHOOK_URL: ${{ secrets.SLACK_ALERT_WEBHOOK }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 99ad10ee7c..32bdfa9de8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,7 +46,7 @@ repos: - --template-files=../helm-docs.md.gotmpl - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.4 + rev: v0.6.5 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/requirements/dev.txt b/requirements/dev.txt index 515080cba5..fd796c0977 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -446,9 +446,9 @@ greenlet==3.1.0 ; (python_full_version < '3.13' and platform_machine == 'AMD64') --hash=sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501 \ --hash=sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54 # via sqlalchemy -idna==3.8 \ - --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ - --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 # via # -c requirements/main.txt # requests @@ -689,9 +689,9 @@ pexpect==4.9.0 ; sys_platform != 'emscripten' and sys_platform != 'win32' \ --hash=sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523 \ --hash=sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f # via ipython -platformdirs==4.3.2 \ - --hash=sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c \ - --hash=sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617 +platformdirs==4.3.3 \ + --hash=sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5 \ + --hash=sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0 # via jupyter-core pluggy==1.5.0 \ --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ @@ -1201,10 +1201,12 @@ rpds-py==0.20.0 \ # via # jsonschema # referencing -setuptools==74.1.2 \ - --hash=sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308 \ - --hash=sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6 - # via documenteer +setuptools==75.1.0 \ + --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ + --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 + # via + # documenteer + # sphinxcontrib-bibtex six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 @@ -1250,13 +1252,13 @@ sphinx==8.0.2 \ # sphinxcontrib-youtube # sphinxext-opengraph # sphinxext-rediraffe -sphinx-autodoc-typehints==2.4.0 \ - --hash=sha256:8f8281654ddf5709875429b7120d367f4edee39a131e13d5806e4f779a81bf0f \ - --hash=sha256:c9774d47e7d304cf975e073df49ebf19763dca94ac0295e7013b522b26cb18de +sphinx-autodoc-typehints==2.4.1 \ + --hash=sha256:af37abb816ebd2cf56c7a8174fd2f34d0f2f84fbf58265f89429ae107212fe6f \ + --hash=sha256:cfe410920cecf08ade046bb387b0007edb83e992de59686c62d194c762f1e45c # via documenteer -sphinx-automodapi==0.17.0 \ - --hash=sha256:4d029cb79eef29413e94ab01bb0177ebd2d5ba86e9789b73575afe9c06ae1501 \ - --hash=sha256:7ccdadad57add4aa9149d9f2bb5cf28c8f8b590280b4735b1156ea8355c423a1 +sphinx-automodapi==0.18.0 \ + --hash=sha256:022860385590768f52d4f6e19abb83b2574772d2721fb4050ecdb6e593a1a440 \ + --hash=sha256:7bf9d9a2cb67a5389c51071cfd86674ca3892ca5d5943f95de4553d6f35dddae # via documenteer sphinx-click==6.0.0 \ --hash=sha256:1e0a3c83bcb7c55497751b19d07ebe56b5d7b85eb76dd399cf9061b497adc317 \ @@ -1288,9 +1290,9 @@ sphinxcontrib-applehelp==2.0.0 \ --hash=sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1 \ --hash=sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5 # via sphinx -sphinxcontrib-bibtex==2.6.2 \ - --hash=sha256:10d45ebbb19207c5665396c9446f8012a79b8a538cb729f895b5910ab2d0b2da \ - --hash=sha256:f487af694336f28bfb7d6a17070953a7d264bec43000a2379724274f5f8d70ae +sphinxcontrib-bibtex==2.6.3 \ + --hash=sha256:7c790347ef1cb0edf30de55fc324d9782d085e89c52c2b8faafa082e08e23946 \ + --hash=sha256:ff016b738fcc867df0f75c29e139b3b2158d26a2c802db27963cb128be3b75fb # via documenteer sphinxcontrib-devhelp==2.0.0 \ --hash=sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad \ @@ -1335,56 +1337,47 @@ sphinxext-rediraffe==0.2.7 \ --hash=sha256:651dcbfae5ffda9ffd534dfb8025f36120e5efb6ea1a33f5420023862b9f725d \ --hash=sha256:9e430a52d4403847f4ffb3a8dd6dfc34a9fe43525305131f52ed899743a5fd8c # via documenteer -sqlalchemy==2.0.34 \ - --hash=sha256:10d8f36990dd929690666679b0f42235c159a7051534adb135728ee52828dd22 \ - --hash=sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3 \ - --hash=sha256:165bbe0b376541092bf49542bd9827b048357f4623486096fc9aaa6d4e7c59a2 \ - --hash=sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd \ - --hash=sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d \ - --hash=sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c \ - --hash=sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278 \ - --hash=sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b \ - --hash=sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021 \ - --hash=sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba \ - --hash=sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8 \ - --hash=sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b \ - --hash=sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e \ - --hash=sha256:430093fce0efc7941d911d34f75a70084f12f6ca5c15d19595c18753edb7c33b \ - --hash=sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a \ - --hash=sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82 \ - --hash=sha256:53e68b091492c8ed2bd0141e00ad3089bcc6bf0e6ec4142ad6505b4afe64163e \ - --hash=sha256:5bc08e75ed11693ecb648b7a0a4ed80da6d10845e44be0c98c03f2f880b68ff4 \ - --hash=sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3 \ - --hash=sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b \ - --hash=sha256:6daeb8382d0df526372abd9cb795c992e18eed25ef2c43afe518c73f8cccb721 \ - --hash=sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d \ - --hash=sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a \ - --hash=sha256:7286c353ee6475613d8beff83167374006c6b3e3f0e6491bfe8ca610eb1dec0f \ - --hash=sha256:79cb400c360c7c210097b147c16a9e4c14688a6402445ac848f296ade6283bbc \ - --hash=sha256:7cee4c6917857fd6121ed84f56d1dc78eb1d0e87f845ab5a568aba73e78adf83 \ - --hash=sha256:80bd73ea335203b125cf1d8e50fef06be709619eb6ab9e7b891ea34b5baa2287 \ - --hash=sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434 \ - --hash=sha256:8fddde2368e777ea2a4891a3fb4341e910a056be0bb15303bf1b92f073b80c02 \ - --hash=sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db \ - --hash=sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8 \ - --hash=sha256:97b850f73f8abbffb66ccbab6e55a195a0eb655e5dc74624d15cff4bfb35bd74 \ - --hash=sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24 \ - --hash=sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7 \ - --hash=sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0 \ - --hash=sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5 \ - --hash=sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f \ - --hash=sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c \ - --hash=sha256:bcd18441a49499bf5528deaa9dee1f5c01ca491fc2791b13604e8f972877f812 \ - --hash=sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768 \ - --hash=sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8 \ - --hash=sha256:c3330415cd387d2b88600e8e26b510d0370db9b7eaf984354a43e19c40df2e2b \ - --hash=sha256:c7db3db284a0edaebe87f8f6642c2b2c27ed85c3e70064b84d1c9e4ec06d5d84 \ - --hash=sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796 \ - --hash=sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b \ - --hash=sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2 \ - --hash=sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06 \ - --hash=sha256:fb1b30f31a36c7f3fee848391ff77eebdd3af5750bf95fbf9b8b5323edfdb4ec \ - --hash=sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580 +sqlalchemy==2.0.35 \ + --hash=sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00 \ + --hash=sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee \ + --hash=sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6 \ + --hash=sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf \ + --hash=sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8 \ + --hash=sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b \ + --hash=sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc \ + --hash=sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c \ + --hash=sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5 \ + --hash=sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90 \ + --hash=sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec \ + --hash=sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71 \ + --hash=sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7 \ + --hash=sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b \ + --hash=sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468 \ + --hash=sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3 \ + --hash=sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e \ + --hash=sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff \ + --hash=sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11 \ + --hash=sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01 \ + --hash=sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62 \ + --hash=sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d \ + --hash=sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a \ + --hash=sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db \ + --hash=sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87 \ + --hash=sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e \ + --hash=sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1 \ + --hash=sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f \ + --hash=sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0 \ + --hash=sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936 \ + --hash=sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8 \ + --hash=sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f \ + --hash=sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4 \ + --hash=sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0 \ + --hash=sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c \ + --hash=sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f \ + --hash=sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60 \ + --hash=sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2 \ + --hash=sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9 \ + --hash=sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33 # via jupyter-cache stack-data==0.6.3 \ --hash=sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9 \ @@ -1490,9 +1483,9 @@ uc-micro-py==1.0.3 \ --hash=sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a \ --hash=sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5 # via linkify-it-py -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 +urllib3==2.2.3 \ + --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ + --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via # -c requirements/main.txt # documenteer @@ -1502,7 +1495,7 @@ wcwidth==0.2.13 \ --hash=sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859 \ --hash=sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5 # via prompt-toolkit -zipp==3.20.1 \ - --hash=sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064 \ - --hash=sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b +zipp==3.20.2 \ + --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ + --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 # via importlib-metadata diff --git a/requirements/main.txt b/requirements/main.txt index 2bbd0d4a85..f8cb7df176 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -249,9 +249,9 @@ cryptography==43.0.1 \ # phalanx (pyproject.toml) # pyjwt # safir -fastapi==0.114.1 \ - --hash=sha256:1d7bbbeabbaae0acb0c22f0ab0b040f642d3093ca3645f8c876b6f91391861d8 \ - --hash=sha256:5d4746f6e4b7dff0b4f6b6c6d5445645285f662fe75886e99af7ee2d6b58bb3e +fastapi==0.114.2 \ + --hash=sha256:0adb148b62edb09e8c6eeefa3ea934e8f276dabc038c5a82989ea6346050c3da \ + --hash=sha256:44474a22913057b1acb973ab90f4b671ba5200482e7622816d79105dcece1ac5 # via safir gidgethub==5.3.0 \ --hash=sha256:4dd92f2252d12756b13f9dd15cde322bfb0d625b6fb5d680da1567ec74b462c0 \ @@ -283,9 +283,9 @@ hvac==2.3.0 \ --hash=sha256:1b85e3320e8642dd82f234db63253cda169a817589e823713dc5fca83119b1e2 \ --hash=sha256:a3afc5710760b6ee9b3571769df87a0333da45da05a5f9f963e1d3925a84be7d # via phalanx (pyproject.toml) -idna==3.8 \ - --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ - --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 # via # anyio # requests @@ -535,13 +535,13 @@ rfc3986==1.5.0 \ --hash=sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835 \ --hash=sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97 # via httpx -safir==6.3.0 \ - --hash=sha256:2fcd64bf37dd42eacedd6378341b2487cd06dbaf1f28403301b8d80f60a4fb56 \ - --hash=sha256:6ad7dad520d87d853628849ef95a348c55dbd0180ad3f15c1cf2f7f8fe32f915 +safir==6.4.0 \ + --hash=sha256:ba7af071eab0d198e6e15a2117028566f3f4237e02e2278e8bfc2633a7c68228 \ + --hash=sha256:f38c3f1d7d76d304984b572288826510e5c7a0e1f965b2eabdd7f3bace07c48a # via phalanx (pyproject.toml) -safir-logging==6.3.0 \ - --hash=sha256:491dfe85de89a3f2daa29c491a22a0551f0961444490418d91ec50c040ae16eb \ - --hash=sha256:e14754ab0bba6cfa248c3fc4cb5ca28410d97ff3965e831eab6581ed37485e79 +safir-logging==6.4.0 \ + --hash=sha256:4031a430d738b8fe5bfd29125dce6cbf4e4949879307ba4146648afa3d24cd0a \ + --hash=sha256:e2dbf0b5d9dabecd70c27bff9bf01629bf0724b05b0f0087a1fe4f45c702215f # via safir six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ @@ -581,7 +581,7 @@ uritemplate==4.1.1 \ --hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \ --hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e # via gidgethub -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 +urllib3==2.2.3 \ + --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ + --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via requests diff --git a/requirements/tox.txt b/requirements/tox.txt index 5a2eb7afd2..0db59a8534 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -33,9 +33,9 @@ packaging==24.1 \ # pyproject-api # tox # tox-uv -platformdirs==4.3.2 \ - --hash=sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c \ - --hash=sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617 +platformdirs==4.3.3 \ + --hash=sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5 \ + --hash=sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0 # via # -c requirements/dev.txt # tox @@ -60,25 +60,25 @@ tox-uv==1.11.3 \ --hash=sha256:316f559ae5525edec12791d9e1f393e405ded5b7e7d50fbaee4726676951f49a \ --hash=sha256:d434787406ff2854600c1ceaa555519080026208cf7f65bb5d4b2d7c9c4776de # via -r requirements/tox.in -uv==0.4.9 \ - --hash=sha256:0340d2c7bf9afe0098e3301c1885de10e317232cfa346f0ac16374cee284a4cb \ - --hash=sha256:060af185481ef46ab97008cad330f3cd7a7aa1ce3d219b67d27c5a2a551ac2ea \ - --hash=sha256:1a8acc7abb2174bd3c8f5fc98345f2bb602f31b7558e37f3d23bef99ddd58dec \ - --hash=sha256:34bce9f4892130b01a7605d27bbeb71395e9b031d793123c250b79187ee307ca \ - --hash=sha256:45bf0cead2436b1977f71669e945db19990ca70a7765111fb951545815467bb6 \ - --hash=sha256:52101bc8652b4284b78fac52ed7878f3bae414bc4076c377735962666b309dde \ - --hash=sha256:5422680436f4cebef945bb2e562e01c02a4fa0a95f85d1b8010f2ee868a0b8c1 \ - --hash=sha256:55cf2522262ef663114bda5d80375ddc7f7af0d054df89426372a0d494380875 \ - --hash=sha256:566d4d7a475aacd21dbb4aba053cd4f4f52d65acdef2c83c59bcdff08756701e \ - --hash=sha256:5b66a52cb60a2882a882bc5f13afa6daf3172a54fe9fb998529d19418d5aed18 \ - --hash=sha256:630a6fe215829f734278e618c1633c2bb88ee03dc6a92ae9890fabd98ee810a9 \ - --hash=sha256:69529b6bf5de6ec8fbe8e022f5bcbaef778e76136fc37fae6ec7a8b18b3f9024 \ - --hash=sha256:71e87038fcc9f61b2d6f66c4a92354c6d0abe4baae21bb90241693f161ddeaa1 \ - --hash=sha256:8869637ea6231f66fe643be22f9334874db3496844b3d8bfd8efd4227ded3d44 \ - --hash=sha256:9c9b70f016f28cc05633b564d8690cfdb7ebac4d2210d9158819947841e00347 \ - --hash=sha256:b54a9022e9e1fdbf3ae15ef340a0d1d1847dd739df5023896aa8d97d88af1efe \ - --hash=sha256:bf834f7f360a192372d879eda86f6a1dd94195faf68154dcf7c90247098d2bb2 \ - --hash=sha256:f50cbdfbc8399e1211c580e47f42650a184541ee398af95ad29bf9a2e977baba +uv==0.4.10 \ + --hash=sha256:0784f75093a75390d8d480cc8a444516e78f08849db9a13c21791a5f651df4a1 \ + --hash=sha256:0f8b9ba4ecfbea343a00e46d509669606e55fe233d800752c4c25650473df358 \ + --hash=sha256:1b6b6c6b8cc0c4e54ab25e3b46e49d1e583e26c194572eb42bfeebf71b39cca2 \ + --hash=sha256:1ff5130b6f3af79c4e47f63db03215aed15e78cb4f1f51682af6f9949c2bcf00 \ + --hash=sha256:2ff29a2f55a697e78d787a41ab41d4b26421d200728289b88b6241d3b486c436 \ + --hash=sha256:30d1f8348a2b18e21a35c97ce42528781f242d0303881fc92fbacdcb653c8bca \ + --hash=sha256:3be73788db9ceacb94a521cf67ca5cc08bac512aef71145b904ab62a3acabdae \ + --hash=sha256:444e1cdb36d7ef103e52185f918800527c255dc369c9f90eb1f198dfa3f4d5bc \ + --hash=sha256:6ba1cc3070e5c63ce0a1421fbed28bd1b3ff520671d7badda11a501504c78394 \ + --hash=sha256:8fa510dfbbde4f8ad5cd2769568c7b0c3e867b74deaf4beabcca79e74e7550cc \ + --hash=sha256:97a1187e11a9df70d55bc577721ad4a19441cda56e4d69fb2f38d88c7650d2a0 \ + --hash=sha256:99954a94dd6c4bff8a9a963c05bc3988214ea39e7511a52fda35112e1a478447 \ + --hash=sha256:a9dc1f8fca5c4a2f73054d9f56c7397e9fc6ba43baefc503d6f0128d72ea662f \ + --hash=sha256:b89dfd213359a23797155ff8175e5202ed6b84aadeb20df92132127608d46acf \ + --hash=sha256:bc87d6c581cfed0979e0f5ee93383d46006c6d4a5e4eb9f43ef13bce61b50cc2 \ + --hash=sha256:bc99e6b45303f0881a8dc199f0b7ea8261dd1779e576e8477a7721ceeeaafcc7 \ + --hash=sha256:e99e3f761875962942e0743b868bd666021d5e14c3df494e820ef8f45fb88578 \ + --hash=sha256:ff9046a8c5e836e892ac7741e672ee016e92e55c659fa8195595df65a1f3accf # via tox-uv virtualenv==20.26.4 \ --hash=sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55 \ diff --git a/tox.ini b/tox.ini index 87733c00bd..0a26029174 100644 --- a/tox.ini +++ b/tox.ini @@ -45,13 +45,6 @@ deps = pre-commit commands = pre-commit run --all-files -[testenv:neophile-update] -description = Run neophile to update dependencies -skip_install = true -deps = - neophile -commands = neophile update {posargs} - [testenv:phalanx-lint-change] description = Lint application chart changes determined by Git commands = From da531a97b17c3ea72234202d7d63ce4d691288b2 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 16 Sep 2024 17:17:58 -0700 Subject: [PATCH 083/567] Fix spelling of updateSchema vo-cutouts setting --- applications/vo-cutouts/values-idfdev.yaml | 1 - applications/vo-cutouts/values-idfint.yaml | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/applications/vo-cutouts/values-idfdev.yaml b/applications/vo-cutouts/values-idfdev.yaml index 1ca562074c..d65f4f8bbe 100644 --- a/applications/vo-cutouts/values-idfdev.yaml +++ b/applications/vo-cutouts/values-idfdev.yaml @@ -1,7 +1,6 @@ config: serviceAccount: "vo-cutouts@science-platform-dev-7696.iam.gserviceaccount.com" storageBucketUrl: "gs://rubin-cutouts-dev-us-central1-output/" - upgradeSchema: true cloudsql: enabled: true diff --git a/applications/vo-cutouts/values-idfint.yaml b/applications/vo-cutouts/values-idfint.yaml index faca2b18da..9239f30c7d 100644 --- a/applications/vo-cutouts/values-idfint.yaml +++ b/applications/vo-cutouts/values-idfint.yaml @@ -1,7 +1,7 @@ config: serviceAccount: "vo-cutouts@science-platform-int-dc5d.iam.gserviceaccount.com" storageBucketUrl: "gs://rubin-cutouts-int-us-central1-output/" - upgradeSchema: true + updateSchema: true cloudsql: enabled: true From c44a5aa41579edc4e84f85f7fc629710fd6d6250 Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Tue, 17 Sep 2024 09:56:12 -0400 Subject: [PATCH 084/567] Increase memory for Times Square redis We're experiencing OOMKilled with 2Gi memory limits for the Times Square redis on usdf-rsp-dev --- applications/times-square/values.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/times-square/values.yaml b/applications/times-square/values.yaml index 1a26a01356..e6cdc61f51 100644 --- a/applications/times-square/values.yaml +++ b/applications/times-square/values.yaml @@ -200,10 +200,10 @@ redis: resources: limits: cpu: "1" - memory: "2Gi" + memory: "4Gi" requests: cpu: "6m" - memory: "50Mi" + memory: "1Gi" # -- Pod annotations for the Redis pod podAnnotations: {} From 0690c25921e4115f150485885e4f557deef00fdd Mon Sep 17 00:00:00 2001 From: Dan Fuchs Date: Tue, 17 Sep 2024 12:13:41 -0500 Subject: [PATCH 085/567] DM-45522: Enable strimzi-access-operator in idfdev --- applications/strimzi-access-operator/values-idfdev.yaml | 0 environments/values-idfdev.yaml | 1 + 2 files changed, 1 insertion(+) create mode 100644 applications/strimzi-access-operator/values-idfdev.yaml diff --git a/applications/strimzi-access-operator/values-idfdev.yaml b/applications/strimzi-access-operator/values-idfdev.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/environments/values-idfdev.yaml b/environments/values-idfdev.yaml index c9b76b5e85..b0a52056de 100644 --- a/environments/values-idfdev.yaml +++ b/environments/values-idfdev.yaml @@ -29,6 +29,7 @@ applications: squareone: true sqlproxy-cross-project: true strimzi: true + strimzi-access-operator: true tap: true telegraf: true telegraf-ds: true From 8342bd28af7ea7ff2c1b0c0499b985a0d72c6ff9 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Tue, 17 Sep 2024 17:09:03 -0700 Subject: [PATCH 086/567] Enable OpenID Connect on idfprod Enable the Gafaelfawr OpenID Connect server on idfprod and configure it with a data rights mapping. Add documentation for how to add new OpenID Connect clients. --- applications/gafaelfawr/values-idfprod.yaml | 8 ++ .../gafaelfawr/add-oidc-client.rst | 102 ++++++++++++++++++ docs/applications/gafaelfawr/index.rst | 3 +- 3 files changed, 112 insertions(+), 1 deletion(-) create mode 100644 docs/applications/gafaelfawr/add-oidc-client.rst diff --git a/applications/gafaelfawr/values-idfprod.yaml b/applications/gafaelfawr/values-idfprod.yaml index ef48fe0314..f9148ef05d 100644 --- a/applications/gafaelfawr/values-idfprod.yaml +++ b/applications/gafaelfawr/values-idfprod.yaml @@ -28,6 +28,14 @@ config: firestore: project: "rsp-firestore-stable-e8eb" + # This environment provides authentication services to IDACs. + oidcServer: + enabled: true + dataRightsMapping: + g_users: + - "dp0.2" + - "dp0.3" + # Support generating user metadata for CADC authentication code. cadcBaseUuid: "5f0eb655-0e72-4948-a6a5-a94c0be9019f" diff --git a/docs/applications/gafaelfawr/add-oidc-client.rst b/docs/applications/gafaelfawr/add-oidc-client.rst new file mode 100644 index 0000000000..013319b66c --- /dev/null +++ b/docs/applications/gafaelfawr/add-oidc-client.rst @@ -0,0 +1,102 @@ +############################# +Add new OpenID Connect client +############################# + +Gafaelfawr can also serve as an OpenID Connect server, allowing third-party applications running inside Phalanx and OpenID Connect clients outside of Phalanx environments to authenticate users in the same way that the Science Platform does. + +Each OpenID Connect client of Gafaelfawr must be pre-registered and assigned a ``client_id`` and password. +To complete an authentication, the client must authenticate with that ``client_id`` and password. +See `the Gafaelfawr documentation `__. + +This page describes how to register a new client of Gafaelfawr. +You will need the following information: + +* The Phalanx environment to which you'll be adding the new client. +* A short, human-readable name of the new client you're adding. +* The return URL to which the user will be sent after authentication. + +.. note:: + + The instructions here are specific to SQuaRE-managed Phalanx environments. + For other environments, you can update the ``oidc-server-secrets`` Gafaelfawr secret key however you maintain static secrets. + +Add secret +========== + +OpenID Connect clients are configured in the ``oidc-server-secrets`` key of the ``gafaelfawr`` secret. +The value of this key is, unfortunately, a JSON representation of all of the clients. +We currently maintain two parallel records of the clients, one in a structured 1Password secret that is not currently used, and separately in the ``gafaelfawr`` secret. +The goal is to eventually add automation to Phalanx to generate the latter from the former. + +#. Open 1Password. + Go to the 1Password vault for static secrets for the Phalanx environment where you want to add an OpenID Connect client. + +#. Create or edit an item named ``oidc-clients``. + If it doesn't already exist, create it as an item of type :menuselection:`Server`. + +#. Add a new section for the new client. + Set the section title to a short, human-readable name for the OpenID Connect client. + This name should be enough to tell someone looking at this secret what this client is used for. + +#. Add a text field to the new section. + Change the label to ``id``. + Change the contents to :samp:`{random-id}.clients.{fqdn}` where the random ID is the results of ``os.urandom(16).hex()`` in Python and the FQDN is the FQDN of the environment. + For example, ``de5dd2c1fbf648e11d50b6cf3aa72277.clients.data.lsst.cloud``. + +#. Add a password field to the new section, leaving the label as ``password``. + You can let 1Password generate a random 20-character password if you want, or generate one of equivalent entropy however you choose. + +#. Add a final text field to the new section. + Change the label to ``return_uri``. + Set the value to the return URL of the client. + This should be provided by the OpenID Connect client and will be the URL to which the user is sent after authentication. + +#. Now, you will need to copy this data into the ``gafaelfawr`` secret under the ``oidc-server-secrets`` key, creating that key if it doesn't already exist. + Unfortunately, you currently have to construct the JSON by hand. + The value of this key should be a JSON-encoded list of objects, and each object should have keys ``id``, ``password``, and ``return_uri`` with the information above. + Be sure to include all the clients, not just the new one that you're adding. + +Share the secret with the client +================================ + +You now need to convey the ``client_id`` (the ``id`` value above) and the ``client_secret`` (the ``password`` value above) to the OpenID Connect client. +They will need to configure their client software to use that ``client_id`` and ``client_secret`` whenever performing an OpenID Connect authentication. + +The easiest way to do this is often to create a separate 1Password secret and share it with the client. + +.. warning:: + + **DO NOT SHARE THE SECRETS CREATED ABOVE.** + The client should not have access to the ``oidc-clients`` or ``gafaelfawr`` secrets. + +#. Go to the SQuaRE vault and create a new secret. + Use a name like ``Gafaelfawr OIDC``, replacing ```` with a *short* human-readable name for the client. + Use the :menuselection:`Server` item type. + +#. Add the information above. + It's best to call the fields ``client_id``, ``client_secret``, and ``return_uri``, since those are the field names in the OpenID Connect standard and therefore what is usually used in software documentation. + Enter the same information as above. + +When sharing with someone who is managing multiple related clients, feel free to put all of the secrets in the same 1Password item in separate sections. + +Now, you can create a one-time 1Password link for this secret and share it with the user in Slack or via email. + +Configure Gafaelfawr +==================== + +If this is the first OpenID Connect client for Gafaelfawr, you will need to enable OpenID Connect server support. +Do this by setting ``config.oidcServer.enabled`` to true in the Gafaelfawr :file:`values-{environment}.yaml` file. +See `the Gafaelfawr documentation `__ for more details. + +If the purpose of this OpenID Connect client is to provide services to an IDAC or another external client that may need data rights information (see :dmtn:`253`), ensure the configuration of the Gafaelfawr OpenID Connect server is correct and has a ``dataRightsMapping`` setting. +See `the Gafaelfawr documentation `__ for more information. + +Then, whether or not you needed to make configuration changes, you will need to sync secrets for this environment. +Follow the normal process (:doc:`/admin/sync-secrets`) to do that. + +Finally, you will need to restart Gafaelfawr to pick up the new secret. +Do this by selecting :menuselection:`Restart` on the deployment in Argo CD (see :ref:`branch-deploy-restart`). + +.. note:: + + Since this requires a Gafaelfawr restart, and since you are changing a secret that contains manually-formatted JSON that is prone to syntax errors that will prevent Gafaelfawr from starting, you will normally want to do this during a maintenance window for a production environment. diff --git a/docs/applications/gafaelfawr/index.rst b/docs/applications/gafaelfawr/index.rst index 93546861eb..f921f7e1f2 100644 --- a/docs/applications/gafaelfawr/index.rst +++ b/docs/applications/gafaelfawr/index.rst @@ -8,7 +8,7 @@ Gafaelfawr provides authentication and identity management services for the Rubi It is primarily used as an NGINX ``auth_request`` handler configured via annotations on the ``Ingress`` resources of Science Platform services. In that role, it requires a user have the required access scope to use that service, rejects users who do not have that scope, and redirects users who are not authenticated to the authentication process. -Gafaelfawr supports authentication via either OpenID Connect (often through CILogon_ or GitHub). +Gafaelfawr supports authentication via either OpenID Connect (often through CILogon_) or GitHub. Gafaelfawr also provides a token management API and (currently) UI for users of the Science Platform. @@ -24,6 +24,7 @@ Guides bootstrap manage-schema recreate-token + add-oidc-client github-organizations troubleshoot values From 57cb3da173f9c27d52d7fec3e3e617d4c1bbb675 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Fri, 9 Aug 2024 11:42:53 -0700 Subject: [PATCH 087/567] Remove float support from Prompt Processing timeouts. Gunicorn and Knative both require that all timeouts be integers, so it does not make sense to force floating-point math. --- charts/prompt-proto-service/templates/prompt-proto-service.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/prompt-proto-service/templates/prompt-proto-service.yaml b/charts/prompt-proto-service/templates/prompt-proto-service.yaml index 690c5b34aa..326a3e523f 100644 --- a/charts/prompt-proto-service/templates/prompt-proto-service.yaml +++ b/charts/prompt-proto-service/templates/prompt-proto-service.yaml @@ -47,7 +47,7 @@ spec: - name: WORKER_GRACE_PERIOD value: {{ .Values.worker.grace_period | toString | quote }} {{- /* Knative not configured for timeouts longer than 1200 seconds, and shouldn't need to be. */ -}} - {{- $knative_timeout := minf 1200 (addf (mulf 2 (coalesce .Values.worker.timeout 600)) .Values.knative.extraTimeout) }} + {{- $knative_timeout := min 1200 (add (mul 2 (coalesce .Values.worker.timeout 600)) .Values.knative.extraTimeout) }} - name: RUBIN_INSTRUMENT value: {{ .Values.instrument.name }} - name: PREPROCESSING_PIPELINES_CONFIG From c368d22f7c0e6b354f256b13871e0f2634760d25 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Fri, 9 Aug 2024 12:06:51 -0700 Subject: [PATCH 088/567] Support multiple workers per Prompt Processing container. The existing `containerConcurrency` flag is now exposed in the container as an environment variable, and can be used to configure workers. I've kept the existing resource requests as per-pod, not per-worker, because managing units like "8Gi" in the template would become messy. --- .../prompt-proto-service-hsc-gpu/README.md | 15 ++++++++------- .../prompt-proto-service-hsc-gpu/values.yaml | 15 ++++++++++----- applications/prompt-proto-service-hsc/README.md | 15 ++++++++------- applications/prompt-proto-service-hsc/values.yaml | 15 ++++++++++----- .../prompt-proto-service-latiss/README.md | 15 ++++++++------- .../prompt-proto-service-latiss/values.yaml | 15 ++++++++++----- .../prompt-proto-service-lsstcam/README.md | 15 ++++++++------- .../prompt-proto-service-lsstcam/values.yaml | 15 ++++++++++----- .../prompt-proto-service-lsstcomcam/README.md | 15 ++++++++------- .../prompt-proto-service-lsstcomcam/values.yaml | 15 ++++++++++----- .../prompt-proto-service-lsstcomcamsim/README.md | 15 ++++++++------- .../values.yaml | 15 ++++++++++----- charts/prompt-proto-service/README.md | 14 +++++++------- .../templates/prompt-proto-service.yaml | 2 ++ charts/prompt-proto-service/values.yaml | 12 +++++++----- 15 files changed, 124 insertions(+), 84 deletions(-) diff --git a/applications/prompt-proto-service-hsc-gpu/README.md b/applications/prompt-proto-service-hsc-gpu/README.md index b97ddaa42a..76ce7c399a 100644 --- a/applications/prompt-proto-service-hsc-gpu/README.md +++ b/applications/prompt-proto-service-hsc-gpu/README.md @@ -21,6 +21,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | | prompt-proto-service.cache.patchesPerImage | int | `4` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | +| prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | | prompt-proto-service.image.pullPolicy | string | `IfNotPresent` in prod, `Always` in dev | Pull policy for the PP image | | prompt-proto-service.image.repository | string | `"ghcr.io/lsst-dm/prompt-service"` | Image to use in the PP deployment | | prompt-proto-service.image.tag | string | `"latest"` | Overrides the image tag whose default is the chart appVersion. | @@ -32,16 +33,16 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.skymap | string | `"hsc_rings_v1"` | Skymap to use with the instrument | -| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores. | -| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested. | -| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). | -| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). | +| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | +| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | | prompt-proto-service.knative.extraTimeout | int | `10` | To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. This parameter adds extra time to that minimum (seconds). | | prompt-proto-service.knative.gpu | bool | `true` | GPUs enabled. | -| prompt-proto-service.knative.gpuRequest | int | `1` | The number of GPUs to request. | +| prompt-proto-service.knative.gpuRequest | int | `1` | The number of GPUs to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.idleTimeout | int | `0` | Maximum time that a container can send nothing to Knative (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | -| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit. | -| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request. | +| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | diff --git a/applications/prompt-proto-service-hsc-gpu/values.yaml b/applications/prompt-proto-service-hsc-gpu/values.yaml index b8cc85249d..c838c1475a 100644 --- a/applications/prompt-proto-service-hsc-gpu/values.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values.yaml @@ -121,21 +121,23 @@ prompt-proto-service: auth_env: true knative: - # -- The cpu cores requested. + # -- The cpu cores requested for the full pod (see `containerConcurrency`). cpuRequest: 1 - # -- The maximum cpu cores. + # -- The maximum cpu cores for the full pod (see `containerConcurrency`). cpuLimit: 1 # -- The storage space reserved for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageRequest: "5Gi" # -- The maximum storage space allowed for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageLimit: "5Gi" # -- GPUs enabled. gpu: true - # -- The number of GPUs to request. + # -- The number of GPUs to request for the full pod (see `containerConcurrency`). gpuRequest: 1 - # -- The minimum memory to request. + # -- The minimum memory to request for the full pod (see `containerConcurrency`). memoryRequest: "2Gi" - # -- The maximum memory limit. + # -- The maximum memory limit for the full pod (see `containerConcurrency`). memoryLimit: "8Gi" # -- To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. # This parameter adds extra time to that minimum (seconds). @@ -149,6 +151,9 @@ prompt-proto-service: # If 0, idle timeout is ignored. responseStartTimeout: 0 + # -- The number of Knative requests that can be handled simultaneously by one container + containerConcurrency: 1 + # -- Kubernetes YAML configs for extra container volume(s). # Any volumes required by other config options are automatically handled by the Helm chart. additionalVolumeMounts: [] diff --git a/applications/prompt-proto-service-hsc/README.md b/applications/prompt-proto-service-hsc/README.md index 1d6c810a2c..a463a85160 100644 --- a/applications/prompt-proto-service-hsc/README.md +++ b/applications/prompt-proto-service-hsc/README.md @@ -21,6 +21,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | | prompt-proto-service.cache.patchesPerImage | int | `4` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | +| prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | | prompt-proto-service.image.pullPolicy | string | `IfNotPresent` in prod, `Always` in dev | Pull policy for the PP image | | prompt-proto-service.image.repository | string | `"ghcr.io/lsst-dm/prompt-service"` | Image to use in the PP deployment | | prompt-proto-service.image.tag | string | `"latest"` | Overrides the image tag whose default is the chart appVersion. | @@ -32,16 +33,16 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.skymap | string | `"hsc_rings_v1"` | Skymap to use with the instrument | -| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores. | -| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested. | -| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). | -| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). | +| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | +| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | | prompt-proto-service.knative.extraTimeout | int | `10` | To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. This parameter adds extra time to that minimum (seconds). | | prompt-proto-service.knative.gpu | bool | `false` | GPUs enabled. | -| prompt-proto-service.knative.gpuRequest | int | `0` | The number of GPUs to request. | +| prompt-proto-service.knative.gpuRequest | int | `0` | The number of GPUs to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.idleTimeout | int | `0` | Maximum time that a container can send nothing to Knative (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | -| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit. | -| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request. | +| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | diff --git a/applications/prompt-proto-service-hsc/values.yaml b/applications/prompt-proto-service-hsc/values.yaml index 3f4b799c67..1361c25215 100644 --- a/applications/prompt-proto-service-hsc/values.yaml +++ b/applications/prompt-proto-service-hsc/values.yaml @@ -121,21 +121,23 @@ prompt-proto-service: auth_env: true knative: - # -- The cpu cores requested. + # -- The cpu cores requested for the full pod (see `containerConcurrency`). cpuRequest: 1 - # -- The maximum cpu cores. + # -- The maximum cpu cores for the full pod (see `containerConcurrency`). cpuLimit: 1 # -- The storage space reserved for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageRequest: "5Gi" # -- The maximum storage space allowed for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageLimit: "5Gi" # -- GPUs enabled. gpu: false - # -- The number of GPUs to request. + # -- The number of GPUs to request for the full pod (see `containerConcurrency`). gpuRequest: 0 - # -- The minimum memory to request. + # -- The minimum memory to request for the full pod (see `containerConcurrency`). memoryRequest: "2Gi" - # -- The maximum memory limit. + # -- The maximum memory limit for the full pod (see `containerConcurrency`). memoryLimit: "8Gi" # -- To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. # This parameter adds extra time to that minimum (seconds). @@ -149,6 +151,9 @@ prompt-proto-service: # If 0, idle timeout is ignored. responseStartTimeout: 0 + # -- The number of Knative requests that can be handled simultaneously by one container + containerConcurrency: 1 + # -- Kubernetes YAML configs for extra container volume(s). # Any volumes required by other config options are automatically handled by the Helm chart. additionalVolumeMounts: [] diff --git a/applications/prompt-proto-service-latiss/README.md b/applications/prompt-proto-service-latiss/README.md index 17da7029ab..579207cc66 100644 --- a/applications/prompt-proto-service-latiss/README.md +++ b/applications/prompt-proto-service-latiss/README.md @@ -21,6 +21,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | | prompt-proto-service.cache.patchesPerImage | int | `6` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | +| prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | | prompt-proto-service.image.pullPolicy | string | `IfNotPresent` in prod, `Always` in dev | Pull policy for the PP image | | prompt-proto-service.image.repository | string | `"ghcr.io/lsst-dm/prompt-service"` | Image to use in the PP deployment | | prompt-proto-service.image.tag | string | `"latest"` | Overrides the image tag whose default is the chart appVersion. | @@ -32,16 +33,16 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.skymap | string | `"latiss_v1"` | Skymap to use with the instrument | -| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores. | -| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested. | -| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). | -| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). | +| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | +| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | | prompt-proto-service.knative.extraTimeout | int | `10` | To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. This parameter adds extra time to that minimum (seconds). | | prompt-proto-service.knative.gpu | bool | `false` | GPUs enabled. | -| prompt-proto-service.knative.gpuRequest | int | `0` | The number of GPUs to request. | +| prompt-proto-service.knative.gpuRequest | int | `0` | The number of GPUs to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.idleTimeout | int | `0` | Maximum time that a container can send nothing to Knative (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | -| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit. | -| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request. | +| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | diff --git a/applications/prompt-proto-service-latiss/values.yaml b/applications/prompt-proto-service-latiss/values.yaml index 9768a1c05d..5b82a11fed 100644 --- a/applications/prompt-proto-service-latiss/values.yaml +++ b/applications/prompt-proto-service-latiss/values.yaml @@ -121,21 +121,23 @@ prompt-proto-service: auth_env: true knative: - # -- The cpu cores requested. + # -- The cpu cores requested for the full pod (see `containerConcurrency`). cpuRequest: 1 - # -- The maximum cpu cores. + # -- The maximum cpu cores for the full pod (see `containerConcurrency`). cpuLimit: 1 # -- The storage space reserved for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageRequest: "5Gi" # -- The maximum storage space allowed for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageLimit: "5Gi" # -- GPUs enabled. gpu: false - # -- The number of GPUs to request. + # -- The number of GPUs to request for the full pod (see `containerConcurrency`). gpuRequest: 0 - # -- The minimum memory to request. + # -- The minimum memory to request for the full pod (see `containerConcurrency`). memoryRequest: "2Gi" - # -- The maximum memory limit. + # -- The maximum memory limit for the full pod (see `containerConcurrency`). memoryLimit: "8Gi" # -- To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. # This parameter adds extra time to that minimum (seconds). @@ -149,6 +151,9 @@ prompt-proto-service: # If 0, idle timeout is ignored. responseStartTimeout: 0 + # -- The number of Knative requests that can be handled simultaneously by one container + containerConcurrency: 1 + # -- Kubernetes YAML configs for extra container volume(s). # Any volumes required by other config options are automatically handled by the Helm chart. additionalVolumeMounts: [] diff --git a/applications/prompt-proto-service-lsstcam/README.md b/applications/prompt-proto-service-lsstcam/README.md index 20834485da..419a466c0d 100644 --- a/applications/prompt-proto-service-lsstcam/README.md +++ b/applications/prompt-proto-service-lsstcam/README.md @@ -21,6 +21,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | | prompt-proto-service.cache.patchesPerImage | int | `4` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | +| prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | | prompt-proto-service.image.pullPolicy | string | `IfNotPresent` in prod, `Always` in dev | Pull policy for the PP image | | prompt-proto-service.image.repository | string | `"ghcr.io/lsst-dm/prompt-service"` | Image to use in the PP deployment | | prompt-proto-service.image.tag | string | `"latest"` | Overrides the image tag whose default is the chart appVersion. | @@ -32,16 +33,16 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.skymap | string | `""` | Skymap to use with the instrument | -| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores. | -| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested. | -| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). | -| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). | +| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | +| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | | prompt-proto-service.knative.extraTimeout | int | `10` | To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. This parameter adds extra time to that minimum (seconds). | | prompt-proto-service.knative.gpu | bool | `false` | GPUs enabled. | -| prompt-proto-service.knative.gpuRequest | int | `0` | The number of GPUs to request. | +| prompt-proto-service.knative.gpuRequest | int | `0` | The number of GPUs to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.idleTimeout | int | `0` | Maximum time that a container can send nothing to Knative (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | -| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit. | -| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request. | +| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | diff --git a/applications/prompt-proto-service-lsstcam/values.yaml b/applications/prompt-proto-service-lsstcam/values.yaml index 6221360a93..c0d79823c9 100644 --- a/applications/prompt-proto-service-lsstcam/values.yaml +++ b/applications/prompt-proto-service-lsstcam/values.yaml @@ -121,21 +121,23 @@ prompt-proto-service: auth_env: true knative: - # -- The cpu cores requested. + # -- The cpu cores requested for the full pod (see `containerConcurrency`). cpuRequest: 1 - # -- The maximum cpu cores. + # -- The maximum cpu cores for the full pod (see `containerConcurrency`). cpuLimit: 1 # -- The storage space reserved for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageRequest: "5Gi" # -- The maximum storage space allowed for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageLimit: "5Gi" # -- GPUs enabled. gpu: false - # -- The number of GPUs to request. + # -- The number of GPUs to request for the full pod (see `containerConcurrency`). gpuRequest: 0 - # -- The minimum memory to request. + # -- The minimum memory to request for the full pod (see `containerConcurrency`). memoryRequest: "2Gi" - # -- The maximum memory limit. + # -- The maximum memory limit for the full pod (see `containerConcurrency`). memoryLimit: "8Gi" # -- To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. # This parameter adds extra time to that minimum (seconds). @@ -149,6 +151,9 @@ prompt-proto-service: # If 0, idle timeout is ignored. responseStartTimeout: 0 + # -- The number of Knative requests that can be handled simultaneously by one container + containerConcurrency: 1 + # -- Kubernetes YAML configs for extra container volume(s). # Any volumes required by other config options are automatically handled by the Helm chart. additionalVolumeMounts: [] diff --git a/applications/prompt-proto-service-lsstcomcam/README.md b/applications/prompt-proto-service-lsstcomcam/README.md index ca625a5b66..71a9b5713d 100644 --- a/applications/prompt-proto-service-lsstcomcam/README.md +++ b/applications/prompt-proto-service-lsstcomcam/README.md @@ -21,6 +21,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | | prompt-proto-service.cache.patchesPerImage | int | `4` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | +| prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | | prompt-proto-service.image.pullPolicy | string | `IfNotPresent` in prod, `Always` in dev | Pull policy for the PP image | | prompt-proto-service.image.repository | string | `"ghcr.io/lsst-dm/prompt-service"` | Image to use in the PP deployment | | prompt-proto-service.image.tag | string | `"latest"` | Overrides the image tag whose default is the chart appVersion. | @@ -32,16 +33,16 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.skymap | string | `""` | Skymap to use with the instrument | -| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores. | -| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested. | -| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). | -| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). | +| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | +| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | | prompt-proto-service.knative.extraTimeout | int | `10` | To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. This parameter adds extra time to that minimum (seconds). | | prompt-proto-service.knative.gpu | bool | `false` | GPUs enabled. | -| prompt-proto-service.knative.gpuRequest | int | `0` | The number of GPUs to request. | +| prompt-proto-service.knative.gpuRequest | int | `0` | The number of GPUs to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.idleTimeout | int | `0` | Maximum time that a container can send nothing to Knative (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | -| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit. | -| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request. | +| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | diff --git a/applications/prompt-proto-service-lsstcomcam/values.yaml b/applications/prompt-proto-service-lsstcomcam/values.yaml index 67fc0978a9..83d6a9616b 100644 --- a/applications/prompt-proto-service-lsstcomcam/values.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values.yaml @@ -121,21 +121,23 @@ prompt-proto-service: auth_env: true knative: - # -- The cpu cores requested. + # -- The cpu cores requested for the full pod (see `containerConcurrency`). cpuRequest: 1 - # -- The maximum cpu cores. + # -- The maximum cpu cores for the full pod (see `containerConcurrency`). cpuLimit: 1 # -- The storage space reserved for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageRequest: "5Gi" # -- The maximum storage space allowed for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageLimit: "5Gi" # -- GPUs enabled. gpu: false - # -- The number of GPUs to request. + # -- The number of GPUs to request for the full pod (see `containerConcurrency`). gpuRequest: 0 - # -- The minimum memory to request. + # -- The minimum memory to request for the full pod (see `containerConcurrency`). memoryRequest: "2Gi" - # -- The maximum memory limit. + # -- The maximum memory limit for the full pod (see `containerConcurrency`). memoryLimit: "8Gi" # -- To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. # This parameter adds extra time to that minimum (seconds). @@ -149,6 +151,9 @@ prompt-proto-service: # If 0, idle timeout is ignored. responseStartTimeout: 0 + # -- The number of Knative requests that can be handled simultaneously by one container + containerConcurrency: 1 + # -- Kubernetes YAML configs for extra container volume(s). # Any volumes required by other config options are automatically handled by the Helm chart. additionalVolumeMounts: [] diff --git a/applications/prompt-proto-service-lsstcomcamsim/README.md b/applications/prompt-proto-service-lsstcomcamsim/README.md index 55d6b814c6..0bf22395e9 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/README.md +++ b/applications/prompt-proto-service-lsstcomcamsim/README.md @@ -21,6 +21,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | | prompt-proto-service.cache.patchesPerImage | int | `16` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `6` | A factor by which to multiply `baseSize` for refcat datasets. | +| prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | | prompt-proto-service.image.pullPolicy | string | `IfNotPresent` in prod, `Always` in dev | Pull policy for the PP image | | prompt-proto-service.image.repository | string | `"ghcr.io/lsst-dm/prompt-service"` | Image to use in the PP deployment | | prompt-proto-service.image.tag | string | `"latest"` | Overrides the image tag whose default is the chart appVersion. | @@ -32,16 +33,16 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.skymap | string | `"ops_rehersal_prep_2k_v1"` | Skymap to use with the instrument | -| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores. | -| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested. | -| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). | -| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). | +| prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | +| prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | | prompt-proto-service.knative.extraTimeout | int | `10` | To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. This parameter adds extra time to that minimum (seconds). | | prompt-proto-service.knative.gpu | bool | `false` | GPUs enabled. | -| prompt-proto-service.knative.gpuRequest | int | `0` | The number of GPUs to request. | +| prompt-proto-service.knative.gpuRequest | int | `0` | The number of GPUs to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.idleTimeout | int | `0` | Maximum time that a container can send nothing to Knative (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | -| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit. | -| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request. | +| prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit for the full pod (see `containerConcurrency`). | +| prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | diff --git a/applications/prompt-proto-service-lsstcomcamsim/values.yaml b/applications/prompt-proto-service-lsstcomcamsim/values.yaml index 47815b63cf..ae5879d20a 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values.yaml @@ -121,21 +121,23 @@ prompt-proto-service: auth_env: true knative: - # -- The cpu cores requested. + # -- The cpu cores requested for the full pod (see `containerConcurrency`). cpuRequest: 1 - # -- The maximum cpu cores. + # -- The maximum cpu cores for the full pod (see `containerConcurrency`). cpuLimit: 1 # -- The storage space reserved for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageRequest: "5Gi" # -- The maximum storage space allowed for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageLimit: "5Gi" # -- GPUs enabled. gpu: false - # -- The number of GPUs to request. + # -- The number of GPUs to request for the full pod (see `containerConcurrency`). gpuRequest: 0 - # -- The minimum memory to request. + # -- The minimum memory to request for the full pod (see `containerConcurrency`). memoryRequest: "2Gi" - # -- The maximum memory limit. + # -- The maximum memory limit for the full pod (see `containerConcurrency`). memoryLimit: "8Gi" # -- To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. # This parameter adds extra time to that minimum (seconds). @@ -149,6 +151,9 @@ prompt-proto-service: # If 0, idle timeout is ignored. responseStartTimeout: 0 + # -- The number of Knative requests that can be handled simultaneously by one container + containerConcurrency: 1 + # -- Kubernetes YAML configs for extra container volume(s). # Any volumes required by other config options are automatically handled by the Helm chart. additionalVolumeMounts: [] diff --git a/charts/prompt-proto-service/README.md b/charts/prompt-proto-service/README.md index 5f3f2efadb..03390726d6 100644 --- a/charts/prompt-proto-service/README.md +++ b/charts/prompt-proto-service/README.md @@ -36,16 +36,16 @@ Event-driven processing of camera images | instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits' raws. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | | instrument.skymap | string | `""` | Skymap to use with the instrument | -| knative.cpuLimit | int | `1` | The maximum cpu cores. | -| knative.cpuRequest | int | `1` | The cpu cores requested. | -| knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). | -| knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). | +| knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | +| knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | +| knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | +| knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | | knative.extraTimeout | int | `10` | To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. This parameter adds extra time to that minimum (seconds). | | knative.gpu | bool | `false` | GPUs enabled. | -| knative.gpuRequest | int | `0` | The number of GPUs to request. | +| knative.gpuRequest | int | `0` | The number of GPUs to request for the full pod (see `containerConcurrency`). | | knative.idleTimeout | int | `0` | Maximum time that a container can send nothing to Knative (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | -| knative.memoryLimit | string | `"8Gi"` | The maximum memory limit. | -| knative.memoryRequest | string | `"2Gi"` | The minimum memory to request. | +| knative.memoryLimit | string | `"8Gi"` | The maximum memory limit for the full pod (see `containerConcurrency`). | +| knative.memoryRequest | string | `"2Gi"` | The minimum memory to request for the full pod (see `containerConcurrency`). | | knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, startup timeout is ignored. | | logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | nameOverride | string | `""` | Override the base name for resources | diff --git a/charts/prompt-proto-service/templates/prompt-proto-service.yaml b/charts/prompt-proto-service/templates/prompt-proto-service.yaml index 326a3e523f..841be47dcb 100644 --- a/charts/prompt-proto-service/templates/prompt-proto-service.yaml +++ b/charts/prompt-proto-service/templates/prompt-proto-service.yaml @@ -40,6 +40,8 @@ spec: imagePullPolicy: {{ .Values.image.pullPolicy | quote }} name: user-container env: + - name: WORKER_COUNT + value: {{ .Values.containerConcurrency | toString | quote }} - name: WORKER_RESTART_FREQ value: {{ .Values.worker.restart | toString | quote }} - name: WORKER_TIMEOUT diff --git a/charts/prompt-proto-service/values.yaml b/charts/prompt-proto-service/values.yaml index 7751ab89ea..954c50e7d1 100644 --- a/charts/prompt-proto-service/values.yaml +++ b/charts/prompt-proto-service/values.yaml @@ -124,21 +124,23 @@ sasquatch: auth_env: true knative: - # -- The cpu cores requested. + # -- The cpu cores requested for the full pod (see `containerConcurrency`). cpuRequest: 1 - # -- The maximum cpu cores. + # -- The maximum cpu cores for the full pod (see `containerConcurrency`). cpuLimit: 1 # -- The storage space reserved for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageRequest: "5Gi" # -- The maximum storage space allowed for each container (mostly local Butler). + # This allocation is for the full pod (see `containerConcurrency`) ephemeralStorageLimit: "5Gi" # -- GPUs enabled. gpu: false - # -- The number of GPUs to request. + # -- The number of GPUs to request for the full pod (see `containerConcurrency`). gpuRequest: 0 - # -- The minimum memory to request. + # -- The minimum memory to request for the full pod (see `containerConcurrency`). memoryRequest: "2Gi" - # -- The maximum memory limit. + # -- The maximum memory limit for the full pod (see `containerConcurrency`). memoryLimit: "8Gi" # -- To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. # This parameter adds extra time to that minimum (seconds). From f77dbd317b4944bab76ef73f9a637f3fc1b25522 Mon Sep 17 00:00:00 2001 From: Amanda Ibsen Date: Wed, 18 Sep 2024 11:32:41 +0000 Subject: [PATCH 089/567] change lb ip, update to match test env --- applications/ingress-nginx/values-roe.yaml | 2 +- .../vault-secrets-operator/values-roe.yaml | 14 ++++++++++++++ environments/values-roe.yaml | 2 +- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/applications/ingress-nginx/values-roe.yaml b/applications/ingress-nginx/values-roe.yaml index e2e549ef68..f104956429 100644 --- a/applications/ingress-nginx/values-roe.yaml +++ b/applications/ingress-nginx/values-roe.yaml @@ -7,7 +7,7 @@ ingress-nginx: use-proxy-protocol: "false" enable-health-monitor: "false" service: - loadBalancerIP: "192.41.122.16" + loadBalancerIP: "192.41.122.52" annotations: kubernetes.io/ingress.class: "openstack" loadbalancer.openstack.org/enable-health-monitor: "false" diff --git a/applications/vault-secrets-operator/values-roe.yaml b/applications/vault-secrets-operator/values-roe.yaml index e69de29bb2..1e40e6f933 100644 --- a/applications/vault-secrets-operator/values-roe.yaml +++ b/applications/vault-secrets-operator/values-roe.yaml @@ -0,0 +1,14 @@ +vault-secrets-operator: + environmentVars: + - name: VAULT_ROLE_ID + valueFrom: + secretKeyRef: + name: vault-credentials + key: VAULT_ROLE_ID + - name: VAULT_SECRET_ID + valueFrom: + secretKeyRef: + name: vault-credentials + key: VAULT_SECRET_ID + vault: + authMethod: approle diff --git a/environments/values-roe.yaml b/environments/values-roe.yaml index 444f3bd295..4bdd211bbb 100644 --- a/environments/values-roe.yaml +++ b/environments/values-roe.yaml @@ -2,7 +2,7 @@ name: "roe" fqdn: "rsp.lsst.ac.uk" appOfAppsName: "science-platform" vaultUrl: "https://vault.lsst.ac.uk" -vaultPathPrefix: "secret/k8s_operator/roe" +vaultPathPrefix: "kv-v2/k8s_operator/roe" applications: mobu: true From 94a37d646d0d6153b04d6055f0577f2214853227 Mon Sep 17 00:00:00 2001 From: Amanda Ibsen Date: Wed, 18 Sep 2024 15:11:30 +0000 Subject: [PATCH 090/567] changed lb floating ip to something that is available --- applications/ingress-nginx/values-roe.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/ingress-nginx/values-roe.yaml b/applications/ingress-nginx/values-roe.yaml index f104956429..3fcae8f034 100644 --- a/applications/ingress-nginx/values-roe.yaml +++ b/applications/ingress-nginx/values-roe.yaml @@ -7,7 +7,7 @@ ingress-nginx: use-proxy-protocol: "false" enable-health-monitor: "false" service: - loadBalancerIP: "192.41.122.52" + loadBalancerIP: "192.41.122.130" annotations: kubernetes.io/ingress.class: "openstack" loadbalancer.openstack.org/enable-health-monitor: "false" From 519ae42b10d068d8f41b54aa079bc32641bbf11d Mon Sep 17 00:00:00 2001 From: Amanda Ibsen Date: Wed, 18 Sep 2024 15:33:07 +0000 Subject: [PATCH 091/567] added helm chart for ssotap app --- applications/ssotap/values-roe.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 applications/ssotap/values-roe.yaml diff --git a/applications/ssotap/values-roe.yaml b/applications/ssotap/values-roe.yaml new file mode 100644 index 0000000000..e5b3eb69a4 --- /dev/null +++ b/applications/ssotap/values-roe.yaml @@ -0,0 +1,16 @@ +cadc-tap: + tapSchema: + image: + repository: "lsstuk/tap-schema-roe-sso" + + config: + gcsBucket: "async" + gcsBucketUrl: "https://somerville.ed.ac.uk:6780" + gcsBucketType: "S3" + + pg: + host: "192.41.122.118:5432" + database: "dp03_catalogs_10yr" + username: "dp03_user" + + vaultSecretName: "ssotap" From 09a385284f4fe2595e276746876420d2d3258b62 Mon Sep 17 00:00:00 2001 From: A I Date: Wed, 18 Sep 2024 16:45:42 +0100 Subject: [PATCH 092/567] added necessary value to gafaelfawr config file --- applications/gafaelfawr/values-roe.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/gafaelfawr/values-roe.yaml b/applications/gafaelfawr/values-roe.yaml index f53b9e0ead..216835fcc3 100644 --- a/applications/gafaelfawr/values-roe.yaml +++ b/applications/gafaelfawr/values-roe.yaml @@ -7,7 +7,7 @@ config: github: clientId: "10172b4db1b67ee31620" - + cadcBaseUuid: "4cb5f948-aad9-466c-837b-5eae565b0a77" # Allow access by GitHub team. groupMapping: "exec:admin": From d506dc23ddb680fe17036d0f74753b19bedd1321 Mon Sep 17 00:00:00 2001 From: pav511 <38131208+pav511@users.noreply.github.com> Date: Wed, 18 Sep 2024 10:32:06 -0700 Subject: [PATCH 093/567] afausti dev alert-stream-broker rbac --- applications/argocd/values-usdfdev-alert-stream-broker.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/argocd/values-usdfdev-alert-stream-broker.yaml b/applications/argocd/values-usdfdev-alert-stream-broker.yaml index 482984f9b7..8298470022 100644 --- a/applications/argocd/values-usdfdev-alert-stream-broker.yaml +++ b/applications/argocd/values-usdfdev-alert-stream-broker.yaml @@ -33,6 +33,7 @@ argo-cd: g, smart@slac.stanford.edu, role:admin g, ebellm@slac.stanford.edu, role:admin g, hchiang2@slac.stanford.edu, role:admin + g, afausti@slac.stanford.edu, role:admin scopes: "[email]" server: From 9102bc2458431ec36846a1466b23b95c709b6bca Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Tue, 17 Sep 2024 17:33:16 -0400 Subject: [PATCH 094/567] Create KafkaUser for templatebot - Make consumer group a prefix rule because faststream seems to need each consumer to have a different group ID. - Drop access to unneeded topics --- .../templates/templatebot-user.yaml | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 applications/sasquatch/charts/square-events/templates/templatebot-user.yaml diff --git a/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml b/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml new file mode 100644 index 0000000000..fb46b65e2b --- /dev/null +++ b/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml @@ -0,0 +1,44 @@ +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaUser +metadata: + name: templatebot + labels: + strimzi.io/cluster: {{ .Values.cluster.name }} +spec: + template: + secret: + metadata: + annotations: + replicator.v1.mittwald.de/replication-allowed: "true" + replicator.v1.mittwald.de/replication-allowed-namespaces: "templatebot" + authentication: + type: tls + authorization: + type: simple + acls: + - resource: + type: group + name: "templatebot" + patternType: prefix + operations: + - "Read" + host: "*" + - resource: + type: topic + name: "lsst.square-events.squarebot.slack.app.mention" + patternType: literal + type: allow + host: "*" + operations: + - "Read" + - "Describe" + - resource: + type: topic + name: "lsst.square-events.squarebot.slack.message.im" + patternType: literal + type: allow + host: "*" + operations: + - "Read" + - "Describe" From f55be4988f72e58ec8c5318f466e3a92898a1cf9 Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Tue, 17 Sep 2024 17:34:01 -0400 Subject: [PATCH 095/567] Add Phalanx app for templatebot This adds [templatebot](https://github.com/lsst-sqre/templatebot) into Phalanx for deployment with the modern Roundtable clusters. Templatebot works with the Squarebot message bus, so it works similarly to apps like Unfurlbot. --- applications/templatebot/.helmignore | 23 ++++ applications/templatebot/Chart.yaml | 8 ++ applications/templatebot/README.md | 30 +++++ applications/templatebot/secrets.yaml | 26 ++++ .../templatebot/templates/_helpers.tpl | 26 ++++ .../templatebot/templates/configmap.yaml | 13 ++ .../templatebot/templates/deployment.yaml | 111 ++++++++++++++++++ .../templatebot/templates/kafkaaccess.yaml | 14 +++ .../templatebot/templates/networkpolicy.yaml | 21 ++++ .../templatebot/templates/service.yaml | 15 +++ .../templatebot/templates/vaultsecret.yaml | 9 ++ .../templatebot/values-roundtable-dev.yaml | 5 + .../templatebot/values-roundtable-prod.yaml | 0 applications/templatebot/values.yaml | 70 +++++++++++ docs/applications/roundtable.rst | 1 + docs/applications/templatebot/index.rst | 16 +++ docs/applications/templatebot/values.md | 12 ++ environments/README.md | 1 + .../applications/roundtable/templatebot.yaml | 34 ++++++ environments/values-roundtable-dev.yaml | 1 + environments/values.yaml | 3 + 21 files changed, 439 insertions(+) create mode 100644 applications/templatebot/.helmignore create mode 100644 applications/templatebot/Chart.yaml create mode 100644 applications/templatebot/README.md create mode 100644 applications/templatebot/secrets.yaml create mode 100644 applications/templatebot/templates/_helpers.tpl create mode 100644 applications/templatebot/templates/configmap.yaml create mode 100644 applications/templatebot/templates/deployment.yaml create mode 100644 applications/templatebot/templates/kafkaaccess.yaml create mode 100644 applications/templatebot/templates/networkpolicy.yaml create mode 100644 applications/templatebot/templates/service.yaml create mode 100644 applications/templatebot/templates/vaultsecret.yaml create mode 100644 applications/templatebot/values-roundtable-dev.yaml create mode 100644 applications/templatebot/values-roundtable-prod.yaml create mode 100644 applications/templatebot/values.yaml create mode 100644 docs/applications/templatebot/index.rst create mode 100644 docs/applications/templatebot/values.md create mode 100644 environments/templates/applications/roundtable/templatebot.yaml diff --git a/applications/templatebot/.helmignore b/applications/templatebot/.helmignore new file mode 100644 index 0000000000..0e8a0eb36f --- /dev/null +++ b/applications/templatebot/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/applications/templatebot/Chart.yaml b/applications/templatebot/Chart.yaml new file mode 100644 index 0000000000..c8a3e6c9b1 --- /dev/null +++ b/applications/templatebot/Chart.yaml @@ -0,0 +1,8 @@ +apiVersion: v2 +appVersion: "tickets-DM-43699" +description: Create new projects +name: templatebot +sources: + - https://github.com/lsst-sqre/templatebot +type: application +version: 1.0.0 diff --git a/applications/templatebot/README.md b/applications/templatebot/README.md new file mode 100644 index 0000000000..c743d3c467 --- /dev/null +++ b/applications/templatebot/README.md @@ -0,0 +1,30 @@ +# templatebot + +Create new projects + +## Source Code + +* + +## Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| affinity | object | `{}` | Affinity rules for the templatebot deployment pod | +| config.logLevel | string | `"INFO"` | Logging level | +| config.logProfile | string | `"production"` | Logging profile (`production` for JSON, `development` for human-friendly) | +| config.pathPrefix | string | `"/templatebot"` | URL path prefix | +| config.topics.slackAppMention | string | `"lsst.square-events.squarebot.slack.app.mention"` | Kafka topic name for the Slack `app_mention` events | +| config.topics.slackMessageIm | string | `"lsst.square-events.squarebot.slack.message.im"` | Kafka topic name for the Slack `message.im` events (direct message channels) | +| global.baseUrl | string | Set by Argo CD | Base URL for the environment | +| global.host | string | Set by Argo CD | Host name for ingress | +| global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | +| image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the templatebot image | +| image.repository | string | `"ghcr.io/lsst-sqre/templatebot"` | Image to use in the templatebot deployment | +| image.tag | string | The appVersion of the chart | Tag of image to use | +| ingress.annotations | object | `{}` | Additional annotations for the ingress rule | +| nodeSelector | object | `{}` | Node selection rules for the templatebot deployment pod | +| podAnnotations | object | `{}` | Annotations for the templatebot deployment pod | +| replicaCount | int | `1` | Number of web deployment pods to start | +| resources | object | See `values.yaml` | Resource limits and requests for the templatebot deployment pod | +| tolerations | list | `[]` | Tolerations for the templatebot deployment pod | diff --git a/applications/templatebot/secrets.yaml b/applications/templatebot/secrets.yaml new file mode 100644 index 0000000000..7e672c9ecf --- /dev/null +++ b/applications/templatebot/secrets.yaml @@ -0,0 +1,26 @@ +TEMPLATEBOT_GITHUB_APP_ID: + description: >- + The ID of the GitHub App shared by all Squarebot services. + copy: + application: squarebot + key: SQUAREBOT_GITHUB_APP_ID +TEMPLATEBOT_GITHUB_APP_PRIVATE_KEY: + description: >- + The private key for the GitHub App shared by all Squarebot services. + onepassword: + encoded: true + copy: + application: squarebot + key: SQUAREBOT_GITHUB_APP_PRIVATE_KEY +TEMPLATEBOT_SLACK_APP_ID: + description: >- + The ID of the Slack App shared by all Squarebot services. + copy: + application: squarebot + key: SQUAREBOT_SLACK_APP_ID +TEMPLATEBOT_SLACK_TOKEN: + description: >- + The Slack bot user oauth token for the Slack App shared by all Squarebot services. + copy: + application: squarebot + key: SQUAREBOT_SLACK_TOKEN diff --git a/applications/templatebot/templates/_helpers.tpl b/applications/templatebot/templates/_helpers.tpl new file mode 100644 index 0000000000..22ab8421e4 --- /dev/null +++ b/applications/templatebot/templates/_helpers.tpl @@ -0,0 +1,26 @@ +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "templatebot.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "templatebot.labels" -}} +helm.sh/chart: {{ include "templatebot.chart" . }} +{{ include "templatebot.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "templatebot.selectorLabels" -}} +app.kubernetes.io/name: "templatebot" +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} diff --git a/applications/templatebot/templates/configmap.yaml b/applications/templatebot/templates/configmap.yaml new file mode 100644 index 0000000000..81782fd7e0 --- /dev/null +++ b/applications/templatebot/templates/configmap.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: "templatebot" + labels: + {{- include "templatebot.labels" . | nindent 4 }} +data: + TEMPLATEBOT_LOG_LEVEL: {{ .Values.config.logLevel | quote }} + TEMPLATEBOT_ENVIRONMENT_URL: {{ .Values.global.baseUrl | quote }} + TEMPLATEBOT_PATH_PREFIX: {{ .Values.config.pathPrefix | quote }} + TEMPLATEBOT_PROFILE: {{ .Values.config.logProfile | quote }} + TEMPLATEBOT_APP_MENTION_TOPIC: {{ .Values.config.topics.slackAppMention | quote }} + TEMPLATEBOT_MESSAGE_IM_TOPIC: {{ .Values.config.topics.slackMessageIm | quote }} diff --git a/applications/templatebot/templates/deployment.yaml b/applications/templatebot/templates/deployment.yaml new file mode 100644 index 0000000000..79888b1aff --- /dev/null +++ b/applications/templatebot/templates/deployment.yaml @@ -0,0 +1,111 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: "templatebot" + labels: + {{- include "templatebot.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + {{- include "templatebot.selectorLabels" . | nindent 6 }} + template: + metadata: + annotations: + checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} + {{- with .Values.podAnnotations }} + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "templatebot.selectorLabels" . | nindent 8 }} + spec: + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + automountServiceAccountToken: false + containers: + - name: {{ .Chart.Name }} + envFrom: + - configMapRef: + name: "templatebot" + env: + # Writeable directory for concatenating certs. See "tmp" volume. + - name: "KAFKA_CERT_TEMP_DIR" + value: "/tmp/kafka_certs" + - name: "KAFKA_SECURITY_PROTOCOL" + value: "SSL" + # From KafkaAccess + - name: "KAFKA_BOOTSTRAP_SERVERS" + valueFrom: + secretKeyRef: + name: templatebot-kafka + key: "bootstrapServers" + - name: "KAFKA_CLUSTER_CA_PATH" + value: "/etc/kafkacluster/ca.crt" + - name: "KAFKA_CLIENT_CERT_PATH" + value: "/etc/kafkauser/user.crt" + - name: "KAFKA_CLIENT_KEY_PATH" + value: "/etc/kafkauser/user.key" + # From Vault secrets + - name: "TEMPLATEBOT_SLACK_APP_ID" + valueFrom: + secretKeyRef: + name: "templatebot" + key: "TEMPLATEBOT_SLACK_APP_ID" + - name: "TEMPLATEBOT_SLACK_TOKEN" + valueFrom: + secretKeyRef: + name: "templatebot" + key: "TEMPLATEBOT_SLACK_TOKEN" + volumeMounts: + - name: "kafka" + mountPath: "/etc/kafkacluster/ca.crt" + subPath: "ssl.truststore.crt" # CA cert from the Kafka cluster + - name: "kafka" + mountPath: "/etc/kafkauser/user.crt" + subPath: "ssl.keystore.crt" # User cert from the Kafka cluster signed by the clients' CA + - name: "kafka" + mountPath: "/etc/kafkauser/user.key" + subPath: "ssl.keystore.key" # private key for the consuming client + - name: "tmp" + mountPath: "/tmp/kafka_certs" + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + ports: + - name: "http" + containerPort: 8080 + protocol: "TCP" + readinessProbe: + httpGet: + path: "/" + port: "http" + resources: + {{- toYaml .Values.resources | nindent 12 }} + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - "all" + readOnlyRootFilesystem: true + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} + securityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 1000 + volumes: + - name: "kafka" + secret: + secretName: templatebot-kafka + - name: "templatebot" + secret: + secretName: "templatebot" + - name: "tmp" + emptyDir: {} diff --git a/applications/templatebot/templates/kafkaaccess.yaml b/applications/templatebot/templates/kafkaaccess.yaml new file mode 100644 index 0000000000..8ca9095ac8 --- /dev/null +++ b/applications/templatebot/templates/kafkaaccess.yaml @@ -0,0 +1,14 @@ +apiVersion: access.strimzi.io/v1alpha1 +kind: KafkaAccess +metadata: + name: templatebot-kafka +spec: + kafka: + name: sasquatch + namespace: sasquatch + listener: tls + user: + kind: KafkaUser + apiGroup: kafka.strimzi.io + name: templatebot + namespace: sasquatch diff --git a/applications/templatebot/templates/networkpolicy.yaml b/applications/templatebot/templates/networkpolicy.yaml new file mode 100644 index 0000000000..ca1c1e87a1 --- /dev/null +++ b/applications/templatebot/templates/networkpolicy.yaml @@ -0,0 +1,21 @@ +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: "templatebot" +spec: + podSelector: + matchLabels: + {{- include "templatebot.selectorLabels" . | nindent 6 }} + policyTypes: + - "Ingress" + ingress: + # Allow inbound access from pods (in any namespace) labeled + # gafaelfawr.lsst.io/ingress: true. + - from: + - namespaceSelector: {} + podSelector: + matchLabels: + gafaelfawr.lsst.io/ingress: "true" + ports: + - protocol: "TCP" + port: 8080 diff --git a/applications/templatebot/templates/service.yaml b/applications/templatebot/templates/service.yaml new file mode 100644 index 0000000000..2ad67bccf8 --- /dev/null +++ b/applications/templatebot/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: "templatebot" + labels: + {{- include "templatebot.labels" . | nindent 4 }} +spec: + type: "ClusterIP" + ports: + - port: 8080 + targetPort: "http" + protocol: "TCP" + name: "http" + selector: + {{- include "templatebot.selectorLabels" . | nindent 4 }} diff --git a/applications/templatebot/templates/vaultsecret.yaml b/applications/templatebot/templates/vaultsecret.yaml new file mode 100644 index 0000000000..defc7709fe --- /dev/null +++ b/applications/templatebot/templates/vaultsecret.yaml @@ -0,0 +1,9 @@ +apiVersion: ricoberger.de/v1alpha1 +kind: VaultSecret +metadata: + name: templatebot + labels: + {{- include "templatebot.labels" . | nindent 4 }} +spec: + path: "{{ .Values.global.vaultSecretsPath }}/templatebot" + type: Opaque diff --git a/applications/templatebot/values-roundtable-dev.yaml b/applications/templatebot/values-roundtable-dev.yaml new file mode 100644 index 0000000000..91a3f6a1c6 --- /dev/null +++ b/applications/templatebot/values-roundtable-dev.yaml @@ -0,0 +1,5 @@ +image: + pullPolicy: Always + +config: + logLevel: "DEBUG" diff --git a/applications/templatebot/values-roundtable-prod.yaml b/applications/templatebot/values-roundtable-prod.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/applications/templatebot/values.yaml b/applications/templatebot/values.yaml new file mode 100644 index 0000000000..cf65f9bab7 --- /dev/null +++ b/applications/templatebot/values.yaml @@ -0,0 +1,70 @@ +# Default values for templatebot. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +# -- Number of web deployment pods to start +replicaCount: 1 + +image: + # -- Image to use in the templatebot deployment + repository: "ghcr.io/lsst-sqre/templatebot" + + # -- Pull policy for the templatebot image + pullPolicy: "IfNotPresent" + + # -- Tag of image to use + # @default -- The appVersion of the chart + tag: null + +config: + # -- Logging level + logLevel: "INFO" + + # -- Logging profile (`production` for JSON, `development` for + # human-friendly) + logProfile: "production" + + # -- URL path prefix + pathPrefix: "/templatebot" + + topics: + # -- Kafka topic name for the Slack `app_mention` events + slackAppMention: "lsst.square-events.squarebot.slack.app.mention" + + # -- Kafka topic name for the Slack `message.im` events (direct message channels) + slackMessageIm: "lsst.square-events.squarebot.slack.message.im" + +ingress: + # -- Additional annotations for the ingress rule + annotations: {} + +# -- Affinity rules for the templatebot deployment pod +affinity: {} + +# -- Node selection rules for the templatebot deployment pod +nodeSelector: {} + +# -- Annotations for the templatebot deployment pod +podAnnotations: {} + +# -- Resource limits and requests for the templatebot deployment pod +# @default -- See `values.yaml` +resources: {} + +# -- Tolerations for the templatebot deployment pod +tolerations: [] + +# The following will be set by parameters injected by Argo CD and should not +# be set in the individual environment values files. +global: + # -- Base URL for the environment + # @default -- Set by Argo CD + baseUrl: null + + # -- Host name for ingress + # @default -- Set by Argo CD + host: null + + # -- Base path for Vault secrets + # @default -- Set by Argo CD + vaultSecretsPath: null diff --git a/docs/applications/roundtable.rst b/docs/applications/roundtable.rst index df4f559bbd..8d3ecce818 100644 --- a/docs/applications/roundtable.rst +++ b/docs/applications/roundtable.rst @@ -19,6 +19,7 @@ Argo CD project: ``roundtable`` ook/index sqrbot-sr/index squarebot/index + templatebot/index unfurlbot/index vault/index diff --git a/docs/applications/templatebot/index.rst b/docs/applications/templatebot/index.rst new file mode 100644 index 0000000000..9b2f2ce3a4 --- /dev/null +++ b/docs/applications/templatebot/index.rst @@ -0,0 +1,16 @@ +.. px-app:: templatebot + +################################# +templatebot — Create new projects +################################# + +.. jinja:: templatebot + :file: applications/_summary.rst.jinja + +Guides +====== + +.. toctree:: + :maxdepth: 1 + + values \ No newline at end of file diff --git a/docs/applications/templatebot/values.md b/docs/applications/templatebot/values.md new file mode 100644 index 0000000000..ad83245bf4 --- /dev/null +++ b/docs/applications/templatebot/values.md @@ -0,0 +1,12 @@ +```{px-app-values} templatebot +``` + +# templatebot Helm values reference + +Helm values reference table for the {px-app}`templatebot` application. + +```{include} ../../../applications/templatebot/README.md +--- +start-after: "## Values" +--- +``` \ No newline at end of file diff --git a/environments/README.md b/environments/README.md index 100d733ea5..56c5604c22 100644 --- a/environments/README.md +++ b/environments/README.md @@ -67,6 +67,7 @@ | applications.tap | bool | `false` | Enable the tap application | | applications.telegraf | bool | `false` | Enable the telegraf application | | applications.telegraf-ds | bool | `false` | Enable the telegraf-ds application | +| applications.templatebot | bool | `false` | Enable the templatebot application | | applications.times-square | bool | `false` | Enable the times-square application | | applications.unfurlbot | bool | `false` | Enable the unfurlbot application | | applications.uws | bool | `false` | Enable the uws application. This includes the dmocps control system application. | diff --git a/environments/templates/applications/roundtable/templatebot.yaml b/environments/templates/applications/roundtable/templatebot.yaml new file mode 100644 index 0000000000..f0f34810ce --- /dev/null +++ b/environments/templates/applications/roundtable/templatebot.yaml @@ -0,0 +1,34 @@ +{{- if (index .Values "applications" "templatebot") -}} +apiVersion: v1 +kind: Namespace +metadata: + name: "templatebot" +--- +apiVersion: argoproj.io/v1alpha1 +kind: Application +metadata: + name: "templatebot" + namespace: "argocd" + finalizers: + - "resources-finalizer.argocd.argoproj.io" +spec: + destination: + namespace: "templatebot" + server: "https://kubernetes.default.svc" + project: "roundtable" + source: + path: "applications/templatebot" + repoURL: {{ .Values.repoUrl | quote }} + targetRevision: {{ .Values.targetRevision | quote }} + helm: + parameters: + - name: "global.host" + value: {{ .Values.fqdn | quote }} + - name: "global.baseUrl" + value: "https://{{ .Values.fqdn }}" + - name: "global.vaultSecretsPath" + value: {{ .Values.vaultPathPrefix | quote }} + valueFiles: + - "values.yaml" + - "values-{{ .Values.name }}.yaml" +{{- end -}} \ No newline at end of file diff --git a/environments/values-roundtable-dev.yaml b/environments/values-roundtable-dev.yaml index 1ff4738824..a11686b579 100644 --- a/environments/values-roundtable-dev.yaml +++ b/environments/values-roundtable-dev.yaml @@ -27,5 +27,6 @@ applications: strimzi-access-operator: true telegraf: true telegraf-ds: true + templatebot: true unfurlbot: true vault: true diff --git a/environments/values.yaml b/environments/values.yaml index e613ee06f5..fa0b156f5d 100644 --- a/environments/values.yaml +++ b/environments/values.yaml @@ -225,6 +225,9 @@ applications: # -- Enable the telegraf-ds application telegraf-ds: false + # -- Enable the templatebot application + templatebot: false + # -- Enable the times-square application times-square: false From df29eab47f50d0138a8282618dd2590ba4de0652 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Wed, 18 Sep 2024 16:10:55 -0700 Subject: [PATCH 096/567] Stop using fullname macro in docs The new starters no longer define a fullname macro since it's not needed for Phalanx Helm charts. Remove the last reference to it in the documentation. --- docs/developers/helm-chart/define-secrets.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/developers/helm-chart/define-secrets.rst b/docs/developers/helm-chart/define-secrets.rst index ac904f1f80..ad68a7f831 100644 --- a/docs/developers/helm-chart/define-secrets.rst +++ b/docs/developers/helm-chart/define-secrets.rst @@ -136,7 +136,7 @@ A typical ``VaultSecret`` Helm template for an application looks like this (repl apiVersion: ricoberger.de/v1alpha1 kind: VaultSecret metadata: - name: {{ include "myapp.fullname" . }} + name: "myapp" labels: {{- include "myapp.labels" . | nindent 4 }} spec: From d26aff31dc2fba33900e5859c1730a9434b823e8 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Wed, 18 Sep 2024 17:09:08 -0700 Subject: [PATCH 097/567] Add a COmanage link to environment pages If Gafaelfawr is configured with CILogon and there's an enrollment URL, assume that it points to COmanage and extract its hostname. Add a COmanage link to the environment page for this environment. --- docs/environments/_summary.rst.jinja | 2 +- src/phalanx/models/environments.py | 5 +++++ src/phalanx/storage/config.py | 6 ++++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/environments/_summary.rst.jinja b/docs/environments/_summary.rst.jinja index 72e40a18e8..d83a2d6051 100644 --- a/docs/environments/_summary.rst.jinja +++ b/docs/environments/_summary.rst.jinja @@ -5,7 +5,7 @@ * - Root domain - `{{ env.fqdn }} `__ * - Identity provider - - {{ env.gafaelfawr.provider.value }}{% if env.gafaelfawr.provider_hostname %} ({{ env.gafaelfawr.provider_hostname }}){% endif %} + - {{ env.gafaelfawr.provider.value }}{% if env.gafaelfawr.provider_hostname %} ({{ env.gafaelfawr.provider_hostname }}){% endif %}{% if env.gafaelfawr.comanage_hostname %} (COmanage: `{{ env.gafaelfawr.comanage_hostname }} `__){% endif %} {%- if env.argocd.url %} * - Argo CD - {{ env.argocd.url }} diff --git a/src/phalanx/models/environments.py b/src/phalanx/models/environments.py index ab1df52d87..bc15ff58c3 100644 --- a/src/phalanx/models/environments.py +++ b/src/phalanx/models/environments.py @@ -23,12 +23,14 @@ from .secrets import Secret __all__ = [ + "ArgoCDDetails", "ControlSystemConfig", "Environment", "EnvironmentBaseConfig", "EnvironmentConfig", "EnvironmentDetails", "GCPMetadata", + "GafaelfawrDetails", "GafaelfawrGitHubGroup", "GafaelfawrGitHubTeam", "GafaelfawrScope", @@ -467,6 +469,9 @@ class GafaelfawrDetails(BaseModel): provider_hostname: str | None = None """Hostname of upstream identity provider, if meaningful.""" + comanage_hostname: str | None = None + """Hostname of COmanage instance, if COmanage is in use.""" + scopes: list[GafaelfawrScope] = [] """Gafaelfawr scopes and their associated groups.""" diff --git a/src/phalanx/storage/config.py b/src/phalanx/storage/config.py index 99b4a62966..b66e01e49d 100644 --- a/src/phalanx/storage/config.py +++ b/src/phalanx/storage/config.py @@ -786,9 +786,14 @@ def _build_gafaelfawr_details( # Determine the upstream identity provider. provider_hostname = None + comanage_hostname = None if gafaelfawr: if gafaelfawr.values["config"]["cilogon"]["clientId"]: provider = IdentityProvider.CILOGON + cilogon_config = gafaelfawr.values["config"]["cilogon"] + if cilogon_config["enrollmentUrl"]: + url = cilogon_config["enrollmentUrl"] + comanage_hostname = urlparse(url).hostname elif gafaelfawr.values["config"]["github"]["clientId"]: provider = IdentityProvider.GITHUB elif gafaelfawr.values["config"]["oidc"]["clientId"]: @@ -828,6 +833,7 @@ def _build_gafaelfawr_details( return GafaelfawrDetails( provider=provider, provider_hostname=provider_hostname, + comanage_hostname=comanage_hostname, scopes=sorted(gafaelfawr_scopes, key=lambda s: s.scope), ) From bd394e746c95db9b2d8352b87db34f0f6e0422e8 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Wed, 18 Sep 2024 17:23:59 -0700 Subject: [PATCH 098/567] Document restoring user with same UID/GID Document that UID/GID assignment is only based on username, and a user can be deleted from COmanage and then recreated with the same username and they will retain the same UID and GID and thus the same file access. --- docs/admin/troubleshooting.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/admin/troubleshooting.rst b/docs/admin/troubleshooting.rst index 5383f06074..cb50958d6d 100644 --- a/docs/admin/troubleshooting.rst +++ b/docs/admin/troubleshooting.rst @@ -81,3 +81,17 @@ Even when you want to be prompted. **Solution:** Have the user go to `https://cilogin.org/me `__ and choose "Delete ALL". This will clear their remembered selection. They can they retry whatever operation they were attempting. + +User deleted from COmanage and needs to be restored +=================================================== + +**Symptoms**: In a Phalanx environment that uses CILogon and COmanage, a user was deleted from COmanage, possibly because their identity record or authentication configuration was irrevocably broken. +The user needs to be reinstated with their previously existing files. + +**Solution**: The user should create their account again and choose the same username that they used previously. +This will assign them the same UID and GID that they had previously. +Currently, we don't delete files for deleted users, so all of their files should still be intact. + +UID and GID for users is tracked in Google Filestore and is assigned solely based on the user's username. +Any user in the environment with the same username will get the same UID and GID, and UIDs and GIDs are never reused. +Therefore, the same UID and GID can be retained by keeping the same username. From d88d93a2ae7d1587195197e29fdc3ebdd7b937be Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 19 Sep 2024 11:48:56 -0500 Subject: [PATCH 099/567] Update Kafka version to 3.8.0 --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/strimzi-kafka/README.md | 2 +- applications/sasquatch/charts/strimzi-kafka/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 459c2f9459..daaf651fd7 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -365,7 +365,7 @@ Rubin Observatory's telemetry service | strimzi-kafka.kafka.storage.size | string | `"500Gi"` | Size of the backing storage disk for each of the Kafka brokers | | strimzi-kafka.kafka.storage.storageClassName | string | `""` | Name of a StorageClass to use when requesting persistent volumes | | strimzi-kafka.kafka.tolerations | list | `[]` | Tolerations for Kafka broker pod assignment | -| strimzi-kafka.kafka.version | string | `"3.7.1"` | Version of Kafka to deploy | +| strimzi-kafka.kafka.version | string | `"3.8.0"` | Version of Kafka to deploy | | strimzi-kafka.kafkaController.enabled | bool | `false` | Enable Kafka Controller | | strimzi-kafka.kafkaController.resources | object | See `values.yaml` | Kubernetes requests and limits for the Kafka Controller | | strimzi-kafka.kafkaController.storage.size | string | `"20Gi"` | Size of the backing storage disk for each of the Kafka controllers | diff --git a/applications/sasquatch/charts/strimzi-kafka/README.md b/applications/sasquatch/charts/strimzi-kafka/README.md index 4e844c02a3..fd425d5279 100644 --- a/applications/sasquatch/charts/strimzi-kafka/README.md +++ b/applications/sasquatch/charts/strimzi-kafka/README.md @@ -41,7 +41,7 @@ A subchart to deploy Strimzi Kafka components for Sasquatch. | kafka.storage.size | string | `"500Gi"` | Size of the backing storage disk for each of the Kafka brokers | | kafka.storage.storageClassName | string | `""` | Name of a StorageClass to use when requesting persistent volumes | | kafka.tolerations | list | `[]` | Tolerations for Kafka broker pod assignment | -| kafka.version | string | `"3.7.1"` | Version of Kafka to deploy | +| kafka.version | string | `"3.8.0"` | Version of Kafka to deploy | | kafkaController.enabled | bool | `false` | Enable Kafka Controller | | kafkaController.resources | object | See `values.yaml` | Kubernetes requests and limits for the Kafka Controller | | kafkaController.storage.size | string | `"20Gi"` | Size of the backing storage disk for each of the Kafka controllers | diff --git a/applications/sasquatch/charts/strimzi-kafka/values.yaml b/applications/sasquatch/charts/strimzi-kafka/values.yaml index f43fd60e4c..fa0deaa57b 100644 --- a/applications/sasquatch/charts/strimzi-kafka/values.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/values.yaml @@ -11,7 +11,7 @@ cluster: kafka: # -- Version of Kafka to deploy - version: "3.7.1" + version: "3.8.0" # -- Number of Kafka broker replicas to run replicas: 3 From 172f4b62bf52f1ce0e01c43513e4a7684db5edd5 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Mon, 9 Sep 2024 12:06:56 -0700 Subject: [PATCH 100/567] Make sure all connector offsets are set o oldest by default --- applications/sasquatch/values-usdfprod.yaml | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 4a1503f939..18ebd80615 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -146,7 +146,6 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTAOS", "lsst.sal.MTDome", "lsst.sal.MTDomeTrajectory", "lsst.sal.MTPtg" ] - offset: "newest" mtmount: enabled: true database: "efd" @@ -154,21 +153,18 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTMount" ] - offset: "newest" comcam: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.CCCamera", "lsst.sal.CCHeaderService", "lsst.sal.CCOODS" ] - offset: "newest" eas: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] - offset: "newest" m1m3: enabled: true database: "efd" @@ -176,70 +172,60 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTM1M3" ] - offset: "newest" m2: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTHexapod", "lsst.sal.MTM2", "lsst.sal.MTRotator" ] - offset: "newest" obssys: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.Scheduler", "lsst.sal.Script", "lsst.sal.ScriptQueue", "lsst.sal.Watcher" ] - offset: "newest" ocps: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.OCPS" ] - offset: "newest" pmd: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.PMD" ] - offset: "newest" calsys: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.ATMonochromator", "lsst.sal.ATWhiteLight", "lsst.sal.CBP", "lsst.sal.Electrometer", "lsst.sal.FiberSpectrograph", "lsst.sal.LinearStage", "lsst.sal.TunableLaser" ] - offset: "newest" mtaircompressor: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTAirCompressor" ] - offset: "newest" genericcamera: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.GCHeaderService", "lsst.sal.GenericCamera" ] - offset: "newest" gis: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.GIS" ] - offset: "newest" lsstcam: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTCamera", "lsst.sal.MTHeaderService", "lsst.sal.MTOODS" ] - offset: "newest" auxtel: enabled: true database: "efd" From b6fefe55644231ee9376fef978ed6b49d57e2599 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Mon, 9 Sep 2024 13:06:43 -0700 Subject: [PATCH 101/567] Split auxtel connector There's a race condition that crashes the connector one way to alleviate this problem is splitting the topics among multiple connectors. --- applications/sasquatch/values-usdfprod.yaml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 18ebd80615..1c35d0f145 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -231,7 +231,15 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] + [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory" ] + debug: true + auxtel2: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] + debug: true latiss: enabled: true database: "efd" From 3a6e3ec13b224395c072016d31d0dcc71ded9f21 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Sat, 14 Sep 2024 13:41:02 -0700 Subject: [PATCH 102/567] Split eas connector There's a race condition that crashes the connector one way to alleviate this problem is splitting the topics among multiple connectors. --- applications/sasquatch/values-usdfprod.yaml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 1c35d0f145..38204d5e30 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -164,7 +164,13 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + [ "lsst.sal.ESS" ] + eas2: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] m1m3: enabled: true database: "efd" From b232082a6412705d8342eccec765c0f1a9d3d0b2 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 19 Sep 2024 11:45:44 -0500 Subject: [PATCH 103/567] Enable debug for all connectors --- applications/sasquatch/values-usdfprod.yaml | 22 +++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 38204d5e30..8fbe3b0b4a 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -139,6 +139,7 @@ telegraf-kafka-consumer: timestamp_field: "timestamp" topicRegexps: | [ "lsst.backpack" ] + debug: true # CSC connectors maintel: enabled: true @@ -146,6 +147,7 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTAOS", "lsst.sal.MTDome", "lsst.sal.MTDomeTrajectory", "lsst.sal.MTPtg" ] + debug: true mtmount: enabled: true database: "efd" @@ -153,24 +155,28 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTMount" ] + debug: true comcam: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.CCCamera", "lsst.sal.CCHeaderService", "lsst.sal.CCOODS" ] + debug: true eas: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.ESS" ] + debug: true eas2: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + debug: true m1m3: enabled: true database: "efd" @@ -178,60 +184,70 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTM1M3" ] + debug: true m2: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTHexapod", "lsst.sal.MTM2", "lsst.sal.MTRotator" ] + debug: true obssys: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.Scheduler", "lsst.sal.Script", "lsst.sal.ScriptQueue", "lsst.sal.Watcher" ] + debug: true ocps: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.OCPS" ] + debug: true pmd: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.PMD" ] + debug: true calsys: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.ATMonochromator", "lsst.sal.ATWhiteLight", "lsst.sal.CBP", "lsst.sal.Electrometer", "lsst.sal.FiberSpectrograph", "lsst.sal.LinearStage", "lsst.sal.TunableLaser" ] + debug: true mtaircompressor: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTAirCompressor" ] + debug: true genericcamera: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.GCHeaderService", "lsst.sal.GenericCamera" ] + debug: true gis: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.GIS" ] + debug: true lsstcam: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTCamera", "lsst.sal.MTHeaderService", "lsst.sal.MTOODS" ] + debug: true auxtel: enabled: true database: "efd" @@ -252,18 +268,21 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.ATCamera", "lsst.sal.ATHeaderService", "lsst.sal.ATOODS", "lsst.sal.ATSpectrograph" ] + debug: true test: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.Test" ] + debug: true lasertracker: enabled: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.LaserTracker" ] + debug: true # CCS connectors (experimental) data is being written on separate databases for now atcamera: enabled: true @@ -274,6 +293,7 @@ telegraf-kafka-consumer: [ "Agent", "Aspic", "Location", "Raft", "Reb", "Sensor", "Source" ] topicRegexps: | [ "lsst.ATCamera" ] + debug: true cccamera: enabled: true database: "lsst.CCCamera" @@ -283,6 +303,7 @@ telegraf-kafka-consumer: [ "Agent", "Aspic", "Cold", "Cryo", "Hardware", "Location", "Ps", "RTD", "Raft", "Reb", "Segment", "Sensor", "Source" ] topicRegexps: | [ "lsst.CCCamera" ] + debug: true mtcamera: enabled: true database: "lsst.MTCamera" @@ -292,6 +313,7 @@ telegraf-kafka-consumer: [ "Agent", "Aspic", "Axis", "Canbus", "Cip", "Clamp", "Cold", "Controller", "Cryo", "Gateway", "Hardware", "Hip", "Hook", "Latch", "Location", "Ps", "RTD", "Raft", "Reb", "Segment", "Sensor", "Socket", "Source", "Truck" ] topicRegexps: | [ "lsst.MTCamera" ] + debug: true kafdrop: ingress: From 991b4ba16422e4c656c903312c536054ca8c3e4f Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 19 Sep 2024 12:38:47 -0500 Subject: [PATCH 104/567] Run mtmount and m1m3 with one replica --- applications/sasquatch/values-usdfprod.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 8fbe3b0b4a..4dfe10f35f 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -151,7 +151,6 @@ telegraf-kafka-consumer: mtmount: enabled: true database: "efd" - replicaCount: 8 timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTMount" ] @@ -180,7 +179,6 @@ telegraf-kafka-consumer: m1m3: enabled: true database: "efd" - replicaCount: 8 timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTM1M3" ] From 509e3591dbf270b674e2be900423320ed866b8c0 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 19 Sep 2024 13:36:16 -0500 Subject: [PATCH 105/567] Increase readiness probe initial delay - InfluxDB Enterprise needs more time to read the shards from disk when restarting. The readiness probe was killing the data pods too early preventing it to restart. --- .../charts/influxdb-enterprise/templates/data-statefulset.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/sasquatch/charts/influxdb-enterprise/templates/data-statefulset.yaml b/applications/sasquatch/charts/influxdb-enterprise/templates/data-statefulset.yaml index fa28e08cf4..1cc01f575a 100644 --- a/applications/sasquatch/charts/influxdb-enterprise/templates/data-statefulset.yaml +++ b/applications/sasquatch/charts/influxdb-enterprise/templates/data-statefulset.yaml @@ -90,7 +90,7 @@ spec: path: /ping port: http readinessProbe: - initialDelaySeconds: 30 + initialDelaySeconds: 60 httpGet: path: /ping port: http From 99d61ef6e49df379a9f19c6d9ff86c2a39c8b48b Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Thu, 19 Sep 2024 17:35:35 -0700 Subject: [PATCH 106/567] Fix naming of OpenID Connect client secrets The instructions for setting up a new OpenID Connect client secret didn't match the expected contents of that secret. Fix them to match. --- docs/applications/gafaelfawr/add-oidc-client.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/applications/gafaelfawr/add-oidc-client.rst b/docs/applications/gafaelfawr/add-oidc-client.rst index 013319b66c..e981115f8d 100644 --- a/docs/applications/gafaelfawr/add-oidc-client.rst +++ b/docs/applications/gafaelfawr/add-oidc-client.rst @@ -43,7 +43,7 @@ The goal is to eventually add automation to Phalanx to generate the latter from Change the contents to :samp:`{random-id}.clients.{fqdn}` where the random ID is the results of ``os.urandom(16).hex()`` in Python and the FQDN is the FQDN of the environment. For example, ``de5dd2c1fbf648e11d50b6cf3aa72277.clients.data.lsst.cloud``. -#. Add a password field to the new section, leaving the label as ``password``. +#. Add a password field to the new section, changing the label as ``secret``. You can let 1Password generate a random 20-character password if you want, or generate one of equivalent entropy however you choose. #. Add a final text field to the new section. @@ -53,13 +53,13 @@ The goal is to eventually add automation to Phalanx to generate the latter from #. Now, you will need to copy this data into the ``gafaelfawr`` secret under the ``oidc-server-secrets`` key, creating that key if it doesn't already exist. Unfortunately, you currently have to construct the JSON by hand. - The value of this key should be a JSON-encoded list of objects, and each object should have keys ``id``, ``password``, and ``return_uri`` with the information above. + The value of this key should be a JSON-encoded list of objects, and each object should have keys ``id``, ``secret``, and ``return_uri`` with the information above. Be sure to include all the clients, not just the new one that you're adding. Share the secret with the client ================================ -You now need to convey the ``client_id`` (the ``id`` value above) and the ``client_secret`` (the ``password`` value above) to the OpenID Connect client. +You now need to convey the ``client_id`` (the ``id`` value above) and the ``client_secret`` (the ``secret`` value above) to the OpenID Connect client. They will need to configure their client software to use that ``client_id`` and ``client_secret`` whenever performing an OpenID Connect authentication. The easiest way to do this is often to create a separate 1Password secret and share it with the client. From a33ffaf4f109c2cde9fabd68f2d0c95138c04da4 Mon Sep 17 00:00:00 2001 From: Amanda Ibsen Date: Fri, 20 Sep 2024 11:01:46 +0000 Subject: [PATCH 107/567] set updateSchema true --- applications/gafaelfawr/values-roe.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/gafaelfawr/values-roe.yaml b/applications/gafaelfawr/values-roe.yaml index f53b9e0ead..2a6410845f 100644 --- a/applications/gafaelfawr/values-roe.yaml +++ b/applications/gafaelfawr/values-roe.yaml @@ -4,6 +4,7 @@ redis: config: internalDatabase: true + updateSchema: true github: clientId: "10172b4db1b67ee31620" From a85e8e26b83628d2fd6d51981fcef4e55c33bf49 Mon Sep 17 00:00:00 2001 From: Amanda Ibsen Date: Fri, 20 Sep 2024 12:00:06 +0000 Subject: [PATCH 108/567] set updateSchema false --- applications/gafaelfawr/values-roe.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/applications/gafaelfawr/values-roe.yaml b/applications/gafaelfawr/values-roe.yaml index 6251d202f7..78f34f3cd5 100644 --- a/applications/gafaelfawr/values-roe.yaml +++ b/applications/gafaelfawr/values-roe.yaml @@ -4,7 +4,7 @@ redis: config: internalDatabase: true - updateSchema: true + updateSchema: false github: clientId: "10172b4db1b67ee31620" @@ -37,4 +37,5 @@ config: team: "dev" initialAdmins: - - "stvoutsin" + - "gpfrancis" + - "aibsen" From 604ed1eaa2727b2517e25742e7f09b83c48f08b7 Mon Sep 17 00:00:00 2001 From: A I Date: Fri, 20 Sep 2024 13:46:54 +0100 Subject: [PATCH 109/567] changed tap schema version to 2.3.0 The latest version is 3.0.2 but we need to update the schemas. Also atm the pluggin that manages the s3 buckets is not officially available for ssotap, we are using an image in Stelios' personal repo, but this should change soon --- charts/cadc-tap/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index 317a89e879..4aaaf85900 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -162,7 +162,7 @@ tapSchema: pullPolicy: "IfNotPresent" # -- Tag of TAP schema image - tag: "3.0.2" + tag: "2.3.0"" # -- Resource limits and requests for the TAP schema database pod # @default -- See `values.yaml` From 9bdd033691f7bcc6caa3e6fb44874713513912d3 Mon Sep 17 00:00:00 2001 From: A I Date: Fri, 20 Sep 2024 14:08:39 +0100 Subject: [PATCH 110/567] changed back to 3.0.2 to see if there's something else failing --- charts/cadc-tap/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index 4aaaf85900..6bc67ed03d 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -162,7 +162,7 @@ tapSchema: pullPolicy: "IfNotPresent" # -- Tag of TAP schema image - tag: "2.3.0"" + tag: "3.0.2"" # -- Resource limits and requests for the TAP schema database pod # @default -- See `values.yaml` From c247c5a6a44ca682309594ad73eedd61148ab51c Mon Sep 17 00:00:00 2001 From: A I Date: Fri, 20 Sep 2024 14:34:14 +0100 Subject: [PATCH 111/567] added line to enable ssotap in the portal under applications/portal/values-roe.yaml --- applications/portal/values-roe.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/portal/values-roe.yaml b/applications/portal/values-roe.yaml index 787f7a9638..f030ba5b8d 100644 --- a/applications/portal/values-roe.yaml +++ b/applications/portal/values-roe.yaml @@ -6,3 +6,4 @@ resources: config: hipsUrl: "http://alasky.cds.unistra.fr/DSS/DSSColor" + ssotap: "ssotap" From 1b557a65d87baa0cdd1a21cceec2b0e3b8d6035a Mon Sep 17 00:00:00 2001 From: A I Date: Fri, 20 Sep 2024 14:57:56 +0100 Subject: [PATCH 112/567] Update values-roe.yaml --- applications/tap/values-roe.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/applications/tap/values-roe.yaml b/applications/tap/values-roe.yaml index 34abee38bf..40432ed6cc 100644 --- a/applications/tap/values-roe.yaml +++ b/applications/tap/values-roe.yaml @@ -7,6 +7,7 @@ cadc-tap: gcsBucket: "async" gcsBucketUrl: "https://somerville.ed.ac.uk:6780" gcsBucketType: "S3" - + jvmMaxHeapSize: "31G" + qserv: host: "192.41.122.85:30040" From b51c296d9b647c44153c0f8d03af4992cc564f49 Mon Sep 17 00:00:00 2001 From: A I Date: Fri, 20 Sep 2024 14:59:28 +0100 Subject: [PATCH 113/567] Update values.yaml --- charts/cadc-tap/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index 6bc67ed03d..be56a7e25d 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -162,7 +162,7 @@ tapSchema: pullPolicy: "IfNotPresent" # -- Tag of TAP schema image - tag: "3.0.2"" + tag: "2.3.0" # -- Resource limits and requests for the TAP schema database pod # @default -- See `values.yaml` From c97bf75e33013973f8e567abbf3d890bdfddabc4 Mon Sep 17 00:00:00 2001 From: A I Date: Fri, 20 Sep 2024 15:02:38 +0100 Subject: [PATCH 114/567] Update values.yaml --- charts/cadc-tap/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index be56a7e25d..317a89e879 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -162,7 +162,7 @@ tapSchema: pullPolicy: "IfNotPresent" # -- Tag of TAP schema image - tag: "2.3.0" + tag: "3.0.2" # -- Resource limits and requests for the TAP schema database pod # @default -- See `values.yaml` From 8ed7d469f978f6994577643e60e3789a14a2ba97 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 20 Sep 2024 10:35:23 -0700 Subject: [PATCH 115/567] Run m1m3 and mtmount connectors with one replica - We optimized the connector configuration for throughput and we should run a single instance of Telegraf per connector. --- applications/sasquatch/values-base.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index f818146427..0f9e2e631c 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -147,7 +147,6 @@ telegraf-kafka-consumer: mtmount: enabled: true database: "efd" - replicaCount: 8 topicRegexps: | [ "lsst.sal.MTMount" ] eas: @@ -163,7 +162,6 @@ telegraf-kafka-consumer: m1m3: enabled: true database: "efd" - replicaCount: 8 topicRegexps: | [ "lsst.sal.MTM1M3" ] m2: From 207add1742b1cf5da929a8a861bad177253e62d4 Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Wed, 11 Sep 2024 15:51:06 -0700 Subject: [PATCH 116/567] Make Butler client/server default on IDF int/dev dp02 To facilitate testing by CST and start getting stability testing from Mobu, switch the default dp02 alias on IDF dev and int to point to client/server Butler instead of DirectButler. --- applications/butler/README.md | 1 + applications/butler/templates/configmap.yaml | 18 ++++++++++++------ applications/butler/values-idfdev.yaml | 1 + applications/butler/values-idfint.yaml | 1 + applications/butler/values.yaml | 4 ++++ 5 files changed, 19 insertions(+), 6 deletions(-) diff --git a/applications/butler/README.md b/applications/butler/README.md index 73d7c17812..a3d2d49811 100644 --- a/applications/butler/README.md +++ b/applications/butler/README.md @@ -16,6 +16,7 @@ Server for Butler data abstraction service | autoscaling.minReplicas | int | `1` | Minimum number of butler deployment pods | | autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization of butler deployment pods | | config.additionalS3ProfileName | string | No second S3 profile is available. | Profile name identifying a second S3 endpoint and set of credentials to use for accessing files in the datastore. | +| config.dp02ClientServerIsDefault | bool | `false` | True if the 'dp02' Butler repository alias should use client/server Butler. False if it should use DirectButler. | | config.dp02PostgresUri | string | No configuration file for DP02 will be generated. | Postgres connection string pointing to the registry database hosting Data Preview 0.2 data. | | config.pathPrefix | string | `"/api/butler"` | The prefix of the path portion of the URL where the Butler service will be exposed. For example, if the service should be exposed at `https://data.lsst.cloud/api/butler`, this should be set to `/api/butler` | | config.pguser | string | Use values specified in per-repository Butler config files. | Postgres username used to connect to the Butler DB | diff --git a/applications/butler/templates/configmap.yaml b/applications/butler/templates/configmap.yaml index 3a815fc6e6..8529fa2ba0 100644 --- a/applications/butler/templates/configmap.yaml +++ b/applications/butler/templates/configmap.yaml @@ -46,11 +46,17 @@ data: # connecting to the Butler server. # # We provide both DirectButler and RemoteButler versions of dp02 because some - # users rely on functionality not yet available via RemoteButler. The default is currently - # DirectButler because the Community Science team has not had the opportunity to test RemoteButler, - # and RemoteButler is not available in the current "recommended" RSP image. + # users rely on functionality not yet available via RemoteButler. The default in production is + # DirectButler because RemoteButler is not available in the current recommended RSP image. + # On dev and int it is RemoteButler -- the Community Science team is testing the new system. idf-repositories.yaml: | - dp02: {{ .Values.global.baseUrl }}{{ .Values.config.pathPrefix }}/configs/dp02.yaml - dp02-direct: {{ .Values.global.baseUrl }}{{ .Values.config.pathPrefix }}/configs/dp02.yaml - dp02-remote: {{ .Values.global.baseUrl }}{{ .Values.config.pathPrefix }}/repo/dp02/butler.yaml + {{- $dp02Direct := print .Values.global.baseUrl .Values.config.pathPrefix "/configs/dp02.yaml" -}} + {{- $dp02Remote := print .Values.global.baseUrl .Values.config.pathPrefix "/repo/dp02/butler.yaml" -}} + {{- if .Values.config.dp02ClientServerIsDefault }} + dp02: {{ $dp02Remote }} + {{- else }} + dp02: {{ $dp02Direct }} + {{- end }} + dp02-direct: {{ $dp02Direct }} + dp02-remote: {{ $dp02Remote }} {{- end }} diff --git a/applications/butler/values-idfdev.yaml b/applications/butler/values-idfdev.yaml index e70e31b433..92cc0e6897 100644 --- a/applications/butler/values-idfdev.yaml +++ b/applications/butler/values-idfdev.yaml @@ -2,6 +2,7 @@ image: pullPolicy: Always config: + dp02ClientServerIsDefault: true dp02PostgresUri: postgresql://postgres@sqlproxy-butler-int.sqlproxy-cross-project:5432/dp02 s3EndpointUrl: "https://storage.googleapis.com" additionalS3ProfileName: "ir2" diff --git a/applications/butler/values-idfint.yaml b/applications/butler/values-idfint.yaml index 5f16d776da..fc3fcb6a8f 100644 --- a/applications/butler/values-idfint.yaml +++ b/applications/butler/values-idfint.yaml @@ -1,4 +1,5 @@ config: + dp02ClientServerIsDefault: true dp02PostgresUri: postgresql://postgres@sqlproxy-butler-int.sqlproxy-cross-project:5432/dp02 s3EndpointUrl: "https://storage.googleapis.com" repositories: diff --git a/applications/butler/values.yaml b/applications/butler/values.yaml index 18086ea1c0..51ec757201 100644 --- a/applications/butler/values.yaml +++ b/applications/butler/values.yaml @@ -85,6 +85,10 @@ config: # @default -- No configuration file for DP02 will be generated. dp02PostgresUri: "" + # -- True if the 'dp02' Butler repository alias should use client/server + # Butler. False if it should use DirectButler. + dp02ClientServerIsDefault: false + # -- Postgres username used to connect to the Butler DB # @default -- Use values specified in per-repository Butler config files. pguser: "" From 2b8c4a9001b43ce261cbd44c9e9dafcbf90982c4 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 20 Sep 2024 10:36:28 -0700 Subject: [PATCH 117/567] Update Telegraf image - This version (not released yet) fixes a race condition bug we found in the Telegraf Avro parser. --- applications/sasquatch/values-base.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index 0f9e2e631c..4cde0064d7 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -133,6 +133,9 @@ influxdb: telegraf-kafka-consumer: enabled: true + image: + repo: "docker.io/lsstsqre/telegraf" + tag: "avro-mutex" kafkaConsumers: auxtel: enabled: true From f0b4eb4451b2a223036dc2670134f591ac0ea82b Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 20 Sep 2024 10:36:49 -0700 Subject: [PATCH 118/567] Enable debug logs for all connectors --- applications/sasquatch/values-base.yaml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index 4cde0064d7..576b753ae3 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -142,71 +142,85 @@ telegraf-kafka-consumer: database: "efd" topicRegexps: | [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] + debug: true maintel: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.MTAOS", "lsst.sal.MTDome", "lsst.sal.MTDomeTrajectory", "lsst.sal.MTPtg" ] + debug: true mtmount: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.MTMount" ] + debug: true eas: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + debug: true latiss: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.ATCamera", "lsst.sal.ATHeaderService", "lsst.sal.ATOODS", "lsst.sal.ATSpectrograph" ] + debug: true m1m3: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.MTM1M3" ] + debug: true m2: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.MTHexapod", "lsst.sal.MTM2", "lsst.sal.MTRotator" ] + debug: true obssys: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.Scheduler", "lsst.sal.Script", "lsst.sal.ScriptQueue", "lsst.sal.Watcher" ] + debug: true ocps: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.OCPS" ] + debug: true test: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.Test" ] + debug: true mtaircompressor: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.MTAirCompressor" ] + debug: true lasertracker: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.LaserTracker" ] + debug: true genericcamera: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.GCHeaderService", "lsst.sal.GenericCamera" ] + debug: true lsstcam: enabled: true database: "efd" topicRegexps: | [ "lsst.sal.MTCamera", "lsst.sal.MTHeaderService", "lsst.sal.MTOODS" ] + debug: true kafdrop: cmdArgs: "--message.format=AVRO --message.keyFormat=DEFAULT --topic.deleteEnabled=false --topic.createEnabled=false" From 84c4b4c229803ed3fd5fa022f2ab0330bd8e538b Mon Sep 17 00:00:00 2001 From: Fritz Mueller Date: Sun, 22 Sep 2024 21:13:16 -0700 Subject: [PATCH 119/567] cm-service: move initial deployment from prod to dev vcluster --- .../cm-service/values-usdf-cm-dev.yaml | 31 +++++++++++++++++++ environments/values-usdf-cm-dev.yaml | 2 ++ environments/values-usdf-cm.yaml | 2 -- 3 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 applications/cm-service/values-usdf-cm-dev.yaml diff --git a/applications/cm-service/values-usdf-cm-dev.yaml b/applications/cm-service/values-usdf-cm-dev.yaml new file mode 100644 index 0000000000..e7b42a3f33 --- /dev/null +++ b/applications/cm-service/values-usdf-cm-dev.yaml @@ -0,0 +1,31 @@ +config: + logLevel: "INFO" + logProfile: "development" + databaseEcho: true + outputVolume: + storageClassName: "sdf-data-rubin" + subPath: "shared/campaigns/users/usdf-cm-prod" +worker: + htcondor: + config: + mountPath: "/home/lsstsvc1/stack/conda/envs/lsst-scipipe-9.0.0/etc/condor/config.d" + contents: | + CONDOR_HOST = sdfiana012.sdf.slac.stanford.edu + COLLECTOR_HOST = sdfiana012.sdf.slac.stanford.edu + SEC_CLIENT_AUTHENTICATION_METHODS = FS, FS_REMOTE + use security:recommended_v9_0 + SEC_DEFAULT_AUTHENTICATION_METHODS = FS_REMOTE, IDTOKENS, FS + SEC_DAEMON_AUTHENTICATION_METHODS = FS_REMOTE, IDTOKENS, FS + SEC_READ_AUTHENTICATION_METHODS = FS_REMOTE, IDTOKENS, FS + FS_REMOTE_DIR = /sdf/group/rubin/services/htcondor/shared + SCHEDD_ADDRESS_FILE = /config/schedd-address + fsRemoteDir: + storageClassName: "sdf-group-rubin" + subPath: "services/htcondor/shared" + mountPath: "/sdf/group/rubin/services/htcondor/shared" + scheddAddress: + mountPath: "/config" + contents: | + <172.24.49.173:5935?addrs=172.24.49.173-5935&alias=sdfiana012.sdf.slac.stanford.edu> + $CondorVersion: 23.0.12 2024-06-13 BuildID: 739441 PackageID: 23.0.12-1 $ + $CondorPlatform: x86_64_AlmaLinux8 $ diff --git a/environments/values-usdf-cm-dev.yaml b/environments/values-usdf-cm-dev.yaml index 365566c1f2..7ca7a8afd7 100644 --- a/environments/values-usdf-cm-dev.yaml +++ b/environments/values-usdf-cm-dev.yaml @@ -10,3 +10,5 @@ applications: cert-manager: false gafaelfawr: false ingress-nginx: false + + cm-service: true diff --git a/environments/values-usdf-cm.yaml b/environments/values-usdf-cm.yaml index 3eac3a6f9d..2fb30966d6 100644 --- a/environments/values-usdf-cm.yaml +++ b/environments/values-usdf-cm.yaml @@ -10,5 +10,3 @@ applications: cert-manager: false gafaelfawr: false ingress-nginx: false - - cm-service: true From e0310aa9096188418b8477b616f461b9043f312b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 07:18:35 +0000 Subject: [PATCH 120/567] Update Helm release argo-workflows to v0.42.3 --- applications/argo-workflows/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argo-workflows/Chart.yaml b/applications/argo-workflows/Chart.yaml index fa244f7232..28c6a47cac 100644 --- a/applications/argo-workflows/Chart.yaml +++ b/applications/argo-workflows/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-workflows - version: 0.42.2 + version: 0.42.3 repository: https://argoproj.github.io/argo-helm From b81297c4dbea4fd3cf8fe92c58f7a6e4d1a301c9 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 07:18:40 +0000 Subject: [PATCH 121/567] Update Helm release telegraf to v1.8.54 --- applications/telegraf/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/telegraf/Chart.yaml b/applications/telegraf/Chart.yaml index 407d6c1e7b..33c097cea8 100644 --- a/applications/telegraf/Chart.yaml +++ b/applications/telegraf/Chart.yaml @@ -8,7 +8,7 @@ sources: - https://github.com/influxdata/helm-charts dependencies: - name: telegraf - version: 1.8.53 + version: 1.8.54 repository: https://helm.influxdata.com/ annotations: phalanx.lsst.io/docs: | From a492c69081e1abb594c251013c4f6220e2610efe Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 10:16:05 +0000 Subject: [PATCH 122/567] Update Helm release telegraf-ds to v1.1.34 --- applications/telegraf-ds/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/telegraf-ds/Chart.yaml b/applications/telegraf-ds/Chart.yaml index b6e5adade2..8cb53aec89 100644 --- a/applications/telegraf-ds/Chart.yaml +++ b/applications/telegraf-ds/Chart.yaml @@ -8,7 +8,7 @@ sources: - https://github.com/influxdata/helm-charts dependencies: - name: telegraf-ds - version: 1.1.33 + version: 1.1.34 repository: https://helm.influxdata.com/ annotations: phalanx.lsst.io/docs: | From f56eda3bf6eefb66e3fb12a69e954f9d9f65fc4f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 10:16:09 +0000 Subject: [PATCH 123/567] Update Helm release argo-cd to v7.6.1 --- applications/argocd/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argocd/Chart.yaml b/applications/argocd/Chart.yaml index 81ca4bd156..56c9e07f2f 100644 --- a/applications/argocd/Chart.yaml +++ b/applications/argocd/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-cd - version: 7.5.2 + version: 7.6.1 repository: https://argoproj.github.io/argo-helm From 6ae010f8857684a2053c02be832a5ef29ad0f280 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 15:43:00 +0000 Subject: [PATCH 124/567] Update Helm release connect to v1.16.0 --- applications/onepassword-connect/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/onepassword-connect/Chart.yaml b/applications/onepassword-connect/Chart.yaml index ea91cd2183..7cb6ff21d9 100644 --- a/applications/onepassword-connect/Chart.yaml +++ b/applications/onepassword-connect/Chart.yaml @@ -6,7 +6,7 @@ version: 1.0.0 dependencies: - name: connect - version: 1.15.1 + version: 1.16.0 repository: https://1password.github.io/connect-helm-charts/ annotations: From ed68f98695a2c9a1968ad6fd8f88d23aa6b9cf8b Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 3 Sep 2024 14:25:22 -0700 Subject: [PATCH 125/567] Add ghostwriter --- applications/ghostwriter/.helmignore | 23 ++++++ applications/ghostwriter/Chart.yaml | 8 ++ applications/ghostwriter/secret.yaml | 8 ++ .../ghostwriter/templates/_helpers.tpl | 26 ++++++ .../ghostwriter/templates/configmap.yaml | 9 +++ .../ghostwriter/templates/deployment.yaml | 81 +++++++++++++++++++ .../ghostwriter/templates/ingress.yaml | 30 +++++++ .../ghostwriter/templates/networkpolicy.yaml | 21 +++++ .../ghostwriter/templates/service.yaml | 15 ++++ .../ghostwriter/templates/vault-secrets.yaml | 11 +++ applications/ghostwriter/values-idfdev.yaml | 10 +++ applications/ghostwriter/values.yaml | 73 +++++++++++++++++ docs/applications/ghostwriter/index.rst | 16 ++++ docs/applications/ghostwriter/values.md | 12 +++ docs/applications/infrastructure.rst | 1 + environments/README.md | 1 + .../infrastructure/ghostwriter.yaml | 34 ++++++++ environments/values-idfdev.yaml | 1 + environments/values.yaml | 3 + 19 files changed, 383 insertions(+) create mode 100644 applications/ghostwriter/.helmignore create mode 100644 applications/ghostwriter/Chart.yaml create mode 100644 applications/ghostwriter/secret.yaml create mode 100644 applications/ghostwriter/templates/_helpers.tpl create mode 100644 applications/ghostwriter/templates/configmap.yaml create mode 100644 applications/ghostwriter/templates/deployment.yaml create mode 100644 applications/ghostwriter/templates/ingress.yaml create mode 100644 applications/ghostwriter/templates/networkpolicy.yaml create mode 100644 applications/ghostwriter/templates/service.yaml create mode 100644 applications/ghostwriter/templates/vault-secrets.yaml create mode 100644 applications/ghostwriter/values-idfdev.yaml create mode 100644 applications/ghostwriter/values.yaml create mode 100644 docs/applications/ghostwriter/index.rst create mode 100644 docs/applications/ghostwriter/values.md create mode 100644 environments/templates/applications/infrastructure/ghostwriter.yaml diff --git a/applications/ghostwriter/.helmignore b/applications/ghostwriter/.helmignore new file mode 100644 index 0000000000..0e8a0eb36f --- /dev/null +++ b/applications/ghostwriter/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/applications/ghostwriter/Chart.yaml b/applications/ghostwriter/Chart.yaml new file mode 100644 index 0000000000..0ea46c3e3d --- /dev/null +++ b/applications/ghostwriter/Chart.yaml @@ -0,0 +1,8 @@ +apiVersion: v2 +appVersion: 0.1.0 +description: URL rewriter/personalizer +name: ghostwriter +sources: +- https://github.com/lsst-sqre/ghostwriter +type: application +version: 1.0.0 diff --git a/applications/ghostwriter/secret.yaml b/applications/ghostwriter/secret.yaml new file mode 100644 index 0000000000..e0f4154904 --- /dev/null +++ b/applications/ghostwriter/secret.yaml @@ -0,0 +1,8 @@ +slack-webhook: + description: >- + Slack web hook used to report internal errors to Slack. This secret may be + changed at any time. + if: config.slackAlerts + copy: + application: mobu + key: app-alert-webhook diff --git a/applications/ghostwriter/templates/_helpers.tpl b/applications/ghostwriter/templates/_helpers.tpl new file mode 100644 index 0000000000..51a900690a --- /dev/null +++ b/applications/ghostwriter/templates/_helpers.tpl @@ -0,0 +1,26 @@ +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "ghostwriter.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "ghostwriter.labels" -}} +helm.sh/chart: {{ include "ghostwriter.chart" . }} +{{ include "ghostwriter.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "ghostwriter.selectorLabels" -}} +app.kubernetes.io/name: "ghostwriter" +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} diff --git a/applications/ghostwriter/templates/configmap.yaml b/applications/ghostwriter/templates/configmap.yaml new file mode 100644 index 0000000000..151a90e1e5 --- /dev/null +++ b/applications/ghostwriter/templates/configmap.yaml @@ -0,0 +1,9 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: "ghostwriter-config" + labels: + {{- include "ghostwriter.labels" . | nindent 4 }} +data: + routing.yaml: |- + {{- toYaml .Values.mapping | nindent 4 }} diff --git a/applications/ghostwriter/templates/deployment.yaml b/applications/ghostwriter/templates/deployment.yaml new file mode 100644 index 0000000000..8e3ddf9fa5 --- /dev/null +++ b/applications/ghostwriter/templates/deployment.yaml @@ -0,0 +1,81 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: "ghostwriter" + labels: + {{- include "ghostwriter.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + {{- include "ghostwriter.selectorLabels" . | nindent 6 }} + template: + metadata: + {{- with .Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "ghostwriter.selectorLabels" . | nindent 8 }} + spec: + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + automountServiceAccountToken: false + containers: + - name: {{ .Chart.Name }} + imagePullPolicy: {{ .Values.image.pullPolicy }} + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + env: + - name: "GHOSTWRITER_ENVIRONMENT_URL" + value: {{ .Values.global.baseUrl | quote }} + {{- if .Values.config.slackAlerts }} + - name: "GHOSTWRITER_ALERT_HOOK" + valueFrom: + secretKeyRef: + name: "ghostwriter-secret" + key: "slack-webhook" + {{- end }} + {{- if .Values.config.debug }} + - name: GHOSTWRITER_LOG_LEVEL + value: "DEBUG" + - name: GHOSTWRITER_LOGGING_PROFILE + value: "development" + {{- end }} + ports: + - name: "http" + containerPort: 8080 + protocol: "TCP" + readinessProbe: + httpGet: + path: "/" + port: "http" + resources: + {{- toYaml .Values.resources | nindent 12 }} + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - "all" + readOnlyRootFilesystem: true + volumeMounts: + - name: "config" + mountPath: "/etc/ghostwriter" + readOnly: true + volumes: + - name: "config" + configMap: + name: "ghostwriter-config" + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} + securityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 1000 diff --git a/applications/ghostwriter/templates/ingress.yaml b/applications/ghostwriter/templates/ingress.yaml new file mode 100644 index 0000000000..0d18079b28 --- /dev/null +++ b/applications/ghostwriter/templates/ingress.yaml @@ -0,0 +1,30 @@ +apiVersion: gafaelfawr.lsst.io/v1alpha1 +kind: GafaelfawrIngress +metadata: + name: "ghostwriter" + labels: + {{- include "ghostwriter.labels" . | nindent 4 }} +config: + baseUrl: {{ .Values.global.baseUrl | quote }} + scopes: + all: + - "read:image" +template: + metadata: + name: "ghostwriter" + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 6 }} + {{- end }} + spec: + rules: + - host: {{ required "global.host must be set" .Values.global.host | quote }} + http: + paths: + - path: "/ghostwriter" + pathType: "Prefix" + backend: + service: + name: "ghostwriter" + port: + number: 8080 diff --git a/applications/ghostwriter/templates/networkpolicy.yaml b/applications/ghostwriter/templates/networkpolicy.yaml new file mode 100644 index 0000000000..b4a5ecb1e5 --- /dev/null +++ b/applications/ghostwriter/templates/networkpolicy.yaml @@ -0,0 +1,21 @@ +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: "ghostwriter" +spec: + podSelector: + matchLabels: + {{- include "ghostwriter.selectorLabels" . | nindent 6 }} + policyTypes: + - "Ingress" + ingress: + # Allow inbound access from pods (in any namespace) labeled + # gafaelfawr.lsst.io/ingress: true. + - from: + - namespaceSelector: {} + podSelector: + matchLabels: + gafaelfawr.lsst.io/ingress: "true" + ports: + - protocol: "TCP" + port: 8080 diff --git a/applications/ghostwriter/templates/service.yaml b/applications/ghostwriter/templates/service.yaml new file mode 100644 index 0000000000..ced6204a96 --- /dev/null +++ b/applications/ghostwriter/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: "ghostwriter" + labels: + {{- include "ghostwriter.labels" . | nindent 4 }} +spec: + type: "ClusterIP" + ports: + - port: 8080 + targetPort: "http" + protocol: "TCP" + name: "http" + selector: + {{- include "ghostwriter.selectorLabels" . | nindent 4 }} diff --git a/applications/ghostwriter/templates/vault-secrets.yaml b/applications/ghostwriter/templates/vault-secrets.yaml new file mode 100644 index 0000000000..785ca96b8a --- /dev/null +++ b/applications/ghostwriter/templates/vault-secrets.yaml @@ -0,0 +1,11 @@ +{{- if .Values.config.slackAlerts }} +apiVersion: ricoberger.de/v1alpha1 +kind: VaultSecret +metadata: + name: ghostwriter-secret + labels: + {{- include "ghostwriter.labels" . | nindent 4 }} +spec: + path: "{{ .Values.global.vaultSecretsPath }}/ghostwriter" + type: "Opaque" +{{- end }} diff --git a/applications/ghostwriter/values-idfdev.yaml b/applications/ghostwriter/values-idfdev.yaml new file mode 100644 index 0000000000..b220357eef --- /dev/null +++ b/applications/ghostwriter/values-idfdev.yaml @@ -0,0 +1,10 @@ +image: + # pullPolicy: "Always" + tag: "tickets-dm-46010" +config: + slackAlerts: true + debug: true +mapping: + routes: + - source_prefix: "/tutorials/" + target: "${base_url}/nb/user/${user}/lab/tree/${path}.ipynb" diff --git a/applications/ghostwriter/values.yaml b/applications/ghostwriter/values.yaml new file mode 100644 index 0000000000..75247c6676 --- /dev/null +++ b/applications/ghostwriter/values.yaml @@ -0,0 +1,73 @@ +# Default values for ghostwriter. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +# -- ghostwriter configuration +config: + # -- Whether to send alerts and status to Slack. + slackAlerts: false + + # -- If set to true, enable verbose logging and disable structured JSON + # logging + debug: false + +# -- ghostwriter URL mapping +mapping: + # routes for URL rewriting + # @default -- None; must be set for each environment + routes: [] + +# -- Number of web deployment pods to start +replicaCount: 1 + +image: + # -- Image to use in the ghostwriter deployment + repository: "ghcr.io/lsst-sqre/ghostwriter" + + # -- Pull policy for the ghostwriter image + pullPolicy: "IfNotPresent" + + # -- Tag of image to use + # @default -- The appVersion of the chart + tag: null + +ingress: + # -- Additional annotations for the ingress rule + annotations: {} + +# -- Affinity rules for the ghostwriter deployment pod +affinity: {} + +# -- Node selection rules for the ghostwriter deployment pod +nodeSelector: {} + +# -- Annotations for the ghostwriter deployment pod +podAnnotations: {} + +# -- Resource limits and requests for the ghostwriter deployment pod +# @default -- See `values.yaml` +resources: + limits: + cpu: "1" + memory: "2Gi" + requests: + cpu: "10m" + memory: "128Mi" + +# -- Tolerations for the ghostwriter deployment pod +tolerations: [] + +# The following will be set by parameters injected by Argo CD and should not +# be set in the individual environment values files. +global: + # -- Base URL for the environment + # @default -- Set by Argo CD + baseUrl: null + + # -- Host name for ingress + # @default -- Set by Argo CD + host: null + + # -- Base path for Vault secrets + # @default -- Set by Argo CD + vaultSecretsPath: null diff --git a/docs/applications/ghostwriter/index.rst b/docs/applications/ghostwriter/index.rst new file mode 100644 index 0000000000..fc1adf2cc9 --- /dev/null +++ b/docs/applications/ghostwriter/index.rst @@ -0,0 +1,16 @@ +.. px-app:: ghostwriter + +####################################### +ghostwriter — URL rewriter/personalizer +####################################### + +.. jinja:: ghostwriter + :file: applications/_summary.rst.jinja + +Guides +====== + +.. toctree:: + :maxdepth: 1 + + values \ No newline at end of file diff --git a/docs/applications/ghostwriter/values.md b/docs/applications/ghostwriter/values.md new file mode 100644 index 0000000000..0f3a1e3f75 --- /dev/null +++ b/docs/applications/ghostwriter/values.md @@ -0,0 +1,12 @@ +```{px-app-values} ghostwriter +``` + +# ghostwriter Helm values reference + +Helm values reference table for the {px-app}`ghostwriter` application. + +```{include} ../../../applications/ghostwriter/README.md +--- +start-after: "## Values" +--- +``` \ No newline at end of file diff --git a/docs/applications/infrastructure.rst b/docs/applications/infrastructure.rst index 158629e5e5..bb614908d1 100644 --- a/docs/applications/infrastructure.rst +++ b/docs/applications/infrastructure.rst @@ -12,6 +12,7 @@ Argo CD project: ``infrastructure`` argocd/index cert-manager/index + ghostwriter/index ingress-nginx/index gafaelfawr/index mobu/index diff --git a/environments/README.md b/environments/README.md index 56c5604c22..9ec20b4fda 100644 --- a/environments/README.md +++ b/environments/README.md @@ -21,6 +21,7 @@ | applications.fastapi-bootcamp | bool | `false` | Enable the fastapi-bootcamp application | | applications.filestore-backup | bool | `false` | Enable the filestore-backup application | | applications.gafaelfawr | bool | `true` | Enable the Gafaelfawr application. This is required by Phalanx since most other applications use `GafaelfawrIngress` | +| applications.ghostwriter | bool | `false` | Enable the ghostwriter application | | applications.giftless | bool | `false` | Enable the giftless application | | applications.hips | bool | `false` | Enable the HiPS application | | applications.ingress-nginx | bool | `true` | Enable the ingress-nginx application. This is required for all environments, but is still configurable because currently USDF uses an unsupported configuration with ingress-nginx deployed in a different cluster. | diff --git a/environments/templates/applications/infrastructure/ghostwriter.yaml b/environments/templates/applications/infrastructure/ghostwriter.yaml new file mode 100644 index 0000000000..5d993e0b88 --- /dev/null +++ b/environments/templates/applications/infrastructure/ghostwriter.yaml @@ -0,0 +1,34 @@ +{{- if (index .Values "applications" "ghostwriter") -}} +apiVersion: v1 +kind: Namespace +metadata: + name: "ghostwriter" +--- +apiVersion: argoproj.io/v1alpha1 +kind: Application +metadata: + name: "ghostwriter" + namespace: "argocd" + finalizers: + - "resources-finalizer.argocd.argoproj.io" +spec: + destination: + namespace: "ghostwriter" + server: "https://kubernetes.default.svc" + project: "infrastructure" + source: + path: "applications/ghostwriter" + repoURL: {{ .Values.repoUrl | quote }} + targetRevision: {{ .Values.targetRevision | quote }} + helm: + parameters: + - name: "global.host" + value: {{ .Values.fqdn | quote }} + - name: "global.baseUrl" + value: "https://{{ .Values.fqdn }}" + - name: "global.vaultSecretsPath" + value: {{ .Values.vaultPathPrefix | quote }} + valueFiles: + - "values.yaml" + - "values-{{ .Values.name }}.yaml" +{{- end -}} \ No newline at end of file diff --git a/environments/values-idfdev.yaml b/environments/values-idfdev.yaml index b0a52056de..39c028701b 100644 --- a/environments/values-idfdev.yaml +++ b/environments/values-idfdev.yaml @@ -16,6 +16,7 @@ applications: butler: true datalinker: true filestore-backup: true + ghostwriter: true hips: true jira-data-proxy: true mobu: true diff --git a/environments/values.yaml b/environments/values.yaml index fa0b156f5d..b7774a1d1e 100644 --- a/environments/values.yaml +++ b/environments/values.yaml @@ -82,6 +82,9 @@ applications: # most other applications use `GafaelfawrIngress` gafaelfawr: true + # -- Enable the ghostwriter application + ghostwriter: false + # -- Enable the giftless application giftless: false From d031befc8372556b2dad92e18a4ebe6bde5c0707 Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 3 Sep 2024 14:36:15 -0700 Subject: [PATCH 126/567] no slack alerts ghostwriter/idfdev --- applications/ghostwriter/values-idfdev.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/ghostwriter/values-idfdev.yaml b/applications/ghostwriter/values-idfdev.yaml index b220357eef..34ad1d9ae8 100644 --- a/applications/ghostwriter/values-idfdev.yaml +++ b/applications/ghostwriter/values-idfdev.yaml @@ -1,8 +1,8 @@ image: - # pullPolicy: "Always" + pullPolicy: "Always" tag: "tickets-dm-46010" config: - slackAlerts: true + # slackAlerts: true debug: true mapping: routes: From d43325d3a4e3a80e97d248506a14b6214b960827 Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 3 Sep 2024 14:43:55 -0700 Subject: [PATCH 127/567] Add delegated scopes --- applications/ghostwriter/templates/ingress.yaml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/applications/ghostwriter/templates/ingress.yaml b/applications/ghostwriter/templates/ingress.yaml index 0d18079b28..00b1749a52 100644 --- a/applications/ghostwriter/templates/ingress.yaml +++ b/applications/ghostwriter/templates/ingress.yaml @@ -9,6 +9,16 @@ config: scopes: all: - "read:image" + delegate: + internal: + service: "ghostwriter" + scopes: + - "read:image" + - "exec:notebook" + - "exec:portal" + - "read:image" + - "read:tap" + - "write:files" template: metadata: name: "ghostwriter" From e1fbc45760d4600669878872da8a7b9637a02fea Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 3 Sep 2024 14:46:31 -0700 Subject: [PATCH 128/567] Add dummy config.yaml --- applications/ghostwriter/templates/configmap.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/applications/ghostwriter/templates/configmap.yaml b/applications/ghostwriter/templates/configmap.yaml index 151a90e1e5..ba63bffdea 100644 --- a/applications/ghostwriter/templates/configmap.yaml +++ b/applications/ghostwriter/templates/configmap.yaml @@ -7,3 +7,5 @@ metadata: data: routing.yaml: |- {{- toYaml .Values.mapping | nindent 4 }} + config.yaml: |- + # Empty: values will be taken from environment From c0107daaf130d8b296053a7fa8fdc35fd9ab2ece Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 3 Sep 2024 15:13:18 -0700 Subject: [PATCH 129/567] try 'notebook' style delegation --- applications/ghostwriter/templates/ingress.yaml | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/applications/ghostwriter/templates/ingress.yaml b/applications/ghostwriter/templates/ingress.yaml index 00b1749a52..1570a890a7 100644 --- a/applications/ghostwriter/templates/ingress.yaml +++ b/applications/ghostwriter/templates/ingress.yaml @@ -9,16 +9,8 @@ config: scopes: all: - "read:image" - delegate: - internal: - service: "ghostwriter" - scopes: - - "read:image" - - "exec:notebook" - - "exec:portal" - - "read:image" - - "read:tap" - - "write:files" + delegate: + notebook: {} template: metadata: name: "ghostwriter" From dfdb68a262fc170e4ea31840a2b66e53ec4b298d Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 3 Sep 2024 16:05:57 -0700 Subject: [PATCH 130/567] Add path for tutorial nb rewrite --- applications/ghostwriter/values-idfdev.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/ghostwriter/values-idfdev.yaml b/applications/ghostwriter/values-idfdev.yaml index 34ad1d9ae8..2ec32258e6 100644 --- a/applications/ghostwriter/values-idfdev.yaml +++ b/applications/ghostwriter/values-idfdev.yaml @@ -7,4 +7,4 @@ config: mapping: routes: - source_prefix: "/tutorials/" - target: "${base_url}/nb/user/${user}/lab/tree/${path}.ipynb" + target: "${base_url}/nb/user/${user}/lab/tree/notebooks/tutorial-notebooks/${path}.ipynb" From d51328aae6af5c7e78060c9dbb404c37415ceb95 Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 3 Sep 2024 16:45:17 -0700 Subject: [PATCH 131/567] Add top-level ingresses to ghostwriter --- applications/ghostwriter/README.md | 29 +++++++++++++++ .../ghostwriter/templates/deployment.yaml | 3 +- .../templates/ingress-toplevel.yaml | 36 +++++++++++++++++++ 3 files changed, 67 insertions(+), 1 deletion(-) create mode 100644 applications/ghostwriter/README.md create mode 100644 applications/ghostwriter/templates/ingress-toplevel.yaml diff --git a/applications/ghostwriter/README.md b/applications/ghostwriter/README.md new file mode 100644 index 0000000000..ef7ad713f4 --- /dev/null +++ b/applications/ghostwriter/README.md @@ -0,0 +1,29 @@ +# ghostwriter + +URL rewriter/personalizer + +## Source Code + +* + +## Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| affinity | object | `{}` | Affinity rules for the ghostwriter deployment pod | +| config | object | `{"debug":false,"slackAlerts":false}` | ghostwriter configuration | +| config.debug | bool | `false` | If set to true, enable verbose logging and disable structured JSON logging | +| config.slackAlerts | bool | `false` | Whether to send alerts and status to Slack. | +| global.baseUrl | string | Set by Argo CD | Base URL for the environment | +| global.host | string | Set by Argo CD | Host name for ingress | +| global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | +| image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the ghostwriter image | +| image.repository | string | `"ghcr.io/lsst-sqre/ghostwriter"` | Image to use in the ghostwriter deployment | +| image.tag | string | The appVersion of the chart | Tag of image to use | +| ingress.annotations | object | `{}` | Additional annotations for the ingress rule | +| mapping | object | `{"routes":[]}` | ghostwriter URL mapping | +| nodeSelector | object | `{}` | Node selection rules for the ghostwriter deployment pod | +| podAnnotations | object | `{}` | Annotations for the ghostwriter deployment pod | +| replicaCount | int | `1` | Number of web deployment pods to start | +| resources | object | See `values.yaml` | Resource limits and requests for the ghostwriter deployment pod | +| tolerations | list | `[]` | Tolerations for the ghostwriter deployment pod | diff --git a/applications/ghostwriter/templates/deployment.yaml b/applications/ghostwriter/templates/deployment.yaml index 8e3ddf9fa5..8c945dc7b4 100644 --- a/applications/ghostwriter/templates/deployment.yaml +++ b/applications/ghostwriter/templates/deployment.yaml @@ -11,8 +11,9 @@ spec: {{- include "ghostwriter.selectorLabels" . | nindent 6 }} template: metadata: - {{- with .Values.podAnnotations }} annotations: + checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} + {{- with .Values.podAnnotations }} {{- toYaml . | nindent 8 }} {{- end }} labels: diff --git a/applications/ghostwriter/templates/ingress-toplevel.yaml b/applications/ghostwriter/templates/ingress-toplevel.yaml new file mode 100644 index 0000000000..b659bc7cec --- /dev/null +++ b/applications/ghostwriter/templates/ingress-toplevel.yaml @@ -0,0 +1,36 @@ +{{- $root := . -}} +{{- range $route := $root.Values.mapping.routes }} +{{- $source := $route.source_prefix | trimAll "/" }} +{{- $res_src := trimPrefix "/" $source | replace "/" "-" }} +apiVersion: gafaelfawr.lsst.io/v1alpha1 +kind: GafaelfawrIngress +metadata: + name: "ghostwriter-{{ $res_src }}" +config: + baseUrl: {{ $root.Values.global.baseUrl | quote }} + scopes: + all: + - "read:image" + delegate: + notebook: {} +template: + metadata: + name: "ghostwriter-{{ $res_src }}" + annotations: + nginx.ingress.kubernetes.io/rewrite-target: "/ghostwriter/rewrite/$1" + {{- with $root.Values.ingress.annotations }} + {{- toYaml . | nindent 6 }} + {{- end }} + spec: + rules: + - host: {{ required "global.host must be set" $root.Values.global.host | quote }} + http: + paths: + - path: "/({{ $source }}/.*)" + pathType: "ImplementationSpecific" + backend: + service: + name: "ghostwriter" + port: + number: 8080 +{{- end }} From 78464434850a20fcf6813bacf19ea064274bd615 Mon Sep 17 00:00:00 2001 From: adam Date: Wed, 4 Sep 2024 12:54:47 -0700 Subject: [PATCH 132/567] Try adding a hook for ghostwriter --- applications/ghostwriter/values-idfdev.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/applications/ghostwriter/values-idfdev.yaml b/applications/ghostwriter/values-idfdev.yaml index 2ec32258e6..235801a95c 100644 --- a/applications/ghostwriter/values-idfdev.yaml +++ b/applications/ghostwriter/values-idfdev.yaml @@ -8,3 +8,5 @@ mapping: routes: - source_prefix: "/tutorials/" target: "${base_url}/nb/user/${user}/lab/tree/notebooks/tutorial-notebooks/${path}.ipynb" + hooks: + - "ensure_running_lab" From 4ecd1f226007f3023da57e4685aa9516616609ee Mon Sep 17 00:00:00 2001 From: adam Date: Wed, 4 Sep 2024 15:55:51 -0700 Subject: [PATCH 133/567] Add query target for ghostwriter --- applications/ghostwriter/values-idfdev.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/applications/ghostwriter/values-idfdev.yaml b/applications/ghostwriter/values-idfdev.yaml index 235801a95c..f97559f62e 100644 --- a/applications/ghostwriter/values-idfdev.yaml +++ b/applications/ghostwriter/values-idfdev.yaml @@ -10,3 +10,8 @@ mapping: target: "${base_url}/nb/user/${user}/lab/tree/notebooks/tutorial-notebooks/${path}.ipynb" hooks: - "ensure_running_lab" + - source_prefix: "/queries/" + target: "${base_url}/nb/user/${user}/lab/tree/queries/portal_${path}.ipynb" + hooks: + - "ensure_running_lab" + - "portal_query" From f3b772a51218f9b635e67378e3d227f1eca74497 Mon Sep 17 00:00:00 2001 From: adam Date: Wed, 4 Sep 2024 16:27:23 -0700 Subject: [PATCH 134/567] separate multiple ingresses --- applications/ghostwriter/templates/ingress-toplevel.yaml | 1 + applications/ghostwriter/values-idfdev.yaml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/applications/ghostwriter/templates/ingress-toplevel.yaml b/applications/ghostwriter/templates/ingress-toplevel.yaml index b659bc7cec..fb8ebbf3b5 100644 --- a/applications/ghostwriter/templates/ingress-toplevel.yaml +++ b/applications/ghostwriter/templates/ingress-toplevel.yaml @@ -33,4 +33,5 @@ template: name: "ghostwriter" port: number: 8080 +--- {{- end }} diff --git a/applications/ghostwriter/values-idfdev.yaml b/applications/ghostwriter/values-idfdev.yaml index f97559f62e..3386860209 100644 --- a/applications/ghostwriter/values-idfdev.yaml +++ b/applications/ghostwriter/values-idfdev.yaml @@ -11,7 +11,7 @@ mapping: hooks: - "ensure_running_lab" - source_prefix: "/queries/" - target: "${base_url}/nb/user/${user}/lab/tree/queries/portal_${path}.ipynb" + target: "${base_url}/nb/user/${user}/lab/tree/notebooks/queries/portal_${path}.ipynb" hooks: - "ensure_running_lab" - "portal_query" From 388f8bb8d8ca0eaa5c266afd5850161441c9b043 Mon Sep 17 00:00:00 2001 From: adam Date: Wed, 4 Sep 2024 16:35:04 -0700 Subject: [PATCH 135/567] Add exec:notebook to delegated portal scope, so they can hit '/queries/query-id' --- applications/portal/templates/ingress.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/portal/templates/ingress.yaml b/applications/portal/templates/ingress.yaml index 0d7d6fc957..4edef4ab7b 100644 --- a/applications/portal/templates/ingress.yaml +++ b/applications/portal/templates/ingress.yaml @@ -18,6 +18,7 @@ config: - "read:image" - "read:tap" - "write:files" + - "exec:notebook" template: metadata: name: {{ include "portal.fullname" . }} From 7dd7939bf742b50e171609ed23a2bd9d2e8acad1 Mon Sep 17 00:00:00 2001 From: adam Date: Mon, 9 Sep 2024 12:02:03 -0700 Subject: [PATCH 136/567] Add tutorial-on-demand to ghostwriter --- applications/ghostwriter/values-idfdev.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/applications/ghostwriter/values-idfdev.yaml b/applications/ghostwriter/values-idfdev.yaml index 3386860209..4bd576564d 100644 --- a/applications/ghostwriter/values-idfdev.yaml +++ b/applications/ghostwriter/values-idfdev.yaml @@ -15,3 +15,8 @@ mapping: hooks: - "ensure_running_lab" - "portal_query" + - source_prefix: "/tutorials-on-demand/" + target: "${base_url}/nb/user/${user}/lab/tree/notebooks/tutorials-on-demand/${path}.ipynb" + hooks: + - "ensure_running_lab" + - "tutorial_on_demand" From f0a70ee0908ef73c45d110314c8f237dc970e9c9 Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Wed, 11 Sep 2024 15:54:09 -0700 Subject: [PATCH 137/567] Add system-test to ghostwriter --- applications/ghostwriter/values-idfdev.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/applications/ghostwriter/values-idfdev.yaml b/applications/ghostwriter/values-idfdev.yaml index 4bd576564d..d00bee4314 100644 --- a/applications/ghostwriter/values-idfdev.yaml +++ b/applications/ghostwriter/values-idfdev.yaml @@ -20,3 +20,8 @@ mapping: hooks: - "ensure_running_lab" - "tutorial_on_demand" + - source_prefix: "/system-test/" + target: "${base_url}/nb/user/${user}/lab/tree/notebooks/on-demand/${path}.ipynb" + hooks: + - "ensure_running_lab" + - "system_test" From dd8a5600e217dbc2fbe13b0291029c5247b80869 Mon Sep 17 00:00:00 2001 From: adam Date: Wed, 11 Sep 2024 21:45:50 -0700 Subject: [PATCH 138/567] Add generic github notebook hook --- applications/ghostwriter/values-idfdev.yaml | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/applications/ghostwriter/values-idfdev.yaml b/applications/ghostwriter/values-idfdev.yaml index d00bee4314..706121ea96 100644 --- a/applications/ghostwriter/values-idfdev.yaml +++ b/applications/ghostwriter/values-idfdev.yaml @@ -6,22 +6,18 @@ config: debug: true mapping: routes: - - source_prefix: "/tutorials/" - target: "${base_url}/nb/user/${user}/lab/tree/notebooks/tutorial-notebooks/${path}.ipynb" - hooks: - - "ensure_running_lab" - source_prefix: "/queries/" target: "${base_url}/nb/user/${user}/lab/tree/notebooks/queries/portal_${path}.ipynb" hooks: - "ensure_running_lab" - "portal_query" - - source_prefix: "/tutorials-on-demand/" - target: "${base_url}/nb/user/${user}/lab/tree/notebooks/tutorials-on-demand/${path}.ipynb" + - source_prefix: "/notebooks/github.com/" + target: "${base_url}/nb/user/${user}/lab/tree/notebooks/on-demand/github.com/${path}.ipynb" hooks: - "ensure_running_lab" - - "tutorial_on_demand" + - "github_notebook" + # Two convenience routes that themselves just use the github_notebook hook - source_prefix: "/system-test/" - target: "${base_url}/nb/user/${user}/lab/tree/notebooks/on-demand/${path}.ipynb" - hooks: - - "ensure_running_lab" - - "system_test" + target: "${base_url}/notebooks/github.com/lsst-sqre/system-test/${path}" + - source_prefix: "/tutorials/" + target: "${base_url}/notebooks/github.com/rubin-dp0/tutorial-notebooks/${path}" From 62d4ebb56ce147016dd098dc7d0a144d674cc947 Mon Sep 17 00:00:00 2001 From: adam Date: Mon, 23 Sep 2024 13:37:31 -0700 Subject: [PATCH 139/567] Move to Ghostwriter release version, enable in int/prod --- applications/ghostwriter/values-idfdev.yaml | 3 --- applications/ghostwriter/values-idfint.yaml | 17 +++++++++++++++++ applications/ghostwriter/values-idfprod.yaml | 17 +++++++++++++++++ environments/values-idfint.yaml | 1 + environments/values-idfprod.yaml | 1 + 5 files changed, 36 insertions(+), 3 deletions(-) create mode 100644 applications/ghostwriter/values-idfint.yaml create mode 100644 applications/ghostwriter/values-idfprod.yaml diff --git a/applications/ghostwriter/values-idfdev.yaml b/applications/ghostwriter/values-idfdev.yaml index 706121ea96..a2ee3d93ca 100644 --- a/applications/ghostwriter/values-idfdev.yaml +++ b/applications/ghostwriter/values-idfdev.yaml @@ -1,6 +1,3 @@ -image: - pullPolicy: "Always" - tag: "tickets-dm-46010" config: # slackAlerts: true debug: true diff --git a/applications/ghostwriter/values-idfint.yaml b/applications/ghostwriter/values-idfint.yaml new file mode 100644 index 0000000000..364d12c05b --- /dev/null +++ b/applications/ghostwriter/values-idfint.yaml @@ -0,0 +1,17 @@ +mapping: + routes: + - source_prefix: "/queries/" + target: "${base_url}/nb/user/${user}/lab/tree/notebooks/queries/portal_${path}.ipynb" + hooks: + - "ensure_running_lab" + - "portal_query" + - source_prefix: "/notebooks/github.com/" + target: "${base_url}/nb/user/${user}/lab/tree/notebooks/on-demand/github.com/${path}.ipynb" + hooks: + - "ensure_running_lab" + - "github_notebook" + # Two convenience routes that themselves just use the github_notebook hook + - source_prefix: "/system-test/" + target: "${base_url}/notebooks/github.com/lsst-sqre/system-test/${path}" + - source_prefix: "/tutorials/" + target: "${base_url}/notebooks/github.com/rubin-dp0/tutorial-notebooks/${path}" diff --git a/applications/ghostwriter/values-idfprod.yaml b/applications/ghostwriter/values-idfprod.yaml new file mode 100644 index 0000000000..364d12c05b --- /dev/null +++ b/applications/ghostwriter/values-idfprod.yaml @@ -0,0 +1,17 @@ +mapping: + routes: + - source_prefix: "/queries/" + target: "${base_url}/nb/user/${user}/lab/tree/notebooks/queries/portal_${path}.ipynb" + hooks: + - "ensure_running_lab" + - "portal_query" + - source_prefix: "/notebooks/github.com/" + target: "${base_url}/nb/user/${user}/lab/tree/notebooks/on-demand/github.com/${path}.ipynb" + hooks: + - "ensure_running_lab" + - "github_notebook" + # Two convenience routes that themselves just use the github_notebook hook + - source_prefix: "/system-test/" + target: "${base_url}/notebooks/github.com/lsst-sqre/system-test/${path}" + - source_prefix: "/tutorials/" + target: "${base_url}/notebooks/github.com/rubin-dp0/tutorial-notebooks/${path}" diff --git a/environments/values-idfint.yaml b/environments/values-idfint.yaml index 27a63ea967..15190999da 100644 --- a/environments/values-idfint.yaml +++ b/environments/values-idfint.yaml @@ -16,6 +16,7 @@ applications: butler: true datalinker: true filestore-backup: true + ghostwriter: true hips: true mobu: true nublado: true diff --git a/environments/values-idfprod.yaml b/environments/values-idfprod.yaml index 611286d511..af3b77877c 100644 --- a/environments/values-idfprod.yaml +++ b/environments/values-idfprod.yaml @@ -17,6 +17,7 @@ applications: butler: true datalinker: true filestore-backup: true + ghostwriter: true hips: true mobu: true nublado: true From 4f49b48120b78fc8e78f5ee6d23084e09e462b64 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 23 Sep 2024 13:42:52 -0700 Subject: [PATCH 140/567] BTS: Change ack for CSC producers. --- charts/csc_shared/templates/configmap-env.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/charts/csc_shared/templates/configmap-env.yaml b/charts/csc_shared/templates/configmap-env.yaml index cefb956f2c..8e7f7485f8 100644 --- a/charts/csc_shared/templates/configmap-env.yaml +++ b/charts/csc_shared/templates/configmap-env.yaml @@ -10,5 +10,6 @@ data: LSST_KAFKA_REPLICATION_FACTOR: {{ $.Values.global.controlSystem.kafkaTopicReplicationFactor | quote }} LSST_KAFKA_SECURITY_USERNAME: ts-salkafka LSST_SCHEMA_REGISTRY_URL: {{ $.Values.global.controlSystem.schemaRegistryUrl }} + LSST_KAFKA_PRODUCER_WAIT_ACKS: "1" S3_ENDPOINT_URL: {{ $.Values.global.controlSystem.s3EndpointUrl }} {{- end }} From ea700e3af952a5c63a039c0f1642f95931de8eb7 Mon Sep 17 00:00:00 2001 From: adam Date: Mon, 23 Sep 2024 13:46:56 -0700 Subject: [PATCH 141/567] Rename secret.yaml -> secrets.yaml (ghostwriter) --- applications/ghostwriter/{secret.yaml => secrets.yaml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename applications/ghostwriter/{secret.yaml => secrets.yaml} (100%) diff --git a/applications/ghostwriter/secret.yaml b/applications/ghostwriter/secrets.yaml similarity index 100% rename from applications/ghostwriter/secret.yaml rename to applications/ghostwriter/secrets.yaml From f220731a27eb1421f8bf24bebfca7e884e42e901 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Mon, 23 Sep 2024 14:50:30 -0700 Subject: [PATCH 142/567] Fix context deadline exceeded error - Reducing the batch size configuration in Telegraf from 5000 to 2500 messages fixed the problem. It seems that 5000 messages * avg size of M1M3 messages was too large for a request to the InfluxDB API. --- applications/sasquatch/values-base.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index 576b753ae3..a28a51be3b 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -172,6 +172,7 @@ telegraf-kafka-consumer: database: "efd" topicRegexps: | [ "lsst.sal.MTM1M3" ] + metric_batch_size: 2500 debug: true m2: enabled: true From edfa77c7b4a91d339c7f63bbd6ff9319abd07de9 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 23 Sep 2024 15:11:46 -0700 Subject: [PATCH 143/567] Update Python and pre-commit dependencies --- .pre-commit-config.yaml | 2 +- requirements/dev.txt | 360 +++++++++++++++++++++------------------- requirements/main.txt | 204 +++++++++++------------ requirements/tox.txt | 74 ++++----- 4 files changed, 328 insertions(+), 312 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 32bdfa9de8..c684835a13 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,7 +46,7 @@ repos: - --template-files=../helm-docs.md.gotmpl - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.5 + rev: v0.6.7 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/requirements/dev.txt b/requirements/dev.txt index fd796c0977..a8d900ddd8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -378,73 +378,80 @@ graphviz==0.20.3 \ --hash=sha256:09d6bc81e6a9fa392e7ba52135a9d49f1ed62526f96499325930e87ca1b5925d \ --hash=sha256:81f848f2904515d8cd359cc611faba817598d2feaac4027b266aa3eda7b3dde5 # via diagrams -greenlet==3.1.0 ; (python_full_version < '3.13' and platform_machine == 'AMD64') or (python_full_version < '3.13' and platform_machine == 'WIN32') or (python_full_version < '3.13' and platform_machine == 'aarch64') or (python_full_version < '3.13' and platform_machine == 'amd64') or (python_full_version < '3.13' and platform_machine == 'ppc64le') or (python_full_version < '3.13' and platform_machine == 'win32') or (python_full_version < '3.13' and platform_machine == 'x86_64') \ - --hash=sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9 \ - --hash=sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17 \ - --hash=sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc \ - --hash=sha256:13ff8c8e54a10472ce3b2a2da007f915175192f18e6495bad50486e87c7f6637 \ - --hash=sha256:1544b8dd090b494c55e60c4ff46e238be44fdc472d2589e943c241e0169bcea2 \ - --hash=sha256:184258372ae9e1e9bddce6f187967f2e08ecd16906557c4320e3ba88a93438c3 \ - --hash=sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6 \ - --hash=sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b \ - --hash=sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf \ - --hash=sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27 \ - --hash=sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1 \ - --hash=sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc \ - --hash=sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a \ - --hash=sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b \ - --hash=sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d \ - --hash=sha256:2d004db911ed7b6218ec5c5bfe4cf70ae8aa2223dffbb5b3c69e342bb253cb28 \ - --hash=sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303 \ - --hash=sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99 \ - --hash=sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f \ - --hash=sha256:44cd313629ded43bb3b98737bba2f3e2c2c8679b55ea29ed73daea6b755fe8e7 \ - --hash=sha256:4a3dae7492d16e85ea6045fd11cb8e782b63eac8c8d520c3a92c02ac4573b0a6 \ - --hash=sha256:4b5ea3664eed571779403858d7cd0a9b0ebf50d57d2cdeafc7748e09ef8cd81a \ - --hash=sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc \ - --hash=sha256:5415b9494ff6240b09af06b91a375731febe0090218e2898d2b85f9b92abcda0 \ - --hash=sha256:5fd6e94593f6f9714dbad1aaba734b5ec04593374fa6638df61592055868f8b8 \ - --hash=sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a \ - --hash=sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca \ - --hash=sha256:665b21e95bc0fce5cab03b2e1d90ba9c66c510f1bb5fdc864f3a377d0f553f6b \ - --hash=sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989 \ - --hash=sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19 \ - --hash=sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6 \ - --hash=sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484 \ - --hash=sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd \ - --hash=sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25 \ - --hash=sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b \ - --hash=sha256:7f346d24d74c00b6730440f5eb8ec3fe5774ca8d1c9574e8e57c8671bb51b910 \ - --hash=sha256:81eeec4403a7d7684b5812a8aaa626fa23b7d0848edb3a28d2eb3220daddcbd0 \ - --hash=sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5 \ - --hash=sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345 \ - --hash=sha256:9eb4a1d7399b9f3c7ac68ae6baa6be5f9195d1d08c9ddc45ad559aa6b556bce6 \ - --hash=sha256:a0409bc18a9f85321399c29baf93545152d74a49d92f2f55302f122007cfda00 \ - --hash=sha256:a22f4e26400f7f48faef2d69c20dc055a1f3043d330923f9abe08ea0aecc44df \ - --hash=sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811 \ - --hash=sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca \ - --hash=sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8 \ - --hash=sha256:a8870983af660798dc1b529e1fd6f1cefd94e45135a32e58bd70edd694540f33 \ - --hash=sha256:ac0adfdb3a21dc2a24ed728b61e72440d297d0fd3a577389df566651fcd08f97 \ - --hash=sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0 \ - --hash=sha256:b9505a0c8579899057cbefd4ec34d865ab99852baf1ff33a9481eb3924e2da0b \ - --hash=sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682 \ - --hash=sha256:c3967dcc1cd2ea61b08b0b276659242cbce5caca39e7cbc02408222fb9e6ff39 \ - --hash=sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64 \ - --hash=sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f \ - --hash=sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665 \ - --hash=sha256:cfcfb73aed40f550a57ea904629bdaf2e562c68fa1164fa4588e752af6efdc3f \ - --hash=sha256:d0dd943282231480aad5f50f89bdf26690c995e8ff555f26d8a5b9887b559bcc \ - --hash=sha256:d3c59a06c2c28a81a026ff11fbf012081ea34fb9b7052f2ed0366e14896f0a1d \ - --hash=sha256:d45b75b0f3fd8d99f62eb7908cfa6d727b7ed190737dec7fe46d993da550b81a \ - --hash=sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0 \ - --hash=sha256:d58ec349e0c2c0bc6669bf2cd4982d2f93bf067860d23a0ea1fe677b0f0b1e09 \ - --hash=sha256:db1b3ccb93488328c74e97ff888604a8b95ae4f35f4f56677ca57a4fc3a4220b \ - --hash=sha256:dd65695a8df1233309b701dec2539cc4b11e97d4fcc0f4185b4a12ce54db0491 \ - --hash=sha256:f9482c2ed414781c0af0b35d9d575226da6b728bd1a720668fa05837184965b7 \ - --hash=sha256:f9671e7282d8c6fcabc32c0fb8d7c0ea8894ae85cee89c9aadc2d7129e1a9954 \ - --hash=sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501 \ - --hash=sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54 +greenlet==3.1.1 ; (python_full_version < '3.13' and platform_machine == 'AMD64') or (python_full_version < '3.13' and platform_machine == 'WIN32') or (python_full_version < '3.13' and platform_machine == 'aarch64') or (python_full_version < '3.13' and platform_machine == 'amd64') or (python_full_version < '3.13' and platform_machine == 'ppc64le') or (python_full_version < '3.13' and platform_machine == 'win32') or (python_full_version < '3.13' and platform_machine == 'x86_64') \ + --hash=sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e \ + --hash=sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7 \ + --hash=sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01 \ + --hash=sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1 \ + --hash=sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159 \ + --hash=sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563 \ + --hash=sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83 \ + --hash=sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9 \ + --hash=sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395 \ + --hash=sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa \ + --hash=sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942 \ + --hash=sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1 \ + --hash=sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441 \ + --hash=sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22 \ + --hash=sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9 \ + --hash=sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0 \ + --hash=sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba \ + --hash=sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3 \ + --hash=sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1 \ + --hash=sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6 \ + --hash=sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291 \ + --hash=sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39 \ + --hash=sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d \ + --hash=sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467 \ + --hash=sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475 \ + --hash=sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef \ + --hash=sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c \ + --hash=sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511 \ + --hash=sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c \ + --hash=sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822 \ + --hash=sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a \ + --hash=sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8 \ + --hash=sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d \ + --hash=sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01 \ + --hash=sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145 \ + --hash=sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80 \ + --hash=sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13 \ + --hash=sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e \ + --hash=sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b \ + --hash=sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1 \ + --hash=sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef \ + --hash=sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc \ + --hash=sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff \ + --hash=sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120 \ + --hash=sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437 \ + --hash=sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd \ + --hash=sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981 \ + --hash=sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36 \ + --hash=sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a \ + --hash=sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798 \ + --hash=sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7 \ + --hash=sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761 \ + --hash=sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0 \ + --hash=sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e \ + --hash=sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af \ + --hash=sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa \ + --hash=sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c \ + --hash=sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42 \ + --hash=sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e \ + --hash=sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81 \ + --hash=sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e \ + --hash=sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617 \ + --hash=sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc \ + --hash=sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de \ + --hash=sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111 \ + --hash=sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383 \ + --hash=sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70 \ + --hash=sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6 \ + --hash=sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4 \ + --hash=sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011 \ + --hash=sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803 \ + --hash=sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79 \ + --hash=sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f # via sqlalchemy idna==3.10 \ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ @@ -505,9 +512,9 @@ jupyter-cache==1.0.0 \ --hash=sha256:594b1c4e29b488b36547e12477645f489dbdc62cc939b2408df5679f79245078 \ --hash=sha256:d0fa7d7533cd5798198d8889318269a8c1382ed3b22f622c09a9356521f48687 # via myst-nb -jupyter-client==8.6.2 \ - --hash=sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df \ - --hash=sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f +jupyter-client==8.6.3 \ + --hash=sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419 \ + --hash=sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f # via # ipykernel # nbclient @@ -689,9 +696,9 @@ pexpect==4.9.0 ; sys_platform != 'emscripten' and sys_platform != 'win32' \ --hash=sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523 \ --hash=sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f # via ipython -platformdirs==4.3.3 \ - --hash=sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5 \ - --hash=sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via jupyter-core pluggy==1.5.0 \ --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ @@ -744,104 +751,104 @@ pycparser==2.22 ; implementation_name == 'pypy' \ # via # -c requirements/main.txt # cffi -pydantic==2.9.1 \ - --hash=sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2 \ - --hash=sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612 +pydantic==2.9.2 \ + --hash=sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f \ + --hash=sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12 # via # -c requirements/main.txt # autodoc-pydantic # documenteer # pydantic-settings -pydantic-core==2.23.3 \ - --hash=sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801 \ - --hash=sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec \ - --hash=sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295 \ - --hash=sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba \ - --hash=sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e \ - --hash=sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e \ - --hash=sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4 \ - --hash=sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211 \ - --hash=sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea \ - --hash=sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c \ - --hash=sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835 \ - --hash=sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d \ - --hash=sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c \ - --hash=sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c \ - --hash=sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61 \ - --hash=sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83 \ - --hash=sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb \ - --hash=sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1 \ - --hash=sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5 \ - --hash=sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690 \ - --hash=sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b \ - --hash=sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7 \ - --hash=sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70 \ - --hash=sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a \ - --hash=sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8 \ - --hash=sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd \ - --hash=sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee \ - --hash=sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1 \ - --hash=sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab \ - --hash=sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958 \ - --hash=sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5 \ - --hash=sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b \ - --hash=sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961 \ - --hash=sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c \ - --hash=sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25 \ - --hash=sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4 \ - --hash=sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4 \ - --hash=sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f \ - --hash=sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326 \ - --hash=sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab \ - --hash=sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8 \ - --hash=sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b \ - --hash=sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6 \ - --hash=sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8 \ - --hash=sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01 \ - --hash=sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc \ - --hash=sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d \ - --hash=sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e \ - --hash=sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b \ - --hash=sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855 \ - --hash=sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700 \ - --hash=sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a \ - --hash=sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa \ - --hash=sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541 \ - --hash=sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791 \ - --hash=sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162 \ - --hash=sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611 \ - --hash=sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef \ - --hash=sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe \ - --hash=sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5 \ - --hash=sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba \ - --hash=sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28 \ - --hash=sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa \ - --hash=sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27 \ - --hash=sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4 \ - --hash=sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b \ - --hash=sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2 \ - --hash=sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c \ - --hash=sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8 \ - --hash=sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb \ - --hash=sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c \ - --hash=sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e \ - --hash=sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305 \ - --hash=sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8 \ - --hash=sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4 \ - --hash=sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433 \ - --hash=sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45 \ - --hash=sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16 \ - --hash=sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed \ - --hash=sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0 \ - --hash=sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d \ - --hash=sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710 \ - --hash=sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48 \ - --hash=sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423 \ - --hash=sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf \ - --hash=sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9 \ - --hash=sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63 \ - --hash=sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5 \ - --hash=sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb +pydantic-core==2.23.4 \ + --hash=sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36 \ + --hash=sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05 \ + --hash=sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071 \ + --hash=sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327 \ + --hash=sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c \ + --hash=sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36 \ + --hash=sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29 \ + --hash=sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744 \ + --hash=sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d \ + --hash=sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec \ + --hash=sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e \ + --hash=sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e \ + --hash=sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577 \ + --hash=sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232 \ + --hash=sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863 \ + --hash=sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6 \ + --hash=sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368 \ + --hash=sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480 \ + --hash=sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2 \ + --hash=sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2 \ + --hash=sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6 \ + --hash=sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769 \ + --hash=sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d \ + --hash=sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2 \ + --hash=sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84 \ + --hash=sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166 \ + --hash=sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271 \ + --hash=sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5 \ + --hash=sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb \ + --hash=sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13 \ + --hash=sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323 \ + --hash=sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556 \ + --hash=sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665 \ + --hash=sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef \ + --hash=sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb \ + --hash=sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119 \ + --hash=sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126 \ + --hash=sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510 \ + --hash=sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b \ + --hash=sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87 \ + --hash=sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f \ + --hash=sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc \ + --hash=sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8 \ + --hash=sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21 \ + --hash=sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f \ + --hash=sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6 \ + --hash=sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658 \ + --hash=sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b \ + --hash=sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3 \ + --hash=sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb \ + --hash=sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59 \ + --hash=sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24 \ + --hash=sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9 \ + --hash=sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3 \ + --hash=sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd \ + --hash=sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753 \ + --hash=sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55 \ + --hash=sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad \ + --hash=sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a \ + --hash=sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605 \ + --hash=sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e \ + --hash=sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b \ + --hash=sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433 \ + --hash=sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8 \ + --hash=sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07 \ + --hash=sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728 \ + --hash=sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0 \ + --hash=sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327 \ + --hash=sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555 \ + --hash=sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64 \ + --hash=sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6 \ + --hash=sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea \ + --hash=sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b \ + --hash=sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df \ + --hash=sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e \ + --hash=sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd \ + --hash=sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068 \ + --hash=sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3 \ + --hash=sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040 \ + --hash=sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12 \ + --hash=sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916 \ + --hash=sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f \ + --hash=sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f \ + --hash=sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801 \ + --hash=sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231 \ + --hash=sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5 \ + --hash=sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8 \ + --hash=sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee \ + --hash=sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607 # via # -c requirements/main.txt # pydantic @@ -1252,9 +1259,9 @@ sphinx==8.0.2 \ # sphinxcontrib-youtube # sphinxext-opengraph # sphinxext-rediraffe -sphinx-autodoc-typehints==2.4.1 \ - --hash=sha256:af37abb816ebd2cf56c7a8174fd2f34d0f2f84fbf58265f89429ae107212fe6f \ - --hash=sha256:cfe410920cecf08ade046bb387b0007edb83e992de59686c62d194c762f1e45c +sphinx-autodoc-typehints==2.4.4 \ + --hash=sha256:940de2951fd584d147e46772579fdc904f945c5f1ee1a78c614646abfbbef18b \ + --hash=sha256:e743512da58b67a06579a1462798a6907664ab77460758a43234adeac350afbf # via documenteer sphinx-automodapi==0.18.0 \ --hash=sha256:022860385590768f52d4f6e19abb83b2574772d2721fb4050ecdb6e593a1a440 \ @@ -1338,14 +1345,19 @@ sphinxext-rediraffe==0.2.7 \ --hash=sha256:9e430a52d4403847f4ffb3a8dd6dfc34a9fe43525305131f52ed899743a5fd8c # via documenteer sqlalchemy==2.0.35 \ + --hash=sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9 \ --hash=sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00 \ --hash=sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee \ --hash=sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6 \ + --hash=sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1 \ + --hash=sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72 \ --hash=sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf \ --hash=sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8 \ --hash=sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b \ --hash=sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc \ --hash=sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c \ + --hash=sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1 \ + --hash=sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3 \ --hash=sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5 \ --hash=sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90 \ --hash=sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec \ @@ -1355,6 +1367,7 @@ sqlalchemy==2.0.35 \ --hash=sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468 \ --hash=sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3 \ --hash=sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e \ + --hash=sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139 \ --hash=sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff \ --hash=sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11 \ --hash=sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01 \ @@ -1365,10 +1378,13 @@ sqlalchemy==2.0.35 \ --hash=sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87 \ --hash=sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e \ --hash=sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1 \ + --hash=sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9 \ --hash=sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f \ --hash=sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0 \ + --hash=sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44 \ --hash=sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936 \ --hash=sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8 \ + --hash=sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea \ --hash=sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f \ --hash=sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4 \ --hash=sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0 \ @@ -1465,9 +1481,9 @@ typed-ast==1.5.5 \ --hash=sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a \ --hash=sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2 # via diagrams -types-pyyaml==6.0.12.20240808 \ - --hash=sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af \ - --hash=sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35 +types-pyyaml==6.0.12.20240917 \ + --hash=sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570 \ + --hash=sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587 # via -r requirements/dev.in typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ diff --git a/requirements/main.txt b/requirements/main.txt index f8cb7df176..10ad6927b2 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -4,9 +4,9 @@ annotated-types==0.7.0 \ --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \ --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89 # via pydantic -anyio==4.4.0 \ - --hash=sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94 \ - --hash=sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7 +anyio==4.6.0 \ + --hash=sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb \ + --hash=sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a # via # httpcore # starlette @@ -249,9 +249,9 @@ cryptography==43.0.1 \ # phalanx (pyproject.toml) # pyjwt # safir -fastapi==0.114.2 \ - --hash=sha256:0adb148b62edb09e8c6eeefa3ea934e8f276dabc038c5a82989ea6346050c3da \ - --hash=sha256:44474a22913057b1acb973ab90f4b671ba5200482e7622816d79105dcece1ac5 +fastapi==0.115.0 \ + --hash=sha256:17ea427674467486e997206a5ab25760f6b09e069f099b96f5b55a32fb6f1631 \ + --hash=sha256:f93b4ca3529a8ebc6fc3fcf710e5efa8de3df9b41570958abf1d97d843138004 # via safir gidgethub==5.3.0 \ --hash=sha256:4dd92f2252d12756b13f9dd15cde322bfb0d625b6fb5d680da1567ec74b462c0 \ @@ -364,103 +364,103 @@ pycparser==2.22 ; platform_python_implementation != 'PyPy' \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pydantic==2.9.1 \ - --hash=sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2 \ - --hash=sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612 +pydantic==2.9.2 \ + --hash=sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f \ + --hash=sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12 # via # phalanx (pyproject.toml) # fastapi # safir -pydantic-core==2.23.3 \ - --hash=sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801 \ - --hash=sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec \ - --hash=sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295 \ - --hash=sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba \ - --hash=sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e \ - --hash=sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e \ - --hash=sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4 \ - --hash=sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211 \ - --hash=sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea \ - --hash=sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c \ - --hash=sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835 \ - --hash=sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d \ - --hash=sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c \ - --hash=sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c \ - --hash=sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61 \ - --hash=sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83 \ - --hash=sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb \ - --hash=sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1 \ - --hash=sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5 \ - --hash=sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690 \ - --hash=sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b \ - --hash=sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7 \ - --hash=sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70 \ - --hash=sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a \ - --hash=sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8 \ - --hash=sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd \ - --hash=sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee \ - --hash=sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1 \ - --hash=sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab \ - --hash=sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958 \ - --hash=sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5 \ - --hash=sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b \ - --hash=sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961 \ - --hash=sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c \ - --hash=sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25 \ - --hash=sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4 \ - --hash=sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4 \ - --hash=sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f \ - --hash=sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326 \ - --hash=sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab \ - --hash=sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8 \ - --hash=sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b \ - --hash=sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6 \ - --hash=sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8 \ - --hash=sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01 \ - --hash=sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc \ - --hash=sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d \ - --hash=sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e \ - --hash=sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b \ - --hash=sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855 \ - --hash=sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700 \ - --hash=sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a \ - --hash=sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa \ - --hash=sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541 \ - --hash=sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791 \ - --hash=sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162 \ - --hash=sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611 \ - --hash=sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef \ - --hash=sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe \ - --hash=sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5 \ - --hash=sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba \ - --hash=sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28 \ - --hash=sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa \ - --hash=sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27 \ - --hash=sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4 \ - --hash=sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b \ - --hash=sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2 \ - --hash=sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c \ - --hash=sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8 \ - --hash=sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb \ - --hash=sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c \ - --hash=sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e \ - --hash=sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305 \ - --hash=sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8 \ - --hash=sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4 \ - --hash=sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433 \ - --hash=sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45 \ - --hash=sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16 \ - --hash=sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed \ - --hash=sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0 \ - --hash=sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d \ - --hash=sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710 \ - --hash=sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48 \ - --hash=sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423 \ - --hash=sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf \ - --hash=sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9 \ - --hash=sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63 \ - --hash=sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5 \ - --hash=sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb +pydantic-core==2.23.4 \ + --hash=sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36 \ + --hash=sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05 \ + --hash=sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071 \ + --hash=sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327 \ + --hash=sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c \ + --hash=sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36 \ + --hash=sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29 \ + --hash=sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744 \ + --hash=sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d \ + --hash=sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec \ + --hash=sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e \ + --hash=sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e \ + --hash=sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577 \ + --hash=sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232 \ + --hash=sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863 \ + --hash=sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6 \ + --hash=sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368 \ + --hash=sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480 \ + --hash=sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2 \ + --hash=sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2 \ + --hash=sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6 \ + --hash=sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769 \ + --hash=sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d \ + --hash=sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2 \ + --hash=sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84 \ + --hash=sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166 \ + --hash=sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271 \ + --hash=sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5 \ + --hash=sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb \ + --hash=sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13 \ + --hash=sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323 \ + --hash=sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556 \ + --hash=sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665 \ + --hash=sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef \ + --hash=sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb \ + --hash=sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119 \ + --hash=sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126 \ + --hash=sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510 \ + --hash=sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b \ + --hash=sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87 \ + --hash=sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f \ + --hash=sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc \ + --hash=sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8 \ + --hash=sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21 \ + --hash=sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f \ + --hash=sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6 \ + --hash=sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658 \ + --hash=sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b \ + --hash=sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3 \ + --hash=sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb \ + --hash=sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59 \ + --hash=sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24 \ + --hash=sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9 \ + --hash=sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3 \ + --hash=sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd \ + --hash=sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753 \ + --hash=sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55 \ + --hash=sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad \ + --hash=sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a \ + --hash=sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605 \ + --hash=sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e \ + --hash=sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b \ + --hash=sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433 \ + --hash=sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8 \ + --hash=sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07 \ + --hash=sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728 \ + --hash=sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0 \ + --hash=sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327 \ + --hash=sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555 \ + --hash=sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64 \ + --hash=sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6 \ + --hash=sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea \ + --hash=sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b \ + --hash=sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df \ + --hash=sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e \ + --hash=sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd \ + --hash=sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068 \ + --hash=sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3 \ + --hash=sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040 \ + --hash=sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12 \ + --hash=sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916 \ + --hash=sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f \ + --hash=sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f \ + --hash=sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801 \ + --hash=sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231 \ + --hash=sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5 \ + --hash=sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8 \ + --hash=sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee \ + --hash=sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607 # via # pydantic # safir @@ -558,9 +558,9 @@ sniffio==1.3.1 \ # anyio # httpcore # httpx -starlette==0.38.5 \ - --hash=sha256:04a92830a9b6eb1442c766199d62260c3d4dc9c4f9188360626b1e0273cb7077 \ - --hash=sha256:632f420a9d13e3ee2a6f18f437b0a9f1faecb0bc42e1942aa2ea0e379a4c4206 +starlette==0.38.6 \ + --hash=sha256:4517a1409e2e73ee4951214ba012052b9e16f60e90d73cfb06192c19203bbb05 \ + --hash=sha256:863a1588f5574e70a821dadefb41e4881ea451a47a3cd1b4df359d4ffefe5ead # via # fastapi # safir diff --git a/requirements/tox.txt b/requirements/tox.txt index 0db59a8534..f50f47aa68 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -19,9 +19,9 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.16.0 \ - --hash=sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec \ - --hash=sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via # tox # virtualenv @@ -33,9 +33,9 @@ packaging==24.1 \ # pyproject-api # tox # tox-uv -platformdirs==4.3.3 \ - --hash=sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5 \ - --hash=sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via # -c requirements/dev.txt # tox @@ -46,41 +46,41 @@ pluggy==1.5.0 \ # via # -c requirements/dev.txt # tox -pyproject-api==1.7.1 \ - --hash=sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb \ - --hash=sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827 +pyproject-api==1.8.0 \ + --hash=sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228 \ + --hash=sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496 # via tox -tox==4.18.1 \ - --hash=sha256:35d472032ee1f73fe20c3e0e73d7073a4e85075c86ff02c576f9fc7c6a15a578 \ - --hash=sha256:3c0c96bc3a568a5c7e66387a4cfcf8c875b52e09f4d47c9f7a277ec82f1a0b11 +tox==4.20.0 \ + --hash=sha256:21a8005e3d3fe5658a8e36b8ca3ed13a4230429063c5cc2a2fdac6ee5aa0de34 \ + --hash=sha256:5b78a49b6eaaeab3ae4186415e7c97d524f762ae967c63562687c3e5f0ec23d5 # via # -r requirements/tox.in # tox-uv -tox-uv==1.11.3 \ - --hash=sha256:316f559ae5525edec12791d9e1f393e405ded5b7e7d50fbaee4726676951f49a \ - --hash=sha256:d434787406ff2854600c1ceaa555519080026208cf7f65bb5d4b2d7c9c4776de +tox-uv==1.13.0 \ + --hash=sha256:1037e4abad15a3b708b5970ed7a17a0765d7249b641a92b155bc3343b8b0145b \ + --hash=sha256:fb087b8b4ff779c72b48fc72ea1995387bb1c0dfb37910c20e46cef8b5f98c15 # via -r requirements/tox.in -uv==0.4.10 \ - --hash=sha256:0784f75093a75390d8d480cc8a444516e78f08849db9a13c21791a5f651df4a1 \ - --hash=sha256:0f8b9ba4ecfbea343a00e46d509669606e55fe233d800752c4c25650473df358 \ - --hash=sha256:1b6b6c6b8cc0c4e54ab25e3b46e49d1e583e26c194572eb42bfeebf71b39cca2 \ - --hash=sha256:1ff5130b6f3af79c4e47f63db03215aed15e78cb4f1f51682af6f9949c2bcf00 \ - --hash=sha256:2ff29a2f55a697e78d787a41ab41d4b26421d200728289b88b6241d3b486c436 \ - --hash=sha256:30d1f8348a2b18e21a35c97ce42528781f242d0303881fc92fbacdcb653c8bca \ - --hash=sha256:3be73788db9ceacb94a521cf67ca5cc08bac512aef71145b904ab62a3acabdae \ - --hash=sha256:444e1cdb36d7ef103e52185f918800527c255dc369c9f90eb1f198dfa3f4d5bc \ - --hash=sha256:6ba1cc3070e5c63ce0a1421fbed28bd1b3ff520671d7badda11a501504c78394 \ - --hash=sha256:8fa510dfbbde4f8ad5cd2769568c7b0c3e867b74deaf4beabcca79e74e7550cc \ - --hash=sha256:97a1187e11a9df70d55bc577721ad4a19441cda56e4d69fb2f38d88c7650d2a0 \ - --hash=sha256:99954a94dd6c4bff8a9a963c05bc3988214ea39e7511a52fda35112e1a478447 \ - --hash=sha256:a9dc1f8fca5c4a2f73054d9f56c7397e9fc6ba43baefc503d6f0128d72ea662f \ - --hash=sha256:b89dfd213359a23797155ff8175e5202ed6b84aadeb20df92132127608d46acf \ - --hash=sha256:bc87d6c581cfed0979e0f5ee93383d46006c6d4a5e4eb9f43ef13bce61b50cc2 \ - --hash=sha256:bc99e6b45303f0881a8dc199f0b7ea8261dd1779e576e8477a7721ceeeaafcc7 \ - --hash=sha256:e99e3f761875962942e0743b868bd666021d5e14c3df494e820ef8f45fb88578 \ - --hash=sha256:ff9046a8c5e836e892ac7741e672ee016e92e55c659fa8195595df65a1f3accf +uv==0.4.15 \ + --hash=sha256:04858bfd551fabe1635127d9a0afe5c62e1e7d56cf309a9674840c90bfc1f21e \ + --hash=sha256:0e9b78f1a800a4cfdfbdc9ff4e5d4cce34af770f8a1f2b9416b161f294eb3703 \ + --hash=sha256:1401e73f0e8df62b4cfbf394e65a75f18b73bf8a94a6c5653a55bd6fdb8e1bc3 \ + --hash=sha256:1bb79cb06be9bb25a1bf8641bf34593f64a96b3ba66ebd8712954f647d9faa24 \ + --hash=sha256:21a3cedb2276d635543a10a11c61f75c6e387110e23e90cdb6c6dd2e1f3c9453 \ + --hash=sha256:27884429b7fed371fe1fcbe829659c4a259463d0ecacb7891d800e4754b5f24c \ + --hash=sha256:4e40deb2cf2cb403dbaf65209d49c45462ebbb1bff290d4c18b902b5b385cdc9 \ + --hash=sha256:6eef6881abf9b858020ffd23f4e5d77423329da2d4a1bc0af6613c2f698c369a \ + --hash=sha256:7fcf7f3812dd173d39273e99fb2abb0814be6133e7a721baa424cbcfd25b483b \ + --hash=sha256:8d45295757f66d1913e5917c06f1974745adad842403d419362491939be889a6 \ + --hash=sha256:8e36b8e07595fc6216d01e729c81a0b4ff029a93cc2ef987a73d3b650d6d559c \ + --hash=sha256:9822fa4db0d8d50abf5eebe081c01666a98120455090d0b71463d01d5d4153c1 \ + --hash=sha256:9e28141883c0aa8525ad5418e519d8791b7dd75f35020d3b1457db89346c5dc8 \ + --hash=sha256:a5920ff4d114025c51d3f925130ca3b0fad277631846b1109347c24948b29159 \ + --hash=sha256:be46b37b569e3c8ffb7d78022bcc0eadeb987109f709c1cec01b00c261ed9595 \ + --hash=sha256:cf7d554656bb8c5b7710300e04d86ab5137ebdd31fe309d66860a9d474b385f8 \ + --hash=sha256:d16ae6b97eb77f478dfe51d6eb3627048d3f47bd04282d3006e6a212e541dba0 \ + --hash=sha256:e32137ba8202b1291e879e8145113bfb543fcc992b5f043852a96d803788b83c # via tox-uv -virtualenv==20.26.4 \ - --hash=sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55 \ - --hash=sha256:c17f4e0f3e6036e9f26700446f85c76ab11df65ff6d8a9cbfad9f71aabfcf23c +virtualenv==20.26.5 \ + --hash=sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6 \ + --hash=sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4 # via tox From 11e360b23201d3777dac67b0c6302ba397d05600 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 23 Sep 2024 15:12:17 -0700 Subject: [PATCH 144/567] Remove now-unncessary Black configuration We still use Black to reformat docs, but there's no need to maintain the file exclusion list since Python linting is done with Ruff. --- pyproject.toml | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2911638a09..c503a3df13 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,22 +53,7 @@ build-backend = "setuptools.build_meta" [tool.black] line-length = 79 -target-version = ["py311"] -exclude = ''' -/( - \.eggs - | \.git - | \.mypy_cache - | \.ruff_cache - | \.tox - | \.venv - | _build - | build - | dist -)/ -''' -# Use single-quoted strings so TOML treats the string like a Python r-string -# Multi-line strings are implicitly treated by black as regular expressions +target-version = ["py312"] [tool.coverage.run] parallel = true From 22601c3bac3bfc3ec139c3131171862db4cee569 Mon Sep 17 00:00:00 2001 From: adam Date: Mon, 23 Sep 2024 16:27:52 -0700 Subject: [PATCH 145/567] Remove notebook delegation from portal; adopt newer ghostwriter --- applications/ghostwriter/Chart.yaml | 2 +- applications/portal/templates/ingress.yaml | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/applications/ghostwriter/Chart.yaml b/applications/ghostwriter/Chart.yaml index 0ea46c3e3d..8d923876b0 100644 --- a/applications/ghostwriter/Chart.yaml +++ b/applications/ghostwriter/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: 0.1.0 +appVersion: 0.1.1 description: URL rewriter/personalizer name: ghostwriter sources: diff --git a/applications/portal/templates/ingress.yaml b/applications/portal/templates/ingress.yaml index 4edef4ab7b..0d7d6fc957 100644 --- a/applications/portal/templates/ingress.yaml +++ b/applications/portal/templates/ingress.yaml @@ -18,7 +18,6 @@ config: - "read:image" - "read:tap" - "write:files" - - "exec:notebook" template: metadata: name: {{ include "portal.fullname" . }} From d77314bd51d33e5515692055867308de098c2a7e Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 24 Sep 2024 10:18:21 -0300 Subject: [PATCH 146/567] exposurelog: move site-specific environment variables to Values.env Also add `DAF_BUTLER_REPOSITORY_INDEX`, `S3_ENDPOINT_URL` and `PGPASSFILE` env variables for butler access on the usdf-dev --- applications/exposurelog/README.md | 1 + applications/exposurelog/templates/deployment.yaml | 6 ++++-- applications/exposurelog/values-usdfdev.yaml | 9 +++++++++ applications/exposurelog/values.yaml | 3 +++ 4 files changed, 17 insertions(+), 2 deletions(-) diff --git a/applications/exposurelog/README.md b/applications/exposurelog/README.md index be173d896a..c22286d0e7 100644 --- a/applications/exposurelog/README.md +++ b/applications/exposurelog/README.md @@ -32,6 +32,7 @@ Log messages related to an exposure | db.host | string | `"postgres.postgres"` | database host | | db.port | int | `5432` | database port | | db.user | string | `"exposurelog"` | database user | +| env | list | `[]` | Environment variables to set in the exposurelog pod | | fullnameOverride | string | `""` | Override the full name for resources (includes the release name) | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | diff --git a/applications/exposurelog/templates/deployment.yaml b/applications/exposurelog/templates/deployment.yaml index 714ccb5136..c77bb88f7a 100644 --- a/applications/exposurelog/templates/deployment.yaml +++ b/applications/exposurelog/templates/deployment.yaml @@ -77,8 +77,10 @@ spec: value: {{ .Values.db.database | quote }} - name: SITE_ID value: {{ .Values.config.site_id | quote }} - - name: AWS_SHARED_CREDENTIALS_FILE - value: /var/secrets/butler/aws-credentials.ini + {{- range .Values.env }} + - name: {{ .name }} + value: {{ .value | quote }} + {{- end }} volumeMounts: {{- if .Values.config.nfs_path_1 }} - name: volume1 diff --git a/applications/exposurelog/values-usdfdev.yaml b/applications/exposurelog/values-usdfdev.yaml index 5153d2fde7..6c7663c3ad 100644 --- a/applications/exposurelog/values-usdfdev.yaml +++ b/applications/exposurelog/values-usdfdev.yaml @@ -4,3 +4,12 @@ config: db: host: usdf-summitdb.slac.stanford.edu user: usdf +env: + - name: AWS_SHARED_CREDENTIALS_FILE + value: "/var/secrets/butler/aws-credentials.ini" + - name: DAF_BUTLER_REPOSITORY_INDEX + value: "/project/data-repos.yaml" + - name: S3_ENDPOINT_URL + value: "https://s3dfrgw.slac.stanford.edu" + - name: PGPASSFILE + value: "/var/secrets/butler/postgres-credentials.txt" diff --git a/applications/exposurelog/values.yaml b/applications/exposurelog/values.yaml index 15be2fd7df..426ba95480 100644 --- a/applications/exposurelog/values.yaml +++ b/applications/exposurelog/values.yaml @@ -86,6 +86,9 @@ config: # Sandboxes should use `test`. site_id: "" +# -- Environment variables to set in the exposurelog pod +env: [] + # -- Annotations for the exposurelog pod podAnnotations: {} From 1cdcf79654e1b8c7b13ef28127302de54cb2d279 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 24 Sep 2024 10:20:37 -0300 Subject: [PATCH 147/567] exposurelog: remove `PGPASSWORD` as we are setting `PGPASSFILE` Also move `PGUSER` env to site specific --- applications/exposurelog/templates/deployment.yaml | 7 ------- applications/exposurelog/values-usdfdev.yaml | 2 ++ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/applications/exposurelog/templates/deployment.yaml b/applications/exposurelog/templates/deployment.yaml index c77bb88f7a..6c50301944 100644 --- a/applications/exposurelog/templates/deployment.yaml +++ b/applications/exposurelog/templates/deployment.yaml @@ -57,18 +57,11 @@ spec: value: {{ .Values.config.butler_uri_2 | quote }} - name: EXPOSURELOG_DB_USER value: {{ .Values.db.user | quote }} - - name: PGUSER - value: {{ .Values.db.user | quote }} - name: EXPOSURELOG_DB_PASSWORD valueFrom: secretKeyRef: name: exposurelog key: exposurelog_password - - name: PGPASSWORD - valueFrom: - secretKeyRef: - name: exposurelog - key: exposurelog_password - name: EXPOSURELOG_DB_HOST value: {{ .Values.db.host | quote }} - name: EXPOSURELOG_DB_PORT diff --git a/applications/exposurelog/values-usdfdev.yaml b/applications/exposurelog/values-usdfdev.yaml index 6c7663c3ad..da7cfbcc8d 100644 --- a/applications/exposurelog/values-usdfdev.yaml +++ b/applications/exposurelog/values-usdfdev.yaml @@ -13,3 +13,5 @@ env: value: "https://s3dfrgw.slac.stanford.edu" - name: PGPASSFILE value: "/var/secrets/butler/postgres-credentials.txt" + - name: PGUSER + value: "rubin" From 34f48e9621a237062f79873feb5f1d56875151d9 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 24 Sep 2024 10:24:45 -0300 Subject: [PATCH 148/567] exposurelog: update butler uri on usdfdev --- applications/exposurelog/values-usdfdev.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/exposurelog/values-usdfdev.yaml b/applications/exposurelog/values-usdfdev.yaml index da7cfbcc8d..e914a0e17f 100644 --- a/applications/exposurelog/values-usdfdev.yaml +++ b/applications/exposurelog/values-usdfdev.yaml @@ -1,6 +1,6 @@ config: site_id: usdfdev - butler_uri_1: s3://rubin-summit-users/butler.yaml + butler_uri_1: s3://embargo@rubin-summit-users/butler.yaml db: host: usdf-summitdb.slac.stanford.edu user: usdf From 617dcd682a1fc1d950a70007456f5bc94e283a07 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 24 Sep 2024 11:18:25 -0300 Subject: [PATCH 149/567] exposurelog: Add secretEnv to be configured site-specific --- applications/exposurelog/README.md | 1 + applications/exposurelog/templates/deployment.yaml | 7 +++++++ applications/exposurelog/values.yaml | 3 +++ 3 files changed, 11 insertions(+) diff --git a/applications/exposurelog/README.md b/applications/exposurelog/README.md index c22286d0e7..927c35f2f7 100644 --- a/applications/exposurelog/README.md +++ b/applications/exposurelog/README.md @@ -47,5 +47,6 @@ Log messages related to an exposure | podSecurityContext | object | `{}` | Security context for the exposurelog pod | | replicaCount | int | `1` | How many exposurelog pods to run | | resources | object | `{}` | Resource limits and requests for the exposurelog pod | +| secretEnv | list | `[]` | Additional secret environment variables to set in the exposurelog pod | | securityContext | object | `{}` | Security context for the exposurelog deployment | | tolerations | list | `[]` | Tolerations for the exposurelog pod | diff --git a/applications/exposurelog/templates/deployment.yaml b/applications/exposurelog/templates/deployment.yaml index 6c50301944..f738aaa49c 100644 --- a/applications/exposurelog/templates/deployment.yaml +++ b/applications/exposurelog/templates/deployment.yaml @@ -74,6 +74,13 @@ spec: - name: {{ .name }} value: {{ .value | quote }} {{- end }} + {{- range .Values.secretEnv }} + - name: {{ .name }} + valueFrom: + secretKeyRef: + name: {{ .secretName }} + key: {{ .secretKey }} + {{- end }} volumeMounts: {{- if .Values.config.nfs_path_1 }} - name: volume1 diff --git a/applications/exposurelog/values.yaml b/applications/exposurelog/values.yaml index 426ba95480..ece7625737 100644 --- a/applications/exposurelog/values.yaml +++ b/applications/exposurelog/values.yaml @@ -89,6 +89,9 @@ config: # -- Environment variables to set in the exposurelog pod env: [] +# -- Additional secret environment variables to set in the exposurelog pod +secretEnv: [] + # -- Annotations for the exposurelog pod podAnnotations: {} From 590794cf07eedc167eed8639cbe9d9a4330317ee Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 24 Sep 2024 11:19:24 -0300 Subject: [PATCH 150/567] exposurelog: add `PGUSER` and `PGPASSWORD` to base, tucson and summit --- applications/exposurelog/values-base.yaml | 12 ++++++++++++ applications/exposurelog/values-summit.yaml | 12 ++++++++++++ .../exposurelog/values-tucson-teststand.yaml | 12 ++++++++++++ 3 files changed, 36 insertions(+) diff --git a/applications/exposurelog/values-base.yaml b/applications/exposurelog/values-base.yaml index c3ff786c6e..3aff3ea83a 100644 --- a/applications/exposurelog/values-base.yaml +++ b/applications/exposurelog/values-base.yaml @@ -6,3 +6,15 @@ config: db: host: postgresdb01.ls.lsst.org + +# We use the same database user and password defined on the db object +# in the values.yaml file. This is due to telescope deployments +# are not using butler access which requires a different user and password. +env: + - name: PGUSER + value: exposurelog + +secretEnv: + - name: PGPASSWORD + secretName: exposurelog + secretKey: exposurelog_password diff --git a/applications/exposurelog/values-summit.yaml b/applications/exposurelog/values-summit.yaml index 636150ebec..dac0d8412a 100644 --- a/applications/exposurelog/values-summit.yaml +++ b/applications/exposurelog/values-summit.yaml @@ -9,3 +9,15 @@ config: butler_uri_2: /volume_2 db: host: postgresdb01.cp.lsst.org + +# We use the same database user and password defined on the db object +# in the values.yaml file. This is due to telescope deployments +# are not using butler access which requires a different user and password. +env: + - name: PGUSER + value: exposurelog + +secretEnv: + - name: PGPASSWORD + secretName: exposurelog + secretKey: exposurelog_password diff --git a/applications/exposurelog/values-tucson-teststand.yaml b/applications/exposurelog/values-tucson-teststand.yaml index 94a3159b2f..9a9f75c408 100644 --- a/applications/exposurelog/values-tucson-teststand.yaml +++ b/applications/exposurelog/values-tucson-teststand.yaml @@ -9,3 +9,15 @@ config: butler_uri_2: /volume_2 db: host: postgresdb01.tu.lsst.org + +# We use the same database user and password defined on the db object +# in the values.yaml file. This is due to telescope deployments +# are not using butler access which requires a different user and password. +env: + - name: PGUSER + value: exposurelog + +secretEnv: + - name: PGPASSWORD + secretName: exposurelog + secretKey: exposurelog_password From 5841933f8de95bd51ca6cf5079db438fafeda1b4 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 24 Sep 2024 13:52:45 -0300 Subject: [PATCH 151/567] exposurelog: Update appVersion to 1.3.0 --- applications/exposurelog/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/exposurelog/Chart.yaml b/applications/exposurelog/Chart.yaml index c1a84f7c27..5b095cfd61 100644 --- a/applications/exposurelog/Chart.yaml +++ b/applications/exposurelog/Chart.yaml @@ -12,4 +12,4 @@ version: 1.0.0 # number should be incremented each time you make changes to the # application. Versions are not expected to follow Semantic Versioning. They # should reflect the version the application is using. -appVersion: 1.2.1 +appVersion: 1.3.0 From e974ae28d6a7e630403d7dcd92511b29ed8f2a8e Mon Sep 17 00:00:00 2001 From: dspeck1 Date: Tue, 24 Sep 2024 13:14:42 -0500 Subject: [PATCH 152/567] Add additional test topic for testing job based processing with keda. --- applications/sasquatch/values-usdfdev.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/sasquatch/values-usdfdev.yaml b/applications/sasquatch/values-usdfdev.yaml index 833f333ba7..a08a521d88 100644 --- a/applications/sasquatch/values-usdfdev.yaml +++ b/applications/sasquatch/values-usdfdev.yaml @@ -157,6 +157,7 @@ rest-proxy: kafka: topics: - test.next-visit + - test.next-visit-job topicPrefixes: - test - lsst.dm From d1e260100e1736ea3faa7320b9178ceb34e4d1f2 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Tue, 24 Sep 2024 11:36:07 -0700 Subject: [PATCH 153/567] Remove acks envvar. --- charts/csc_shared/templates/configmap-env.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/charts/csc_shared/templates/configmap-env.yaml b/charts/csc_shared/templates/configmap-env.yaml index 8e7f7485f8..cefb956f2c 100644 --- a/charts/csc_shared/templates/configmap-env.yaml +++ b/charts/csc_shared/templates/configmap-env.yaml @@ -10,6 +10,5 @@ data: LSST_KAFKA_REPLICATION_FACTOR: {{ $.Values.global.controlSystem.kafkaTopicReplicationFactor | quote }} LSST_KAFKA_SECURITY_USERNAME: ts-salkafka LSST_SCHEMA_REGISTRY_URL: {{ $.Values.global.controlSystem.schemaRegistryUrl }} - LSST_KAFKA_PRODUCER_WAIT_ACKS: "1" S3_ENDPOINT_URL: {{ $.Values.global.controlSystem.s3EndpointUrl }} {{- end }} From a96f9f9277cd12b580e6b5a13f33f6a85ed87c1c Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 24 Sep 2024 16:05:51 -0300 Subject: [PATCH 154/567] nightreport: add application to usdfdev --- applications/nightreport/values-usdfdev.yaml | 9 +++++++++ environments/values-usdfdev.yaml | 1 + 2 files changed, 10 insertions(+) create mode 100644 applications/nightreport/values-usdfdev.yaml diff --git a/applications/nightreport/values-usdfdev.yaml b/applications/nightreport/values-usdfdev.yaml new file mode 100644 index 0000000000..1fab965ee8 --- /dev/null +++ b/applications/nightreport/values-usdfdev.yaml @@ -0,0 +1,9 @@ +image: + repository: ts-dockerhub.lsst.org/nightreport + tag: c0039 + pullPolicy: Always +config: + site_id: usdfdev +db: + host: usdf-summitdb.slac.stanford.edu + user: usdf diff --git a/environments/values-usdfdev.yaml b/environments/values-usdfdev.yaml index af80333545..9eb8c8c0a7 100644 --- a/environments/values-usdfdev.yaml +++ b/environments/values-usdfdev.yaml @@ -20,6 +20,7 @@ applications: livetap: true mobu: true narrativelog: true + nightreport: true noteburst: true nublado: true obsloctap: true From 6d9ddda9f178d00be5f4302fdde133b485ddaae7 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 24 Sep 2024 17:55:30 -0300 Subject: [PATCH 155/567] nightreport: update nightreport secrets --- .../nightreport/{secrets.yaml => secrets-usdfdev.yaml} | 3 +++ 1 file changed, 3 insertions(+) rename applications/nightreport/{secrets.yaml => secrets-usdfdev.yaml} (53%) diff --git a/applications/nightreport/secrets.yaml b/applications/nightreport/secrets-usdfdev.yaml similarity index 53% rename from applications/nightreport/secrets.yaml rename to applications/nightreport/secrets-usdfdev.yaml index 7a1e9e4a72..a748a56695 100644 --- a/applications/nightreport/secrets.yaml +++ b/applications/nightreport/secrets-usdfdev.yaml @@ -1,2 +1,5 @@ nightreport_password: description: "Password for the nightreport database." + copy: + application: exposurelog + key: exposurelog_password From e5d123ae84aadb290acaf9da129787b06b9bdaad Mon Sep 17 00:00:00 2001 From: Stelios Voutsinas Date: Tue, 24 Sep 2024 15:44:17 -0700 Subject: [PATCH 156/567] Turn off siav2 app on all IDFs --- environments/values-idfdev.yaml | 2 +- environments/values-idfint.yaml | 2 +- environments/values-idfprod.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/environments/values-idfdev.yaml b/environments/values-idfdev.yaml index 39c028701b..6283dddfbd 100644 --- a/environments/values-idfdev.yaml +++ b/environments/values-idfdev.yaml @@ -25,7 +25,7 @@ applications: portal: true sasquatch: true semaphore: true - siav2: true + siav2: false ssotap: true squareone: true sqlproxy-cross-project: true diff --git a/environments/values-idfint.yaml b/environments/values-idfint.yaml index 15190999da..34696fc711 100644 --- a/environments/values-idfint.yaml +++ b/environments/values-idfint.yaml @@ -23,7 +23,7 @@ applications: plot-navigator: true portal: true sasquatch: true - siav2: true + siav2: false ssotap: true production-tools: true sasquatch-backpack: true diff --git a/environments/values-idfprod.yaml b/environments/values-idfprod.yaml index af3b77877c..0a6a26cc37 100644 --- a/environments/values-idfprod.yaml +++ b/environments/values-idfprod.yaml @@ -23,7 +23,7 @@ applications: nublado: true portal: true semaphore: true - siav2: true + siav2: false squareone: true ssotap: true tap: true From 4a0b35b75082e42a4b325c8bf1e65fb2bcb24bfc Mon Sep 17 00:00:00 2001 From: pav511 <38131208+pav511@users.noreply.github.com> Date: Tue, 27 Aug 2024 12:45:03 -0700 Subject: [PATCH 157/567] usdf-cm-dev: enable gf and postgres (pav) --- .../gafaelfawr/values-usdf-cm-dev.yaml | 223 ++++++++++++++++++ applications/postgres/values-usdf-cm-dev.yaml | 5 + environments/values-usdf-cm-dev.yaml | 3 +- 3 files changed, 230 insertions(+), 1 deletion(-) create mode 100644 applications/gafaelfawr/values-usdf-cm-dev.yaml create mode 100644 applications/postgres/values-usdf-cm-dev.yaml diff --git a/applications/gafaelfawr/values-usdf-cm-dev.yaml b/applications/gafaelfawr/values-usdf-cm-dev.yaml new file mode 100644 index 0000000000..18f741fc04 --- /dev/null +++ b/applications/gafaelfawr/values-usdf-cm-dev.yaml @@ -0,0 +1,223 @@ +replicaCount: 2 + +# Use the CSI storage class so that we can use snapshots. +redis: + persistence: + storageClass: "wekafs--sdf-k8s01" + +config: + internalDatabase: true + + oidcServer: + enabled: true + + oidc: + clientId: vcluster--usdf-cm-dev + audience: "vcluster--usdf-cm-dev" + loginUrl: "https://dex.slac.stanford.edu/auth" + tokenUrl: "https://dex.slac.stanford.edu/token" + issuer: "https://dex.slac.stanford.edu" + scopes: + - "openid" + - "email" + - "groups" + - "profile" + usernameClaim: "name" + + ldap: + url: ldaps://ldap-unix.slac.stanford.edu:636 + groupBaseDn: ou=Group,dc=slac,dc=stanford,dc=edu + groupObjectClass: posixGroup + groupMemberAttr: memberUid + groupSearchByDn: false + userBaseDn: ou=Accounts,dc=slac,dc=stanford,dc=edu + userSearchAttr: uid + addUserGroup: false + uidAttr: uidNumber + gidAttr: gidNumber + nameAttr: gecos + + groupMapping: + "admin:token": + - "rubinmgr" + - "unix-admin" + "exec:admin": + - "rubinmgr" + - "unix-admin" + "exec:internal-tools": + - "lsst" + - "lsst-ccs" + - "rubin_users" + - "rubin_users-a" + - "rubin_users-b" + - "rubin_users-c" + - "rubin_users-d" + - "rubin_users-e" + - "rubin_users-f" + - "rubin_users-g" + - "rubin_users-h" + - "rubin_users-i" + - "rubin_users-j" + - "rubin_users-k" + - "rubin_users-l" + - "rubin_users-m" + - "rubin_users-n" + - "rubin_users-o" + - "rubin_users-p" + - "rubin_users-q" + - "rubin_users-r" + - "rubin_users-s" + - "rubin_users-t" + - "rubin_users-u" + - "rubin_users-v" + - "rubin_users-w" + - "rubin_users-x" + - "rubin_users-y" + - "rubin_users-z" + - "rubin_admin_datasets" + - "rubin_admin_repos" + - "unix-admin" + "exec:notebook": + - "lsst" + - "lsst-ccs" + - "rubin_users" + - "rubin_users-a" + - "rubin_users-b" + - "rubin_users-c" + - "rubin_users-d" + - "rubin_users-e" + - "rubin_users-f" + - "rubin_users-g" + - "rubin_users-h" + - "rubin_users-i" + - "rubin_users-j" + - "rubin_users-k" + - "rubin_users-l" + - "rubin_users-m" + - "rubin_users-n" + - "rubin_users-o" + - "rubin_users-p" + - "rubin_users-q" + - "rubin_users-r" + - "rubin_users-s" + - "rubin_users-t" + - "rubin_users-u" + - "rubin_users-v" + - "rubin_users-w" + - "rubin_users-x" + - "rubin_users-y" + - "rubin_users-z" + - "rubin_admin_datasets" + - "rubin_admin_repos" + - "unix-admin" + "exec:portal": + - "lsst" + - "lsst-ccs" + - "rubin_users" + - "rubin_users-a" + - "rubin_users-b" + - "rubin_users-c" + - "rubin_users-d" + - "rubin_users-e" + - "rubin_users-f" + - "rubin_users-g" + - "rubin_users-h" + - "rubin_users-i" + - "rubin_users-j" + - "rubin_users-k" + - "rubin_users-l" + - "rubin_users-m" + - "rubin_users-n" + - "rubin_users-o" + - "rubin_users-p" + - "rubin_users-q" + - "rubin_users-r" + - "rubin_users-s" + - "rubin_users-t" + - "rubin_users-u" + - "rubin_users-v" + - "rubin_users-w" + - "rubin_users-x" + - "rubin_users-y" + - "rubin_users-z" + - "rubin_admin_datasets" + - "rubin_admin_repos" + - "unix-admin" + "read:tap": + - "lsst" + - "lsst-ccs" + - "rubin_users" + - "rubin_users-a" + - "rubin_users-b" + - "rubin_users-c" + - "rubin_users-d" + - "rubin_users-e" + - "rubin_users-f" + - "rubin_users-g" + - "rubin_users-h" + - "rubin_users-i" + - "rubin_users-j" + - "rubin_users-k" + - "rubin_users-l" + - "rubin_users-m" + - "rubin_users-n" + - "rubin_users-o" + - "rubin_users-p" + - "rubin_users-q" + - "rubin_users-r" + - "rubin_users-s" + - "rubin_users-t" + - "rubin_users-u" + - "rubin_users-v" + - "rubin_users-w" + - "rubin_users-x" + - "rubin_users-y" + - "rubin_users-z" + - "rubin_admin_datasets" + - "rubin_admin_repos" + - "unix-admin" + "read:image": + - "lsst" + - "lsst-ccs" + - "rubin_users" + - "rubin_users-a" + - "rubin_users-b" + - "rubin_users-c" + - "rubin_users-d" + - "rubin_users-e" + - "rubin_users-f" + - "rubin_users-g" + - "rubin_users-h" + - "rubin_users-i" + - "rubin_users-j" + - "rubin_users-k" + - "rubin_users-l" + - "rubin_users-m" + - "rubin_users-n" + - "rubin_users-o" + - "rubin_users-p" + - "rubin_users-q" + - "rubin_users-r" + - "rubin_users-s" + - "rubin_users-t" + - "rubin_users-u" + - "rubin_users-v" + - "rubin_users-w" + - "rubin_users-x" + - "rubin_users-y" + - "rubin_users-z" + - "rubin_admin_datasets" + - "rubin_admin_repos" + - "unix-admin" + "write:sasquatch": + - "rubinmgr" + - "unix-admin" + + initialAdmins: + - "afausti" + - "athor" + - "frossie" + - "jonathansick" + - "rra" + - "ytl" + - "ppascual" diff --git a/applications/postgres/values-usdf-cm-dev.yaml b/applications/postgres/values-usdf-cm-dev.yaml new file mode 100644 index 0000000000..79960946d4 --- /dev/null +++ b/applications/postgres/values-usdf-cm-dev.yaml @@ -0,0 +1,5 @@ +gafaelfawr_db: + user: 'gafaelfawr' + db: 'gafaelfawr' + +postgresStorageClass: 'wekafs--sdf-k8s01' diff --git a/environments/values-usdf-cm-dev.yaml b/environments/values-usdf-cm-dev.yaml index 7ca7a8afd7..79573c97b2 100644 --- a/environments/values-usdf-cm-dev.yaml +++ b/environments/values-usdf-cm-dev.yaml @@ -8,7 +8,8 @@ applications: # This environment uses an ingress managed in a separate Kubernetes cluster, # despite that configuration not being officially supported by Phalanx. cert-manager: false - gafaelfawr: false + gafaelfawr: true ingress-nginx: false cm-service: true + postgres: true From d5dd8ca08358a044b6c06a3fc9b4e0d3da2b4c38 Mon Sep 17 00:00:00 2001 From: Fritz Mueller Date: Mon, 23 Sep 2024 14:52:02 -0700 Subject: [PATCH 158/567] cm-service: update to 0.1.2 --- applications/cm-service/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/cm-service/Chart.yaml b/applications/cm-service/Chart.yaml index 9ea2b7b9f1..f6174a96f1 100644 --- a/applications/cm-service/Chart.yaml +++ b/applications/cm-service/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: 0.1.1 +appVersion: 0.1.2 description: Campaign Management for Rubin Data Release Production name: cm-service sources: From 0bc9516b7f6c3cfc7cbbf6747b0d98757f0ab888 Mon Sep 17 00:00:00 2001 From: Fritz Mueller Date: Mon, 23 Sep 2024 16:59:12 -0700 Subject: [PATCH 159/567] cm-service: add back gf ingress --- .../cm-service/templates/ingress.yaml | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 applications/cm-service/templates/ingress.yaml diff --git a/applications/cm-service/templates/ingress.yaml b/applications/cm-service/templates/ingress.yaml new file mode 100644 index 0000000000..882de320dc --- /dev/null +++ b/applications/cm-service/templates/ingress.yaml @@ -0,0 +1,38 @@ +apiVersion: gafaelfawr.lsst.io/v1alpha1 +kind: GafaelfawrIngress +metadata: + name: "cm-service" + labels: + {{- include "cm-service.labels" . | nindent 4 }} +config: + baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true + scopes: + all: + - "exec:internal-tools" +template: + metadata: + name: "cm-service" + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 6 }} + {{- end }} + spec: + rules: + - host: {{ required "global.host must be set" .Values.global.host | quote }} + http: + paths: + - path: {{ .Values.config.pathPrefix | quote }} + pathType: "Prefix" + backend: + service: + name: "cm-service" + port: + number: 8080 + - path: "/web_app" + pathType: "Prefix" + backend: + service: + name: "cm-service" + port: + number: 8080 From bebfdf10f538b451d2a2ff691db0c962d396e995 Mon Sep 17 00:00:00 2001 From: Jeremy McCormick Date: Wed, 25 Sep 2024 13:39:05 -0500 Subject: [PATCH 160/567] Update sdm_schemas to v3.2.1 --- charts/cadc-tap/README.md | 4 ++-- charts/cadc-tap/values.yaml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/charts/cadc-tap/README.md b/charts/cadc-tap/README.md index f0ab35a1aa..1da63ab1f8 100644 --- a/charts/cadc-tap/README.md +++ b/charts/cadc-tap/README.md @@ -22,7 +22,7 @@ IVOA TAP service | cloudsql.resources | object | See `values.yaml` | Resource limits and requests for the Cloud SQL Proxy container | | cloudsql.serviceAccount | string | None, must be set | The Google service account that has an IAM binding to the `cadc-tap` Kubernetes service accounts and has the `cloudsql.client` role, access | | config.backend | string | None, must be set to `pg` or `qserv` | What type of backend are we connecting to? | -| config.datalinkPayloadUrl | string | `"https://github.com/lsst/sdm_schemas/releases/download/3.0.2/datalink-snippets.zip"` | Datalink payload URL | +| config.datalinkPayloadUrl | string | `"https://github.com/lsst/sdm_schemas/releases/download/v3.2.1/datalink-snippets.zip"` | Datalink payload URL | | config.gcsBucket | string | `"async-results.lsst.codes"` | Name of GCS bucket in which to store results | | config.gcsBucketType | string | `"GCS"` | GCS bucket type (GCS or S3) | | config.gcsBucketUrl | string | `"https://tap-files.lsst.codes"` | Base URL for results stored in GCS bucket | @@ -69,7 +69,7 @@ IVOA TAP service | tapSchema.affinity | object | `{}` | Affinity rules for the TAP schema database pod | | tapSchema.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the TAP schema image | | tapSchema.image.repository | string | `"lsstsqre/tap-schema-mock"` | TAP schema image to ue. This must be overridden by each environment with the TAP schema for that environment. | -| tapSchema.image.tag | string | `"3.0.2"` | Tag of TAP schema image | +| tapSchema.image.tag | string | `"v3.2.1"` | Tag of TAP schema image | | tapSchema.nodeSelector | object | `{}` | Node selection rules for the TAP schema database pod | | tapSchema.podAnnotations | object | `{}` | Annotations for the TAP schema database pod | | tapSchema.resources | object | See `values.yaml` | Resource limits and requests for the TAP schema database pod | diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index d61fdf37af..fd8b7e20ce 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -99,7 +99,7 @@ config: tapSchemaAddress: "cadc-tap-schema-db:3306" # -- Datalink payload URL - datalinkPayloadUrl: "https://github.com/lsst/sdm_schemas/releases/download/3.0.2/datalink-snippets.zip" + datalinkPayloadUrl: "https://github.com/lsst/sdm_schemas/releases/download/v3.2.1/datalink-snippets.zip" # -- Name of GCS bucket in which to store results gcsBucket: "async-results.lsst.codes" @@ -162,7 +162,7 @@ tapSchema: pullPolicy: "IfNotPresent" # -- Tag of TAP schema image - tag: "3.0.2" + tag: "v3.2.1" # -- Resource limits and requests for the TAP schema database pod # @default -- See `values.yaml` From 5cccf3058b467147c7eafc9bf6cea2d02c4a8cef Mon Sep 17 00:00:00 2001 From: Hsin-Fang Chiang Date: Wed, 25 Sep 2024 14:05:00 -0700 Subject: [PATCH 161/567] Shutdown Prompt Processing LSSTComCamSim prod We want to keep LSSTComCamSim-dev alive for now for testing use. --- .../values-usdfprod-prompt-processing.yaml | 58 ------------------- .../values-usdfprod-prompt-processing.yaml | 2 +- 2 files changed, 1 insertion(+), 59 deletions(-) delete mode 100644 applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml diff --git a/applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml deleted file mode 100644 index 2195736e75..0000000000 --- a/applications/prompt-proto-service-lsstcomcamsim/values-usdfprod-prompt-processing.yaml +++ /dev/null @@ -1,58 +0,0 @@ -prompt-proto-service: - - podAnnotations: - # Expect to need roughly n_detector × request_latency / survey_cadence pods - # For a 30 s ComCam survey with 500 s latency, this is 150 - autoscaling.knative.dev/max-scale: "150" - autoscaling.knative.dev/target-utilization-percentage: "100" - # Update this field if using latest or static image tag in dev - revision: "1" - - worker: - # Embargo rack allows fast cleanup. - grace_period: 20 - - image: - pullPolicy: IfNotPresent - # Overrides the image tag whose default is the chart appVersion. - tag: 4.2.0 - - instrument: - pipelines: - main: >- - (survey="BLOCK-297")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/ApPipe.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/SingleFrame.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Isr.yaml] - (survey="")=[] - preprocessing: >- - (survey="BLOCK-297")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Preprocessing.yaml] - (survey="")=[] - calibRepo: s3://rubin-summit-users - - s3: - imageBucket: rubin-summit - endpointUrl: https://sdfembs3.sdf.slac.stanford.edu - - raw_microservice: http://172.24.5.158:8080/presence - - imageNotifications: - kafkaClusterAddress: prompt-processing-2-kafka-bootstrap.kafka:9092 - topic: rubin-summit-notification - - apdb: - config: s3://rubin-summit-users/apdb_config/cassandra/pp_apdb_lsstcomcamsim_or4.py - - alerts: - topic: alerts-simulated - - sasquatch: - endpointUrl: https://usdf-rsp-dev.slac.stanford.edu/sasquatch-rest-proxy - namespace: lsst.prompt.prod - auth_env: false - - logLevel: timer.lsst.activator=DEBUG lsst.diaPipe=VERBOSE lsst.rbClassify=VERBOSE - - knative: - memoryLimit: "16Gi" - - fullnameOverride: "prompt-proto-service-lsstcomcamsim" diff --git a/environments/values-usdfprod-prompt-processing.yaml b/environments/values-usdfprod-prompt-processing.yaml index 7ec0cb921e..b1c1ce92d9 100644 --- a/environments/values-usdfprod-prompt-processing.yaml +++ b/environments/values-usdfprod-prompt-processing.yaml @@ -12,5 +12,5 @@ applications: prompt-proto-service-latiss: true prompt-proto-service-lsstcam: false prompt-proto-service-lsstcomcam: false - prompt-proto-service-lsstcomcamsim: true + prompt-proto-service-lsstcomcamsim: false vault-secrets-operator: false From b49ad6905b8b55cb5de2a00b0cb3e9bdc2526b0e Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Wed, 25 Sep 2024 15:07:35 -0700 Subject: [PATCH 162/567] Remove .pgpass support from Prompt Processing. All credentials are now handled through db-auth.yaml, so there's no need to set up a .pgpass file at pod startup. --- .../templates/prompt-proto-service.yaml | 27 ------------------- 1 file changed, 27 deletions(-) diff --git a/charts/prompt-proto-service/templates/prompt-proto-service.yaml b/charts/prompt-proto-service/templates/prompt-proto-service.yaml index 841be47dcb..f08eb4b17e 100644 --- a/charts/prompt-proto-service/templates/prompt-proto-service.yaml +++ b/charts/prompt-proto-service/templates/prompt-proto-service.yaml @@ -13,17 +13,6 @@ spec: spec: containerConcurrency: {{ .Values.containerConcurrency }} initContainers: - - name: init-pgpass - # Make a copy of the read-only secret that's owned by lsst - # lsst account is created by main image with id 1000 - image: busybox - command: ["sh", "-c", "cp -L /app/pg-mount/.pgpass /app/pgsql/ && chown 1000:1000 /app/pgsql/.pgpass && chmod u=r,go-rwx /app/pgsql/.pgpass"] - volumeMounts: - - mountPath: /app/pg-mount - name: pgpass-mount - readOnly: true - - mountPath: /app/pgsql - name: pgpass-credentials-file - name: init-db-auth # Make a copy of the read-only secret that's owned by lsst # lsst account is created by main image with id 1000 @@ -103,8 +92,6 @@ spec: - name: AWS_SHARED_CREDENTIALS_FILE value: /app/s3/credentials {{- end }} - - name: PGPASSFILE - value: /app/pgsql/.pgpass - name: LSST_DB_AUTH value: /app/lsst-credentials/db-auth.yaml - name: AP_KAFKA_PRODUCER_PASSWORD @@ -133,9 +120,6 @@ spec: volumeMounts: - mountPath: /tmp-butler name: ephemeral - - mountPath: /app/pgsql - name: pgpass-credentials-file - readOnly: true - mountPath: /app/lsst-credentials name: db-auth-credentials-file readOnly: true @@ -166,17 +150,6 @@ spec: - name: ephemeral emptyDir: sizeLimit: {{ .Values.knative.ephemeralStorageLimit }} - - name: pgpass-mount - # Temporary mount for .pgpass; cannot be read directly because it's owned by root - secret: - secretName: {{ template "prompt-proto-service.fullname" . }}-secret - items: - - key: pgpass_file - path: .pgpass - defaultMode: 0400 # Minimal permissions, as extra protection - - name: pgpass-credentials-file - emptyDir: - sizeLimit: 10Ki # Just a text file! - name: db-auth-mount # Temporary mount for db-auth.yaml; cannot be read directly because it's owned by root secret: From cf9882509e84197554b598b89c0969367affe297 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 24 Sep 2024 13:15:58 -0700 Subject: [PATCH 163/567] Deploy the Telegraf-based connectors at TTS --- .../sasquatch/values-tucson-teststand.yaml | 126 +++++++----------- 1 file changed, 50 insertions(+), 76 deletions(-) diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index 002c0d1bca..1c7732ecef 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -76,122 +76,96 @@ influxdb: hostname: tucson-teststand.lsst.codes telegraf-kafka-consumer: - enabled: false + enabled: true + image: + repo: "docker.io/lsstsqre/telegraf" + tag: "avro-mutex" kafkaConsumers: auxtel: enabled: true + database: "efd" topicRegexps: | - [ ".*ATAOS", ".*ATDome", ".*ATDomeTrajectory", ".*ATHexapod", ".*ATPneumatics", ".*ATPtg", ".*ATMCS" ] + [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] + debug: true maintel: enabled: true + database: "efd" topicRegexps: | - [ ".*MTAOS", ".*MTDome", ".*MTDomeTrajectory", ".*MTPtg" ] + [ "lsst.sal.MTAOS", "lsst.sal.MTDome", "lsst.sal.MTDomeTrajectory", "lsst.sal.MTPtg" ] + debug: true mtmount: enabled: true + database: "efd" topicRegexps: | - [ ".*MTMount" ] - comcam: - enabled: true - topicRegexps: | - [ ".*CCCamera", ".*CCHeaderService", ".*CCOODS" ] + [ "lsst.sal.MTMount" ] + debug: true eas: enabled: true + database: "efd" topicRegexps: | - [ ".*DIMM", ".*DSM", ".*EPM", ".*ESS", ".*WeatherForecast" ] + [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + debug: true latiss: enabled: true + database: "efd" topicRegexps: | - [ ".*ATCamera", ".*ATHeaderService", ".*ATOODS", ".*ATSpectrograph" ] + [ "lsst.sal.ATCamera", "lsst.sal.ATHeaderService", "lsst.sal.ATOODS", "lsst.sal.ATSpectrograph" ] + debug: true m1m3: enabled: true - flush_interval: "1s" - metric_batch_size: 5000 - interval: "0.1s" + database: "efd" topicRegexps: | - [ ".*MTM1M3" ] + [ "lsst.sal.MTM1M3" ] + metric_batch_size: 2500 + debug: true m2: enabled: true + database: "efd" topicRegexps: | - [ ".*MTHexapod", ".*MTM2", ".*MTRotator" ] + [ "lsst.sal.MTHexapod", "lsst.sal.MTM2", "lsst.sal.MTRotator" ] + debug: true obssys: enabled: true + database: "efd" topicRegexps: | - [ ".*Scheduler", ".*Script", ".*ScriptQueue", ".*Watcher" ] + [ "lsst.sal.Scheduler", "lsst.sal.Script", "lsst.sal.ScriptQueue", "lsst.sal.Watcher" ] + debug: true ocps: enabled: true + database: "efd" topicRegexps: | - [ ".*OCPS" ] - calsys: + [ "lsst.sal.OCPS" ] + debug: true + test: enabled: true + database: "efd" topicRegexps: | - [ ".*ATMonochromator", ".*ATWhiteLight", ".*CBP", ".*Electrometer", ".*FiberSpectrograph", ".*LEDProjector", ".*LinearStage", ".*MTReflector", ".*TunableLaser" ] + [ "lsst.sal.Test" ] + debug: true mtaircompressor: enabled: true + database: "efd" topicRegexps: | - [ ".*MTAirCompressor" ] + [ "lsst.sal.MTAirCompressor" ] + debug: true lasertracker: enabled: true + database: "efd" topicRegexps: | - [ ".*LaserTracker" ] - test: + [ "lsst.sal.LaserTracker" ] + debug: true + genericcamera: enabled: true + database: "efd" topicRegexps: | - [ "lsst.sal.Test" ] - genericcamera: + [ "lsst.sal.GCHeaderService", "lsst.sal.GenericCamera" ] + debug: true + lsstcam: enabled: true + database: "efd" topicRegexps: | - [ ".*GCHeaderService", ".*GenericCamera" ] - -kafka-connect-manager: - influxdbSink: - # Based on the kafka producers configuration for the TTS - # https://github.com/lsst-ts/argocd-csc/blob/main/apps/kafka-producers/values-tucson-teststand.yaml - connectors: - auxtel: - enabled: true - topicsRegex: ".*ATAOS|.*ATDome|.*ATDomeTrajectory|.*ATHexapod|.*ATPneumatics|.*ATPtg|.*ATMCS" - maintel: - enabled: true - topicsRegex: ".*MTAOS|.*MTDome|.*MTDomeTrajectory|.*MTPtg" - mtmount: - enabled: true - topicsRegex: ".*MTMount" - comcam: - enabled: true - topicsRegex: ".*CCCamera|.*CCHeaderService|.*CCOODS" - eas: - enabled: true - topicsRegex: ".*DIMM|.*DSM|.*EPM|.*ESS|.*WeatherForecast" - latiss: - enabled: true - topicsRegex: ".*ATCamera|.*ATHeaderService|.*ATOODS|.*ATSpectrograph" - m1m3: - enabled: true - topicsRegex: ".*MTM1M3" - m2: - enabled: true - topicsRegex: ".*MTHexapod|.*MTM2|.*MTRotator" - obssys: - enabled: true - topicsRegex: ".*Scheduler|.*Script|.*ScriptQueue|.*Watcher" - ocps: - enabled: true - topicsRegex: ".*OCPS" - test: - enabled: true - topicsRegex: "lsst.sal.Test" - calsys: - enabled: true - topicsRegex: ".*ATMonochromator|.*ATWhiteLight|.*CBP|.*Electrometer|.*FiberSpectrograph|.*LEDProjector|.*LinearStage|.*MTReflector|.*TunableLaser" - mtaircompressor: - enabled: true - topicsRegex: ".*MTAirCompressor" - lasertracker: - enabled: true - topicsRegex: ".*LaserTracker" - genericcamera: - enabled: true - topicsRegex: ".*GCHeaderService|.*GenericCamera" + [ "lsst.sal.MTCamera", "lsst.sal.MTHeaderService", "lsst.sal.MTOODS" ] + debug: true kafdrop: cmdArgs: "--message.format=AVRO --message.keyFormat=DEFAULT --topic.deleteEnabled=false --topic.createEnabled=false" From b6e1aa2e7c4f883f84cb0531b1dba3871ec1ebca Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 24 Sep 2024 13:59:08 -0700 Subject: [PATCH 164/567] Remove LSSTCam --- applications/sasquatch/values-tucson-teststand.yaml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index 1c7732ecef..98e315ef21 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -160,12 +160,6 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.sal.GCHeaderService", "lsst.sal.GenericCamera" ] debug: true - lsstcam: - enabled: true - database: "efd" - topicRegexps: | - [ "lsst.sal.MTCamera", "lsst.sal.MTHeaderService", "lsst.sal.MTOODS" ] - debug: true kafdrop: cmdArgs: "--message.format=AVRO --message.keyFormat=DEFAULT --topic.deleteEnabled=false --topic.createEnabled=false" From d4a0494550b08a6f325b0637a562a212fbd1dbc7 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 24 Sep 2024 13:59:51 -0700 Subject: [PATCH 165/567] Add ComCam --- applications/sasquatch/values-tucson-teststand.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index 98e315ef21..aab1e77686 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -136,6 +136,12 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.sal.OCPS" ] debug: true + comcam: + enabled: true + database: "efd" + topicRegexps: | + [ "lsst.sal.CCCamera", "lsst.sal.CCHeaderService", "lsst.sal.CCOODS" ] + debug: true test: enabled: true database: "efd" From 8d2006504a2b458027a7d3f3c0ae2e7c612b298a Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 24 Sep 2024 14:00:32 -0700 Subject: [PATCH 166/567] Add the calibration systems back - RemoveATMonochromator and ATWhiteLight --- applications/sasquatch/values-tucson-teststand.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index aab1e77686..2e148cf30b 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -136,6 +136,12 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.sal.OCPS" ] debug: true + calsys: + enabled: true + database: "efd" + topicRegexps: | + [ "lsst.sal.CBP", "lsst.sal.Electrometer", "lsst.sal.FiberSpectrograph", "lsst.sal.LEDProjector", "lsst.sal.LinearStage", "lsst.sal.MTReflector", "lsst.sal.TunableLaser" ] + debug: true comcam: enabled: true database: "efd" From 02459083ced1a9fcd9acc09cdc5ea6dc2418de62 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 24 Sep 2024 12:50:48 -0700 Subject: [PATCH 167/567] Add lsst.obsenv namespace for telescope environments --- applications/sasquatch/values-base.yaml | 9 +++++++++ applications/sasquatch/values-summit.yaml | 10 ++++++++++ applications/sasquatch/values-tucson-teststand.yaml | 9 +++++++++ 3 files changed, 28 insertions(+) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index a28a51be3b..8bd0138b2b 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -222,6 +222,14 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.sal.MTCamera", "lsst.sal.MTHeaderService", "lsst.sal.MTOODS" ] debug: true + obsenv: + enabled: true + database: "lsst.obsenv" + timestamp_format: "unix_ms" + timestamp_field: "timestamp" + topicRegexps: | + [ "lsst.obsenv" ] + debug: true kafdrop: cmdArgs: "--message.format=AVRO --message.keyFormat=DEFAULT --topic.deleteEnabled=false --topic.createEnabled=false" @@ -242,6 +250,7 @@ rest-proxy: topicPrefixes: - test - lsst.dm + - lsst.obsenv chronograf: persistence: diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 30b1873a25..fd4905696c 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -276,6 +276,14 @@ telegraf-kafka-consumer-oss: [ "Agent", "Aspic", "Axis", "Canbus", "Cip", "Clamp", "Cold", "Controller", "Cryo", "Gateway", "Hardware", "Hip", "Hook", "Latch", "Location", "Ps", "RTD", "Raft", "Reb", "Segment", "Sensor", "Socket", "Source", "Truck" ] topicRegexps: | [ "lsst.MTCamera" ] + oss-obsenv: + enabled: true + database: "lsst.obsenv" + timestamp_format: "unix_ms" + timestamp_field: "timestamp" + topicRegexps: | + [ "lsst.obsenv" ] + debug: true telegraf-kafka-consumer: enabled: true @@ -450,6 +458,7 @@ telegraf-kafka-consumer: [ "lsst.MTCamera" ] debug: true + kafdrop: ingress: enabled: true @@ -466,6 +475,7 @@ rest-proxy: topicPrefixes: - lsst.dm - lsst.backpack + - lsst.obsenv - lsst.ATCamera - lsst.CCCamera - lsst.MTCamera diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index 2e148cf30b..1df0bcd307 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -172,6 +172,14 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.sal.GCHeaderService", "lsst.sal.GenericCamera" ] debug: true + obsenv: + enabled: true + database: "lsst.obsenv" + timestamp_format: "unix_ms" + timestamp_field: "timestamp" + topicRegexps: | + [ "lsst.obsenv" ] + debug: true kafdrop: cmdArgs: "--message.format=AVRO --message.keyFormat=DEFAULT --topic.deleteEnabled=false --topic.createEnabled=false" @@ -191,6 +199,7 @@ rest-proxy: - test.next-visit topicPrefixes: - test + - lsst.obsenv - lsst.dm chronograf: From 8a0eb43692c0a8b8c5fe37dfbd618136dc7774cf Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 25 Sep 2024 16:28:15 -0700 Subject: [PATCH 168/567] Replicate lsst.obsenv topics to USDF --- applications/sasquatch/values-usdfprod.yaml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 4dfe10f35f..bd0516b5a7 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -22,7 +22,7 @@ strimzi-kafka: enabled: true source: bootstrapServer: sasquatch-summit-kafka-bootstrap.lsst.codes:9094 - topicsPattern: "registry-schemas, lsst.sal.*, lsst.dm.*, lsst.backpack.*, lsst.ATCamera.*, lsst.CCCamera.*, lsst.MTCamera.*" + topicsPattern: "registry-schemas, lsst.sal.*, lsst.dm.*, lsst.backpack.*, lsst.ATCamera.*, lsst.CCCamera.*, lsst.MTCamera.*, lsst.obsenv.*" resources: requests: cpu: 2 @@ -312,6 +312,14 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.MTCamera" ] debug: true + obsenv: + enabled: true + database: "lsst.obsenv" + timestamp_format: "unix_ms" + timestamp_field: "timestamp" + topicRegexps: | + [ "lsst.obsenv" ] + debug: true kafdrop: ingress: From cd153ba43545ec442355d6c89dc6cc2c4b487511 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 25 Sep 2024 17:31:28 -0700 Subject: [PATCH 169/567] Add lsst.cp namespace for summit environment - This namespace is used by the Calibration Pipeline --- applications/sasquatch/values-summit.yaml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index fd4905696c..7a6158cfef 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -284,6 +284,16 @@ telegraf-kafka-consumer-oss: topicRegexps: | [ "lsst.obsenv" ] debug: true + oss-cp: + enabled: true + database: "lsst.cp" + timestamp_format: "unix" + timestamp_field: "timestamp" + topicRegexps: | + [ "lsst.cp" ] + tags: | + [ "dataset_tag", "band", "instrument", "skymap", "detector", "physical_filter", "tract", "exposure", "patch", "visit", "run", "pipeline" ] + debug: true telegraf-kafka-consumer: enabled: true @@ -476,6 +486,7 @@ rest-proxy: - lsst.dm - lsst.backpack - lsst.obsenv + - lsst.cp - lsst.ATCamera - lsst.CCCamera - lsst.MTCamera From 3d7cffb40a262083abd30bd9a16f27e405938502 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 25 Sep 2024 17:36:36 -0700 Subject: [PATCH 170/567] Enable replication of lsst.cp topics to USDF --- applications/sasquatch/values-usdfprod.yaml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index bd0516b5a7..d3adca0d93 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -22,7 +22,7 @@ strimzi-kafka: enabled: true source: bootstrapServer: sasquatch-summit-kafka-bootstrap.lsst.codes:9094 - topicsPattern: "registry-schemas, lsst.sal.*, lsst.dm.*, lsst.backpack.*, lsst.ATCamera.*, lsst.CCCamera.*, lsst.MTCamera.*, lsst.obsenv.*" + topicsPattern: "registry-schemas, lsst.sal.*, lsst.dm.*, lsst.backpack.*, lsst.ATCamera.*, lsst.CCCamera.*, lsst.MTCamera.*, lsst.obsenv.*, lsst.cp.*" resources: requests: cpu: 2 @@ -320,6 +320,16 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.obsenv" ] debug: true + cp: + enabled: true + database: "lsst.cp" + timestamp_format: "unix" + timestamp_field: "timestamp" + topicRegexps: | + [ "lsst.cp" ] + tags: | + [ "dataset_tag", "band", "instrument", "skymap", "detector", "physical_filter", "tract", "exposure", "patch", "visit", "run", "pipeline" ] + debug: true kafdrop: ingress: From d851003230502b870ca9bcae1c9c81d0a80f1644 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Date: Thu, 26 Sep 2024 12:53:17 -0300 Subject: [PATCH 171/567] rubintv: update app version for summit and usdf production deployments --- applications/rubintv/values-summit.yaml | 2 +- applications/rubintv/values-usdfprod.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index c1f2cb88ef..07a3594fb2 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -20,7 +20,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.3.0 + tag: v2.3.1 pullPolicy: Always workers: diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index 7349f935f4..9818e96584 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -16,7 +16,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.3.0 + tag: v2.3.1 pullPolicy: Always workers: From a694855d5f014ec1e15d040ae1153104efebcee6 Mon Sep 17 00:00:00 2001 From: Hsin-Fang Chiang Date: Thu, 26 Sep 2024 10:27:18 -0700 Subject: [PATCH 172/567] Send production-run alerts to a separate alert topic Both development testing and production prompt processing runs can generate alerts. Currently all LATISS alerts are sent to the "alert-stream-test" topic. This removes "alert-stream-test" as the default topic for all deployments and changes where the LATISS production PP runs send their alerts to. --- applications/prompt-proto-service-hsc-gpu/README.md | 2 +- .../values-usdfdev-prompt-processing.yaml | 3 +++ applications/prompt-proto-service-hsc-gpu/values.yaml | 3 ++- applications/prompt-proto-service-hsc/README.md | 2 +- .../values-usdfdev-prompt-processing.yaml | 3 +++ applications/prompt-proto-service-hsc/values.yaml | 3 ++- applications/prompt-proto-service-latiss/README.md | 2 +- .../values-usdfdev-prompt-processing.yaml | 3 +++ .../values-usdfprod-prompt-processing.yaml | 3 +++ applications/prompt-proto-service-latiss/values.yaml | 3 ++- applications/prompt-proto-service-lsstcam/README.md | 2 +- .../values-usdfdev-prompt-processing.yaml | 3 +++ applications/prompt-proto-service-lsstcam/values.yaml | 3 ++- applications/prompt-proto-service-lsstcomcam/README.md | 2 +- .../values-usdfdev-prompt-processing.yaml | 3 +++ applications/prompt-proto-service-lsstcomcam/values.yaml | 3 ++- applications/prompt-proto-service-lsstcomcamsim/README.md | 2 +- applications/prompt-proto-service-lsstcomcamsim/values.yaml | 1 + 18 files changed, 35 insertions(+), 11 deletions(-) diff --git a/applications/prompt-proto-service-hsc-gpu/README.md b/applications/prompt-proto-service-hsc-gpu/README.md index 76ce7c399a..2415159676 100644 --- a/applications/prompt-proto-service-hsc-gpu/README.md +++ b/applications/prompt-proto-service-hsc-gpu/README.md @@ -15,7 +15,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.additionalVolumeMounts | list | `[]` | Kubernetes YAML configs for extra container volume(s). Any volumes required by other config options are automatically handled by the Helm chart. | | prompt-proto-service.affinity | object | `{}` | Affinity rules for the prompt processing pods | | prompt-proto-service.alerts.server | string | `"usdf-alert-stream-dev-broker-0.lsst.cloud:9094"` | Server address for the alert stream | -| prompt-proto-service.alerts.topic | string | `"alert-stream-test"` | Topic name where alerts will be sent | +| prompt-proto-service.alerts.topic | string | None, must be set | Topic name where alerts will be sent | | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | diff --git a/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml index fe819556fd..7e9e4e559b 100644 --- a/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml @@ -27,6 +27,9 @@ prompt-proto-service: apdb: config: s3://rubin-pp-dev-users/apdb_config/sql/pp_apdb_hsc-dev.py + alerts: + topic: "alert-stream-test" + sasquatch: endpointUrl: https://usdf-rsp-dev.slac.stanford.edu/sasquatch-rest-proxy auth_env: false diff --git a/applications/prompt-proto-service-hsc-gpu/values.yaml b/applications/prompt-proto-service-hsc-gpu/values.yaml index c838c1475a..7efc93a3bb 100644 --- a/applications/prompt-proto-service-hsc-gpu/values.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values.yaml @@ -99,7 +99,8 @@ prompt-proto-service: # -- Server address for the alert stream server: "usdf-alert-stream-dev-broker-0.lsst.cloud:9094" # -- Topic name where alerts will be sent - topic: "alert-stream-test" + # @default -- None, must be set + topic: "" registry: # -- If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. diff --git a/applications/prompt-proto-service-hsc/README.md b/applications/prompt-proto-service-hsc/README.md index a463a85160..fbb60fceae 100644 --- a/applications/prompt-proto-service-hsc/README.md +++ b/applications/prompt-proto-service-hsc/README.md @@ -15,7 +15,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.additionalVolumeMounts | list | `[]` | Kubernetes YAML configs for extra container volume(s). Any volumes required by other config options are automatically handled by the Helm chart. | | prompt-proto-service.affinity | object | `{}` | Affinity rules for the prompt processing pods | | prompt-proto-service.alerts.server | string | `"usdf-alert-stream-dev-broker-0.lsst.cloud:9094"` | Server address for the alert stream | -| prompt-proto-service.alerts.topic | string | `"alert-stream-test"` | Topic name where alerts will be sent | +| prompt-proto-service.alerts.topic | string | None, must be set | Topic name where alerts will be sent | | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | diff --git a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml index f507024096..aba3ca2b2c 100644 --- a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml @@ -28,6 +28,9 @@ prompt-proto-service: apdb: config: s3://rubin-pp-dev-users/apdb_config/sql/pp_apdb_hsc-dev.py + alerts: + topic: "alert-stream-test" + sasquatch: endpointUrl: https://usdf-rsp-dev.slac.stanford.edu/sasquatch-rest-proxy auth_env: false diff --git a/applications/prompt-proto-service-hsc/values.yaml b/applications/prompt-proto-service-hsc/values.yaml index 1361c25215..c3921fcb42 100644 --- a/applications/prompt-proto-service-hsc/values.yaml +++ b/applications/prompt-proto-service-hsc/values.yaml @@ -99,7 +99,8 @@ prompt-proto-service: # -- Server address for the alert stream server: "usdf-alert-stream-dev-broker-0.lsst.cloud:9094" # -- Topic name where alerts will be sent - topic: "alert-stream-test" + # @default -- None, must be set + topic: "" registry: # -- If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. diff --git a/applications/prompt-proto-service-latiss/README.md b/applications/prompt-proto-service-latiss/README.md index 579207cc66..941c350a20 100644 --- a/applications/prompt-proto-service-latiss/README.md +++ b/applications/prompt-proto-service-latiss/README.md @@ -15,7 +15,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.additionalVolumeMounts | list | `[]` | Kubernetes YAML configs for extra container volume(s). Any volumes required by other config options are automatically handled by the Helm chart. | | prompt-proto-service.affinity | object | `{}` | Affinity rules for the prompt processing pods | | prompt-proto-service.alerts.server | string | `"usdf-alert-stream-dev-broker-0.lsst.cloud:9094"` | Server address for the alert stream | -| prompt-proto-service.alerts.topic | string | `"alert-stream-test"` | Topic name where alerts will be sent | +| prompt-proto-service.alerts.topic | string | None, must be set | Topic name where alerts will be sent | | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | diff --git a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml index 3c34271230..9e0c60bf5d 100644 --- a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml @@ -28,6 +28,9 @@ prompt-proto-service: apdb: config: s3://rubin-pp-dev-users/apdb_config/cassandra/pp_apdb_latiss-dev.py + alerts: + topic: "alert-stream-test" + sasquatch: endpointUrl: https://usdf-rsp-dev.slac.stanford.edu/sasquatch-rest-proxy auth_env: false diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index 77d8ba6207..07426caa9c 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -58,6 +58,9 @@ prompt-proto-service: apdb: config: s3://rubin-summit-users/apdb_config/cassandra/pp_apdb_latiss.py + alerts: + topic: "latiss-alerts" + sasquatch: endpointUrl: https://usdf-rsp-dev.slac.stanford.edu/sasquatch-rest-proxy namespace: lsst.prompt.prod diff --git a/applications/prompt-proto-service-latiss/values.yaml b/applications/prompt-proto-service-latiss/values.yaml index 5b82a11fed..38fddacd35 100644 --- a/applications/prompt-proto-service-latiss/values.yaml +++ b/applications/prompt-proto-service-latiss/values.yaml @@ -99,7 +99,8 @@ prompt-proto-service: # -- Server address for the alert stream server: "usdf-alert-stream-dev-broker-0.lsst.cloud:9094" # -- Topic name where alerts will be sent - topic: "alert-stream-test" + # @default -- None, must be set + topic: "" registry: # -- If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. diff --git a/applications/prompt-proto-service-lsstcam/README.md b/applications/prompt-proto-service-lsstcam/README.md index 419a466c0d..b2d000f026 100644 --- a/applications/prompt-proto-service-lsstcam/README.md +++ b/applications/prompt-proto-service-lsstcam/README.md @@ -15,7 +15,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.additionalVolumeMounts | list | `[]` | Kubernetes YAML configs for extra container volume(s). Any volumes required by other config options are automatically handled by the Helm chart. | | prompt-proto-service.affinity | object | `{}` | Affinity rules for the prompt processing pods | | prompt-proto-service.alerts.server | string | `"usdf-alert-stream-dev-broker-0.lsst.cloud:9094"` | Server address for the alert stream | -| prompt-proto-service.alerts.topic | string | `"alert-stream-test"` | Topic name where alerts will be sent | +| prompt-proto-service.alerts.topic | string | None, must be set | Topic name where alerts will be sent | | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | diff --git a/applications/prompt-proto-service-lsstcam/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-lsstcam/values-usdfdev-prompt-processing.yaml index 32e0705ba0..818307f6ca 100644 --- a/applications/prompt-proto-service-lsstcam/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcam/values-usdfdev-prompt-processing.yaml @@ -22,4 +22,7 @@ prompt-proto-service: kafkaClusterAddress: prompt-processing-kafka-bootstrap.kafka:9092 topic: rubin-prompt-processing + alerts: + topic: "alert-stream-test" + fullnameOverride: "prompt-proto-service-lsstcam" diff --git a/applications/prompt-proto-service-lsstcam/values.yaml b/applications/prompt-proto-service-lsstcam/values.yaml index c0d79823c9..a590661413 100644 --- a/applications/prompt-proto-service-lsstcam/values.yaml +++ b/applications/prompt-proto-service-lsstcam/values.yaml @@ -99,7 +99,8 @@ prompt-proto-service: # -- Server address for the alert stream server: "usdf-alert-stream-dev-broker-0.lsst.cloud:9094" # -- Topic name where alerts will be sent - topic: "alert-stream-test" + # @default -- None, must be set + topic: "" registry: # -- If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. diff --git a/applications/prompt-proto-service-lsstcomcam/README.md b/applications/prompt-proto-service-lsstcomcam/README.md index 71a9b5713d..9e9b55654b 100644 --- a/applications/prompt-proto-service-lsstcomcam/README.md +++ b/applications/prompt-proto-service-lsstcomcam/README.md @@ -15,7 +15,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.additionalVolumeMounts | list | `[]` | Kubernetes YAML configs for extra container volume(s). Any volumes required by other config options are automatically handled by the Helm chart. | | prompt-proto-service.affinity | object | `{}` | Affinity rules for the prompt processing pods | | prompt-proto-service.alerts.server | string | `"usdf-alert-stream-dev-broker-0.lsst.cloud:9094"` | Server address for the alert stream | -| prompt-proto-service.alerts.topic | string | `"alert-stream-test"` | Topic name where alerts will be sent | +| prompt-proto-service.alerts.topic | string | None, must be set | Topic name where alerts will be sent | | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml index b6b4ce83dc..45667dadc3 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml @@ -22,4 +22,7 @@ prompt-proto-service: kafkaClusterAddress: prompt-processing-kafka-bootstrap.kafka:9092 topic: rubin-prompt-processing + alerts: + topic: "alert-stream-test" + fullnameOverride: "prompt-proto-service-lsstcomcam" diff --git a/applications/prompt-proto-service-lsstcomcam/values.yaml b/applications/prompt-proto-service-lsstcomcam/values.yaml index 83d6a9616b..7682298e07 100644 --- a/applications/prompt-proto-service-lsstcomcam/values.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values.yaml @@ -99,7 +99,8 @@ prompt-proto-service: # -- Server address for the alert stream server: "usdf-alert-stream-dev-broker-0.lsst.cloud:9094" # -- Topic name where alerts will be sent - topic: "alert-stream-test" + # @default -- None, must be set + topic: "" registry: # -- If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. diff --git a/applications/prompt-proto-service-lsstcomcamsim/README.md b/applications/prompt-proto-service-lsstcomcamsim/README.md index 0bf22395e9..6854bea8e2 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/README.md +++ b/applications/prompt-proto-service-lsstcomcamsim/README.md @@ -15,7 +15,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.additionalVolumeMounts | list | `[]` | Kubernetes YAML configs for extra container volume(s). Any volumes required by other config options are automatically handled by the Helm chart. | | prompt-proto-service.affinity | object | `{}` | Affinity rules for the prompt processing pods | | prompt-proto-service.alerts.server | string | `"usdf-alert-stream-dev-broker-0.lsst.cloud:9094"` | Server address for the alert stream | -| prompt-proto-service.alerts.topic | string | `""` | Topic name where alerts will be sent | +| prompt-proto-service.alerts.topic | string | None, must be set | Topic name where alerts will be sent | | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | diff --git a/applications/prompt-proto-service-lsstcomcamsim/values.yaml b/applications/prompt-proto-service-lsstcomcamsim/values.yaml index ae5879d20a..99f8eea75b 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values.yaml @@ -99,6 +99,7 @@ prompt-proto-service: # -- Server address for the alert stream server: "usdf-alert-stream-dev-broker-0.lsst.cloud:9094" # -- Topic name where alerts will be sent + # @default -- None, must be set topic: "" registry: From 3a2a96b58b2e304b360fbffb64adf8b321084f49 Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Mon, 23 Sep 2024 11:49:32 -0400 Subject: [PATCH 173/567] Deploy Squarebot with interaction support This adds support for parsing block_actions interaction events from Slack. See https://github.com/lsst-sqre/squarebot/pull/33 --- applications/squarebot/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/squarebot/Chart.yaml b/applications/squarebot/Chart.yaml index e46b7e53fd..78ee9ca608 100644 --- a/applications/squarebot/Chart.yaml +++ b/applications/squarebot/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v2 name: squarebot version: 1.0.0 -appVersion: "0.9.0" +appVersion: "tickets-DM-46427" description: Squarebot feeds events from services like Slack and GitHub into the SQuaRE Events Kafka message bus running on Roundtable. Backend apps like Templatebot and Unfurlbot can subscribe to these events and take domain-specific action. type: application home: https://squarebot.lsst.io/ From 398e88af917cbc30e2033edf99dd50e5250df2ce Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Mon, 23 Sep 2024 18:19:32 -0400 Subject: [PATCH 174/567] Add block actions topic config for Squarebot This is the name of the Kafka topic for Slack block actions. --- applications/squarebot/README.md | 2 +- applications/squarebot/templates/configmap.yaml | 2 +- applications/squarebot/values.yaml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/squarebot/README.md b/applications/squarebot/README.md index 2695e862fb..68ea50f85d 100644 --- a/applications/squarebot/README.md +++ b/applications/squarebot/README.md @@ -19,7 +19,7 @@ Squarebot feeds events from services like Slack and GitHub into the SQuaRE Event | autoscaling.targetCPUUtilizationPercentage | int | `80` | | | config.logLevel | string | `"INFO"` | Logging level: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL" | | config.topics.slackAppMention | string | `"lsst.square-events.squarebot.slack.app.mention"` | Kafka topic name for the Slack `app_mention` events | -| config.topics.slackInteraction | string | `"lsst.square-events.squarebot.slack.interaction"` | Kafka topic for Slack interaction events | +| config.topics.slackBlockActions | string | `"lsst.square-events.squarebot.slack.interaction.block-actions"` | Kafka topic for Slack `block_actions` interaction events | | config.topics.slackMessageChannels | string | `"lsst.square-events.squarebot.slack.message.channels"` | Kafka topic name for the Slack `message.channels` events (public channels) | | config.topics.slackMessageGroups | string | `"lsst.square-events.squarebot.slack.message.groups"` | Kafka topic name for the Slack `message.groups` events (private channels) | | config.topics.slackMessageIm | string | `"lsst.square-events.squarebot.slack.message.im"` | Kafka topic name for the Slack `message.im` events (direct message channels) | diff --git a/applications/squarebot/templates/configmap.yaml b/applications/squarebot/templates/configmap.yaml index b6f81143a8..687d54aede 100644 --- a/applications/squarebot/templates/configmap.yaml +++ b/applications/squarebot/templates/configmap.yaml @@ -14,4 +14,4 @@ data: SQUAREBOT_TOPIC_MESSAGE_GROUPS: {{ .Values.config.topics.slackMessageGroups | quote }} SQUAREBOT_TOPIC_MESSAGE_IM: {{ .Values.config.topics.slackMessageIm | quote }} SQUAREBOT_TOPIC_MESSAGE_MPIM: {{ .Values.config.topics.slackMessageMpim | quote }} - SQUAREBOT_TOPIC_INTERACTION: {{ .Values.config.topics.slackInteraction | quote }} + SQUAREBOT_TOPIC_BLOCK_ACTIONS: {{ .Values.config.topics.slackBlockActions | quote }} diff --git a/applications/squarebot/values.yaml b/applications/squarebot/values.yaml index bd00c36a37..276dc85811 100644 --- a/applications/squarebot/values.yaml +++ b/applications/squarebot/values.yaml @@ -107,5 +107,5 @@ config: # -- Kafka topic name for the Slack `message.mpim` events (multi-person direct messages) slackMessageMpim: "lsst.square-events.squarebot.slack.message.mpim" - # -- Kafka topic for Slack interaction events - slackInteraction: "lsst.square-events.squarebot.slack.interaction" + # -- Kafka topic for Slack `block_actions` interaction events + slackBlockActions: "lsst.square-events.squarebot.slack.interaction.block-actions" From 2d5c8720f835a998b667396447c973ef98884de9 Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Tue, 24 Sep 2024 11:09:58 -0400 Subject: [PATCH 175/567] Add block-actions Kafka topic Rename the "interaction" Squarebot Kafka topic to `...interaction.block-actions` since we'll have different topics for each type of interaction. Add topic permissions for squarebot and templatebot. Add configmap configuration for Squarebot ("templatebot's configuration will be in a separate PR). --- .../charts/square-events/templates/squarebot-topics.yaml | 2 +- .../charts/square-events/templates/squarebot-user.yaml | 2 +- .../charts/square-events/templates/templatebot-user.yaml | 9 +++++++++ 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/applications/sasquatch/charts/square-events/templates/squarebot-topics.yaml b/applications/sasquatch/charts/square-events/templates/squarebot-topics.yaml index 1517ea6c55..7896298b70 100644 --- a/applications/sasquatch/charts/square-events/templates/squarebot-topics.yaml +++ b/applications/sasquatch/charts/square-events/templates/squarebot-topics.yaml @@ -2,7 +2,7 @@ apiVersion: kafka.strimzi.io/v1beta2 kind: KafkaTopic metadata: - name: "lsst.square-events.squarebot.slack.interaction" + name: "lsst.square-events.squarebot.slack.interaction.block-actions" labels: strimzi.io/cluster: {{ .Values.cluster.name }} spec: diff --git a/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml b/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml index 3b0f8e252a..6353ae2784 100644 --- a/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml +++ b/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml @@ -64,7 +64,7 @@ spec: - "Describe" - resource: type: topic - name: "lsst.square-events.squarebot.slack.interaction" + name: "lsst.square-events.squarebot.slack.interaction.block-actions" patternType: literal type: allow host: "*" diff --git a/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml b/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml index fb46b65e2b..0a00275bb2 100644 --- a/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml +++ b/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml @@ -42,3 +42,12 @@ spec: operations: - "Read" - "Describe" + - resource: + type: topic + name: "lsst.square-events.squarebot.slack.interaction.block-actions" + patternType: literal + type: allow + host: "*" + operations: + - "Read" + - "Describe" From 4e838201aef3d03acb719dc06d3be990825251f1 Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Tue, 24 Sep 2024 14:40:50 -0400 Subject: [PATCH 176/567] Configure the block actions topic for templatebot --- applications/templatebot/README.md | 1 + applications/templatebot/templates/configmap.yaml | 1 + applications/templatebot/values.yaml | 3 +++ 3 files changed, 5 insertions(+) diff --git a/applications/templatebot/README.md b/applications/templatebot/README.md index c743d3c467..b0e3509ea8 100644 --- a/applications/templatebot/README.md +++ b/applications/templatebot/README.md @@ -15,6 +15,7 @@ Create new projects | config.logProfile | string | `"production"` | Logging profile (`production` for JSON, `development` for human-friendly) | | config.pathPrefix | string | `"/templatebot"` | URL path prefix | | config.topics.slackAppMention | string | `"lsst.square-events.squarebot.slack.app.mention"` | Kafka topic name for the Slack `app_mention` events | +| config.topics.slackBlockActions | string | `"lsst.square-events.squarebot.slack.interaction.block-actions"` | Kafka topic for Slack `block_actions` interaction events | | config.topics.slackMessageIm | string | `"lsst.square-events.squarebot.slack.message.im"` | Kafka topic name for the Slack `message.im` events (direct message channels) | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | diff --git a/applications/templatebot/templates/configmap.yaml b/applications/templatebot/templates/configmap.yaml index 81782fd7e0..41f0e97266 100644 --- a/applications/templatebot/templates/configmap.yaml +++ b/applications/templatebot/templates/configmap.yaml @@ -11,3 +11,4 @@ data: TEMPLATEBOT_PROFILE: {{ .Values.config.logProfile | quote }} TEMPLATEBOT_APP_MENTION_TOPIC: {{ .Values.config.topics.slackAppMention | quote }} TEMPLATEBOT_MESSAGE_IM_TOPIC: {{ .Values.config.topics.slackMessageIm | quote }} + TEMPLATEBOT_BLOCK_ACTIONS_TOPIC: {{ .Values.config.topics.slackBlockActions | quote }} diff --git a/applications/templatebot/values.yaml b/applications/templatebot/values.yaml index cf65f9bab7..de5e85995b 100644 --- a/applications/templatebot/values.yaml +++ b/applications/templatebot/values.yaml @@ -34,6 +34,9 @@ config: # -- Kafka topic name for the Slack `message.im` events (direct message channels) slackMessageIm: "lsst.square-events.squarebot.slack.message.im" + # -- Kafka topic for Slack `block_actions` interaction events + slackBlockActions: "lsst.square-events.squarebot.slack.interaction.block-actions" + ingress: # -- Additional annotations for the ingress rule annotations: {} From d5a10d898e6e25086dadd2b57a8c869e9f91c4de Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Wed, 25 Sep 2024 15:31:51 -0400 Subject: [PATCH 177/567] Add Slack view submission topic for squarebot Squarebot will publish Slack view submission payload messages and templatebot will consume them. --- .../square-events/templates/squarebot-topics.yaml | 13 +++++++++++++ .../square-events/templates/squarebot-user.yaml | 9 +++++++++ .../square-events/templates/templatebot-user.yaml | 9 +++++++++ applications/squarebot/README.md | 1 + applications/squarebot/templates/configmap.yaml | 1 + applications/squarebot/values.yaml | 3 +++ applications/templatebot/README.md | 1 + applications/templatebot/templates/configmap.yaml | 1 + applications/templatebot/values.yaml | 3 +++ 9 files changed, 41 insertions(+) diff --git a/applications/sasquatch/charts/square-events/templates/squarebot-topics.yaml b/applications/sasquatch/charts/square-events/templates/squarebot-topics.yaml index 7896298b70..25eba2af35 100644 --- a/applications/sasquatch/charts/square-events/templates/squarebot-topics.yaml +++ b/applications/sasquatch/charts/square-events/templates/squarebot-topics.yaml @@ -14,6 +14,19 @@ spec: --- apiVersion: kafka.strimzi.io/v1beta2 kind: KafkaTopic +metadata: + name: "lsst.square-events.squarebot.slack.interaction.view-submission" + labels: + strimzi.io/cluster: {{ .Values.cluster.name }} +spec: + partitions: 4 + replicas: 3 + config: + # http://kafka.apache.org/documentation/#topicconfigs + retention.ms: 1800000 # 30 minutes +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic metadata: name: "lsst.square-events.squarebot.slack.app.mention" labels: diff --git a/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml b/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml index 6353ae2784..1285a4ec6f 100644 --- a/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml +++ b/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml @@ -71,3 +71,12 @@ spec: operations: - "Write" - "Describe" + - resource: + type: topic + name: "lsst.square-events.squarebot.slack.interaction.view-submission" + patternType: literal + type: allow + host: "*" + operations: + - "Write" + - "Describe" diff --git a/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml b/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml index 0a00275bb2..580bfa028f 100644 --- a/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml +++ b/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml @@ -51,3 +51,12 @@ spec: operations: - "Read" - "Describe" + - resource: + type: topic + name: "lsst.square-events.squarebot.slack.interaction.view-submission" + patternType: literal + type: allow + host: "*" + operations: + - "Read" + - "Describe" diff --git a/applications/squarebot/README.md b/applications/squarebot/README.md index 68ea50f85d..8828804def 100644 --- a/applications/squarebot/README.md +++ b/applications/squarebot/README.md @@ -24,6 +24,7 @@ Squarebot feeds events from services like Slack and GitHub into the SQuaRE Event | config.topics.slackMessageGroups | string | `"lsst.square-events.squarebot.slack.message.groups"` | Kafka topic name for the Slack `message.groups` events (private channels) | | config.topics.slackMessageIm | string | `"lsst.square-events.squarebot.slack.message.im"` | Kafka topic name for the Slack `message.im` events (direct message channels) | | config.topics.slackMessageMpim | string | `"lsst.square-events.squarebot.slack.message.mpim"` | Kafka topic name for the Slack `message.mpim` events (multi-person direct messages) | +| config.topics.slackViewSubmission | string | `"lsst.square-events.squarebot.slack.interaction.view-submission"` | Kafka topic for Slack `view_submission` interaction events | | fullnameOverride | string | `""` | Override the full name for resources (includes the release name) | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | diff --git a/applications/squarebot/templates/configmap.yaml b/applications/squarebot/templates/configmap.yaml index 687d54aede..916c526898 100644 --- a/applications/squarebot/templates/configmap.yaml +++ b/applications/squarebot/templates/configmap.yaml @@ -15,3 +15,4 @@ data: SQUAREBOT_TOPIC_MESSAGE_IM: {{ .Values.config.topics.slackMessageIm | quote }} SQUAREBOT_TOPIC_MESSAGE_MPIM: {{ .Values.config.topics.slackMessageMpim | quote }} SQUAREBOT_TOPIC_BLOCK_ACTIONS: {{ .Values.config.topics.slackBlockActions | quote }} + SQUAREBOT_TOPIC_VIEW_SUBMISSION: {{ .Values.config.topics.slackViewSubmission | quote }} diff --git a/applications/squarebot/values.yaml b/applications/squarebot/values.yaml index 276dc85811..a59e16748f 100644 --- a/applications/squarebot/values.yaml +++ b/applications/squarebot/values.yaml @@ -109,3 +109,6 @@ config: # -- Kafka topic for Slack `block_actions` interaction events slackBlockActions: "lsst.square-events.squarebot.slack.interaction.block-actions" + + # -- Kafka topic for Slack `view_submission` interaction events + slackViewSubmission: "lsst.square-events.squarebot.slack.interaction.view-submission" diff --git a/applications/templatebot/README.md b/applications/templatebot/README.md index b0e3509ea8..fa76b28227 100644 --- a/applications/templatebot/README.md +++ b/applications/templatebot/README.md @@ -17,6 +17,7 @@ Create new projects | config.topics.slackAppMention | string | `"lsst.square-events.squarebot.slack.app.mention"` | Kafka topic name for the Slack `app_mention` events | | config.topics.slackBlockActions | string | `"lsst.square-events.squarebot.slack.interaction.block-actions"` | Kafka topic for Slack `block_actions` interaction events | | config.topics.slackMessageIm | string | `"lsst.square-events.squarebot.slack.message.im"` | Kafka topic name for the Slack `message.im` events (direct message channels) | +| config.topics.slackViewSubmission | string | `"lsst.square-events.squarebot.slack.interaction.view-submission"` | Kafka topic for Slack `view_submission` interaction events | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | diff --git a/applications/templatebot/templates/configmap.yaml b/applications/templatebot/templates/configmap.yaml index 41f0e97266..343c47e17b 100644 --- a/applications/templatebot/templates/configmap.yaml +++ b/applications/templatebot/templates/configmap.yaml @@ -12,3 +12,4 @@ data: TEMPLATEBOT_APP_MENTION_TOPIC: {{ .Values.config.topics.slackAppMention | quote }} TEMPLATEBOT_MESSAGE_IM_TOPIC: {{ .Values.config.topics.slackMessageIm | quote }} TEMPLATEBOT_BLOCK_ACTIONS_TOPIC: {{ .Values.config.topics.slackBlockActions | quote }} + TEMPLATEBOT_VIEW_SUBMISSION_TOPIC: {{ .Values.config.topics.slackViewSubmission | quote }} diff --git a/applications/templatebot/values.yaml b/applications/templatebot/values.yaml index de5e85995b..227aa85890 100644 --- a/applications/templatebot/values.yaml +++ b/applications/templatebot/values.yaml @@ -37,6 +37,9 @@ config: # -- Kafka topic for Slack `block_actions` interaction events slackBlockActions: "lsst.square-events.squarebot.slack.interaction.block-actions" + # -- Kafka topic for Slack `view_submission` interaction events + slackViewSubmission: "lsst.square-events.squarebot.slack.interaction.view-submission" + ingress: # -- Additional annotations for the ingress rule annotations: {} From 4f21b8580771444582b569290c002bfbffbadcdb Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Thu, 26 Sep 2024 18:24:18 -0400 Subject: [PATCH 178/567] Update to Squarebot 0.10.0 --- applications/squarebot/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/squarebot/Chart.yaml b/applications/squarebot/Chart.yaml index 78ee9ca608..46f43eabff 100644 --- a/applications/squarebot/Chart.yaml +++ b/applications/squarebot/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v2 name: squarebot version: 1.0.0 -appVersion: "tickets-DM-46427" +appVersion: "0.10.0" description: Squarebot feeds events from services like Slack and GitHub into the SQuaRE Events Kafka message bus running on Roundtable. Backend apps like Templatebot and Unfurlbot can subscribe to these events and take domain-specific action. type: application home: https://squarebot.lsst.io/ From 0e7561488aa4ce72d426b840b1e092a23a241b00 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Thu, 26 Sep 2024 16:14:59 -0700 Subject: [PATCH 179/567] Convert manual ingress to GafaelfawrIngress The next release of Gafaelfawr will drop support for manually configuring Kubernetes ingresses to use Gafaelfawr and will require the use of `GafaelfawrIngress` resources. Convert the ingress for the alert-database subchart of alert-stream-broker to use `GafaelfawrIngress`. --- applications/alert-stream-broker/README.md | 1 - .../charts/alert-database/README.md | 1 - .../alert-database/templates/ingress.yaml | 69 ++++++++++--------- .../charts/alert-database/values.yaml | 3 - .../alert-stream-broker/values-usdfdev.yaml | 1 - 5 files changed, 38 insertions(+), 37 deletions(-) diff --git a/applications/alert-stream-broker/README.md b/applications/alert-stream-broker/README.md index 7cf49c19d9..7331c27c92 100644 --- a/applications/alert-stream-broker/README.md +++ b/applications/alert-stream-broker/README.md @@ -32,7 +32,6 @@ Alert transmission to community brokers | alert-database.ingester.serviceAccountName | string | `"alert-database-ingester"` | The name of the Kubernetes ServiceAccount (*not* the Google Cloud IAM service account!) which is used by the alert database ingester. | | alert-database.ingress.annotations | object | `{}` | | | alert-database.ingress.enabled | bool | `true` | Whether to create an ingress | -| alert-database.ingress.gafaelfawrAuthQuery | string | `"scope=read:alertdb"` | Query string for Gafaelfawr to authorize access | | alert-database.ingress.host | string | None, must be set if the ingress is enabled | Hostname for the ingress | | alert-database.ingress.path | string | `"/alertdb"` | Subpath to host the alert database application under the ingress | | alert-database.ingress.tls | list | `[]` | Configures TLS for the ingress if needed. If multiple ingresses share the same hostname, only one of them needs a TLS configuration. | diff --git a/applications/alert-stream-broker/charts/alert-database/README.md b/applications/alert-stream-broker/charts/alert-database/README.md index eca25aabf3..04107a4e53 100644 --- a/applications/alert-stream-broker/charts/alert-database/README.md +++ b/applications/alert-stream-broker/charts/alert-database/README.md @@ -23,7 +23,6 @@ Archival database of alerts sent through the alert stream. | ingester.serviceAccountName | string | `"alert-database-ingester"` | The name of the Kubernetes ServiceAccount (*not* the Google Cloud IAM service account!) which is used by the alert database ingester. | | ingress.annotations | object | `{}` | | | ingress.enabled | bool | `true` | Whether to create an ingress | -| ingress.gafaelfawrAuthQuery | string | `"scope=read:alertdb"` | Query string for Gafaelfawr to authorize access | | ingress.host | string | None, must be set if the ingress is enabled | Hostname for the ingress | | ingress.path | string | `"/alertdb"` | Subpath to host the alert database application under the ingress | | ingress.tls | list | `[]` | Configures TLS for the ingress if needed. If multiple ingresses share the same hostname, only one of them needs a TLS configuration. | diff --git a/applications/alert-stream-broker/charts/alert-database/templates/ingress.yaml b/applications/alert-stream-broker/charts/alert-database/templates/ingress.yaml index 083c96d39b..774dadf5a1 100644 --- a/applications/alert-stream-broker/charts/alert-database/templates/ingress.yaml +++ b/applications/alert-stream-broker/charts/alert-database/templates/ingress.yaml @@ -1,38 +1,45 @@ {{- if .Values.ingress.enabled -}} -apiVersion: networking.k8s.io/v1 -kind: Ingress +apiVersion: gafaelfawr.lsst.io/v1alpha1 +kind: GafaelfawrIngress metadata: - annotations: - kubernetes.io/ingress.class: "nginx" - nginx.ingress.kubernetes.io/rewrite-target: /$2 - nginx.ingress.kubernetes.io/auth-method: "GET" - nginx.ingress.kubernetes.io/auth-url: "http://gafaelfawr.gafaelfawr.svc.cluster.local:8080/auth?{{ required "ingress.gafaelfawrAuthQuery must be set" .Values.ingress.gafaelfawrAuthQuery }}" - {{- with .Values.ingress.annotations }} - {{- toYaml . | nindent 4 }} - {{- end }} name: {{ template "alertDatabase.fullname" . }} labels: {{- include "alertDatabase.labels" . | nindent 4 }} -spec: - rules: - - host: {{ required "ingress.host must be set" .Values.ingress.host | quote }} - http: - paths: - - path: "{{ .Values.ingress.path }}(/|$)(.*)" - pathType: Prefix - backend: - service: - name: {{ template "alertDatabase.fullname" . }} - port: - name: http - {{- if .Values.ingress.tls }} - tls: - {{- range .Values.ingress.tls }} - - hosts: - {{- range .hosts }} - - {{ . | quote }} - {{- end }} - secretName: {{ .secretName }} +config: + baseUrl: {{ .Values.global.baseUrl | quote }} + scopes: + all: + - "read:alertdb" +template: + metadata: + name: {{ template "alertDatabase.fullname" . }} + annotations: + nginx.ingress.kubernetes.io/rewrite-target: "/$2" + {{- with .Values.ingress.annotations }} + {{- toYaml . | nindent 6 }} + {{- end }} + labels: + {{- include "alertDatabase.labels" . | nindent 4 }} + spec: + rules: + - host: {{ required "ingress.host must be set" .Values.ingress.host | quote }} + http: + paths: + - path: "{{ .Values.ingress.path }}(/|$)(.*)" + pathType: ImplementationSpecific + backend: + service: + name: {{ template "alertDatabase.fullname" . }} + port: + name: http + {{- if .Values.ingress.tls }} + tls: + {{- range .Values.ingress.tls }} + - hosts: + {{- range .hosts }} + - {{ . | quote }} + {{- end }} + secretName: {{ .secretName }} + {{- end }} {{- end }} - {{- end }} {{- end }} diff --git a/applications/alert-stream-broker/charts/alert-database/values.yaml b/applications/alert-stream-broker/charts/alert-database/values.yaml index 867d8c2c0c..45362837da 100644 --- a/applications/alert-stream-broker/charts/alert-database/values.yaml +++ b/applications/alert-stream-broker/charts/alert-database/values.yaml @@ -101,6 +101,3 @@ ingress: # -- Subpath to host the alert database application under the ingress path: "/alertdb" - - # -- Query string for Gafaelfawr to authorize access - gafaelfawrAuthQuery: "scope=read:alertdb" diff --git a/applications/alert-stream-broker/values-usdfdev.yaml b/applications/alert-stream-broker/values-usdfdev.yaml index 77b08617ac..0e01ef35e5 100644 --- a/applications/alert-stream-broker/values-usdfdev.yaml +++ b/applications/alert-stream-broker/values-usdfdev.yaml @@ -110,7 +110,6 @@ alert-database: ingress: enabled: true host: "usdf-rsp-dev.slac.stanford.edu" - gafaelfawrAuthQuery: "scope=read:alertdb" storage: gcp: From 195a44002c48a65130f501d917ea262c7a570413 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Fri, 27 Sep 2024 13:08:06 -0300 Subject: [PATCH 180/567] nightreport: fix summit site_id --- applications/nightreport/values-summit.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/nightreport/values-summit.yaml b/applications/nightreport/values-summit.yaml index 4d12a865e7..9fa3095228 100644 --- a/applications/nightreport/values-summit.yaml +++ b/applications/nightreport/values-summit.yaml @@ -3,7 +3,7 @@ image: tag: c0036 pullPolicy: Always config: - site_id: base + site_id: summit db: host: postgresdb01.cp.lsst.org global: From 0bb33e375b3406a37a5a64402a5e84a0fc39cd43 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Fri, 27 Sep 2024 09:28:14 -0700 Subject: [PATCH 181/567] Deploy Prompt Processing 4.5.1 for LATISS. --- .../values-usdfprod-prompt-processing.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index 07426caa9c..a187cd1136 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.5.0 + tag: 4.5.1 instrument: pipelines: From a526dbea4e2a735f172deb71941ba8e6c5d9b29f Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Fri, 27 Sep 2024 09:28:48 -0700 Subject: [PATCH 182/567] Document block IDs for LATISS Prompt Processing. LATISS has now completely moved away from human-readable block names, so the only way to identify a block is to look it up on Jira. --- .../values-usdfprod-prompt-processing.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index a187cd1136..ec4b9cc3f7 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -18,6 +18,9 @@ prompt-proto-service: instrument: pipelines: + # BLOCK-306 is photographic imaging + # BLOCK-T17 is daytime checkout + # BLOCK-271 is photon transfer curve calibrations # BLOCK-295 is the daily calibration sequence as of May 27, 2024 main: >- (survey="BLOCK-306")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/ApPipe.yaml, From 4152d49d7ce5d36d747f1bb520e1c99b8a1030d9 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 10:12:59 +0000 Subject: [PATCH 183/567] chore(deps): update confluentinc/cp-kafka-rest docker tag to v7.7.1 --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/rest-proxy/README.md | 2 +- applications/sasquatch/charts/rest-proxy/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index daaf651fd7..aac4c033b9 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -313,7 +313,7 @@ Rubin Observatory's telemetry service | rest-proxy.heapOptions | string | `"-Xms512M -Xmx512M"` | Kafka REST proxy JVM Heap Option | | rest-proxy.image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | rest-proxy.image.repository | string | `"confluentinc/cp-kafka-rest"` | Kafka REST proxy image repository | -| rest-proxy.image.tag | string | `"7.7.0"` | Kafka REST proxy image tag | +| rest-proxy.image.tag | string | `"7.7.1"` | Kafka REST proxy image tag | | rest-proxy.ingress.annotations | object | See `values.yaml` | Additional annotations to add to the ingress | | rest-proxy.ingress.enabled | bool | `false` | Whether to enable the ingress | | rest-proxy.ingress.hostname | string | None, must be set if ingress is enabled | Ingress hostname | diff --git a/applications/sasquatch/charts/rest-proxy/README.md b/applications/sasquatch/charts/rest-proxy/README.md index 2daa2e6d24..eea798d3ae 100644 --- a/applications/sasquatch/charts/rest-proxy/README.md +++ b/applications/sasquatch/charts/rest-proxy/README.md @@ -16,7 +16,7 @@ A subchart to deploy Confluent REST proxy for Sasquatch. | heapOptions | string | `"-Xms512M -Xmx512M"` | Kafka REST proxy JVM Heap Option | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"confluentinc/cp-kafka-rest"` | Kafka REST proxy image repository | -| image.tag | string | `"7.7.0"` | Kafka REST proxy image tag | +| image.tag | string | `"7.7.1"` | Kafka REST proxy image tag | | ingress.annotations | object | See `values.yaml` | Additional annotations to add to the ingress | | ingress.enabled | bool | `false` | Whether to enable the ingress | | ingress.hostname | string | None, must be set if ingress is enabled | Ingress hostname | diff --git a/applications/sasquatch/charts/rest-proxy/values.yaml b/applications/sasquatch/charts/rest-proxy/values.yaml index e396a6e9bf..ef0cd8cbac 100644 --- a/applications/sasquatch/charts/rest-proxy/values.yaml +++ b/applications/sasquatch/charts/rest-proxy/values.yaml @@ -11,7 +11,7 @@ image: pullPolicy: IfNotPresent # -- Kafka REST proxy image tag - tag: 7.7.0 + tag: 7.7.1 service: # -- Kafka REST proxy service port From 0ed022a321bb8c73cec979d89884942f4d60f8c1 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 27 Sep 2024 12:05:24 -0700 Subject: [PATCH 184/567] Change metric_batch_size for USDF M1M3 telegraf connector. --- applications/sasquatch/values-usdfprod.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index d3adca0d93..f210ecb710 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -182,6 +182,7 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTM1M3" ] + metric_batch_size: 2500 debug: true m2: enabled: true From 562ebc4f66dfee40e885a41d11752b7e9e75ea24 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 27 Sep 2024 15:48:06 -0700 Subject: [PATCH 185/567] Add a named template for generating the Telegraf configmap - Use it to hash the configmap in the deployment to trigger the deployment restart --- .../templates/_helpers.tpl | 73 +++++++++++++++++++ .../templates/configmap.yaml | 71 +----------------- .../templates/deployment.yaml | 5 +- 3 files changed, 77 insertions(+), 72 deletions(-) create mode 100644 applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl new file mode 100644 index 0000000000..72e8d824c3 --- /dev/null +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl @@ -0,0 +1,73 @@ +{{- define "configmap" -}} +{{- if .value.enabled }} +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: sasquatch-telegraf-{{ .key }} + labels: + app.kubernetes.io/name: sasquatch-telegraf + app.kubernetes.io/instance: sasquatch-telegraf-{{ .key }} + app.kubernetes.io/part-of: sasquatch +data: + telegraf.conf: |+ + [agent] + metric_batch_size = {{ default 5000 .value.metric_batch_size }} + metric_buffer_limit = {{ default 100000 .value.metric_buffer_limit }} + collection_jitter = {{ default "0s" .value.collection_jitter | quote }} + flush_interval = {{ default "10s" .value.flush_interval | quote }} + flush_jitter = {{ default "0s" .value.flush_jitter | quote }} + debug = {{ default false .value.debug }} + omit_hostname = true + + [[outputs.influxdb]] + urls = [ + {{ .influxdbUrl | quote }} + ] + database = {{ .value.database | quote }} + username = "${INFLUXDB_USER}" + password = "${INFLUXDB_PASSWORD}" + + [[outputs.influxdb]] + namepass = ["telegraf_*"] + urls = [ + {{ .influxdbUrl | quote }} + ] + database = "telegraf" + username = "${INFLUXDB_USER}" + password = "${INFLUXDB_PASSWORD}" + + [[inputs.kafka_consumer]] + brokers = [ + "sasquatch-kafka-brokers.sasquatch:9092" + ] + consumer_group = "telegraf-kafka-consumer-{{ .key }}" + sasl_mechanism = "SCRAM-SHA-512" + sasl_password = "$TELEGRAF_PASSWORD" + sasl_username = "telegraf" + data_format = "avro" + avro_schema_registry = "http://sasquatch-schema-registry.sasquatch:8081" + avro_timestamp = {{ default "private_efdStamp" .value.timestamp_field | quote }} + avro_timestamp_format = {{ default "unix" .value.timestamp_format | quote }} + avro_union_mode = {{ default "nullable" .value.union_mode | quote }} + avro_field_separator = {{ default "" .value.union_field_separator | quote }} + {{- if .value.fields }} + avro_fields = {{ .value.fields }} + {{- end }} + {{- if .value.tags }} + avro_tags = {{ .value.tags }} + {{- end }} + topic_regexps = {{ .value.topicRegexps }} + offset = {{ default "oldest" .value.offset | quote }} + precision = {{ default "1us" .value.precision | quote }} + max_processing_time = {{ default "5s" .value.max_processing_time | quote }} + consumer_fetch_default = {{ default "20MB" .value.consumer_fetch_default | quote }} + max_undelivered_messages = {{ default 10000 .value.max_undelivered_messages }} + compression_codec = {{ default 3 .value.compression_codec }} + + [[inputs.internal]] + name_prefix = "telegraf_" + collect_memstats = true + tags = { instance = "{{ .key }}" } +{{- end }} +{{- end }} diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml index 5be588773d..6f70b74961 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/configmap.yaml @@ -1,74 +1,5 @@ {{- range $key, $value := .Values.kafkaConsumers }} -{{- if $value.enabled }} ---- -apiVersion: v1 -kind: ConfigMap -metadata: - name: sasquatch-telegraf-{{ $key }} - labels: - app.kubernetes.io/name: sasquatch-telegraf - app.kubernetes.io/instance: sasquatch-telegraf-{{ $key }} - app.kubernetes.io/part-of: sasquatch -data: - telegraf.conf: |+ - [agent] - metric_batch_size = {{ default 5000 $value.metric_batch_size }} - metric_buffer_limit = {{ default 100000 $value.metric_buffer_limit }} - collection_jitter = {{ default "0s" $value.collection_jitter | quote }} - flush_interval = {{ default "10s" $value.flush_interval | quote }} - flush_jitter = {{ default "0s" $value.flush_jitter | quote }} - debug = {{ default false $value.debug }} - omit_hostname = true - [[outputs.influxdb]] - urls = [ - {{ $.Values.influxdb.url | quote }} - ] - database = {{ $value.database | quote }} - username = "${INFLUXDB_USER}" - password = "${INFLUXDB_PASSWORD}" +{{ include "configmap" (dict "key" $key "value" $value "influxdbUrl" $.Values.influxdb.url ) }} - [[outputs.influxdb]] - namepass = ["telegraf_*"] - urls = [ - {{ $.Values.influxdb.url | quote }} - ] - database = "telegraf" - username = "${INFLUXDB_USER}" - password = "${INFLUXDB_PASSWORD}" - - [[inputs.kafka_consumer]] - brokers = [ - "sasquatch-kafka-brokers.sasquatch:9092" - ] - consumer_group = "telegraf-kafka-consumer-{{ $key }}" - sasl_mechanism = "SCRAM-SHA-512" - sasl_password = "$TELEGRAF_PASSWORD" - sasl_username = "telegraf" - data_format = "avro" - avro_schema_registry = "http://sasquatch-schema-registry.sasquatch:8081" - avro_timestamp = {{ default "private_efdStamp" $value.timestamp_field | quote }} - avro_timestamp_format = {{ default "unix" $value.timestamp_format | quote }} - avro_union_mode = {{ default "nullable" $value.union_mode | quote }} - avro_field_separator = {{ default "" $value.union_field_separator | quote }} - {{ with $value.fields }} - avro_fields = {{ $value.fields }} - {{ end }} - {{ with $value.tags }} - avro_tags = {{ $value.tags }} - {{ end }} - topic_regexps = {{ $value.topicRegexps }} - offset = {{ default "oldest" $value.offset | quote }} - precision = {{ default "1us" $value.precision | quote }} - max_processing_time = {{ default "5s" $value.max_processing_time | quote }} - consumer_fetch_default = {{ default "20MB" $value.consumer_fetch_default | quote }} - max_undelivered_messages = {{ default 10000 $value.max_undelivered_messages }} - compression_codec = {{ default 3 $value.compression_codec }} - - [[inputs.internal]] - name_prefix = "telegraf_" - collect_memstats = true - tags = { instance = "{{ $key }}" } - -{{- end }} {{- end }} diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/deployment.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/deployment.yaml index 5408f4f93f..f8117c8900 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/deployment.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/deployment.yaml @@ -18,10 +18,11 @@ spec: metadata: labels: app.kubernetes.io/instance: sasquatch-telegraf-{{ $key }} - {{- if $.Values.podAnnotations }} annotations: + checksum/config: {{ include "configmap" (dict "key" $key "value" $value "influxdbUrl" $.Values.influxdb.url ) | sha256sum }} + {{- if $.Values.podAnnotations }} {{- toYaml $.Values.podAnnotations | nindent 8 }} - {{- end }} + {{- end }} spec: securityContext: runAsNonRoot: true From ff7d4f9001f12c9b2e4f8026afc53259c6d119bf Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Mon, 30 Sep 2024 08:04:42 -0700 Subject: [PATCH 186/567] Add ConsDb to Tucson TestStand environment --- environments/values-tucson-teststand.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/environments/values-tucson-teststand.yaml b/environments/values-tucson-teststand.yaml index 0bd875b947..ae6ec29038 100644 --- a/environments/values-tucson-teststand.yaml +++ b/environments/values-tucson-teststand.yaml @@ -11,6 +11,7 @@ namespaceLabels: applications: argo-workflows: true + consdb: true exposurelog: true mobu: true narrativelog: true From 53be7755cf78a7cbef953517ff833aba7a98064a Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Mon, 30 Sep 2024 08:11:21 -0700 Subject: [PATCH 187/567] Add configuration for ConsDb on TTS --- .../consdb/values-tucson-teststand.yaml | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 applications/consdb/values-tucson-teststand.yaml diff --git a/applications/consdb/values-tucson-teststand.yaml b/applications/consdb/values-tucson-teststand.yaml new file mode 100644 index 0000000000..21997de89d --- /dev/null +++ b/applications/consdb/values-tucson-teststand.yaml @@ -0,0 +1,21 @@ +db: + user: "oods" + host: "postgresdb01.tu.lsst.org" + database: "exposurelog" +lfa: + s3EndpointUrl: "https://s3.tu.lsst.org" +hinfo: + latiss: + enable: true + tag: "tickets-DM-44551" + logConfig: "consdb.hinfo=DEBUG" + lsstcomcam: + enable: true + tag: "tickets-DM-44551" + logConfig: "consdb.hinfo=DEBUG" + lsstcam: + enable: false + tag: "tickets-DM-44551" +pq: + image: + tag: "main" From 9328a5c70160e768593f3ec5f985dea89646df4b Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Mon, 30 Sep 2024 08:12:17 -0700 Subject: [PATCH 188/567] Add secrets update for ConsDb --- applications/consdb/secrets.yaml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/applications/consdb/secrets.yaml b/applications/consdb/secrets.yaml index 61468589eb..1727b68e71 100644 --- a/applications/consdb/secrets.yaml +++ b/applications/consdb/secrets.yaml @@ -6,7 +6,12 @@ consdb-password: key: consdb-password oods-password: description: >- - PostgreSQL password for the OODS user Butler database. + PostgreSQL password for the OODS user Butler database. lfa-password: description: >- LFA password +exposurelog-password: + description: "Password for the TTS where we use exposurelog database." + copy: + application: exposure-log + key: exposurelog_password \ No newline at end of file From c3e6e352e9c7ec97c419b40fa726282b71bf8d4e Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Mon, 30 Sep 2024 08:13:09 -0700 Subject: [PATCH 189/567] Add secrets update for ConsDb --- applications/consdb/secrets.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/consdb/secrets.yaml b/applications/consdb/secrets.yaml index 1727b68e71..99a8f6ba13 100644 --- a/applications/consdb/secrets.yaml +++ b/applications/consdb/secrets.yaml @@ -14,4 +14,4 @@ exposurelog-password: description: "Password for the TTS where we use exposurelog database." copy: application: exposure-log - key: exposurelog_password \ No newline at end of file + key: exposurelog_password From e3052ea8294c0fe6548fe78e641437ef5908d5ef Mon Sep 17 00:00:00 2001 From: Brianna Smart Date: Mon, 16 Sep 2024 14:15:14 -0700 Subject: [PATCH 190/567] Update Kafka to 3.7, add kraft, and remove zookeeper We have updated kafka to version 3.7 and swapped to using kraft rather than zookeeper. --- applications/alert-stream-broker/README.md | 9 +- .../charts/alert-stream-broker/README.md | 8 +- .../alert-stream-broker/templates/kafka.yaml | 113 +++++++++--------- .../charts/alert-stream-broker/values.yaml | 34 ++++-- .../alert-stream-schema-registry/README.md | 1 + .../templates/schema-registry-server.yaml | 3 +- .../alert-stream-schema-registry/values.yaml | 2 + .../values-usdfdev-alert-stream-broker.yaml | 21 ++-- 8 files changed, 106 insertions(+), 85 deletions(-) diff --git a/applications/alert-stream-broker/README.md b/applications/alert-stream-broker/README.md index 7cf49c19d9..c8043c152d 100644 --- a/applications/alert-stream-broker/README.md +++ b/applications/alert-stream-broker/README.md @@ -72,12 +72,17 @@ Alert transmission to community brokers | alert-stream-broker.kafka.storage.size | string | `"1000Gi"` | Size of the backing storage disk for each of the Kafka brokers. | | alert-stream-broker.kafka.storage.storageClassName | string | `"standard"` | Name of a StorageClass to use when requesting persistent volumes. | | alert-stream-broker.kafka.version | string | `"3.4.0"` | Version of Kafka to deploy. | +| alert-stream-broker.kafkaController.enabled | bool | `false` | Enable Kafka Controller | +| alert-stream-broker.kafkaController.resources | object | See `values.yaml` | Kubernetes requests and limits for the Kafka Controller | +| alert-stream-broker.kafkaController.storage.size | string | `"20Gi"` | Size of the backing storage disk for each of the Kafka controllers | +| alert-stream-broker.kafkaController.storage.storageClassName | string | `""` | Name of a StorageClass to use when requesting persistent volumes | | alert-stream-broker.kafkaExporter | object | `{"enableSaramaLogging":false,"enabled":false,"groupRegex":".*","logLevel":"warning","topicRegex":".*"}` | Kafka JMX Exporter for more detailed diagnostic metrics. | | alert-stream-broker.kafkaExporter.enableSaramaLogging | bool | `false` | Enable Sarama logging | | alert-stream-broker.kafkaExporter.enabled | bool | `false` | Enable Kafka exporter. | | alert-stream-broker.kafkaExporter.groupRegex | string | `".*"` | Consumer groups to monitor | | alert-stream-broker.kafkaExporter.logLevel | string | `"warning"` | Log level for Sarama logging | | alert-stream-broker.kafkaExporter.topicRegex | string | `".*"` | Kafka topics to monitor | +| alert-stream-broker.kraft | bool | `true` | | | alert-stream-broker.maxBytesRetained | string | `"100000000000"` | Maximum number of bytes for the replay topic, per partition, per replica. Default is 100GB, but should be lower to not fill storage. | | alert-stream-broker.maxMillisecondsRetained | string | `"5259492000"` | Maximum amount of time to save alerts in the replay topic, in milliseconds. Default is 7 days (604800000). | | alert-stream-broker.nameOverride | string | `""` | | @@ -95,10 +100,8 @@ Alert transmission to community brokers | alert-stream-broker.users[0].readonlyTopics | list | `["alert-stream","alerts-simulated","alert-stream-test"]` | A list of topics that the user should get read-only access to. | | alert-stream-broker.users[0].username | string | `"rubin-testing"` | The username for the user that should be created. | | alert-stream-broker.vaultSecretsPath | string | `""` | Path to the secret resource in Vault | -| alert-stream-broker.zookeeper.replicas | int | `3` | Number of Zookeeper replicas to run. | -| alert-stream-broker.zookeeper.storage.size | string | `"1000Gi"` | Size of the backing storage disk for each of the Zookeeper instances. | -| alert-stream-broker.zookeeper.storage.storageClassName | string | `"standard"` | Name of a StorageClass to use when requesting persistent volumes. | | alert-stream-schema-registry.clusterName | string | `"alert-broker"` | Strimzi "cluster name" of the broker to use as a backend. | +| alert-stream-schema-registry.compatibilityLevel | string | `"None"` | | | alert-stream-schema-registry.hostname | string | `"usdf-alert-schemas-dev.slac.stanford.edu"` | Hostname for an ingress which sends traffic to the Schema Registry. | | alert-stream-schema-registry.name | string | `"alert-schema-registry"` | Name used by the registry, and by its users. | | alert-stream-schema-registry.port | int | `8081` | Port where the registry is listening. NOTE: Not actually configurable in strimzi-registry-operator, so this basically cannot be changed. | diff --git a/applications/alert-stream-broker/charts/alert-stream-broker/README.md b/applications/alert-stream-broker/charts/alert-stream-broker/README.md index 4c6a0bcc4f..c44bd492cf 100644 --- a/applications/alert-stream-broker/charts/alert-stream-broker/README.md +++ b/applications/alert-stream-broker/charts/alert-stream-broker/README.md @@ -29,12 +29,17 @@ Kafka broker cluster for distributing alerts | kafka.storage.size | string | `"1000Gi"` | Size of the backing storage disk for each of the Kafka brokers. | | kafka.storage.storageClassName | string | `"standard"` | Name of a StorageClass to use when requesting persistent volumes. | | kafka.version | string | `"3.4.0"` | Version of Kafka to deploy. | +| kafkaController.enabled | bool | `false` | Enable Kafka Controller | +| kafkaController.resources | object | See `values.yaml` | Kubernetes requests and limits for the Kafka Controller | +| kafkaController.storage.size | string | `"20Gi"` | Size of the backing storage disk for each of the Kafka controllers | +| kafkaController.storage.storageClassName | string | `""` | Name of a StorageClass to use when requesting persistent volumes | | kafkaExporter | object | `{"enableSaramaLogging":false,"enabled":false,"groupRegex":".*","logLevel":"warning","topicRegex":".*"}` | Kafka JMX Exporter for more detailed diagnostic metrics. | | kafkaExporter.enableSaramaLogging | bool | `false` | Enable Sarama logging | | kafkaExporter.enabled | bool | `false` | Enable Kafka exporter. | | kafkaExporter.groupRegex | string | `".*"` | Consumer groups to monitor | | kafkaExporter.logLevel | string | `"warning"` | Log level for Sarama logging | | kafkaExporter.topicRegex | string | `".*"` | Kafka topics to monitor | +| kraft | bool | `true` | | | maxBytesRetained | string | `"100000000000"` | Maximum number of bytes for the replay topic, per partition, per replica. Default is 100GB, but should be lower to not fill storage. | | maxMillisecondsRetained | string | `"5259492000"` | Maximum amount of time to save alerts in the replay topic, in milliseconds. Default is 7 days (604800000). | | nameOverride | string | `""` | | @@ -52,6 +57,3 @@ Kafka broker cluster for distributing alerts | users[0].readonlyTopics | list | `["alert-stream","alerts-simulated","alert-stream-test"]` | A list of topics that the user should get read-only access to. | | users[0].username | string | `"rubin-testing"` | The username for the user that should be created. | | vaultSecretsPath | string | `""` | Path to the secret resource in Vault | -| zookeeper.replicas | int | `3` | Number of Zookeeper replicas to run. | -| zookeeper.storage.size | string | `"1000Gi"` | Size of the backing storage disk for each of the Zookeeper instances. | -| zookeeper.storage.storageClassName | string | `"standard"` | Name of a StorageClass to use when requesting persistent volumes. | diff --git a/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka.yaml b/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka.yaml index a226042239..2ca5b98df1 100644 --- a/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka.yaml +++ b/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka.yaml @@ -1,7 +1,60 @@ +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaNodePool +metadata: + name: controller + labels: + strimzi.io/cluster: {{ .Values.cluster.name }} +spec: + replicas: {{ .Values.kafka.replicas }} + roles: + - controller + storage: + type: jbod + volumes: + - id: 0 + type: persistent-claim + size: {{ .Values.kafkaController.storage.size }} + class: {{ .Values.kafkaController.storage.storageClassName }} + deleteClaim: false + {{- with .Values.kafkaController.resources }} + resources: + {{- toYaml . | nindent 6 }} + {{- end }} +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaNodePool +metadata: + name: kafka + labels: + strimzi.io/cluster: {{ .Values.cluster.name }} + annotations: + strimzi.io/next-node-ids: "[0-99]" +spec: + replicas: {{ .Values.kafka.replicas }} + roles: + - broker + storage: + type: jbod + volumes: + - id: 0 + type: persistent-claim + size: {{ .Values.kafka.storage.size }} + {{- if .Values.kafka.storage.storageClassName }} + class: {{ .Values.kafka.storage.storageClassName }} + {{- end}} + deleteClaim: false + {{- with .Values.kafka.resources }} + resources: + {{- toYaml . | nindent 6 }} + {{- end }} +--- apiVersion: kafka.strimzi.io/{{ .Values.strimziAPIVersion }} kind: Kafka metadata: name: {{ .Values.cluster.name }} + annotations: + strimzi.io/kraft: enabled + strimzi.io/node-pools: enabled spec: {{- if .Values.kafkaExporter.enabled }} kafkaExporter: @@ -85,14 +138,15 @@ spec: {{- if .Values.kafka.externalListener.brokers }} brokers: - {{- range $idx, $broker := .Values.kafka.externalListener.brokers }} - - broker: {{ $idx }} + {{- range $broker := .Values.kafka.externalListener.brokers }} + - broker: {{ $broker.broker }} loadBalancerIP: {{ $broker.ip }} advertisedHost: {{ $broker.host }} - annotations: {{ toYaml $broker.annotations | nindent 16 }} - {{- end }} + advertisedPort: 9094 + annotations: + annotations: {{ toYaml $broker.annotations | nindent 16 }} + {{- end }} {{- end }} - {{- if and (.Values.kafka.externalListener.tls.enabled) (.Values.kafka.externalListener.bootstrap.host) }} brokerCertChainAndKey: secretName: {{ .Values.cluster.name }}-external-tls @@ -114,8 +168,6 @@ spec: transaction.state.log.replication.factor: 3 transaction.state.log.min.isr: 2 message.max.bytes: 4194304 # 8 Megabytes. For testing purposes only. - log.message.format.version: {{ .Values.kafka.logMessageFormatVersion }} - inter.broker.protocol.version: {{ .Values.kafka.interBrokerProtocolVersion }} ssl.client.auth: required {{- range $key, $value := .Values.kafka.config }} {{ $key }}: {{ $value }} @@ -133,53 +185,6 @@ spec: class: {{ .Values.kafka.storage.storageClassName }} deleteClaim: false - template: - pod: - {{- if .Values.kafka.nodePool.tolerations }} - tolerations: - {{- range $tol := .Values.kafka.nodePool.tolerations }} - - key: {{ $tol.key }} - operator: "Equal" - value: {{ $tol.value }} - effect: {{ $tol.effect }} - {{- end }} - {{- end }} - - {{- if .Values.kafka.nodePool.affinities }} - affinity: - nodeAffinity: - preferredDuringSchedulingIgnoredDuringExecution: - {{- range $affinity := .Values.kafka.nodePool.affinities }} - - weight: 1 - preference: - matchExpressions: - - key: {{ $affinity.key }} - operator: In - values: [{{ $affinity.value }}] - {{- end }} - {{- end }} - - affinity: - podAntiAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - - labelSelector: - matchExpressions: - - key: "app.kubernetes.io/name" - operator: In - values: - - kafka - topologyKey: "kubernetes.io/hostname" - - zookeeper: - replicas: {{ .Values.zookeeper.replicas }} - storage: - # Note that storage is configured per replica. If there are 3 replicas, - # each will get its own PersistentVolumeClaim for the configured size. - type: persistent-claim - size: {{ .Values.zookeeper.storage.size }} - class: {{ .Values.zookeeper.storage.storageClassName }} - deleteClaim: false - template: pod: {{- if .Values.kafka.nodePool.tolerations }} diff --git a/applications/alert-stream-broker/charts/alert-stream-broker/values.yaml b/applications/alert-stream-broker/charts/alert-stream-broker/values.yaml index 1757660413..8c5f950fa4 100644 --- a/applications/alert-stream-broker/charts/alert-stream-broker/values.yaml +++ b/applications/alert-stream-broker/charts/alert-stream-broker/values.yaml @@ -121,17 +121,6 @@ users: # matches. groups: ["rubin-testing"] - -zookeeper: - # -- Number of Zookeeper replicas to run. - replicas: 3 - - storage: - # -- Size of the backing storage disk for each of the Zookeeper instances. - size: 1000Gi - # -- Name of a StorageClass to use when requesting persistent volumes. - storageClassName: standard - tls: subject: # -- Organization to use in the 'Subject' field of the broker's TLS certificate. @@ -149,6 +138,29 @@ fullnameOverride: "" nameOverride: "" +kraft: true + +kafkaController: + # -- Enable Kafka Controller + enabled: false + + storage: + # -- Size of the backing storage disk for each of the Kafka controllers + size: 20Gi + + # -- Name of a StorageClass to use when requesting persistent volumes + storageClassName: "" + + # -- Kubernetes requests and limits for the Kafka Controller + # @default -- See `values.yaml` + resources: + requests: + memory: 32Gi + cpu: "4" + limits: + memory: 64Gi + cpu: "8" + # -- Topic used to send test alerts. testTopicName: alert-stream-test diff --git a/applications/alert-stream-broker/charts/alert-stream-schema-registry/README.md b/applications/alert-stream-broker/charts/alert-stream-schema-registry/README.md index a31ce78c20..5e7df966e2 100644 --- a/applications/alert-stream-broker/charts/alert-stream-schema-registry/README.md +++ b/applications/alert-stream-broker/charts/alert-stream-schema-registry/README.md @@ -7,6 +7,7 @@ Confluent Schema Registry for managing schema versions for the Alert Stream | Key | Type | Default | Description | |-----|------|---------|-------------| | clusterName | string | `"alert-broker"` | Strimzi "cluster name" of the broker to use as a backend. | +| compatibilityLevel | string | `"None"` | | | hostname | string | `"usdf-alert-schemas-dev.slac.stanford.edu"` | Hostname for an ingress which sends traffic to the Schema Registry. | | name | string | `"alert-schema-registry"` | Name used by the registry, and by its users. | | port | int | `8081` | Port where the registry is listening. NOTE: Not actually configurable in strimzi-registry-operator, so this basically cannot be changed. | diff --git a/applications/alert-stream-broker/charts/alert-stream-schema-registry/templates/schema-registry-server.yaml b/applications/alert-stream-broker/charts/alert-stream-schema-registry/templates/schema-registry-server.yaml index f97585bec4..ce73059575 100644 --- a/applications/alert-stream-broker/charts/alert-stream-schema-registry/templates/schema-registry-server.yaml +++ b/applications/alert-stream-broker/charts/alert-stream-schema-registry/templates/schema-registry-server.yaml @@ -9,4 +9,5 @@ metadata: revision: "1" spec: strimzi-version: {{ .Values.strimziAPIVersion }} - listener: internal \ No newline at end of file + listener: internal + compatibilityLevel: none \ No newline at end of file diff --git a/applications/alert-stream-broker/charts/alert-stream-schema-registry/values.yaml b/applications/alert-stream-broker/charts/alert-stream-schema-registry/values.yaml index e77f15f03c..ab28d9c736 100644 --- a/applications/alert-stream-broker/charts/alert-stream-schema-registry/values.yaml +++ b/applications/alert-stream-broker/charts/alert-stream-schema-registry/values.yaml @@ -16,6 +16,8 @@ clusterName: alert-broker # -- Name of the topic used by the Schema Registry to store data. schemaTopic: registry-schemas +compatibilityLevel: None + # -- Hostname for an ingress which sends traffic to the Schema Registry. hostname: usdf-alert-schemas-dev.slac.stanford.edu diff --git a/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml b/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml index f779daf70c..c1aa6c112e 100644 --- a/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml +++ b/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml @@ -2,58 +2,53 @@ alert-stream-broker: cluster: name: "alert-broker" - zookeeper: - storage: - size: 1000Gi - storageClassName: wekafs--sdf-k8s01 - kafka: version: 3.7.0 - # -- Encoding version for messages, see - # https://strimzi.io/docs/operators/latest/deploying.html#ref-kafka-versions-str. - logMessageFormatVersion: 3.4 - # -- Version of the protocol for inter-broker communication, see - # https://strimzi.io/docs/operators/latest/deploying.html#ref-kafka-versions-str. - interBrokerProtocolVersion: 3.4 replicas: 6 prometheusScrapingEnabled: true # Addresses based on the state as of 2021-12-02; these were assigned by - # Google and now we're pinning them. + # Square and now we're pinning them. externalListener: tls: enabled: false bootstrap: host: usdf-alert-stream-dev.lsst.cloud - ip: "134.79.23.215" + ip: "" annotations: metallb.universe.tf/address-pool: 'sdf-dmz' brokers: - host: usdf-alert-stream-dev-broker-0.lsst.cloud ip: "134.79.23.214" + broker: 6 annotations: metallb.universe.tf/address-pool: 'sdf-dmz' - host: usdf-alert-stream-dev-broker-1.lsst.cloud ip: "134.79.23.216" + broker: 7 annotations: metallb.universe.tf/address-pool: 'sdf-dmz' - host: usdf-alert-stream-dev-broker-2.lsst.cloud ip: "134.79.23.218" + broker: 8 annotations: metallb.universe.tf/address-pool: 'sdf-dmz' - host: usdf-alert-stream-dev-broker-3.lsst.cloud ip: "134.79.23.220" + broker: 9 annotations: metallb.universe.tf/address-pool: 'sdf-dmz' - host: usdf-alert-stream-dev-broker-4.lsst.cloud ip: "134.79.23.217" + broker: 10 annotations: metallb.universe.tf/address-pool: 'sdf-dmz' - host: usdf-alert-stream-dev-broker-5.lsst.cloud ip: "134.79.23.219" + broker: 11 annotations: metallb.universe.tf/address-pool: 'sdf-dmz' From 2d5a5e0cf0a249ad43eccb9fc81d01ce8c611d91 Mon Sep 17 00:00:00 2001 From: Brianna Smart Date: Thu, 26 Sep 2024 16:40:56 -0700 Subject: [PATCH 191/567] Add LATISS topic Add LATISS topic for LATISS production alerts. --- .../templates/kafka-topics.yaml | 26 +++++++++++++++++-- .../values-usdfdev-alert-stream-broker.yaml | 14 +++++++--- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka-topics.yaml b/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka-topics.yaml index 98717b9a4a..26c74abe19 100644 --- a/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka-topics.yaml +++ b/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka-topics.yaml @@ -26,6 +26,7 @@ spec: cleanup.policy: "delete" retention.ms: {{ .Values.maxMillisecondsRetained }} # 7 days retention.bytes: {{ .Values.maxBytesRetained }} + compression.type: {{ .Values.topicCompression}} # The default timestamp is the creation time of the alert. # To get the ingestion rate, we need this to be the log # append time, and the header will contain the producer @@ -45,6 +46,7 @@ spec: cleanup.policy: "delete" retention.ms: {{ .Values.maxMillisecondsRetained }} # 7 days retention.bytes: {{ .Values.maxBytesRetained }} + compression.type: {{ .Values.topicCompression}} # The default timestamp is the creation time of the alert. # To get the ingestion rate, we need this to be the log # append time, and the header will contain the producer @@ -64,11 +66,31 @@ spec: cleanup.policy: "delete" retention.ms: {{ .Values.maxMillisecondsRetained }} # 7 days retention.bytes: {{ .Values.maxBytesRetained }} - compression.type: {{ .Values.devTopicCompression}} + compression.type: {{ .Values.topicCompression}} # The default timestamp is the creation time of the alert. # To get the ingestion rate, we need this to be the log # append time, and the header will contain the producer # timestamp instead message.timestamp.type: 'LogAppendTime' partitions: {{ .Values.devTopicPartitions }} - replicas: {{ .Values.devTopicReplicas }} \ No newline at end of file + replicas: {{ .Values.devTopicReplicas }} +--- +apiVersion: "kafka.strimzi.io/{{ .Values.strimziAPIVersion }}" +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: "{{ .Values.clusterName }}" + name: "{{ .Values.latissTopicName}}" +spec: + config: + cleanup.policy: "delete" + retention.ms: {{ .Values.maxMillisecondsRetained }} # 7 days + retention.bytes: {{ .Values.maxBytesRetained }} + compression.type: {{ .Values.topicCompression}} + # The default timestamp is the creation time of the alert. + # To get the ingestion rate, we need this to be the log + # append time, and the header will contain the producer + # timestamp instead + message.timestamp.type: 'LogAppendTime' + partitions: {{ .Values.latissTopicPartitions }} + replicas: {{ .Values.latissTopicReplicas }} \ No newline at end of file diff --git a/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml b/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml index c1aa6c112e..0f56055671 100644 --- a/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml +++ b/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml @@ -10,7 +10,7 @@ alert-stream-broker: prometheusScrapingEnabled: true - # Addresses based on the state as of 2021-12-02; these were assigned by + # Addresses based on the state as of 2023; these were assigned by # Square and now we're pinning them. externalListener: tls: @@ -106,15 +106,23 @@ alert-stream-broker: groups: ["pittgoogle-idfint"] testTopicName: alert-stream-test - simulatedTopicName: alerts-simulated topicPartitions: 400 topicReplicas: 1 + + simulatedTopicName: alerts-simulated simulatedTopicPartitions: 45 simulatedTopicReplicas: 1 + devTopicName: dev-topic devTopicPartitions: 10 devTopicReplicas: 1 - devTopicCompression: lz4 + + latissTopicName: latiss-alerts + latissTopicPartitions: 45 + latissTopicReplicas: 1 + + # Compression set to snappy to balance alert packet compression speed and size. + topicCompression: snappy alert-stream-schema-registry: hostname: "usdf-alert-schemas-dev.slac.stanford.edu" From e3491f4d73b8d6ca7d54581fab3596fe7a3f82d8 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Tue, 24 Sep 2024 14:10:16 -0700 Subject: [PATCH 192/567] Summit: Update nublado mounts for LSSTComCam. --- applications/nublado/values-summit.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/applications/nublado/values-summit.yaml b/applications/nublado/values-summit.yaml index 6bede5f34d..360e229a8f 100644 --- a/applications/nublado/values-summit.yaml +++ b/applications/nublado/values-summit.yaml @@ -58,8 +58,8 @@ controller: - name: "lsstcomcam" source: type: "nfs" - serverPath: "/repo/LSSTComCam" - server: "comcam-archiver.cp.lsst.org" + serverPath: "/comcam/repo/LSSTComCam" + server: "nfs3.cp.lsst.org" - name: "lsstcam" source: type: "nfs" @@ -78,8 +78,8 @@ controller: - name: "lsstdata-comcam" source: type: "nfs" - serverPath: "/lsstdata" - server: "comcam-archiver.cp.lsst.org" + serverPath: "/comcam/lsstdata" + server: "nfs3.cp.lsst.org" - name: "lsstdata-auxtel" source: type: "nfs" @@ -93,8 +93,8 @@ controller: - name: "lsstdata-base-comcam" source: type: "nfs" - serverPath: "/lsstdata/base/comcam" - server: "comcam-archiver.cp.lsst.org" + serverPath: "/comcam/lsstdata/base/comcam" + server: "nfs3.cp.lsst.org" - name: "lsstdata-base-auxtel" source: type: "nfs" From cc608fe27e4b99055f2890e5041d02aa36e80f26 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Tue, 1 Oct 2024 10:25:19 -0700 Subject: [PATCH 193/567] Fix ComCam mounts in UWS. --- applications/uws/values-summit.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/uws/values-summit.yaml b/applications/uws/values-summit.yaml index 41410dc51c..47341768f5 100644 --- a/applications/uws/values-summit.yaml +++ b/applications/uws/values-summit.yaml @@ -33,10 +33,10 @@ uws-api-server: subPath: "" readOnly: false - name: repo-comcam - server: comcam-archiver.cp.lsst.org + server: nfs3.cp.lsst.org claimName: repo-comcam-pvc mountPath: "/repo/LSSTComCam" - exportPath: "/repo/LSSTComCam" + exportPath: "/comcam/repo/LSSTComCam" subPath: "" readOnly: false - name: data-auxtel @@ -47,9 +47,9 @@ uws-api-server: subPath: "" readOnly: true - name: data-comcam - server: comcam-archiver.cp.lsst.org + server: nfs3.cp.lsst.org claimName: data-comcam-pvc mountPath: "/data/lsstdata/base/comcam" - exportPath: "/lsstdata/base/comcam" + exportPath: "/comcam/lsstdata/base/comcam" subPath: "" readOnly: true From 3c2bbdf5064adfc2600a843ae36a8ce19224c378 Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 1 Oct 2024 16:05:17 -0700 Subject: [PATCH 194/567] Bump nublado version --- applications/nublado/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/nublado/Chart.yaml b/applications/nublado/Chart.yaml index 1b0e3dad86..430008b4fc 100644 --- a/applications/nublado/Chart.yaml +++ b/applications/nublado/Chart.yaml @@ -5,7 +5,7 @@ description: JupyterHub and custom spawner for the Rubin Science Platform sources: - https://github.com/lsst-sqre/nublado home: https://nublado.lsst.io/ -appVersion: 7.0.0 +appVersion: 7.2.0 dependencies: - name: jupyterhub From 72cb2aeabf05ba66ea24fbfadc0e29177fe94f90 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 1 Oct 2024 18:59:36 -0300 Subject: [PATCH 195/567] rapid-analysis: add application to summit environment --- applications/rapid-analysis/Chart.yaml | 4 + applications/rapid-analysis/README.md | 50 ++++ applications/rapid-analysis/secrets.yaml | 8 + .../rapid-analysis/templates/_helpers.tpl | 124 ++++++++++ .../rapid-analysis/templates/configmap.yaml | 7 + .../rapid-analysis/templates/deployment.yaml | 224 +++++++++++++++++ .../templates/gather-rollup-set.yaml | 231 ++++++++++++++++++ .../templates/gather2a-set.yaml | 231 ++++++++++++++++++ .../templates/mountpoint-pvc.yaml | 26 ++ .../templates/redis-service.yaml | 21 ++ .../templates/redis-statefulset.yaml | 90 +++++++ .../templates/vault-secret.yaml | 44 ++++ .../rapid-analysis/templates/worker-set.yaml | 231 ++++++++++++++++++ .../rapid-analysis/values-summit.yaml | 151 ++++++++++++ applications/rapid-analysis/values.yaml | 130 ++++++++++ docs/applications/rapid-analysis/index.rst | 29 +++ docs/applications/rapid-analysis/values.md | 12 + docs/applications/rubin.rst | 1 + .../applications/rubin/rapid-analysis.yaml | 34 +++ environments/values-summit.yaml | 1 + 20 files changed, 1649 insertions(+) create mode 100644 applications/rapid-analysis/Chart.yaml create mode 100644 applications/rapid-analysis/README.md create mode 100644 applications/rapid-analysis/secrets.yaml create mode 100644 applications/rapid-analysis/templates/_helpers.tpl create mode 100644 applications/rapid-analysis/templates/configmap.yaml create mode 100644 applications/rapid-analysis/templates/deployment.yaml create mode 100644 applications/rapid-analysis/templates/gather-rollup-set.yaml create mode 100644 applications/rapid-analysis/templates/gather2a-set.yaml create mode 100644 applications/rapid-analysis/templates/mountpoint-pvc.yaml create mode 100644 applications/rapid-analysis/templates/redis-service.yaml create mode 100644 applications/rapid-analysis/templates/redis-statefulset.yaml create mode 100644 applications/rapid-analysis/templates/vault-secret.yaml create mode 100644 applications/rapid-analysis/templates/worker-set.yaml create mode 100644 applications/rapid-analysis/values-summit.yaml create mode 100644 applications/rapid-analysis/values.yaml create mode 100644 docs/applications/rapid-analysis/index.rst create mode 100644 docs/applications/rapid-analysis/values.md create mode 100644 environments/templates/applications/rubin/rapid-analysis.yaml diff --git a/applications/rapid-analysis/Chart.yaml b/applications/rapid-analysis/Chart.yaml new file mode 100644 index 0000000000..c4a7da146e --- /dev/null +++ b/applications/rapid-analysis/Chart.yaml @@ -0,0 +1,4 @@ +apiVersion: v2 +name: rapid-analysis +version: 1.0.0 +description: A Helm chart for deploying the Rapid Analysis services. diff --git a/applications/rapid-analysis/README.md b/applications/rapid-analysis/README.md new file mode 100644 index 0000000000..089df8b114 --- /dev/null +++ b/applications/rapid-analysis/README.md @@ -0,0 +1,50 @@ +# rapid-analysis + +A Helm chart for deploying the Rapid Analysis services. + +## Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| affinity | object | `{}` | This specifies the scheduling constraints of the pod. | +| butlerSecret | object | `{}` | This section allows for specification of Butler secret information. If this section is used, it must contain the following attributes: _key_ (The vault key for the Butler secret), _containerPath_ (The directory location for the Butler secret), _dbUser_ (The username for the Butler backend database) | +| credentialFile | string | `""` | The name of the expected credential file for the broadcasters | +| credentialSecretsPath | string | `""` | The key for the credentials including any sub-paths. | +| env | object | `{}` | This section holds a set of key, value pairs for environmental variables (ENV_VAR: value). NOTE: RUN_ARG is taken care of by the chart using _script_. | +| envSecrets | list | `[]` | This section holds specifications for secret injection. If this section is used, each object listed must have the following attributes defined: _name_ (The label for the secret), _secretName_ (The name of the vault store reference. Uses the _namespace_ attribute to construct the full name), _secretKey_ (The key in the vault store containing the necessary secret) | +| fullnameOverride | string | `""` | Specify the deployed application name specifically. Overrides all other names. | +| gather2aSet | object | `{}` | This configures a StatefulSet used for visit-level gather processing. | +| gatherRollupSet | object | `{}` | This configures a StatefulSet used for night-summary rollup. | +| image.pullPolicy | string | `"IfNotPresent"` | The policy to apply when pulling an image for deployment. | +| image.repository | string | `"ts-dockerhub.lsst.org/rubintv-broadcaster"` | The Docker registry name for the container image. | +| image.tag | string | `"develop"` | The tag of the container image to use. | +| imagePullSecrets | list | `[]` | The list of pull secrets needed for the images. If this section is used, each object listed can have the following attributes defined: _name_ (The label identifying the pull-secret to use) | +| location | string | `""` | Provide the location where the system is running. | +| nameOverride | string | `""` | Adds an extra string to the release name. | +| namespace | string | `"rapid-analysis"` | This is the namespace where the applications will be deployed. | +| nfsMountpoint | list | `[]` | This section holds the information necessary to create a NFS mount for the container. If this section is used, each object listed can have the following attributes defined: _name_ (A label identifier for the mountpoint), _containerPath_ (The path inside the container to mount), _readOnly_ (This sets if the NFS mount is read only or read/write), _server_ (The hostname of the NFS server), _serverPath_ (The path exported by the NFS server) | +| nodeSelector | object | `{}` | This allows the specification of using specific nodes to run the pod. | +| podAnnotations | object | `{}` | This allows the specification of pod annotations. | +| pullSecretsPath | string | `""` | | +| pvcMountpoint | list | `[]` | This section holds information about existing volume claims. If the section is used, each object listed can have the following attributes defined: _name_ (The name ot the persistent volume), _containerPath_ (The path inside the container to mount), _subPath_ (persistent volume subpath, optional) | +| pvcMountpointClaim | list | `[]` | This section holds the information necessary to claim persistent volumes. If the section is used, each object listed can have the following attributes defined: _name_ (The name ot the persistent volume), _containerPath_ (The path inside the container to mount), _subPath_ (persistent volume subpath, optional) | +| redis.affinity | object | `{}` | Affinity rules for the redis pods | +| redis.enabled | bool | `false` | This specifies whether to use redis or not. | +| redis.env | object | `{}` | This section holds a set of key, value pairs for environmental variables (ENV_VAR: value). NOTE: RUN_ARG is taken care of by the chart using _script_. | +| redis.envSecrets | list | `[]` | This section holds specifications for secret injection. If this section is used, each object listed must have the following attributes defined: _name_ (The label for the secret), _secretName_ (The name of the vault store reference. Uses the _namespace_ attribute to construct the full name), _secretKey_ (The key in the vault store containing the necessary secret) | +| redis.image.pullPolicy | string | `"IfNotPresent"` | The policy to apply when pulling an image for deployment. | +| redis.image.repository | string | `"docker.io/redis"` | The Docker registry name for the redis container image. | +| redis.image.tag | string | `"latest"` | The tag of the redis container image to use. | +| redis.nodeSelector | object | `{}` | Node selection rules for the redis pods | +| redis.resources | object | `{}` | This allows the specification of resources (CPU, memory) requires to run the redis container. | +| redis.storage.classname | string | `nil` | | +| redis.storage.request | string | `"1Gi"` | The size of the storage request. | +| redis.tolerations | list | `[]` | Toleration specifications for the redis pods | +| resources | object | `{}` | This allows the specification of resources (CPU, memory) requires to run the container. | +| rubinTvSecretsPath | string | `""` | | +| scripts | object | `{}` | List of script objects to run for the broadcaster. This section MUST have the following attribute specified for each entry. _name_ (The full path for the script) The following attributes are optional _resources_ (A resource object specification) _nodeSelector_ (A node selector object specification) _tolerations_ (A list of tolerations) _affinity_ (An affinity object specification) | +| securityContext | object | `{}` | This section allows for specification of security context information. If the section is used, at least one of the following attributes must be specified. _uid_ (User id to run application as), _gid_ (Group id of the user that runs the application), _fid_ (File system context user id), | +| siteTag | string | `""` | A special tag for letting the scripts know where they are running. | +| tolerations | list | `[]` | This specifies the tolerations of the pod for any system taints. | +| vaultPrefixPath | string | `""` | The Vault prefix path | +| workerSet | object | `{}` | This configures a StatefulSet used for single frame workers. | diff --git a/applications/rapid-analysis/secrets.yaml b/applications/rapid-analysis/secrets.yaml new file mode 100644 index 0000000000..eda73c3be5 --- /dev/null +++ b/applications/rapid-analysis/secrets.yaml @@ -0,0 +1,8 @@ +redis-password: + description: >- + Password used to authenticate rubintv worker pods to their shared + redis pod. If this secret changes, both the Redis server and all + worker pods will require a restart. + generate: + type: + password diff --git a/applications/rapid-analysis/templates/_helpers.tpl b/applications/rapid-analysis/templates/_helpers.tpl new file mode 100644 index 0000000000..fe0a7eaf8e --- /dev/null +++ b/applications/rapid-analysis/templates/_helpers.tpl @@ -0,0 +1,124 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "rapid-analysis.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "rapid-analysis.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "rapid-analysis.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "rapid-analysis.labels" -}} +helm.sh/chart: {{ include "rapid-analysis.chart" . }} +{{ include "rapid-analysis.selectorLabels" . }} +{{- end }} + +{{/* +Script name +*/}} +{{- define "rapid-analysis.scriptName" -}} +{{- regexSplit "/" .Values.script.name -1 | last | trimSuffix ".py" | kebabcase }} +{{- end }} + +{{/* +Deployment name +*/}} +{{- define "rapid-analysis.deploymentName" -}} +{{- $name := regexSplit "/" .Values.script.name -1 | last | trimSuffix ".py" | kebabcase }} +{{- $cameraName := regexSplit "/" .Values.script.name -1 | rest | first | lower }} +{{- $camera := "" }} +{{- if eq $cameraName "auxtel" }} +{{- $camera = "at"}} +{{- else if eq $cameraName "comcam" }} +{{- $camera = "cc"}} +{{- else }} +{{- $camera = $cameraName}} +{{- end }} +{{- printf "s-%s-%s" $camera $name }} +{{- end }} + + +{{/* +Selector labels +*/}} +{{- define "rapid-analysis.selectorLabels" -}} +app.kubernetes.io/name: {{ include "rapid-analysis.deploymentName" . }} +app.kubernetes.io/instance: {{ include "rapid-analysis.name" . }} +{{- $values := regexSplit "/" .Values.script.name -1 }} +{{- if eq 1 (len $values) }} +all: misc +{{- else }} +{{- $all_label := lower (index $values 1) }} +{{- $script := index $values 2 }} +{{- if contains "Isr" $script }} +isr: {{ $all_label }} +{{- end }} +all: {{ $all_label }} +{{- if has $all_label (list "auxtel" "comcam" "bot" "ts8") }} +camera: {{ $all_label }} +{{- else }} +{{- if contains "StarTracker" $script }} +camera: startracker +{{- end }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create a default fully qualified app name for redis. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "rapid-analysis.redis.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- printf "%s-redis" .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s-redis" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Common labels - redis +*/}} +{{- define "rapid-analysis.redis.labels" -}} +helm.sh/chart: {{ include "rapid-analysis.chart" . }} +{{ include "rapid-analysis.redis.selectorLabels" . }} +{{- end }} + +{{/* +Selector labels - redis +*/}} +{{- define "rapid-analysis.redis.selectorLabels" -}} +app.kubernetes.io/name: {{ include "rapid-analysis.name" . }} +app.kubernetes.io/instance: {{ include "rapid-analysis.redis.fullname" . }} +{{- end }} diff --git a/applications/rapid-analysis/templates/configmap.yaml b/applications/rapid-analysis/templates/configmap.yaml new file mode 100644 index 0000000000..65aa6db601 --- /dev/null +++ b/applications/rapid-analysis/templates/configmap.yaml @@ -0,0 +1,7 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: env-configmap + namespace: {{ .Values.namespace }} +data: + GOOGLE_APPLICATION_CREDENTIALS: "/etc/rubintv/creds/{{ .Values.credentialFile }}" diff --git a/applications/rapid-analysis/templates/deployment.yaml b/applications/rapid-analysis/templates/deployment.yaml new file mode 100644 index 0000000000..1f5e13bd87 --- /dev/null +++ b/applications/rapid-analysis/templates/deployment.yaml @@ -0,0 +1,224 @@ +{{ range $script := .Values.scripts }} +{{ $_ := set $.Values "script" $script }} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "rapid-analysis.deploymentName" $ }} + namespace: {{ $.Values.namespace }} + labels: + {{- include "rapid-analysis.labels" $ | nindent 4 }} +spec: + revisionHistoryLimit: 0 + selector: + matchLabels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 6 }} + template: + metadata: + {{- with $.Values.podAnnotations }} + annotations: + {{- toYaml $ | nindent 8 }} + {{- end }} + labels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml $.Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- if $.Values.securityContext }} + securityContext: + {{- if $.Values.securityContext.uid }} + runAsUser: {{ $.Values.securityContext.uid }} + {{- end }} + {{- if $.Values.securityContext.gid }} + runAsGroup: {{ $.Values.securityContext.gid }} + {{- end }} + {{- if $.Values.securityContext.fid }} + fsGroup: {{ $.Values.securityContext.fid }} + {{- end }} + {{- end }} + containers: + - name: {{ include "rapid-analysis.scriptName" $ }} + image: "{{ $.Values.image.repository }}:{{ $.Values.image.tag }}" + imagePullPolicy: {{ $.Values.image.pullPolicy }} + env: + - name: RUN_ARG + value: {{ $script.name }} {{ $.Values.siteTag }} + - name: RAPID_ANALYSIS_LOCATION + value: {{ $.Values.location | upper | quote }} + {{- if or $.Values.env $.Values.envSecrets }} + {{- range $env_var, $env_value := $.Values.env }} + - name: {{ $env_var }} + value: {{ $env_value | quote }} + {{- end }} + {{- range $env := $.Values.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: PGPASSFILE + value: "{{ $.Values.butlerSecret.containerPath }}/postgres-credentials.txt" + - name: PGUSER + value: {{ $.Values.butlerSecret.dbUser | quote }} + - name: AWS_SHARED_CREDENTIALS_FILE + value: "{{ $.Values.butlerSecret.containerPath }}/aws-credentials.ini" + {{- end }} + {{- if $.Values.redis.enabled }} + - name: REDIS_HOST + value: "redis-service" + {{- if $.Values.redis.envSecrets }} + {{- range $env := $.Values.redis.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- end }} + envFrom: + - configMapRef: + name: env-configmap + volumeMounts: + - name: rubintv-creds + mountPath: "/etc/rubintv/creds" + readOnly: true + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + readOnly: {{ $values.readOnly }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + mountPath: {{ $.Values.butlerSecret.containerPath }} + {{- end }} + {{- if or $.Values.resources $script.resources }} + {{- $resources := "" }} + {{- if $script.resources }} + {{- $resources = $script.resources }} + {{- else }} + {{- $resources = $.Values.resources }} + {{- end }} + resources: + {{- toYaml $resources | nindent 12 }} + {{- end }} + {{- if $.Values.butlerSecret }} + initContainers: + - name: {{ $.Release.Name }}-butler-secret-perm-fixer + image: "alpine:latest" + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /secrets + cat /secrets/aws-credentials.ini > new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + cat /secrets-rubintv/aws-credentials.ini >> new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + mv new-aws-credentials.ini /secrets/aws-credentials.ini + chown 73006:73006 /secrets/* + chmod 0600 /secrets/* + volumeMounts: + - name: {{ $.Release.Name }}-raw-butler-secret + mountPath: /secrets-raw + readOnly: true + - name: {{ $.Release.Name }}-butler-secret + mountPath: /secrets + - name: rubintv-aws-creds + mountPath: /secrets-rubintv + readOnly: true + {{- end }} + volumes: + - name: rubintv-creds + secret: + secretName: google-creds + - name: rubintv-aws-creds + secret: + secretName: rubintv-secrets + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + nfs: + path: {{ $values.serverPath }} + readOnly: {{ $values.readOnly }} + server: {{ $values.server }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + emptyDir: {} + - name: {{ $.Release.Name }}-raw-butler-secret + secret: + secretName: butler-secret + defaultMode: 0600 + {{- end }} + {{- if or $.Values.nodeSelector $script.nodeSelector }} + {{- $nodeSelector := "" }} + {{- if $script.nodeSelector }} + {{- $nodeSelector = $script.nodeSelector }} + {{- else }} + {{- $nodeSelector = $.Values.nodeSelector }} + {{- end }} + nodeSelector: + {{- toYaml $nodeSelector | nindent 8 }} + {{- end }} + {{- if or $.Values.affinity $script.affinity }} + {{- $affinity := "" }} + {{- if $script.affinity }} + {{- $affinity = $script.affinity }} + {{- else }} + {{- $affinity = $.Values.affinity }} + {{- end }} + affinity: + {{- toYaml $affinity | nindent 8 }} + {{- end }} + {{- if or $.Values.tolerations $script.tolerations }} + {{- $tolerations := "" }} + {{- if $script.tolerations }} + {{- $tolerations = $script.tolerations }} + {{- else }} + {{- $tolerations = $.Values.tolerations }} + {{- end }} + tolerations: + {{- toYaml $tolerations | nindent 8 }} + {{- end }} +{{- end }} diff --git a/applications/rapid-analysis/templates/gather-rollup-set.yaml b/applications/rapid-analysis/templates/gather-rollup-set.yaml new file mode 100644 index 0000000000..ac8958cddf --- /dev/null +++ b/applications/rapid-analysis/templates/gather-rollup-set.yaml @@ -0,0 +1,231 @@ +{{ $_ := set $.Values "script" $.Values.gatherRollupSet }} +{{ $script := $.Values.gatherRollupSet }} +{{- if $script.name }} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "rapid-analysis.deploymentName" $ }}-gatherrollupset + namespace: {{ $.Values.namespace }} + labels: + {{- include "rapid-analysis.labels" $ | nindent 4 }} +spec: + revisionHistoryLimit: 0 + selector: + matchLabels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 6 }} + replicas: {{ ($script.replicas | int) }} + podManagementPolicy: Parallel + template: + metadata: + {{- with $.Values.podAnnotations }} + annotations: + {{- toYaml $ | nindent 8 }} + {{- end }} + labels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml $.Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- if $.Values.securityContext }} + securityContext: + {{- if $.Values.securityContext.uid }} + runAsUser: {{ $.Values.securityContext.uid }} + {{- end }} + {{- if $.Values.securityContext.gid }} + runAsGroup: {{ $.Values.securityContext.gid }} + {{- end }} + {{- if $.Values.securityContext.fid }} + fsGroup: {{ $.Values.securityContext.fid }} + {{- end }} + {{- end }} + containers: + - name: {{ include "rapid-analysis.scriptName" $ }} + image: "{{ $.Values.image.repository }}:{{ $.Values.image.tag }}" + imagePullPolicy: {{ $.Values.image.pullPolicy }} + env: + - name: RUN_ARG + value: {{ $script.name }} + - name: WORKER_NAME + valueFrom: + fieldRef: + fieldPath: metadata.labels['statefulset.kubernetes.io/pod-name'] + - name: RAPID_ANALYSIS_LOCATION + value: {{ $.Values.location | upper | quote }} + {{- if or $.Values.env $.Values.envSecrets }} + {{- range $env_var, $env_value := $.Values.env }} + - name: {{ $env_var }} + value: {{ $env_value | quote }} + {{- end }} + {{- range $env := $.Values.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: PGPASSFILE + value: "{{ $.Values.butlerSecret.containerPath }}/postgres-credentials.txt" + - name: PGUSER + value: {{ $.Values.butlerSecret.dbUser | quote }} + - name: AWS_SHARED_CREDENTIALS_FILE + value: "{{ $.Values.butlerSecret.containerPath }}/aws-credentials.ini" + {{- end }} + {{- if $.Values.redis.enabled }} + - name: REDIS_HOST + value: "redis-service" + {{- if $.Values.redis.envSecrets }} + {{- range $env := $.Values.redis.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- end }} + envFrom: + - configMapRef: + name: env-configmap + volumeMounts: + - name: rubintv-creds + mountPath: "/etc/rubintv/creds" + readOnly: true + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + readOnly: {{ $values.readOnly }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + mountPath: {{ $.Values.butlerSecret.containerPath }} + {{- end }} + {{- if or $.Values.resources $script.resources }} + {{- $resources := "" }} + {{- if $script.resources }} + {{- $resources = $script.resources }} + {{- else }} + {{- $resources = $.Values.resources }} + {{- end }} + resources: + {{- toYaml $resources | nindent 12 }} + {{- end }} + {{- if $.Values.butlerSecret }} + initContainers: + - name: {{ $.Release.Name }}-butler-secret-perm-fixer + image: "alpine:latest" + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /secrets + cat /secrets/aws-credentials.ini > new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + cat /secrets-rubintv/aws-credentials.ini >> new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + mv new-aws-credentials.ini /secrets/aws-credentials.ini + chown 73006:73006 /secrets/* + chmod 0600 /secrets/* + volumeMounts: + - name: {{ $.Release.Name }}-raw-butler-secret + mountPath: /secrets-raw + readOnly: true + - name: {{ $.Release.Name }}-butler-secret + mountPath: /secrets + - name: rubintv-aws-creds + mountPath: /secrets-rubintv + readOnly: true + {{- end }} + volumes: + - name: rubintv-creds + secret: + secretName: google-creds + - name: rubintv-aws-creds + secret: + secretName: rubintv-secrets + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + nfs: + path: {{ $values.serverPath }} + readOnly: {{ $values.readOnly }} + server: {{ $values.server }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + emptyDir: {} + - name: {{ $.Release.Name }}-raw-butler-secret + secret: + secretName: butler-secret + defaultMode: 0600 + {{- end }} + {{- if or $.Values.nodeSelector $script.nodeSelector }} + {{- $nodeSelector := "" }} + {{- if $script.nodeSelector }} + {{- $nodeSelector = $script.nodeSelector }} + {{- else }} + {{- $nodeSelector = $.Values.nodeSelector }} + {{- end }} + nodeSelector: + {{- toYaml $nodeSelector | nindent 8 }} + {{- end }} + {{- if or $.Values.affinity $script.affinity }} + {{- $affinity := "" }} + {{- if $script.affinity }} + {{- $affinity = $script.affinity }} + {{- else }} + {{- $affinity = $.Values.affinity }} + {{- end }} + affinity: + {{- toYaml $affinity | nindent 8 }} + {{- end }} + {{- if or $.Values.tolerations $script.tolerations }} + {{- $tolerations := "" }} + {{- if $script.tolerations }} + {{- $tolerations = $script.tolerations }} + {{- else }} + {{- $tolerations = $.Values.tolerations }} + {{- end }} + tolerations: + {{- toYaml $tolerations | nindent 8 }} + {{- end }} +{{- end }} diff --git a/applications/rapid-analysis/templates/gather2a-set.yaml b/applications/rapid-analysis/templates/gather2a-set.yaml new file mode 100644 index 0000000000..2c1fdbee4f --- /dev/null +++ b/applications/rapid-analysis/templates/gather2a-set.yaml @@ -0,0 +1,231 @@ +{{ $_ := set $.Values "script" $.Values.gather2aSet }} +{{ $script := $.Values.gather2aSet }} +{{- if $script.name }} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "rapid-analysis.deploymentName" $ }}-gather2aset + namespace: {{ $.Values.namespace }} + labels: + {{- include "rapid-analysis.labels" $ | nindent 4 }} +spec: + revisionHistoryLimit: 0 + selector: + matchLabels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 6 }} + replicas: {{ ($script.replicas | int) }} + podManagementPolicy: Parallel + template: + metadata: + {{- with $.Values.podAnnotations }} + annotations: + {{- toYaml $ | nindent 8 }} + {{- end }} + labels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml $.Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- if $.Values.securityContext }} + securityContext: + {{- if $.Values.securityContext.uid }} + runAsUser: {{ $.Values.securityContext.uid }} + {{- end }} + {{- if $.Values.securityContext.gid }} + runAsGroup: {{ $.Values.securityContext.gid }} + {{- end }} + {{- if $.Values.securityContext.fid }} + fsGroup: {{ $.Values.securityContext.fid }} + {{- end }} + {{- end }} + containers: + - name: {{ include "rapid-analysis.scriptName" $ }} + image: "{{ $.Values.image.repository }}:{{ $.Values.image.tag }}" + imagePullPolicy: {{ $.Values.image.pullPolicy }} + env: + - name: RUN_ARG + value: {{ $script.name }} + - name: WORKER_NAME + valueFrom: + fieldRef: + fieldPath: metadata.labels['statefulset.kubernetes.io/pod-name'] + - name: RAPID_ANALYSIS_LOCATION + value: {{ $.Values.location | upper | quote }} + {{- if or $.Values.env $.Values.envSecrets }} + {{- range $env_var, $env_value := $.Values.env }} + - name: {{ $env_var }} + value: {{ $env_value | quote }} + {{- end }} + {{- range $env := $.Values.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: PGPASSFILE + value: "{{ $.Values.butlerSecret.containerPath }}/postgres-credentials.txt" + - name: PGUSER + value: {{ $.Values.butlerSecret.dbUser | quote }} + - name: AWS_SHARED_CREDENTIALS_FILE + value: "{{ $.Values.butlerSecret.containerPath }}/aws-credentials.ini" + {{- end }} + {{- if $.Values.redis.enabled }} + - name: REDIS_HOST + value: "redis-service" + {{- if $.Values.redis.envSecrets }} + {{- range $env := $.Values.redis.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- end }} + envFrom: + - configMapRef: + name: env-configmap + volumeMounts: + - name: rubintv-creds + mountPath: "/etc/rubintv/creds" + readOnly: true + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + readOnly: {{ $values.readOnly }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + mountPath: {{ $.Values.butlerSecret.containerPath }} + {{- end }} + {{- if or $.Values.resources $script.resources }} + {{- $resources := "" }} + {{- if $script.resources }} + {{- $resources = $script.resources }} + {{- else }} + {{- $resources = $.Values.resources }} + {{- end }} + resources: + {{- toYaml $resources | nindent 12 }} + {{- end }} + {{- if $.Values.butlerSecret }} + initContainers: + - name: {{ $.Release.Name }}-butler-secret-perm-fixer + image: "alpine:latest" + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /secrets + cat /secrets/aws-credentials.ini > new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + cat /secrets-rubintv/aws-credentials.ini >> new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + mv new-aws-credentials.ini /secrets/aws-credentials.ini + chown 73006:73006 /secrets/* + chmod 0600 /secrets/* + volumeMounts: + - name: {{ $.Release.Name }}-raw-butler-secret + mountPath: /secrets-raw + readOnly: true + - name: {{ $.Release.Name }}-butler-secret + mountPath: /secrets + - name: rubintv-aws-creds + mountPath: /secrets-rubintv + readOnly: true + {{- end }} + volumes: + - name: rubintv-creds + secret: + secretName: google-creds + - name: rubintv-aws-creds + secret: + secretName: rubintv-secrets + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + nfs: + path: {{ $values.serverPath }} + readOnly: {{ $values.readOnly }} + server: {{ $values.server }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + emptyDir: {} + - name: {{ $.Release.Name }}-raw-butler-secret + secret: + secretName: butler-secret + defaultMode: 0600 + {{- end }} + {{- if or $.Values.nodeSelector $script.nodeSelector }} + {{- $nodeSelector := "" }} + {{- if $script.nodeSelector }} + {{- $nodeSelector = $script.nodeSelector }} + {{- else }} + {{- $nodeSelector = $.Values.nodeSelector }} + {{- end }} + nodeSelector: + {{- toYaml $nodeSelector | nindent 8 }} + {{- end }} + {{- if or $.Values.affinity $script.affinity }} + {{- $affinity := "" }} + {{- if $script.affinity }} + {{- $affinity = $script.affinity }} + {{- else }} + {{- $affinity = $.Values.affinity }} + {{- end }} + affinity: + {{- toYaml $affinity | nindent 8 }} + {{- end }} + {{- if or $.Values.tolerations $script.tolerations }} + {{- $tolerations := "" }} + {{- if $script.tolerations }} + {{- $tolerations = $script.tolerations }} + {{- else }} + {{- $tolerations = $.Values.tolerations }} + {{- end }} + tolerations: + {{- toYaml $tolerations | nindent 8 }} + {{- end }} +{{- end }} diff --git a/applications/rapid-analysis/templates/mountpoint-pvc.yaml b/applications/rapid-analysis/templates/mountpoint-pvc.yaml new file mode 100644 index 0000000000..4cf1a55df3 --- /dev/null +++ b/applications/rapid-analysis/templates/mountpoint-pvc.yaml @@ -0,0 +1,26 @@ +{{- if .Values.pvcMountpointClaim }} +{{- range $values := .Values.pvcMountpointClaim }} +--- +kind: PersistentVolumeClaim +apiVersion: v1 +metadata: + name: {{ $values.name }} + namespace: {{ $.Values.namespace }} + {{- if $values.ids }} + annotations: + {{- if $values.ids.uid }} + pv.beta.kubernetes.io/uid: "{{ $values.ids.uid }}" + {{- end }} + {{- if $values.ids.gid }} + pv.beta.kubernetes.io/gid: "{{ $values.ids.gid }}" + {{- end }} + {{- end }} +spec: + accessModes: + - {{ $values.accessMode | quote }} + resources: + requests: + storage: {{ $values.claimSize }} + storageClassName: {{ $values.name }} +{{- end }} +{{- end }} diff --git a/applications/rapid-analysis/templates/redis-service.yaml b/applications/rapid-analysis/templates/redis-service.yaml new file mode 100644 index 0000000000..0ac2c01ced --- /dev/null +++ b/applications/rapid-analysis/templates/redis-service.yaml @@ -0,0 +1,21 @@ +{{- if .Values.redis.enabled }} +apiVersion: v1 +kind: Service +metadata: + name: redis-service + namespace: {{ .Values.namespace }} + {{- with $.Values.redis.serviceAnnotations }} + annotations: + {{- toYaml $.Values.redis.serviceAnnotations | nindent 4 }} + {{- end }} +spec: + type: LoadBalancer + internalTrafficPolicy: Cluster + selector: + app.kubernetes.io/instance: {{ include "rapid-analysis.redis.fullname" . }} + ports: + - name: redis + protocol: TCP + port: {{ .Values.redis.port }} + targetPort: {{ .Values.redis.port }} +{{- end }} diff --git a/applications/rapid-analysis/templates/redis-statefulset.yaml b/applications/rapid-analysis/templates/redis-statefulset.yaml new file mode 100644 index 0000000000..224d83c500 --- /dev/null +++ b/applications/rapid-analysis/templates/redis-statefulset.yaml @@ -0,0 +1,90 @@ +{{- if .Values.redis.enabled }} +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: redis + namespace: {{ .Values.namespace }} + labels: + {{- include "rapid-analysis.redis.labels" . | nindent 4 }} +spec: + serviceName: redis-service + selector: + matchLabels: + {{- include "rapid-analysis.redis.selectorLabels" . | nindent 6 }} + replicas: {{ .Values.redis.replicas | default 1 }} + template: + metadata: + labels: + {{- include "rapid-analysis.redis.selectorLabels" . | nindent 8 }} + spec: + securityContext: + fsGroup: 999 + runAsGroup: 999 + runAsNonRoot: true + runAsUser: 999 + containers: + - name: redis + image: "{{ .Values.redis.image.repository }}:{{ .Values.redis.image.tag }}" + imagePullPolicy: {{ .Values.redis.image.pullPolicy }} + command: [ "redis-server", "--appendonly", "yes", "--requirepass", "$(REDIS_PASSWORD)" ] + ports: + - containerPort: {{ .Values.redis.port }} + env: + {{- range $env_var, $env_value := .Values.redis.env }} + - name: {{ $env_var }} + value: {{ $env_value | quote }} + {{- end }} + {{- range $env := .Values.redis.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + livenessProbe: + exec: + command: + - sh + - '-c' + - 'redis-cli -h $(hostname) -a $(REDIS_PASSWORD) incr health:counter' + failureThreshold: 3 + initialDelaySeconds: 15 + periodSeconds: 60 + successThreshold: 1 + timeoutSeconds: 1 + {{- with $.Values.redis.resources }} + resources: + {{- toYaml $.Values.redis.resources | nindent 10 }} + {{- end }} + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - all + volumeMounts: + - mountPath: /data + name: data + {{- with $.Values.redis.nodeSelector }} + nodeSelector: + {{- toYaml $ | nindent 8 }} + {{- end }} + {{- with $.Values.redis.affinity }} + affinity: + {{- toYaml $ | nindent 8 }} + {{- end }} + {{- with $.Values.redis.tolerations }} + tolerations: + {{- toYaml $ | nindent 8 }} + {{- end }} + volumeClaimTemplates: + - metadata: + name: data + spec: + accessModes: [ "ReadWriteOnce" ] + {{- if $.Values.redis.storage.classname }} + storageClassName: {{ $.Values.redis.storage.classname }} + {{- end }} + resources: + requests: + storage: {{ $.Values.redis.storage.request }} +{{- end }} diff --git a/applications/rapid-analysis/templates/vault-secret.yaml b/applications/rapid-analysis/templates/vault-secret.yaml new file mode 100644 index 0000000000..7b3ccf0a19 --- /dev/null +++ b/applications/rapid-analysis/templates/vault-secret.yaml @@ -0,0 +1,44 @@ +--- +apiVersion: ricoberger.de/v1alpha1 +kind: VaultSecret +metadata: + name: pull-secret + namespace: {{ .Values.namespace }} + labels: + app.kubernetes.io/name: {{ include "rapid-analysis.name" . }} +spec: + path: {{ required "vaultSecretsPath must be set" .Values.global.vaultSecretsPath }}/{{ required "pullSecretsPath must be set" .Values.pullSecretsPath }} + type: kubernetes.io/dockerconfigjson +--- +apiVersion: ricoberger.de/v1alpha1 +kind: VaultSecret +metadata: + name: google-creds + namespace: {{ .Values.namespace }} + labels: + app.kubernetes.io/name: {{ include "rapid-analysis.name" . }} +spec: + path: {{ required "vaultSecretsPath must be set" .Values.global.vaultSecretsPath }}/{{ required "credentialSecretsPath must be set" .Values.credentialSecretsPath }} + type: Opaque +--- +apiVersion: ricoberger.de/v1alpha1 +kind: VaultSecret +metadata: + name: butler-secret + namespace: {{ .Values.namespace }} + labels: + app.kubernetes.io/name: {{ include "rapid-analysis.name" . }} +spec: + path: {{ required "vaultSecretsPath must be set" .Values.global.vaultSecretsPath }}/{{ required "butlerSecret.key must be set" .Values.butlerSecret.key }} + type: Opaque +--- +apiVersion: ricoberger.de/v1alpha1 +kind: VaultSecret +metadata: + name: rubintv-secrets + namespace: {{ .Values.namespace }} + labels: + app.kubernetes.io/name: {{ include "rapid-analysis.name" . }} +spec: + path: {{ required "vaultSecretsPath must be set" .Values.global.vaultSecretsPath }}/{{ required "rubinTvSecretsPath must be set" .Values.rubinTvSecretsPath }} + type: Opaque diff --git a/applications/rapid-analysis/templates/worker-set.yaml b/applications/rapid-analysis/templates/worker-set.yaml new file mode 100644 index 0000000000..ad87fbc2b8 --- /dev/null +++ b/applications/rapid-analysis/templates/worker-set.yaml @@ -0,0 +1,231 @@ +{{ $_ := set $.Values "script" $.Values.workerSet }} +{{ $script := $.Values.workerSet }} +{{- if $script.name }} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "rapid-analysis.deploymentName" $ }}-workerset + namespace: {{ $.Values.namespace }} + labels: + {{- include "rapid-analysis.labels" $ | nindent 4 }} +spec: + revisionHistoryLimit: 0 + selector: + matchLabels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 6 }} + replicas: {{ ($script.replicas | int) }} + podManagementPolicy: Parallel + template: + metadata: + {{- with $.Values.podAnnotations }} + annotations: + {{- toYaml $ | nindent 8 }} + {{- end }} + labels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml $.Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- if $.Values.securityContext }} + securityContext: + {{- if $.Values.securityContext.uid }} + runAsUser: {{ $.Values.securityContext.uid }} + {{- end }} + {{- if $.Values.securityContext.gid }} + runAsGroup: {{ $.Values.securityContext.gid }} + {{- end }} + {{- if $.Values.securityContext.fid }} + fsGroup: {{ $.Values.securityContext.fid }} + {{- end }} + {{- end }} + containers: + - name: {{ include "rapid-analysis.scriptName" $ }} + image: "{{ $.Values.image.repository }}:{{ $.Values.image.tag }}" + imagePullPolicy: {{ $.Values.image.pullPolicy }} + env: + - name: RUN_ARG + value: {{ $script.name }} + - name: WORKER_NAME + valueFrom: + fieldRef: + fieldPath: metadata.labels['statefulset.kubernetes.io/pod-name'] + - name: RAPID_ANALYSIS_LOCATION + value: {{ $.Values.location | upper | quote }} + {{- if or $.Values.env $.Values.envSecrets }} + {{- range $env_var, $env_value := $.Values.env }} + - name: {{ $env_var }} + value: {{ $env_value | quote }} + {{- end }} + {{- range $env := $.Values.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: PGPASSFILE + value: "{{ $.Values.butlerSecret.containerPath }}/postgres-credentials.txt" + - name: PGUSER + value: {{ $.Values.butlerSecret.dbUser | quote }} + - name: AWS_SHARED_CREDENTIALS_FILE + value: "{{ $.Values.butlerSecret.containerPath }}/aws-credentials.ini" + {{- end }} + {{- if $.Values.redis.enabled }} + - name: REDIS_HOST + value: "redis-service" + {{- if $.Values.redis.envSecrets }} + {{- range $env := $.Values.redis.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- end }} + envFrom: + - configMapRef: + name: env-configmap + volumeMounts: + - name: rubintv-creds + mountPath: "/etc/rubintv/creds" + readOnly: true + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + readOnly: {{ $values.readOnly }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + mountPath: {{ $.Values.butlerSecret.containerPath }} + {{- end }} + {{- if or $.Values.resources $script.resources }} + {{- $resources := "" }} + {{- if $script.resources }} + {{- $resources = $script.resources }} + {{- else }} + {{- $resources = $.Values.resources }} + {{- end }} + resources: + {{- toYaml $resources | nindent 12 }} + {{- end }} + {{- if $.Values.butlerSecret }} + initContainers: + - name: {{ $.Release.Name }}-butler-secret-perm-fixer + image: "alpine:latest" + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /secrets + cat /secrets/aws-credentials.ini > new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + cat /secrets-rubintv/aws-credentials.ini >> new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + mv new-aws-credentials.ini /secrets/aws-credentials.ini + chown 73006:73006 /secrets/* + chmod 0600 /secrets/* + volumeMounts: + - name: {{ $.Release.Name }}-raw-butler-secret + mountPath: /secrets-raw + readOnly: true + - name: {{ $.Release.Name }}-butler-secret + mountPath: /secrets + - name: rubintv-aws-creds + mountPath: /secrets-rubintv + readOnly: true + {{- end }} + volumes: + - name: rubintv-creds + secret: + secretName: google-creds + - name: rubintv-aws-creds + secret: + secretName: rubintv-secrets + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + nfs: + path: {{ $values.serverPath }} + readOnly: {{ $values.readOnly }} + server: {{ $values.server }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + emptyDir: {} + - name: {{ $.Release.Name }}-raw-butler-secret + secret: + secretName: butler-secret + defaultMode: 0600 + {{- end }} + {{- if or $.Values.nodeSelector $script.nodeSelector }} + {{- $nodeSelector := "" }} + {{- if $script.nodeSelector }} + {{- $nodeSelector = $script.nodeSelector }} + {{- else }} + {{- $nodeSelector = $.Values.nodeSelector }} + {{- end }} + nodeSelector: + {{- toYaml $nodeSelector | nindent 8 }} + {{- end }} + {{- if or $.Values.affinity $script.affinity }} + {{- $affinity := "" }} + {{- if $script.affinity }} + {{- $affinity = $script.affinity }} + {{- else }} + {{- $affinity = $.Values.affinity }} + {{- end }} + affinity: + {{- toYaml $affinity | nindent 8 }} + {{- end }} + {{- if or $.Values.tolerations $script.tolerations }} + {{- $tolerations := "" }} + {{- if $script.tolerations }} + {{- $tolerations = $script.tolerations }} + {{- else }} + {{- $tolerations = $.Values.tolerations }} + {{- end }} + tolerations: + {{- toYaml $tolerations | nindent 8 }} + {{- end }} +{{- end }} diff --git a/applications/rapid-analysis/values-summit.yaml b/applications/rapid-analysis/values-summit.yaml new file mode 100644 index 0000000000..185b063e84 --- /dev/null +++ b/applications/rapid-analysis/values-summit.yaml @@ -0,0 +1,151 @@ +image: + repository: ts-dockerhub.lsst.org/rapid-analysis + tag: c0039 + pullPolicy: Always +location: SUMMIT +env: + DAF_BUTLER_REPOSITORY_INDEX: /project/data-repos.yaml +scripts: +- name: summit/auxTel/runBackgroundService.py + resources: + requests: + cpu: 0.5 + memory: 4G + limits: + cpu: 1.0 + memory: 10G +- name: summit/auxTel/runButlerWatcher.py +- name: summit/auxTel/runCalibrateCcdRunner.py +- name: summit/auxTel/runImExaminer.py +- name: summit/auxTel/runIsrRunner.py +- name: summit/auxTel/runMetadataCreator.py +- name: summit/auxTel/runMetadataServer.py +- name: summit/auxTel/runMonitor.py + resources: + requests: + cpu: 0.5 + memory: 1G + limits: + cpu: 1.0 + memory: 10G +- name: summit/auxTel/runMountTorquePlotter.py +- name: summit/auxTel/runNightReporter.py +- name: summit/auxTel/runSpecExaminer.py + resources: + requests: + cpu: 0.5 + memory: 2G + limits: + cpu: 1.0 + memory: 4G +- name: summit/misc/runAllSky.py + resources: + requests: + cpu: 1.0 + memory: 4G + limits: + cpu: 2 + memory: 6G +- name: summit/misc/runStarTracker.py +- name: summit/misc/runStarTrackerCatchup.py +- name: summit/misc/runStarTrackerFast.py +- name: summit/misc/runStarTrackerMetadata.py +- name: summit/misc/runStarTrackerNightReport.py +- name: summit/misc/runStarTrackerWide.py +- name: summit/misc/runTmaTelemetry.py +- name: summit/LSSTComCam/runButlerWatcher.py +- name: summit/LSSTComCam/runHeadNode.py +- name: summit/LSSTComCam/runMetadataServer.py +- name: summit/LSSTComCam/runPlotter.py +workerSet: + name: summit/LSSTComCam/runSfmRunner.py + replicas: 36 + resources: + requests: + cpu: 1.0 + memory: 4G + limits: + cpu: 1.0 + memory: 8G +credentialFile: google_write_creds +pullSecretsPath: pull-secret +rubinTvSecretsPath: rubintv +credentialSecretsPath: rubintv-broadcaster +butlerSecret: + key: butler-secret + containerPath: /home/saluser/.lsst + dbUser: oods +imagePullSecrets: +- name: pull-secret +nfsMountpoint: +- name: auxtel-gen3-data + containerPath: /repo/LATISS + readOnly: false + server: nfs-auxtel.cp.lsst.org + serverPath: /auxtel/repo/LATISS +- name: comcam-gen3-data + containerPath: /repo/LSSTComCam + readOnly: false + server: nfs3.cp.lsst.org + serverPath: /comcam/repo/LSSTComCam +- name: auxtel-data + containerPath: /readonly/lsstdata/auxtel + readOnly: true + server: nfs-auxtel.cp.lsst.org + serverPath: /auxtel/lsstdata +- name: comcam-data + containerPath: /readonly/lsstdata/comcam + readOnly: true + server: nfs3.cp.lsst.org + serverPath: /comcam/lsstdata +- name: project-shared + containerPath: /project + readOnly: false + server: nfs1.cp.lsst.org + serverPath: /project +- name: auxtel-gen3-data-temp + containerPath: /data/lsstdata/base/auxtel + readOnly: true + server: nfs-auxtel.cp.lsst.org + serverPath: /auxtel/lsstdata/base/auxtel +- name: comcam-gen3-data-temp + containerPath: /data/lsstdata/base/comcam + readOnly: true + server: nfs3.cp.lsst.org + serverPath: /comcam/lsstdata/base/comcam +- name: allsky-data + containerPath: /data/allsky + readOnly: true + server: nfs-auxtel.cp.lsst.org + serverPath: /auxtel/allsky +- name: scratch-shared + containerPath: /scratch + readOnly: false + server: nfs1.cp.lsst.org + serverPath: /scratch/rubintv +resources: + requests: + cpu: 0.5 + memory: 1G + limits: + cpu: 1.0 + memory: 2.5G +redis: + enabled: true + port: 6379 + env: + MASTER: true + envSecrets: + - name: REDIS_PASSWORD + secretName: rubintv-secrets + secretKey: redis-password + storage: + classname: rook-ceph-block + request: 10Gi + resources: + requests: + cpu: 100m + memory: 1Gi + limits: + cpu: 1 + memory: 50Gi diff --git a/applications/rapid-analysis/values.yaml b/applications/rapid-analysis/values.yaml new file mode 100644 index 0000000000..7151ddc993 --- /dev/null +++ b/applications/rapid-analysis/values.yaml @@ -0,0 +1,130 @@ +image: + # -- The Docker registry name for the container image. + repository: ts-dockerhub.lsst.org/rubintv-broadcaster + # -- The tag of the container image to use. + tag: develop + # -- The policy to apply when pulling an image for deployment. + pullPolicy: IfNotPresent +# -- This is the namespace where the applications will be deployed. +namespace: rapid-analysis +# -- A special tag for letting the scripts know where they are running. +siteTag: "" +# -- Provide the location where the system is running. +location: "" +# -- List of script objects to run for the broadcaster. +# This section MUST have the following attribute specified for each entry. +# _name_ (The full path for the script) +# The following attributes are optional +# _resources_ (A resource object specification) +# _nodeSelector_ (A node selector object specification) +# _tolerations_ (A list of tolerations) +# _affinity_ (An affinity object specification) +scripts: {} +# -- This section holds a set of key, value pairs for environmental variables (ENV_VAR: value). +# NOTE: RUN_ARG is taken care of by the chart using _script_. +env: {} +# -- This section holds specifications for secret injection. +# If this section is used, each object listed must have the following attributes defined: +# _name_ (The label for the secret), +# _secretName_ (The name of the vault store reference. Uses the _namespace_ attribute to construct the full name), +# _secretKey_ (The key in the vault store containing the necessary secret) +envSecrets: [] +# -- The Vault prefix path +vaultPrefixPath: "" +# The key for the pull secrets including any sub-paths. +pullSecretsPath: "" +# Path for the rubin tv specific secrets vault. +rubinTvSecretsPath: "" +# -- This key allows specification of a script to override the entrypoint. +# -- The list of pull secrets needed for the images. +# If this section is used, each object listed can have the following attributes defined: +# _name_ (The label identifying the pull-secret to use) +imagePullSecrets: [] +# -- This section allows for specification of Butler secret information. +# If this section is used, it must contain the following attributes: +# _key_ (The vault key for the Butler secret), +# _containerPath_ (The directory location for the Butler secret), +# _dbUser_ (The username for the Butler backend database) +butlerSecret: {} +# -- This section holds the information necessary to create a NFS mount for the container. +# If this section is used, each object listed can have the following attributes defined: +# _name_ (A label identifier for the mountpoint), +# _containerPath_ (The path inside the container to mount), +# _readOnly_ (This sets if the NFS mount is read only or read/write), +# _server_ (The hostname of the NFS server), +# _serverPath_ (The path exported by the NFS server) +nfsMountpoint: [] +# -- This section holds information about existing volume claims. +# If the section is used, each object listed can have the following attributes defined: +# _name_ (The name ot the persistent volume), +# _containerPath_ (The path inside the container to mount), +# _subPath_ (persistent volume subpath, optional) +pvcMountpoint: [] +# -- This section holds the information necessary to claim persistent volumes. +# If the section is used, each object listed can have the following attributes defined: +# _name_ (The name ot the persistent volume), +# _containerPath_ (The path inside the container to mount), +# _subPath_ (persistent volume subpath, optional) +pvcMountpointClaim: [] +# -- The key for the credentials including any sub-paths. +credentialSecretsPath: "" +# -- The name of the expected credential file for the broadcasters +credentialFile: "" +# -- Adds an extra string to the release name. +nameOverride: "" +# -- Specify the deployed application name specifically. Overrides all other names. +fullnameOverride: "" +# -- This allows the specification of pod annotations. +podAnnotations: {} +# -- This allows the specification of resources (CPU, memory) requires to run the container. +resources: {} +# -- This allows the specification of using specific nodes to run the pod. +nodeSelector: {} +# -- This specifies the tolerations of the pod for any system taints. +tolerations: [] +# -- This specifies the scheduling constraints of the pod. +affinity: {} +# -- This section allows for specification of security context information. +# If the section is used, at least one of the following attributes must be specified. +# _uid_ (User id to run application as), +# _gid_ (Group id of the user that runs the application), +# _fid_ (File system context user id), +securityContext: {} +# -- This configures a StatefulSet used for single frame workers. +workerSet: {} +# -- This configures a StatefulSet used for visit-level gather processing. +gather2aSet: {} +# -- This configures a StatefulSet used for night-summary rollup. +gatherRollupSet: {} +redis: + # -- This specifies whether to use redis or not. + enabled: false + image: + # -- The Docker registry name for the redis container image. + repository: docker.io/redis + # -- The tag of the redis container image to use. + tag: latest + # -- The policy to apply when pulling an image for deployment. + pullPolicy: IfNotPresent + # -- This section holds a set of key, value pairs for environmental variables (ENV_VAR: value). + # NOTE: RUN_ARG is taken care of by the chart using _script_. + env: {} + # -- This section holds specifications for secret injection. + # If this section is used, each object listed must have the following attributes defined: + # _name_ (The label for the secret), + # _secretName_ (The name of the vault store reference. Uses the _namespace_ attribute to construct the full name), + # _secretKey_ (The key in the vault store containing the necessary secret) + envSecrets: [] + storage: + # str -- The storage class name for the data store request. + classname: + # -- The size of the storage request. + request: 1Gi + # -- This allows the specification of resources (CPU, memory) requires to run the redis container. + resources: {} + # -- Node selection rules for the redis pods + nodeSelector: {} + # -- Toleration specifications for the redis pods + tolerations: [] + # -- Affinity rules for the redis pods + affinity: {} diff --git a/docs/applications/rapid-analysis/index.rst b/docs/applications/rapid-analysis/index.rst new file mode 100644 index 0000000000..fe3d904d0f --- /dev/null +++ b/docs/applications/rapid-analysis/index.rst @@ -0,0 +1,29 @@ +.. px-app:: rapid-analysis + +################################################# +rapid-analysis — Real-time backend of the RubinTV +################################################# + +The Rapid Analysis Framework performes realtime analysis on data from these sources, rendering the outputs destined for RubinTV as PNGs, JPEGs, MP4s, and JSON files, which are put in S3 buckets at the summit and at USDF. +The RubinTV frontend then monitors these buckets and serves these files to users. + +At the summit, the real-time activities currently include: + +.. rst-class:: compact + +- AuxTel observing +- ComCam testing +- All sky camera observations +- StarTracker data taking on the TMA +- TMA testing activities + +.. jinja:: rapid-analysis + :file: applications/_summary.rst.jinja + +Guides +====== + +.. toctree:: + :maxdepth: 1 + + values diff --git a/docs/applications/rapid-analysis/values.md b/docs/applications/rapid-analysis/values.md new file mode 100644 index 0000000000..157ad4ca2c --- /dev/null +++ b/docs/applications/rapid-analysis/values.md @@ -0,0 +1,12 @@ +```{px-app-values} rapid-analysis +``` + +# rapid-analysis Helm values reference + +Helm values reference table for the {px-app}`rapid-analysis` application. + +```{include} ../../../applications/rapid-analysis/README.md +--- +start-after: "## Values" +--- +``` diff --git a/docs/applications/rubin.rst b/docs/applications/rubin.rst index ec91711ff5..a03aad2b38 100644 --- a/docs/applications/rubin.rst +++ b/docs/applications/rubin.rst @@ -19,6 +19,7 @@ Argo CD project: ``rubin`` obsloctap/index plot-navigator/index production-tools/index + rapid-analysis/index rubintv/index rubintv-dev/index schedview-snapshot/index diff --git a/environments/templates/applications/rubin/rapid-analysis.yaml b/environments/templates/applications/rubin/rapid-analysis.yaml new file mode 100644 index 0000000000..8af8557264 --- /dev/null +++ b/environments/templates/applications/rubin/rapid-analysis.yaml @@ -0,0 +1,34 @@ +{{- if (index .Values "applications" "rapid-analysis") -}} +apiVersion: v1 +kind: Namespace +metadata: + name: "rapid-analysis" +--- +apiVersion: argoproj.io/v1alpha1 +kind: Application +metadata: + name: "rapid-analysis" + namespace: "argocd" + finalizers: + - "resources-finalizer.argocd.argoproj.io" +spec: + destination: + namespace: "rapid-analysis" + server: "https://kubernetes.default.svc" + project: "rubin" + source: + path: "applications/rapid-analysis" + repoURL: {{ .Values.repoUrl | quote }} + targetRevision: {{ .Values.targetRevision | quote }} + helm: + parameters: + - name: "global.host" + value: {{ .Values.fqdn | quote }} + - name: "global.baseUrl" + value: "https://{{ .Values.fqdn }}" + - name: "global.vaultSecretsPath" + value: {{ .Values.vaultPathPrefix | quote }} + valueFiles: + - "values.yaml" + - "values-{{ .Values.name }}.yaml" +{{- end -}} diff --git a/environments/values-summit.yaml b/environments/values-summit.yaml index 1077d688a2..ce63e3bd3f 100644 --- a/environments/values-summit.yaml +++ b/environments/values-summit.yaml @@ -14,6 +14,7 @@ applications: nightreport: true nublado: true portal: true + rapid-analysis: true rubintv: true rubintv-dev: true sasquatch: true From 7db5ced2bab4e5acb604f9d3d005797fb8de097f Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 1 Oct 2024 16:24:26 -0700 Subject: [PATCH 196/567] Makes metric_batch_size=1000 default everywhere --- applications/sasquatch/README.md | 4 ++-- .../sasquatch/charts/telegraf-kafka-consumer/README.md | 2 +- .../charts/telegraf-kafka-consumer/templates/_helpers.tpl | 2 +- .../sasquatch/charts/telegraf-kafka-consumer/values.yaml | 4 ++-- applications/sasquatch/values-base.yaml | 1 - applications/sasquatch/values-tucson-teststand.yaml | 1 - applications/sasquatch/values-usdfprod.yaml | 1 - 7 files changed, 6 insertions(+), 9 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index aac4c033b9..13cbefb735 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -419,7 +419,7 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer.kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | | telegraf-kafka-consumer.kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | | telegraf-kafka-consumer.kafkaConsumers.test.max_undelivered_messages | int | 10000 | Maximum number of undelivered messages. Should be a multiple of metric_batch_size, setting it too low may never flush the broker's messages. | -| telegraf-kafka-consumer.kafkaConsumers.test.metric_batch_size | int | 5000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | +| telegraf-kafka-consumer.kafkaConsumers.test.metric_batch_size | int | 1000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | | telegraf-kafka-consumer.kafkaConsumers.test.metric_buffer_limit | int | 100000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | | telegraf-kafka-consumer.kafkaConsumers.test.offset | string | `"oldest"` | Kafka consumer offset. Possible values are `oldest` and `newest`. | | telegraf-kafka-consumer.kafkaConsumers.test.precision | string | "1us" | Data precision. | @@ -456,7 +456,7 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer-oss.kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.max_undelivered_messages | int | 10000 | Maximum number of undelivered messages. Should be a multiple of metric_batch_size, setting it too low may never flush the broker's messages. | -| telegraf-kafka-consumer-oss.kafkaConsumers.test.metric_batch_size | int | 5000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | +| telegraf-kafka-consumer-oss.kafkaConsumers.test.metric_batch_size | int | 1000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.metric_buffer_limit | int | 100000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.offset | string | `"oldest"` | Kafka consumer offset. Possible values are `oldest` and `newest`. | | telegraf-kafka-consumer-oss.kafkaConsumers.test.precision | string | "1us" | Data precision. | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md index e361988887..893371b373 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md @@ -27,7 +27,7 @@ Telegraf is an agent written in Go for collecting, processing, aggregating, and | kafkaConsumers.test.flush_jitter | string | "0s" | Jitter the flush interval by a random amount. This is primarily to avoid large write spikes for users running a large number of telegraf instances. | | kafkaConsumers.test.max_processing_time | string | "5s" | Maximum processing time for a single message. | | kafkaConsumers.test.max_undelivered_messages | int | 10000 | Maximum number of undelivered messages. Should be a multiple of metric_batch_size, setting it too low may never flush the broker's messages. | -| kafkaConsumers.test.metric_batch_size | int | 5000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | +| kafkaConsumers.test.metric_batch_size | int | 1000 | Sends metrics to the output in batches of at most metric_batch_size metrics. | | kafkaConsumers.test.metric_buffer_limit | int | 100000 | Caches metric_buffer_limit metrics for each output, and flushes this buffer on a successful write. This should be a multiple of metric_batch_size and could not be less than 2 times metric_batch_size. | | kafkaConsumers.test.offset | string | `"oldest"` | Kafka consumer offset. Possible values are `oldest` and `newest`. | | kafkaConsumers.test.precision | string | "1us" | Data precision. | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl index 72e8d824c3..11dae28e5a 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl @@ -12,7 +12,7 @@ metadata: data: telegraf.conf: |+ [agent] - metric_batch_size = {{ default 5000 .value.metric_batch_size }} + metric_batch_size = {{ default 1000 .value.metric_batch_size }} metric_buffer_limit = {{ default 100000 .value.metric_buffer_limit }} collection_jitter = {{ default "0s" .value.collection_jitter | quote }} flush_interval = {{ default "10s" .value.flush_interval | quote }} diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml index 28b3081941..2520358ae6 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml @@ -62,8 +62,8 @@ kafkaConsumers: # -- Sends metrics to the output in batches of at most metric_batch_size # metrics. - # @default -- 5000 - metric_batch_size: 5000 + # @default -- 1000 + metric_batch_size: 1000 # -- Caches metric_buffer_limit metrics for each output, and flushes this # buffer on a successful write. This should be a multiple of metric_batch_size diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index 8bd0138b2b..6b445bdcd9 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -172,7 +172,6 @@ telegraf-kafka-consumer: database: "efd" topicRegexps: | [ "lsst.sal.MTM1M3" ] - metric_batch_size: 2500 debug: true m2: enabled: true diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index 1df0bcd307..21dca61f0c 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -116,7 +116,6 @@ telegraf-kafka-consumer: database: "efd" topicRegexps: | [ "lsst.sal.MTM1M3" ] - metric_batch_size: 2500 debug: true m2: enabled: true diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index f210ecb710..d3adca0d93 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -182,7 +182,6 @@ telegraf-kafka-consumer: timestamp_field: "private_efdStamp" topicRegexps: | [ "lsst.sal.MTM1M3" ] - metric_batch_size: 2500 debug: true m2: enabled: true From c54221a403c2731dd7c114f3e42ce8e34e66aaae Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 1 Oct 2024 16:26:30 -0700 Subject: [PATCH 197/567] Use image with the race condition fix everywhere - Also change pull policy to IfNotPresent --- applications/sasquatch/README.md | 12 ++++++------ .../charts/telegraf-kafka-consumer/README.md | 6 +++--- .../charts/telegraf-kafka-consumer/values.yaml | 6 +++--- applications/sasquatch/values-base.yaml | 3 --- applications/sasquatch/values-tucson-teststand.yaml | 3 --- 5 files changed, 12 insertions(+), 18 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 13cbefb735..7de173c6ca 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -403,9 +403,9 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer.enabled | bool | `false` | Wether the Telegraf Kafka Consumer is enabled | | telegraf-kafka-consumer.env | list | See `values.yaml` | Telegraf agent enviroment variables | | telegraf-kafka-consumer.envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | -| telegraf-kafka-consumer.image.pullPolicy | string | `"Always"` | Image pull policy | -| telegraf-kafka-consumer.image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | -| telegraf-kafka-consumer.image.tag | string | `"1.30.2-alpine"` | Telegraf image tag | +| telegraf-kafka-consumer.image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | +| telegraf-kafka-consumer.image.repo | string | `"docker.io/lsstsqre/telegraf"` | Telegraf image repository | +| telegraf-kafka-consumer.image.tag | string | `"avro-mutex"` | Telegraf image tag | | telegraf-kafka-consumer.imagePullSecrets | list | `[]` | Secret names to use for Docker pulls | | telegraf-kafka-consumer.influxdb.database | string | `"telegraf-kafka-consumer-v1"` | Name of the InfluxDB v1 database to write to | | telegraf-kafka-consumer.influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | @@ -440,9 +440,9 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer-oss.enabled | bool | `false` | Wether the Telegraf Kafka Consumer is enabled | | telegraf-kafka-consumer-oss.env | list | See `values.yaml` | Telegraf agent enviroment variables | | telegraf-kafka-consumer-oss.envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | -| telegraf-kafka-consumer-oss.image.pullPolicy | string | `"Always"` | Image pull policy | -| telegraf-kafka-consumer-oss.image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | -| telegraf-kafka-consumer-oss.image.tag | string | `"1.30.2-alpine"` | Telegraf image tag | +| telegraf-kafka-consumer-oss.image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | +| telegraf-kafka-consumer-oss.image.repo | string | `"docker.io/lsstsqre/telegraf"` | Telegraf image repository | +| telegraf-kafka-consumer-oss.image.tag | string | `"avro-mutex"` | Telegraf image tag | | telegraf-kafka-consumer-oss.imagePullSecrets | list | `[]` | Secret names to use for Docker pulls | | telegraf-kafka-consumer-oss.influxdb.database | string | `"telegraf-kafka-consumer-v1"` | Name of the InfluxDB v1 database to write to | | telegraf-kafka-consumer-oss.influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md index 893371b373..0be7c27bdb 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md @@ -11,9 +11,9 @@ Telegraf is an agent written in Go for collecting, processing, aggregating, and | enabled | bool | `false` | Wether the Telegraf Kafka Consumer is enabled | | env | list | See `values.yaml` | Telegraf agent enviroment variables | | envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | -| image.pullPolicy | string | `"Always"` | Image pull policy | -| image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | -| image.tag | string | `"1.30.2-alpine"` | Telegraf image tag | +| image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | +| image.repo | string | `"docker.io/lsstsqre/telegraf"` | Telegraf image repository | +| image.tag | string | `"avro-mutex"` | Telegraf image tag | | imagePullSecrets | list | `[]` | Secret names to use for Docker pulls | | influxdb.database | string | `"telegraf-kafka-consumer-v1"` | Name of the InfluxDB v1 database to write to | | influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml index 2520358ae6..dd0fc7cb4f 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml @@ -5,13 +5,13 @@ enabled: false image: # -- Telegraf image repository - repo: "docker.io/library/telegraf" + repo: "docker.io/lsstsqre/telegraf" # -- Telegraf image tag - tag: "1.30.2-alpine" + tag: "avro-mutex" # -- Image pull policy - pullPolicy: "Always" + pullPolicy: "IfNotPresent" # -- Annotations for telegraf-kafka-consumers pods podAnnotations: {} diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index 6b445bdcd9..4440c387ea 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -133,9 +133,6 @@ influxdb: telegraf-kafka-consumer: enabled: true - image: - repo: "docker.io/lsstsqre/telegraf" - tag: "avro-mutex" kafkaConsumers: auxtel: enabled: true diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index 21dca61f0c..64f30615a3 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -77,9 +77,6 @@ influxdb: telegraf-kafka-consumer: enabled: true - image: - repo: "docker.io/lsstsqre/telegraf" - tag: "avro-mutex" kafkaConsumers: auxtel: enabled: true From 4f201c16336ad733d13d953876e71aa82f141b34 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 1 Oct 2024 20:37:42 -0300 Subject: [PATCH 198/567] rapid-analysis: slight typo fix for AWS credentials that comes from rubintv vault --- applications/rapid-analysis/templates/deployment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/rapid-analysis/templates/deployment.yaml b/applications/rapid-analysis/templates/deployment.yaml index 1f5e13bd87..d6a44033ca 100644 --- a/applications/rapid-analysis/templates/deployment.yaml +++ b/applications/rapid-analysis/templates/deployment.yaml @@ -138,7 +138,7 @@ spec: cp -RL /secrets-raw/* /secrets cat /secrets/aws-credentials.ini > new-aws-credentials.ini printf "\n" >> new-aws-credentials.ini - cat /secrets-rubintv/aws-credentials.ini >> new-aws-credentials.ini + cat /secrets-rubintv/aws_credentials.ini >> new-aws-credentials.ini printf "\n" >> new-aws-credentials.ini mv new-aws-credentials.ini /secrets/aws-credentials.ini chown 73006:73006 /secrets/* From 08db8a310cfa6bbb70bd432e4dc4b1d8246b8c03 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 01:24:02 +0000 Subject: [PATCH 199/567] Update Helm release argo-workflows to v0.42.5 --- applications/argo-workflows/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argo-workflows/Chart.yaml b/applications/argo-workflows/Chart.yaml index 28c6a47cac..ba374e241b 100644 --- a/applications/argo-workflows/Chart.yaml +++ b/applications/argo-workflows/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-workflows - version: 0.42.3 + version: 0.42.5 repository: https://argoproj.github.io/argo-helm From 8d6b15db64d27c28a9e274994f3e9088568ef453 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Date: Wed, 2 Oct 2024 09:07:29 -0300 Subject: [PATCH 200/567] exposurelog: update ComCam nfs mounts --- applications/exposurelog/values-summit.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/exposurelog/values-summit.yaml b/applications/exposurelog/values-summit.yaml index dac0d8412a..61ba1cf848 100644 --- a/applications/exposurelog/values-summit.yaml +++ b/applications/exposurelog/values-summit.yaml @@ -1,7 +1,7 @@ config: site_id: summit - nfs_path_1: /repo/LSSTComCam # Mounted as /volume_1 - nfs_server_1: comcam-archiver.cp.lsst.org + nfs_path_1: /comcam/repo/LSSTComCam # Mounted as /volume_1 + nfs_server_1: nfs3.cp.lsst.org butler_uri_1: /volume_1 nfs_path_2: /auxtel/repo/LATISS # Mounted as /volume_2 From b77bd8f6017c9f5ac3db99e5bcd01a7892c5fed4 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Date: Wed, 2 Oct 2024 10:55:40 -0300 Subject: [PATCH 201/567] rubintv: add application to TTS --- .../rubintv/values-tucson-teststand.yaml | 42 +++++++++++++++++++ environments/values-tucson-teststand.yaml | 1 + 2 files changed, 43 insertions(+) create mode 100644 applications/rubintv/values-tucson-teststand.yaml diff --git a/applications/rubintv/values-tucson-teststand.yaml b/applications/rubintv/values-tucson-teststand.yaml new file mode 100644 index 0000000000..64526e159d --- /dev/null +++ b/applications/rubintv/values-tucson-teststand.yaml @@ -0,0 +1,42 @@ +rubintv: + siteTag: "tucson" + separateSecrets: true + + imagePullSecrets: + - name: pull-secret + + frontend: + debug: true + env: + - name: S3_ENDPOINT_URL + value: &s3E "https://s3.rubintv.tu.lsst.org" + - name: RAPID_ANALYSIS_LOCATION + value: "TTS" + image: + tag: deploy + pullPolicy: Always + + workers: + replicas: 1 + image: + repository: ts-dockerhub.lsst.org/rapid-analysis + tag: c0037 + pullPolicy: Always + uid: 73006 + gid: 73006 + scriptsLocation: /repos/rubintv_analysis_service/scripts + script: rubintv_worker.py -a rubintv-dev -p 8080 -c /repos/rubintv_analysis_service/scripts/config-temporal.yaml + env: + - name: S3_ENDPOINT_URL + value: *s3E + - name: DAF_BUTLER_REPOSITORY_INDEX + value: "s3://rubin-summit-users/data-repos.yaml" + - name: DAF_BUTLER_REPOSITORY + value: "/sdf/group/rubin/repo/ir2/butler.yaml" + resources: + limits: + cpu: 2.0 + memory: "8Gi" + +global: + tsVaultSecretsPath: "" diff --git a/environments/values-tucson-teststand.yaml b/environments/values-tucson-teststand.yaml index ae6ec29038..3e47b7fcc5 100644 --- a/environments/values-tucson-teststand.yaml +++ b/environments/values-tucson-teststand.yaml @@ -19,6 +19,7 @@ applications: nublado: true obsenv-management: true portal: true + rubintv: true sasquatch: true squareone: true strimzi: true From a6ed075db6c5ed8c95e11b96fad132b3572e2cdb Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Date: Wed, 2 Oct 2024 11:28:37 -0300 Subject: [PATCH 202/567] rapid-analysis: add application to TTS --- .../values-tucson-teststand.yaml | 121 ++++++++++++++++++ environments/values-tucson-teststand.yaml | 1 + 2 files changed, 122 insertions(+) create mode 100644 applications/rapid-analysis/values-tucson-teststand.yaml diff --git a/applications/rapid-analysis/values-tucson-teststand.yaml b/applications/rapid-analysis/values-tucson-teststand.yaml new file mode 100644 index 0000000000..8604e12165 --- /dev/null +++ b/applications/rapid-analysis/values-tucson-teststand.yaml @@ -0,0 +1,121 @@ +image: + repository: ts-dockerhub.lsst.org/rapid-analysis + tag: c0039 + pullPolicy: Always +env: + DAF_BUTLER_REPOSITORY_INDEX: /project/data-repos.yaml + DEPLOY_BRANCH: deploy-tts +siteTag: tts +location: TTS +scripts: +- name: summit/auxTel/runBackgroundService.py +- name: summit/auxTel/runButlerWatcher.py +- name: summit/auxTel/runCalibrateCcdRunner.py +- name: summit/auxTel/runImExaminer.py +- name: summit/auxTel/runIsrRunner.py +- name: summit/auxTel/runMetadataCreator.py +- name: summit/auxTel/runMetadataServer.py +- name: summit/auxTel/runMonitor.py +- name: summit/auxTel/runMountTorquePlotter.py +- name: summit/auxTel/runNightReporter.py +- name: summit/auxTel/runSpecExaminer.py +- name: summit/comCam/runButlerWatcher.py +- name: summit/comCam/runIsrRunner_000.py +- name: summit/comCam/runIsrRunner_001.py +- name: summit/comCam/runIsrRunner_002.py +- name: summit/comCam/runIsrRunner_003.py +- name: summit/comCam/runIsrRunner_004.py +- name: summit/comCam/runIsrRunner_005.py +- name: summit/comCam/runIsrRunner_006.py +- name: summit/comCam/runIsrRunner_007.py +- name: summit/comCam/runIsrRunner_008.py +- name: summit/comCam/runMetadataServer.py +- name: summit/comCam/runPlotter.py + resources: + requests: + cpu: 0.5 + memory: 4G + limits: + cpu: 1.0 + memory: 6G +- name: summit/misc/runTmaTelemetry.py +# TODO: remove google credentials +credentialFile: google_write_creds +vaultPrefixPath: secret/k8s_operator/tucson-teststand.lsst.codes +pullSecretsPath: pull-secret +rubinTvSecretsPath: rubintv +# TODO: remove google credentials +credentialSecretsPath: rubintv +butlerSecret: + key: butler-secret + containerPath: /home/saluser/.lsst + dbUser: oods +imagePullSecrets: +- name: pull-secret +nfsMountpoint: +- name: auxtel-gen3-data + containerPath: /repo/LATISS + readOnly: false + server: nfs-auxtel.tu.lsst.org + serverPath: /auxtel/repo/LATISS +- name: comcam-gen3-data + containerPath: /repo/LSSTComCam + readOnly: false + server: comcam-archiver.tu.lsst.org + serverPath: /repo/LSSTComCam +- name: auxtel-data + containerPath: /readonly/lsstdata/auxtel + readOnly: true + server: nfs-auxtel.tu.lsst.org + serverPath: /auxtel/lsstdata +- name: comcam-data + containerPath: /readonly/lsstdata/comcam + readOnly: true + server: comcam-archiver.tu.lsst.org + serverPath: /lsstdata +- name: project-shared + containerPath: /project + readOnly: false + server: nfs-project.tu.lsst.org + serverPath: /project +- name: auxtel-gen3-data-temp + containerPath: /data/lsstdata/TTS/auxtel + readOnly: true + server: nfs-auxtel.tu.lsst.org + serverPath: /auxtel/lsstdata/TTS/auxtel +- name: comcam-gen3-data-temp + containerPath: /data/lsstdata/TTS/comcam + readOnly: true + server: comcam-archiver.tu.lsst.org + serverPath: /lsstdata/TTS/comcam +- name: scratch-shared + containerPath: /scratch + readOnly: false + server: nfs-scratch.tu.lsst.org + serverPath: /scratch/rubintv +resources: + requests: + cpu: 0.5 + memory: 1G + limits: + cpu: 1.0 + memory: 2.5G +redis: + enabled: true + port: 6379 + env: + MASTER: true + envSecrets: + - name: REDIS_PASSWORD + secretName: rubintv-secrets + secretKey: redis-password + storage: + classname: rook-ceph-block + request: 10Gi + resources: + requests: + cpu: 100m + memory: 1Gi + limits: + cpu: 1 + memory: 50Gi diff --git a/environments/values-tucson-teststand.yaml b/environments/values-tucson-teststand.yaml index 3e47b7fcc5..6554e28ab3 100644 --- a/environments/values-tucson-teststand.yaml +++ b/environments/values-tucson-teststand.yaml @@ -19,6 +19,7 @@ applications: nublado: true obsenv-management: true portal: true + rapid-analysis: true rubintv: true sasquatch: true squareone: true From 35ba2224aac75ddbf2fdcba64446deb669fd2dcf Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 1 Oct 2024 16:49:31 -0700 Subject: [PATCH 203/567] Merge eas and auxtel connectors back - They were split in DM-46203 to alleviate the race condition found in the Telegraf Avro parsers. This is fixed in the new image. --- applications/sasquatch/values-usdfprod.yaml | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index d3adca0d93..9e02f4ea5e 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -167,14 +167,7 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.ESS" ] - debug: true - eas2: - enabled: true - database: "efd" - timestamp_field: "private_efdStamp" - topicRegexps: | - [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + [ "lsst.sal.DIMM", "lsst.sal.ESS", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] debug: true m1m3: enabled: true @@ -251,14 +244,7 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory" ] - debug: true - auxtel2: - enabled: true - database: "efd" - timestamp_field: "private_efdStamp" - topicRegexps: | - [ "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] + [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] debug: true latiss: enabled: true From 1edc4677d27c0c2bf96bd7618539e42bb12edbe3 Mon Sep 17 00:00:00 2001 From: pav511 <38131208+pav511@users.noreply.github.com> Date: Wed, 2 Oct 2024 12:45:52 -0700 Subject: [PATCH 204/567] gmegias usdf-rsp-dev argocd --- applications/argocd/values-usdfdev.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/argocd/values-usdfdev.yaml b/applications/argocd/values-usdfdev.yaml index a58a82f77e..effe6126c4 100644 --- a/applications/argocd/values-usdfdev.yaml +++ b/applications/argocd/values-usdfdev.yaml @@ -59,6 +59,7 @@ argo-cd: g, spothi@slac.stanford.edu, role:developer g, bbrond@slac.stanford.edu, role:developer g, vbecker@slac.stanford.edu, role:developer + g, gmegias@slac.stanford.edu, role:developer scopes: "[email]" server: From b17071ac599a11db4ab017b78936119c803032e7 Mon Sep 17 00:00:00 2001 From: pav511 <38131208+pav511@users.noreply.github.com> Date: Wed, 2 Oct 2024 13:01:29 -0700 Subject: [PATCH 205/567] salnikov usdf-rsp-dev argocd --- applications/argocd/values-usdfdev.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/argocd/values-usdfdev.yaml b/applications/argocd/values-usdfdev.yaml index effe6126c4..3a05ea872d 100644 --- a/applications/argocd/values-usdfdev.yaml +++ b/applications/argocd/values-usdfdev.yaml @@ -60,6 +60,7 @@ argo-cd: g, bbrond@slac.stanford.edu, role:developer g, vbecker@slac.stanford.edu, role:developer g, gmegias@slac.stanford.edu, role:developer + g, salnikov@slac.stanford.edu, role:developer scopes: "[email]" server: From 8085525817e017819ae8797904eab23020f6580e Mon Sep 17 00:00:00 2001 From: Jeremy McCormick Date: Thu, 26 Sep 2024 12:36:59 -0500 Subject: [PATCH 206/567] Add initial version of ppdb-replication application Add the initial version of an application for replicating data between the APDB and the PPDB. --- applications/ppdb-replication/.helmignore | 23 ++++ applications/ppdb-replication/Chart.yaml | 8 ++ applications/ppdb-replication/README.md | 44 +++++++ applications/ppdb-replication/secrets.yaml | 9 ++ .../ppdb-replication/templates/_helpers.tpl | 44 +++++++ .../ppdb-replication/templates/configmap.yaml | 19 +++ .../templates/deployment.yaml | 89 +++++++++++++ .../ppdb-replication/templates/ingress.yaml | 30 +++++ .../templates/networkpolicy.yaml | 21 ++++ .../ppdb-replication/templates/pvc.yaml | 18 +++ .../ppdb-replication/templates/service.yaml | 15 +++ .../templates/vault-secrets.yaml | 9 ++ .../ppdb-replication/values-usdfdev.yaml | 44 +++++++ applications/ppdb-replication/values.yaml | 118 ++++++++++++++++++ docs/applications/ppdb-replication/index.rst | 19 +++ docs/applications/ppdb-replication/values.md | 12 ++ docs/applications/rsp.rst | 1 + docs/applications/rubin.rst | 1 + environments/README.md | 1 + .../applications/rubin/ppdb-replication.yaml | 34 +++++ environments/values-usdfdev.yaml | 1 + environments/values.yaml | 3 + 22 files changed, 563 insertions(+) create mode 100644 applications/ppdb-replication/.helmignore create mode 100644 applications/ppdb-replication/Chart.yaml create mode 100644 applications/ppdb-replication/README.md create mode 100644 applications/ppdb-replication/secrets.yaml create mode 100644 applications/ppdb-replication/templates/_helpers.tpl create mode 100644 applications/ppdb-replication/templates/configmap.yaml create mode 100644 applications/ppdb-replication/templates/deployment.yaml create mode 100644 applications/ppdb-replication/templates/ingress.yaml create mode 100644 applications/ppdb-replication/templates/networkpolicy.yaml create mode 100644 applications/ppdb-replication/templates/pvc.yaml create mode 100644 applications/ppdb-replication/templates/service.yaml create mode 100644 applications/ppdb-replication/templates/vault-secrets.yaml create mode 100644 applications/ppdb-replication/values-usdfdev.yaml create mode 100644 applications/ppdb-replication/values.yaml create mode 100644 docs/applications/ppdb-replication/index.rst create mode 100644 docs/applications/ppdb-replication/values.md create mode 100644 environments/templates/applications/rubin/ppdb-replication.yaml diff --git a/applications/ppdb-replication/.helmignore b/applications/ppdb-replication/.helmignore new file mode 100644 index 0000000000..0e8a0eb36f --- /dev/null +++ b/applications/ppdb-replication/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/applications/ppdb-replication/Chart.yaml b/applications/ppdb-replication/Chart.yaml new file mode 100644 index 0000000000..1dd8dce332 --- /dev/null +++ b/applications/ppdb-replication/Chart.yaml @@ -0,0 +1,8 @@ +apiVersion: v2 +appVersion: 0.1.0 +description: Replicates data from the APDB to the PPDB +name: ppdb-replication +sources: +- https://github.com/lsst/dax_ppdb.git +type: application +version: 1.0.0 diff --git a/applications/ppdb-replication/README.md b/applications/ppdb-replication/README.md new file mode 100644 index 0000000000..5598cba6d9 --- /dev/null +++ b/applications/ppdb-replication/README.md @@ -0,0 +1,44 @@ +# ppdb-replication + +Replicates data from the APDB to the PPDB + +## Source Code + +* + +## Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| affinity | object | `{}` | Affinity rules for the ppdb-replication deployment pod | +| config.additionalS3ProfileName | string | `nil` | Additional S3 profile name | +| config.additionalS3ProfileUrl | string | `nil` | Additional S3 profile URL | +| config.apdbConfig | string | `nil` | APDB config file resource | +| config.apdbIndexUri | string | `nil` | APDB index URI | +| config.checkInterval | string | `nil` | Time to wait before checking for new chunks, if no chunk appears | +| config.disableBucketValidation | int | `1` | Disable bucket validation in LSST S3 tools | +| config.logLevel | string | `"INFO"` | Logging level | +| config.logProfile | string | `"production"` | Logging profile (`production` for JSON, `development` for human-friendly) | +| config.maxWaitTime | string | `nil` | Maximum time to wait before replicating a chunk after next chunk appears | +| config.minWaitTime | string | `nil` | Minimum time to wait before replicating a chunk after next chunk appears | +| config.monLogger | string | `"lsst.dax.ppdb.monitor"` | Name of logger for monitoring | +| config.monRules | string | `nil` | Comma-separated list of monitoring filter rules | +| config.pathPrefix | string | `"/ppdb-replication"` | URL path prefix | +| config.persistentVolumeClaims | list | `[]` | Persistent volume claims | +| config.ppdbConfig | string | `nil` | PPDB config file resource | +| config.s3EndpointUrl | string | `nil` | S3 endpoint URL | +| config.updateExisting | bool | `false` | Allow updates to already replicated data | +| config.volumeMounts | list | `[]` | Volume mounts | +| config.volumes | list | `[]` | Volumes specific to the environment | +| global.baseUrl | string | Set by Argo CD | Base URL for the environment | +| global.host | string | Set by Argo CD | Host name for ingress | +| global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | +| image.pullPolicy | string | `"Always"` | Pull policy for the ppdb-replication image | +| image.repository | string | `"ghcr.io/lsst/ppdb-replication"` | Image to use in the ppdb-replication deployment | +| image.tag | string | The appVersion of the chart | Tag of image to use | +| ingress.annotations | object | `{}` | Additional annotations for the ingress rule | +| nodeSelector | object | `{}` | Node selection rules for the ppdb-replication deployment pod | +| podAnnotations | object | `{}` | Annotations for the ppdb-replication deployment pod | +| replicaCount | int | `1` | Number of deployment pods to start | +| resources | object | see `values.yaml` | Resource limits and requests for the ppdb-replication deployment pod | +| tolerations | list | `[]` | Tolerations for the ppdb-replication deployment pod | diff --git a/applications/ppdb-replication/secrets.yaml b/applications/ppdb-replication/secrets.yaml new file mode 100644 index 0000000000..92474ab2c3 --- /dev/null +++ b/applications/ppdb-replication/secrets.yaml @@ -0,0 +1,9 @@ +"aws-credentials.ini": + description: >- + AWS credentials required for acessing configuration files in S3. +"db-auth.yaml": + description: >- + Cassandra database credentials for the APDB. +"postgres-credentials.txt": + description: >- + PostgreSQL credentials in its pgpass format for the PPDB database. diff --git a/applications/ppdb-replication/templates/_helpers.tpl b/applications/ppdb-replication/templates/_helpers.tpl new file mode 100644 index 0000000000..47bdc59cfe --- /dev/null +++ b/applications/ppdb-replication/templates/_helpers.tpl @@ -0,0 +1,44 @@ +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "ppdb-replication.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "ppdb-replication.labels" -}} +helm.sh/chart: {{ include "ppdb-replication.chart" . }} +{{ include "ppdb-replication.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "ppdb-replication.selectorLabels" -}} +app.kubernetes.io/name: "ppdb-replication" +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "ppdb-replication.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} \ No newline at end of file diff --git a/applications/ppdb-replication/templates/configmap.yaml b/applications/ppdb-replication/templates/configmap.yaml new file mode 100644 index 0000000000..a66bacce2c --- /dev/null +++ b/applications/ppdb-replication/templates/configmap.yaml @@ -0,0 +1,19 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: "ppdb-replication" + labels: + {{- include "ppdb-replication.labels" . | nindent 4 }} +data: + DAX_APDB_INDEX_URI: {{ .Values.config.apdbIndexUri | quote }} + PPDB_REPLICATION_LOG_LEVEL: {{ .Values.config.logLevel | quote }} + PPDB_REPLICATION_PATH_PREFIX: {{ .Values.config.pathPrefix | quote }} + PPDB_REPLICATION_PROFILE: {{ .Values.config.logProfile | quote }} + PPDB_REPLICATION_APDB_CONFIG: {{ .Values.config.apdbConfig | quote }} + PPDB_REPLICATION_PPDB_CONFIG: {{ .Values.config.ppdbConfig | quote }} + PPDB_REPLICATION_MON_LOGGER: {{ .Values.config.monLogger | quote }} + PPDB_REPLICATION_MON_RULES: {{ .Values.config.monRules | quote }} + PPDB_REPLICATION_UPDATE_EXISTING: {{ .Values.config.updateExisting | quote}} + PPDB_REPLICATION_MIN_WAIT_TIME: {{ .Values.config.minWaitTime | quote }} + PPDB_REPLICATION_MAX_WAIT_TIME: {{ .Values.config.maxWaitTime | quote }} + PPDB_REPLICATION_CHECK_INTERVAL: {{ .Values.config.checkInterval | quote}} diff --git a/applications/ppdb-replication/templates/deployment.yaml b/applications/ppdb-replication/templates/deployment.yaml new file mode 100644 index 0000000000..454ec56b56 --- /dev/null +++ b/applications/ppdb-replication/templates/deployment.yaml @@ -0,0 +1,89 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "ppdb-replication.fullname" . }} + labels: + {{- include "ppdb-replication.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + {{- include "ppdb-replication.selectorLabels" . | nindent 6 }} + template: + metadata: + labels: + {{- include "ppdb-replication.selectorLabels" . | nindent 8 }} + annotations: + # Force the pod to restart when the config maps are updated. + checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} + {{- with .Values.podAnnotations }} + {{- toYaml . | nindent 8 }} + {{- end }} + spec: + volumes: + - name: "ppdb-replication-secrets-raw" + secret: + secretName: {{ include "ppdb-replication.fullname" . }} + - name: "ppdb-replication-secrets" + emptyDir: + sizeLimit: "100Mi" + {{- with .Values.config.volumes }} + {{- . | toYaml | nindent 8 }} + {{- end }} + initContainers: + - name: fix-secret-permissions + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy | quote }} + command: + - "/bin/sh" + - "-c" + - | + cp -RL /tmp/ppdb-replication-secrets-raw/* /app/secrets/ + chmod 0400 /app/secrets/* + securityContext: + runAsNonRoot: false + runAsUser: 0 + runAsGroup: 0 + volumeMounts: + - name: "ppdb-replication-secrets" + mountPath: "/app/secrets" + - name: "ppdb-replication-secrets-raw" + mountPath: "/tmp/ppdb-replication-secrets-raw" + readOnly: true + containers: + - name: {{ .Chart.Name }} + envFrom: + - configMapRef: + name: "ppdb-replication" + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + env: + - name: AWS_SHARED_CREDENTIALS_FILE + value: "/app/secrets/aws-credentials.ini" + - name: PGPASSFILE + value: "/app/secrets/postgres-credentials.txt" + - name: LSST_DB_AUTH + value: "/app/secrets/db-auth.yaml" + - name: S3_ENDPOINT_URL + value: {{ .Values.config.s3EndpointUrl | quote }} + - name: LSST_RESOURCES_S3_PROFILE_{{ .Values.config.additionalS3ProfileName }} + value: {{ .Values.config.additionalS3ProfileUrl | quote }} + - name: LSST_DISABLE_BUCKET_VALIDATION + value: {{ .Values.config.disableBucketValidation | quote }} + volumeMounts: + - name: "ppdb-replication-secrets" + mountPath: "/app/secrets" + readOnly: true + {{- with .Values.config.volumeMounts }} + {{- . | toYaml | nindent 12 }} + {{- end }} + resources: + {{- toYaml .Values.resources | nindent 12 }} + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} diff --git a/applications/ppdb-replication/templates/ingress.yaml b/applications/ppdb-replication/templates/ingress.yaml new file mode 100644 index 0000000000..381bce084c --- /dev/null +++ b/applications/ppdb-replication/templates/ingress.yaml @@ -0,0 +1,30 @@ +apiVersion: gafaelfawr.lsst.io/v1alpha1 +kind: GafaelfawrIngress +metadata: + name: "ppdb-replication" + labels: + {{- include "ppdb-replication.labels" . | nindent 4 }} +config: + baseUrl: {{ .Values.global.baseUrl | quote }} + scopes: + all: + - "read:image" +template: + metadata: + name: "ppdb-replication" + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 6 }} + {{- end }} + spec: + rules: + - host: {{ required "global.host must be set" .Values.global.host | quote }} + http: + paths: + - path: {{ .Values.config.pathPrefix | quote }} + pathType: "Prefix" + backend: + service: + name: "ppdb-replication" + port: + number: 8080 diff --git a/applications/ppdb-replication/templates/networkpolicy.yaml b/applications/ppdb-replication/templates/networkpolicy.yaml new file mode 100644 index 0000000000..10ddf62820 --- /dev/null +++ b/applications/ppdb-replication/templates/networkpolicy.yaml @@ -0,0 +1,21 @@ +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: "ppdb-replication" +spec: + podSelector: + matchLabels: + {{- include "ppdb-replication.selectorLabels" . | nindent 6 }} + policyTypes: + - "Ingress" + ingress: + # Allow inbound access from pods (in any namespace) labeled + # gafaelfawr.lsst.io/ingress: true. + - from: + - namespaceSelector: {} + podSelector: + matchLabels: + gafaelfawr.lsst.io/ingress: "true" + ports: + - protocol: "TCP" + port: 8080 diff --git a/applications/ppdb-replication/templates/pvc.yaml b/applications/ppdb-replication/templates/pvc.yaml new file mode 100644 index 0000000000..52af2db47b --- /dev/null +++ b/applications/ppdb-replication/templates/pvc.yaml @@ -0,0 +1,18 @@ +{{- if .Values.config.persistentVolumeClaims }} +{{- $top := . -}} +{{- range $index, $pvc := .Values.config.persistentVolumeClaims }} +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: "{{ $pvc.name }}" +spec: + storageClassName: "{{ $pvc.storageClassName }}" + accessModes: + - ReadOnlyMany + resources: + requests: + storage: 100Mi +{{- end }} +{{- end }} + diff --git a/applications/ppdb-replication/templates/service.yaml b/applications/ppdb-replication/templates/service.yaml new file mode 100644 index 0000000000..27b726bc7b --- /dev/null +++ b/applications/ppdb-replication/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: "ppdb-replication" + labels: + {{- include "ppdb-replication.labels" . | nindent 4 }} +spec: + type: "ClusterIP" + ports: + - port: 8080 + targetPort: "http" + protocol: "TCP" + name: "http" + selector: + {{- include "ppdb-replication.selectorLabels" . | nindent 4 }} diff --git a/applications/ppdb-replication/templates/vault-secrets.yaml b/applications/ppdb-replication/templates/vault-secrets.yaml new file mode 100644 index 0000000000..96c228968f --- /dev/null +++ b/applications/ppdb-replication/templates/vault-secrets.yaml @@ -0,0 +1,9 @@ +apiVersion: ricoberger.de/v1alpha1 +kind: VaultSecret +metadata: + name: {{ include "ppdb-replication.fullname" . }} + labels: + {{- include "ppdb-replication.labels" . | nindent 4 }} +spec: + path: "{{ .Values.global.vaultSecretsPath }}/ppdb-replication" + type: Opaque diff --git a/applications/ppdb-replication/values-usdfdev.yaml b/applications/ppdb-replication/values-usdfdev.yaml new file mode 100644 index 0000000000..b373b91d38 --- /dev/null +++ b/applications/ppdb-replication/values-usdfdev.yaml @@ -0,0 +1,44 @@ +config: + + # -- Logging level + logLevel: "INFO" + + # -- Logging profile (`production` for JSON, `development` for + # human-friendly) + logProfile: "development" + + # -- APDB config file resource + apdbConfig: "label:pp-prod:lsstcomcamsim-or4" + + # -- PPDB config file resource + ppdbConfig: "/sdf/group/rubin/user/jeremym/ppdb-replication/config/ppdb-replication-test-1.yaml" + + # -- APDB index URI + apdbIndexUri: "/sdf/group/rubin/shared/apdb_config/apdb-index.yaml" + + # -- S3 endpoint URL + s3EndpointUrl: https://s3dfrgw.slac.stanford.edu + + # -- S3 profile name for additional S3 profile + additionalS3ProfileName: "embargo" + + # -- S3 profile URL for additional S3 profile + additionalS3ProfileUrl: "https://sdfembs3.sdf.slac.stanford.edu" + + volumes: + - name: sdf-group-rubin + persistentVolumeClaim: + claimName: sdf-group-rubin + - name: sdf-data-rubin + persistentVolumeClaim: + claimName: sdf-data-rubin + volumeMounts: + - name: sdf-group-rubin + mountPath: /sdf/group/rubin + - name: sdf-data-rubin + mountPath: /sdf/data/rubin + persistentVolumeClaims: + - name: sdf-group-rubin + storageClassName: sdf-group-rubin + - name: sdf-data-rubin + storageClassName: sdf-data-rubin diff --git a/applications/ppdb-replication/values.yaml b/applications/ppdb-replication/values.yaml new file mode 100644 index 0000000000..fec71e1776 --- /dev/null +++ b/applications/ppdb-replication/values.yaml @@ -0,0 +1,118 @@ +# Default values for ppdb-replication. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +# -- Number of deployment pods to start +replicaCount: 1 + +image: + # -- Image to use in the ppdb-replication deployment + repository: "ghcr.io/lsst/ppdb-replication" + + # -- Pull policy for the ppdb-replication image + pullPolicy: "Always" + + # -- Tag of image to use + # @default -- The appVersion of the chart + tag: "main" + +ingress: + # -- Additional annotations for the ingress rule + annotations: {} + +# -- Affinity rules for the ppdb-replication deployment pod +affinity: {} + +# -- Node selection rules for the ppdb-replication deployment pod +nodeSelector: {} + +# -- Annotations for the ppdb-replication deployment pod +podAnnotations: {} + +# -- Resource limits and requests for the ppdb-replication deployment pod +# @default -- see `values.yaml` +resources: + limits: + cpu: "1" + memory: "16.0Gi" + requests: + cpu: "200m" # 20% of a single core + memory: "4.0Gi" + +# -- Tolerations for the ppdb-replication deployment pod +tolerations: [] + +# The following will be set by parameters injected by Argo CD and should not +# be set in the individual environment values files. +global: + # -- Base URL for the environment + # @default -- Set by Argo CD + baseUrl: null + + # -- Host name for ingress + # @default -- Set by Argo CD + host: null + + # -- Base path for Vault secrets + # @default -- Set by Argo CD + vaultSecretsPath: null + +# Application-specific configuration +config: + # -- Logging level + logLevel: "INFO" + + # -- Name of logger for monitoring + monLogger: "lsst.dax.ppdb.monitor" + + # -- Logging profile (`production` for JSON, `development` for + # human-friendly) + logProfile: "production" + + # -- URL path prefix + pathPrefix: "/ppdb-replication" + + # -- APDB config file resource + apdbConfig: null + + # -- PPDB config file resource + ppdbConfig: null + + # -- APDB index URI + apdbIndexUri: null + + # -- Comma-separated list of monitoring filter rules + monRules: null + + # -- Allow updates to already replicated data + updateExisting: false + + # -- Minimum time to wait before replicating a chunk after next chunk appears + minWaitTime: null + + # -- Maximum time to wait before replicating a chunk after next chunk appears + maxWaitTime: null + + # -- Time to wait before checking for new chunks, if no chunk appears + checkInterval: null + + # -- S3 endpoint URL + s3EndpointUrl: null + + # -- Additional S3 profile name + additionalS3ProfileName: null + + # -- Additional S3 profile URL + additionalS3ProfileUrl: null + + # -- Disable bucket validation in LSST S3 tools + disableBucketValidation: 1 + + # -- Volumes specific to the environment + volumes: [] + + # -- Volume mounts + volumeMounts: [] + + # -- Persistent volume claims + persistentVolumeClaims: [] diff --git a/docs/applications/ppdb-replication/index.rst b/docs/applications/ppdb-replication/index.rst new file mode 100644 index 0000000000..ea26aae83a --- /dev/null +++ b/docs/applications/ppdb-replication/index.rst @@ -0,0 +1,19 @@ +.. px-app:: ppdb-replication + +############################################################ +ppdb-replication — Replicates data from the APDB to the PPDB +############################################################ + +The ``ppdb-replication`` application periodically replicates data from the +Alert Production DataBase (APDB) to the Prompt Products DataBase (PPDB). + +.. jinja:: ppdb-replication + :file: applications/_summary.rst.jinja + +Guides +====== + +.. toctree:: + :maxdepth: 1 + + values \ No newline at end of file diff --git a/docs/applications/ppdb-replication/values.md b/docs/applications/ppdb-replication/values.md new file mode 100644 index 0000000000..425e7f6fd2 --- /dev/null +++ b/docs/applications/ppdb-replication/values.md @@ -0,0 +1,12 @@ +```{px-app-values} ppdb-replication +``` + +# ppdb-replication Helm values reference + +Helm values reference table for the {px-app}`ppdb-replication` application. + +```{include} ../../../applications/ppdb-replication/README.md +--- +start-after: "## Values" +--- +``` \ No newline at end of file diff --git a/docs/applications/rsp.rst b/docs/applications/rsp.rst index 9c631d37af..b395276738 100644 --- a/docs/applications/rsp.rst +++ b/docs/applications/rsp.rst @@ -18,6 +18,7 @@ Argo CD project: ``rsp`` noteburst/index nublado/index portal/index + ppdb-replication/index semaphore/index siav2/index sqlproxy-cross-project/index diff --git a/docs/applications/rubin.rst b/docs/applications/rubin.rst index a03aad2b38..483f483438 100644 --- a/docs/applications/rubin.rst +++ b/docs/applications/rubin.rst @@ -18,6 +18,7 @@ Argo CD project: ``rubin`` nightreport/index obsloctap/index plot-navigator/index + ppdb-replication/index production-tools/index rapid-analysis/index rubintv/index diff --git a/environments/README.md b/environments/README.md index 9ec20b4fda..d44f6c7e2d 100644 --- a/environments/README.md +++ b/environments/README.md @@ -43,6 +43,7 @@ | applications.plot-navigator | bool | `false` | Enable the plot-navigator application | | applications.portal | bool | `false` | Enable the portal application | | applications.postgres | bool | `false` | Enable the in-cluster PostgreSQL server. Use of this server is discouraged in favor of using infrastructure SQL, but will remain supported for use cases such as minikube test deployments. | +| applications.ppdb-replication | bool | `false` | Enable the ppdb-replication application | | applications.production-tools | bool | `false` | Enable the production-tools application | | applications.prompt-proto-service-hsc | bool | `false` | Enable the prompt-proto-service-hsc application | | applications.prompt-proto-service-hsc-gpu | bool | `false` | Enable the prompt-proto-service-hsc-gpu application | diff --git a/environments/templates/applications/rubin/ppdb-replication.yaml b/environments/templates/applications/rubin/ppdb-replication.yaml new file mode 100644 index 0000000000..e9685feb11 --- /dev/null +++ b/environments/templates/applications/rubin/ppdb-replication.yaml @@ -0,0 +1,34 @@ +{{- if (index .Values "applications" "ppdb-replication") -}} +apiVersion: v1 +kind: Namespace +metadata: + name: "ppdb-replication" +--- +apiVersion: argoproj.io/v1alpha1 +kind: Application +metadata: + name: "ppdb-replication" + namespace: "argocd" + finalizers: + - "resources-finalizer.argocd.argoproj.io" +spec: + destination: + namespace: "ppdb-replication" + server: "https://kubernetes.default.svc" + project: "rubin" + source: + path: "applications/ppdb-replication" + repoURL: {{ .Values.repoUrl | quote }} + targetRevision: {{ .Values.targetRevision | quote }} + helm: + parameters: + - name: "global.host" + value: {{ .Values.fqdn | quote }} + - name: "global.baseUrl" + value: "https://{{ .Values.fqdn }}" + - name: "global.vaultSecretsPath" + value: {{ .Values.vaultPathPrefix | quote }} + valueFiles: + - "values.yaml" + - "values-{{ .Values.name }}.yaml" +{{- end -}} \ No newline at end of file diff --git a/environments/values-usdfdev.yaml b/environments/values-usdfdev.yaml index 9eb8c8c0a7..d186cba8d2 100644 --- a/environments/values-usdfdev.yaml +++ b/environments/values-usdfdev.yaml @@ -27,6 +27,7 @@ applications: plot-navigator: true portal: true postgres: true + ppdb-replication: true rubintv: true sasquatch: true schedview-snapshot: true diff --git a/environments/values.yaml b/environments/values.yaml index b7774a1d1e..cd11a31959 100644 --- a/environments/values.yaml +++ b/environments/values.yaml @@ -153,6 +153,9 @@ applications: # supported for use cases such as minikube test deployments. postgres: false + # -- Enable the ppdb-replication application + ppdb-replication: false + # -- Enable the rubintv application rubintv: false From 4a75b88b0075b3cff6cf10aa454f06ddfe506a55 Mon Sep 17 00:00:00 2001 From: Dan Fuchs Date: Mon, 30 Sep 2024 10:54:27 -0500 Subject: [PATCH 207/567] DM-45522 sasquatch: App metrics events in Sasquatch Sasquatch subchart that creates and configures: * A new telegraf consumer for all app metrics * Per-app Avro tags config for that consumer * KafkaUser and KafkaTopic resources per-app See https://github.com/lsst-sqre/sasquatch/pull/41 for how to use this new subchart. review feedback More review feedback --- applications/sasquatch/Chart.yaml | 3 + applications/sasquatch/README.md | 23 +++- .../sasquatch/charts/app-metrics/Chart.yaml | 6 ++ .../sasquatch/charts/app-metrics/README.md | 28 +++++ .../charts/app-metrics/templates/_helpers.tpl | 10 ++ .../app-metrics/templates/kafka-topics.yaml | 15 +++ .../app-metrics/templates/kafka-users.yaml | 31 ++++++ .../templates/telegraf-configmap.yaml | 68 ++++++++++++ .../templates/telegraf-deployment.yaml | 78 ++++++++++++++ .../sasquatch/charts/app-metrics/values.yaml | 102 ++++++++++++++++++ .../sasquatch/charts/strimzi-kafka/README.md | 1 - .../charts/strimzi-kafka/templates/users.yaml | 38 ------- .../charts/strimzi-kafka/values.yaml | 4 - applications/sasquatch/secrets.yaml | 4 - applications/sasquatch/values-idfdev.yaml | 8 -- applications/sasquatch/values.yaml | 7 ++ 16 files changed, 370 insertions(+), 56 deletions(-) create mode 100644 applications/sasquatch/charts/app-metrics/Chart.yaml create mode 100644 applications/sasquatch/charts/app-metrics/README.md create mode 100644 applications/sasquatch/charts/app-metrics/templates/_helpers.tpl create mode 100644 applications/sasquatch/charts/app-metrics/templates/kafka-topics.yaml create mode 100644 applications/sasquatch/charts/app-metrics/templates/kafka-users.yaml create mode 100644 applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml create mode 100644 applications/sasquatch/charts/app-metrics/templates/telegraf-deployment.yaml create mode 100644 applications/sasquatch/charts/app-metrics/values.yaml diff --git a/applications/sasquatch/Chart.yaml b/applications/sasquatch/Chart.yaml index 600032104e..723f35d9f7 100644 --- a/applications/sasquatch/Chart.yaml +++ b/applications/sasquatch/Chart.yaml @@ -56,6 +56,9 @@ dependencies: - name: square-events condition: squareEvents.enabled version: 1.0.0 + - name: app-metrics + condition: app-metrics.enabled + version: 1.0.0 annotations: phalanx.lsst.io/docs: | diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index aac4c033b9..e922633932 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -18,6 +18,8 @@ Rubin Observatory's telemetry service | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | +| app-metrics.apps | list | `[]` | The apps to create configuration for. | +| app-metrics.enabled | bool | `false` | Enable the app-metrics subchart with topic, user, and telegraf configurations | | chronograf.enabled | bool | `true` | Whether Chronograf is enabled | | chronograf.env | object | See `values.yaml` | Additional environment variables for Chronograf | | chronograf.envFromSecret | string | `"sasquatch"` | Name of secret to use. The keys `generic_client_id`, `generic_client_secret`, and `token_secret` should be set. | @@ -81,6 +83,26 @@ Rubin Observatory's telemetry service | strimzi-registry-operator.clusterNamespace | string | `"sasquatch"` | Namespace where the Strimzi Kafka cluster is deployed | | strimzi-registry-operator.operatorNamespace | string | `"sasquatch"` | Namespace where the strimzi-registry-operator is deployed | | telegraf-kafka-consumer | object | `{}` | Overrides for telegraf-kafka-consumer configuration | +| app-metrics.affinity | object | `{}` | Affinity for pod assignment | +| app-metrics.apps | list | `[]` | A list of applications that will publish metrics events, and the keys that should be ingested into InfluxDB as tags. The names should be the same as the app names in Phalanx. | +| app-metrics.args | list | `[]` | Arguments passed to the Telegraf agent containers | +| app-metrics.cluster.name | string | `"sasquatch"` | | +| app-metrics.debug | bool | false | Run Telegraf in debug mode. | +| app-metrics.env | list | See `values.yaml` | Telegraf agent enviroment variables | +| app-metrics.envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | +| app-metrics.globalAppConfig | object | `{}` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | +| app-metrics.globalInfluxTags | list | `["service"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | +| app-metrics.image.pullPolicy | string | `"Always"` | Image pull policy | +| app-metrics.image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | +| app-metrics.image.tag | string | `"1.30.2-alpine"` | Telegraf image tag | +| app-metrics.imagePullSecrets | list | `[]` | Secret names to use for Docker pulls | +| app-metrics.influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | +| app-metrics.nodeSelector | object | `{}` | Node labels for pod assignment | +| app-metrics.podAnnotations | object | `{}` | Annotations for telegraf-kafka-consumers pods | +| app-metrics.podLabels | object | `{}` | Labels for telegraf-kafka-consumer pods | +| app-metrics.replicaCount | int | `3` | Number of Telegraf replicas. Multiple replicas increase availability. | +| app-metrics.resources | object | See `values.yaml` | Kubernetes resources requests and limits | +| app-metrics.tolerations | list | `[]` | Tolerations for pod assignment | | influxdb-enterprise.bootstrap.auth.secretName | string | `"sasquatch"` | Enable authentication of the data nodes using this secret, by creating a username and password for an admin account. The secret must contain keys `username` and `password`. | | influxdb-enterprise.bootstrap.ddldml.configMap | string | Do not run DDL or DML | A config map containing DDL and DML that define databases, retention policies, and inject some data. The keys `ddl` and `dml` must exist, even if one of them is empty. DDL is executed before DML to ensure databases and retention policies exist. | | influxdb-enterprise.bootstrap.ddldml.resources | object | `{}` | Kubernetes resources and limits for the bootstrap job | @@ -389,7 +411,6 @@ Rubin Observatory's telemetry service | strimzi-kafka.registry.resources | object | See `values.yaml` | Kubernetes requests and limits for the Schema Registry | | strimzi-kafka.registry.schemaTopic | string | `"registry-schemas"` | Name of the topic used by the Schema Registry | | strimzi-kafka.superusers | list | `["kafka-admin"]` | A list of usernames for users who should have global admin permissions. These users will be created, along with their credentials. | -| strimzi-kafka.users.appmetrics.enabled | bool | `false` | Enable user appmetrics | | strimzi-kafka.users.camera.enabled | bool | `false` | Enable user camera, used at the camera environments | | strimzi-kafka.users.consdb.enabled | bool | `false` | Enable user consdb | | strimzi-kafka.users.kafdrop.enabled | bool | `false` | Enable user Kafdrop (deployed by parent Sasquatch chart). | diff --git a/applications/sasquatch/charts/app-metrics/Chart.yaml b/applications/sasquatch/charts/app-metrics/Chart.yaml new file mode 100644 index 0000000000..1152b5b2ca --- /dev/null +++ b/applications/sasquatch/charts/app-metrics/Chart.yaml @@ -0,0 +1,6 @@ +apiVersion: v2 +name: app-metrics +version: 1.0.0 +appVersion: "1.0.0" +description: Kafka topics, users, and a telegraf connector for metrics events. +type: application diff --git a/applications/sasquatch/charts/app-metrics/README.md b/applications/sasquatch/charts/app-metrics/README.md new file mode 100644 index 0000000000..1cb6c56b6d --- /dev/null +++ b/applications/sasquatch/charts/app-metrics/README.md @@ -0,0 +1,28 @@ +# app-metrics + +Kafka topics, users, and a telegraf connector for metrics events. + +## Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| affinity | object | `{}` | Affinity for pod assignment | +| apps | list | `[]` | A list of applications that will publish metrics events, and the keys that should be ingested into InfluxDB as tags. The names should be the same as the app names in Phalanx. | +| args | list | `[]` | Arguments passed to the Telegraf agent containers | +| cluster.name | string | `"sasquatch"` | | +| debug | bool | false | Run Telegraf in debug mode. | +| env | list | See `values.yaml` | Telegraf agent enviroment variables | +| envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | +| globalAppConfig | object | `{}` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | +| globalInfluxTags | list | `["service"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | +| image.pullPolicy | string | `"Always"` | Image pull policy | +| image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | +| image.tag | string | `"1.30.2-alpine"` | Telegraf image tag | +| imagePullSecrets | list | `[]` | Secret names to use for Docker pulls | +| influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | +| nodeSelector | object | `{}` | Node labels for pod assignment | +| podAnnotations | object | `{}` | Annotations for telegraf-kafka-consumers pods | +| podLabels | object | `{}` | Labels for telegraf-kafka-consumer pods | +| replicaCount | int | `3` | Number of Telegraf replicas. Multiple replicas increase availability. | +| resources | object | See `values.yaml` | Kubernetes resources requests and limits | +| tolerations | list | `[]` | Tolerations for pod assignment | diff --git a/applications/sasquatch/charts/app-metrics/templates/_helpers.tpl b/applications/sasquatch/charts/app-metrics/templates/_helpers.tpl new file mode 100644 index 0000000000..f88a9ae075 --- /dev/null +++ b/applications/sasquatch/charts/app-metrics/templates/_helpers.tpl @@ -0,0 +1,10 @@ +{{/* +Convert a list to a TOML array of quoted string values +*/}} +{{- define "helpers.toTomlArray" -}} +{{- $items := list -}} +{{- range . -}} +{{- $items = (quote .) | append $items -}} +{{- end -}} +[ {{ join ", " $items }} ] +{{- end -}} diff --git a/applications/sasquatch/charts/app-metrics/templates/kafka-topics.yaml b/applications/sasquatch/charts/app-metrics/templates/kafka-topics.yaml new file mode 100644 index 0000000000..70db2590de --- /dev/null +++ b/applications/sasquatch/charts/app-metrics/templates/kafka-topics.yaml @@ -0,0 +1,15 @@ +{{- range .Values.apps }} +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaTopic +metadata: + name: "lsst.square.app-metrics.events.{{ . }}" + labels: + strimzi.io/cluster: {{ $.Values.cluster.name }} +spec: + partitions: 10 + replicas: 3 + config: + # http://kafka.apache.org/documentation/#topicconfigs + retention.ms: 86400000 # 1 day +{{- end }} diff --git a/applications/sasquatch/charts/app-metrics/templates/kafka-users.yaml b/applications/sasquatch/charts/app-metrics/templates/kafka-users.yaml new file mode 100644 index 0000000000..9ddab60b5e --- /dev/null +++ b/applications/sasquatch/charts/app-metrics/templates/kafka-users.yaml @@ -0,0 +1,31 @@ +{{- range .Values.apps }} +--- +apiVersion: kafka.strimzi.io/v1beta2 +kind: KafkaUser +metadata: + name: app-metrics-{{ . }} + labels: + strimzi.io/cluster: {{ $.Values.cluster.name }} +spec: + authentication: + type: tls + authorization: + type: simple + acls: + - resource: + type: group + name: app-metrics-events + patternType: prefix + operations: + - "Read" + host: "*" + - resource: + type: topic + name: "lsst.square.app-metrics.events.{{ . }}" + patternType: literal + operations: + - "Describe" + - "Read" + - "Write" + host: "*" +{{- end }} diff --git a/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml b/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml new file mode 100644 index 0000000000..e8a60a4ae3 --- /dev/null +++ b/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml @@ -0,0 +1,68 @@ +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: sasquatch-telegraf-app-metrics + labels: + app.kubernetes.io/name: sasquatch-telegraf-app-metrics + app.kubernetes.io/instance: sasquatch-telegraf-app-metrics + app.kubernetes.io/part-of: sasquatch +data: + telegraf.conf: |+ + [agent] + metric_batch_size = 5000 + metric_buffer_limit = 100000 + collection_jitter = "0s" + flush_interval = "10s" + flush_jitter = "0s" + debug = {{ default false .Values.debug }} + omit_hostname = true + + [[outputs.influxdb]] + urls = [ + {{ .Values.influxdb.url | quote }} + ] + database = "telegraf-kafka-app-metrics-consumer" + username = "${INFLUXDB_USER}" + password = "${INFLUXDB_PASSWORD}" + + [[outputs.influxdb]] + namepass = ["telegraf_*"] + urls = [ + {{ .Values.influxdb.url | quote }} + ] + database = "telegraf" + username = "${INFLUXDB_USER}" + password = "${INFLUXDB_PASSWORD}" + + {{- range $index, $app := .Values.apps }} + {{- $globalInfluxTags := $.Values.globalInfluxTags | default list }} + {{- $appInfluxTags := (index $.Values.globalAppConfig $app "influxTags") | default list }} + {{- $influxTags := concat $globalInfluxTags $appInfluxTags }} + [[inputs.kafka_consumer]] + brokers = [ + "sasquatch-kafka-brokers.sasquatch:9092" + ] + consumer_group = "telegraf-kafka-consumer-app-metrics" + sasl_mechanism = "SCRAM-SHA-512" + sasl_password = "$TELEGRAF_PASSWORD" + sasl_username = "telegraf" + data_format = "avro" + avro_schema_registry = "http://sasquatch-schema-registry.sasquatch:8081" + avro_timestamp = "timestamp_ns" + avro_timestamp_format = "unix_ns" + avro_union_mode = "nullable" + avro_tags = {{ include "helpers.toTomlArray" $influxTags }} + topics = [ + "lsst.square.app-metrics.events.{{ $app }}", + ] + max_processing_time = "5s" + consumer_fetch_default = "5MB" + max_undelivered_messages = 10000 + compression_codec = 3 + {{- end }} + + [[inputs.internal]] + name_prefix = "telegraf_" + collect_memstats = true + tags = { instance = "app-metrics" } diff --git a/applications/sasquatch/charts/app-metrics/templates/telegraf-deployment.yaml b/applications/sasquatch/charts/app-metrics/templates/telegraf-deployment.yaml new file mode 100644 index 0000000000..9a0c3dd017 --- /dev/null +++ b/applications/sasquatch/charts/app-metrics/templates/telegraf-deployment.yaml @@ -0,0 +1,78 @@ +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: sasquatch-telegraf-app-metrics + labels: + app.kubernetes.io/name: sasquatch-telegraf-app-metrics + app.kubernetes.io/instance: sasquatch-telegraf-app-metrics + app.kubernetes.io/part-of: sasquatch +spec: + replicas: {{ default 1 .Values.replicaCount }} + selector: + matchLabels: + app.kubernetes.io/instance: sasquatch-telegraf-app-metrics + template: + metadata: + labels: + app.kubernetes.io/instance: sasquatch-telegraf-app-metrics + annotations: + checksum/config: {{ include (print $.Template.BasePath "/telegraf-configmap.yaml") $ | sha256sum }} + {{- if .Values.podAnnotations }} + {{- toYaml .Values.podAnnotations | nindent 8 }} + {{- end }} + spec: + securityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 1000 + containers: + - name: telegraf + securityContext: + capabilities: + drop: + - all + readOnlyRootFilesystem: true + allowPrivilegeEscalation: false + image: "{{ .Values.image.repo }}:{{ .Values.image.tag }}" + imagePullPolicy: {{ default "IfNotPresent" .Values.image.pullPolicy | quote }} + {{- if .Values.resources }} + resources: + {{- toYaml .Values.resources | nindent 10 }} + {{- end }} + {{- if .Values.args }} + args: + {{- toYaml .Values.args | nindent 8 }} + {{- end }} + {{- if .Values.env }} + env: + {{- toYaml .Values.env | nindent 8 }} + {{- end }} + {{- if .Values.envFromSecret }} + envFrom: + - secretRef: + name: {{ .Values.envFromSecret }} + {{- end }} + volumeMounts: + - name: config + mountPath: /etc/telegraf + {{- if .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml .Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- if .Values.nodeSelector }} + nodeSelector: + {{- toYaml .Values.nodeSelector | nindent 8 }} + {{- end }} + {{- if .Values.affinity }} + affinity: + {{- toYaml .Values.affinity | nindent 8 }} + {{- end }} + {{- if .Values.tolerations }} + tolerations: + {{- toYaml .Values.tolerations | nindent 8 }} + {{- end }} + volumes: + - name: config + configMap: + name: sasquatch-telegraf-app-metrics diff --git a/applications/sasquatch/charts/app-metrics/values.yaml b/applications/sasquatch/charts/app-metrics/values.yaml new file mode 100644 index 0000000000..d5bc17418f --- /dev/null +++ b/applications/sasquatch/charts/app-metrics/values.yaml @@ -0,0 +1,102 @@ +## Default values.yaml for the Metrics Events subchart. + +# -- app-metrics configuration in any environment in which the subchart is +# enabled. This should stay globally specified here, and it shouldn't be +# overridden. +# See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) +# for the structure of this value. +globalAppConfig: {} + +# -- A list of applications that will publish metrics events, and the keys that should be ingested into InfluxDB as tags. +# The names should be the same as the app names in Phalanx. +apps: [] + +# -- Keys in an every event sent by any app that should be recorded in InfluxDB +# as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from +# `globalAppConfig` +globalInfluxTags: ["service"] + +cluster: + # The name of the Strimzi cluster. Synchronize this with the cluster name in + # the parent Sasquatch chart. + name: sasquatch + +# These values refer to the telegraf deployment and config + +image: + # -- Telegraf image repository + repo: "docker.io/library/telegraf" + + # -- Telegraf image tag + tag: "1.30.2-alpine" + + # -- Image pull policy + pullPolicy: "Always" + +# -- Annotations for telegraf-kafka-consumers pods +podAnnotations: {} + +# -- Labels for telegraf-kafka-consumer pods +podLabels: {} + +# -- Secret names to use for Docker pulls +imagePullSecrets: [] + +# -- Arguments passed to the Telegraf agent containers +args: [] + +# -- Telegraf agent enviroment variables +# @default -- See `values.yaml` +env: + - name: TELEGRAF_PASSWORD + valueFrom: + secretKeyRef: + name: sasquatch + # Telegraf KafkaUser password. + key: telegraf-password + - name: INFLUXDB_USER + valueFrom: + secretKeyRef: + name: sasquatch + # InfluxDB v1 user + key: influxdb-user + - name: INFLUXDB_PASSWORD + valueFrom: + secretKeyRef: + name: sasquatch + # InfluxDB v1 password + key: influxdb-password + +# -- Name of the secret with values to be added to the environment. +envFromSecret: "" + +# -- Run Telegraf in debug mode. +# @default -- false +debug: false + +influxdb: + # -- URL of the InfluxDB v1 instance to write to + url: "http://sasquatch-influxdb.sasquatch:8086" + +# -- Number of Telegraf replicas. Multiple replicas increase availability. +replicaCount: 3 + + +# -- Kubernetes resources requests and limits +# @default -- See `values.yaml` +resources: + limits: + cpu: "2" + memory: "4Gi" + requests: + cpu: "0.5" + memory: "1Gi" + +# -- Node labels for pod assignment +nodeSelector: {} + +# -- Affinity for pod assignment +affinity: {} + +# -- Tolerations for pod assignment +tolerations: [] diff --git a/applications/sasquatch/charts/strimzi-kafka/README.md b/applications/sasquatch/charts/strimzi-kafka/README.md index fd425d5279..556761d75d 100644 --- a/applications/sasquatch/charts/strimzi-kafka/README.md +++ b/applications/sasquatch/charts/strimzi-kafka/README.md @@ -65,7 +65,6 @@ A subchart to deploy Strimzi Kafka components for Sasquatch. | registry.resources | object | See `values.yaml` | Kubernetes requests and limits for the Schema Registry | | registry.schemaTopic | string | `"registry-schemas"` | Name of the topic used by the Schema Registry | | superusers | list | `["kafka-admin"]` | A list of usernames for users who should have global admin permissions. These users will be created, along with their credentials. | -| users.appmetrics.enabled | bool | `false` | Enable user appmetrics | | users.camera.enabled | bool | `false` | Enable user camera, used at the camera environments | | users.consdb.enabled | bool | `false` | Enable user consdb | | users.kafdrop.enabled | bool | `false` | Enable user Kafdrop (deployed by parent Sasquatch chart). | diff --git a/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml b/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml index 75b9433255..5b30f2a6a3 100644 --- a/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml @@ -300,41 +300,3 @@ spec: host: "*" operation: All {{- end }} -{{- if .Values.users.appmetrics.enabled }} ---- -apiVersion: kafka.strimzi.io/v1beta2 -kind: KafkaUser -metadata: - name: appmetrics - labels: - strimzi.io/cluster: {{ .Values.cluster.name }} -spec: - authentication: - type: scram-sha-512 - password: - valueFrom: - secretKeyRef: - name: sasquatch - key: appmetrics-password - authorization: - type: simple - acls: - - resource: - type: group - name: "*" - patternType: literal - operation: All - - resource: - type: topic - name: "lsst.square.metrics" - patternType: prefix - type: allow - host: "*" - operation: All - - resource: - type: cluster - operations: - - Describe - - DescribeConfigs - # TODO: Any quotas needed? -{{- end }} diff --git a/applications/sasquatch/charts/strimzi-kafka/values.yaml b/applications/sasquatch/charts/strimzi-kafka/values.yaml index fa0deaa57b..6d587fd746 100644 --- a/applications/sasquatch/charts/strimzi-kafka/values.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/values.yaml @@ -285,10 +285,6 @@ users: # -- Enable user consdb enabled: false - appmetrics: - # -- Enable user appmetrics - enabled: false - mirrormaker2: # -- Enable replication in the target (passive) cluster diff --git a/applications/sasquatch/secrets.yaml b/applications/sasquatch/secrets.yaml index 7f84437a65..13cf51ef04 100644 --- a/applications/sasquatch/secrets.yaml +++ b/applications/sasquatch/secrets.yaml @@ -69,10 +69,6 @@ ts-salkafka-password: description: >- ts-salkafka KafkaUser password. if: strimzi-kafka.users.tsSalKafka.enabled -appmetrics-password: - description: >- - appmetrics KafkaUser password. - if: strimzi-kafka.users.appmetrics.enabled connect-push-secret: description: >- Write token for pushing generated Strimzi Kafka Connect image to GitHub Container Registry. diff --git a/applications/sasquatch/values-idfdev.yaml b/applications/sasquatch/values-idfdev.yaml index 4db585f4d5..6519b85afb 100644 --- a/applications/sasquatch/values-idfdev.yaml +++ b/applications/sasquatch/values-idfdev.yaml @@ -32,8 +32,6 @@ strimzi-kafka: enabled: true kafkaConnectManager: enabled: true - appmetrics: - enabled: true kraft: enabled: true kafkaController: @@ -75,12 +73,6 @@ telegraf-kafka-consumer: replicaCount: 1 topicRegexps: | [ "lsst.Test.*" ] - appmetrics: - enabled: true - database: "metrics" - replicaCount: 1 - topicRegexps: | - [ "lsst.square.metrics.*" ] kafdrop: cmdArgs: "--message.format=AVRO --topic.deleteEnabled=true --topic.createEnabled=true" diff --git a/applications/sasquatch/values.yaml b/applications/sasquatch/values.yaml index d7cb91e266..cc9fff35e6 100644 --- a/applications/sasquatch/values.yaml +++ b/applications/sasquatch/values.yaml @@ -283,3 +283,10 @@ global: # -- Base path for Vault secrets # @default -- Set by Argo CD vaultSecretsPath: "" + +app-metrics: + # -- Enable the app-metrics subchart with topic, user, and telegraf configurations + enabled: false + + # -- The apps to create configuration for. + apps: [] From 86248413594b468e12e33e388d5502063acd0044 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 2 Oct 2024 09:51:04 -0700 Subject: [PATCH 208/567] Adjust telegraf resources on TTS --- applications/sasquatch/values-tucson-teststand.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index 64f30615a3..da095af8b2 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -176,6 +176,13 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.obsenv" ] debug: true + resources: + limits: + cpu: "2" + memory: "2Gi" + requests: + cpu: "1" + memory: "1Gi" kafdrop: cmdArgs: "--message.format=AVRO --message.keyFormat=DEFAULT --topic.deleteEnabled=false --topic.createEnabled=false" From f21231b8577ef38bc99b26b959d49961aa3cf83f Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 2 Oct 2024 16:35:59 -0700 Subject: [PATCH 209/567] Adjust configuration for EAS at TTS --- applications/sasquatch/values-tucson-teststand.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index da095af8b2..03b2703aa2 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -99,6 +99,8 @@ telegraf-kafka-consumer: eas: enabled: true database: "efd" + metric_batch_size: 100 + flush_interval: 20s topicRegexps: | [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] debug: true From c17b02f8fbd75ed8e977649976b192bd45a6086e Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 3 Oct 2024 15:26:20 +0000 Subject: [PATCH 210/567] chore(deps): update helm release argo-cd to v7.6.8 --- applications/argocd/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argocd/Chart.yaml b/applications/argocd/Chart.yaml index 56c9e07f2f..0c95280ff9 100644 --- a/applications/argocd/Chart.yaml +++ b/applications/argocd/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-cd - version: 7.6.1 + version: 7.6.8 repository: https://argoproj.github.io/argo-helm From cb576e362d6257735b736d93073c225713b82a63 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 3 Oct 2024 10:46:52 -0700 Subject: [PATCH 211/567] Exclude telegraf internal metrics from default output - Exclude measurements matching "telegraf_" from the default output using the namedrop filter. --- .../charts/app-metrics/templates/telegraf-configmap.yaml | 1 + .../charts/telegraf-kafka-consumer/templates/_helpers.tpl | 1 + 2 files changed, 2 insertions(+) diff --git a/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml b/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml index e8a60a4ae3..4721483645 100644 --- a/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml +++ b/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml @@ -19,6 +19,7 @@ data: omit_hostname = true [[outputs.influxdb]] + namedrop = ["telegraf_*"] urls = [ {{ .Values.influxdb.url | quote }} ] diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl index 11dae28e5a..f2756bc5ee 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl @@ -21,6 +21,7 @@ data: omit_hostname = true [[outputs.influxdb]] + namedrop = ["telegraf_*"] urls = [ {{ .influxdbUrl | quote }} ] From e7f5bdf13e395da321f45069312823b4711dcb53 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 10:10:55 +0000 Subject: [PATCH 212/567] Update nginx Docker tag to v1.27.2 --- applications/love/charts/love-nginx/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/love/charts/love-nginx/values.yaml b/applications/love/charts/love-nginx/values.yaml index 92a9d612bc..d4e96320fa 100644 --- a/applications/love/charts/love-nginx/values.yaml +++ b/applications/love/charts/love-nginx/values.yaml @@ -4,7 +4,7 @@ image: # -- The NGINX image to use repository: nginx # -- The tag to use for the NGINX image - tag: 1.27.1 + tag: 1.27.2 # -- The pull policy on the NGINX image pullPolicy: IfNotPresent # -- Service type specification From d7e0d114818cd64b6d597e85d01213c7d4602dbf Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Mon, 7 Oct 2024 12:54:42 -0400 Subject: [PATCH 213/567] Relabel the "Portal" docs as "Firefly" See https://rubinobs.atlassian.net/browse/SP-1621 --- applications/squareone/values.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/squareone/values.yaml b/applications/squareone/values.yaml index e4746c7090..4e12177617 100644 --- a/applications/squareone/values.yaml +++ b/applications/squareone/values.yaml @@ -222,10 +222,10 @@ config: - ### Portal + ### Firefly - The Portal enables you to explore LSST image and table data in - your browser. + Help pages for Firefly, which enables exploration and visualization + of image and table data in the Portal Aspect. From 52150953f00686d775dbc4fdf69e4b34cca76dac Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 7 Oct 2024 14:56:06 -0700 Subject: [PATCH 214/567] Update Helm docs --- applications/love/README.md | 2 +- applications/love/charts/love-nginx/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/love/README.md b/applications/love/README.md index fae75a25ca..3db243aefa 100644 --- a/applications/love/README.md +++ b/applications/love/README.md @@ -146,7 +146,7 @@ Deployment for the LSST Operators Visualization Environment | love-nginx.affinity | object | `{}` | Affinity rules for the NGINX pod | | love-nginx.image.pullPolicy | string | `"IfNotPresent"` | The pull policy on the NGINX image | | love-nginx.image.repository | string | `"nginx"` | The NGINX image to use | -| love-nginx.image.tag | string | `"1.27.1"` | The tag to use for the NGINX image | +| love-nginx.image.tag | string | `"1.27.2"` | The tag to use for the NGINX image | | love-nginx.imagePullSecrets | list | `[]` | The list of pull secrets needed for the images. If this section is used, each object listed can have the following attributes defined: _name_ (The label identifying the pull-secret to use) | | love-nginx.ingress.annotations | object | `{}` | Annotations for the NGINX ingress | | love-nginx.ingress.className | string | `"nginx"` | Assign the Ingress class name | diff --git a/applications/love/charts/love-nginx/README.md b/applications/love/charts/love-nginx/README.md index 5e34e445b9..6a1289a87e 100644 --- a/applications/love/charts/love-nginx/README.md +++ b/applications/love/charts/love-nginx/README.md @@ -9,7 +9,7 @@ Helm chart for the LOVE Nginx server. | affinity | object | `{}` | Affinity rules for the NGINX pod | | image.pullPolicy | string | `"IfNotPresent"` | The pull policy on the NGINX image | | image.repository | string | `"nginx"` | The NGINX image to use | -| image.tag | string | `"1.27.1"` | The tag to use for the NGINX image | +| image.tag | string | `"1.27.2"` | The tag to use for the NGINX image | | imagePullSecrets | list | `[]` | The list of pull secrets needed for the images. If this section is used, each object listed can have the following attributes defined: _name_ (The label identifying the pull-secret to use) | | ingress.annotations | object | `{}` | Annotations for the NGINX ingress | | ingress.className | string | `"nginx"` | Assign the Ingress class name | From c7c662ee46dd3310249fd7a9f5c00e8ee7fe1abc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 22:07:30 +0000 Subject: [PATCH 215/567] chore(deps): update redis docker tag to v7.4.1 --- applications/love/charts/love-manager/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/love/charts/love-manager/values.yaml b/applications/love/charts/love-manager/values.yaml index d5534ee77c..c361269f09 100644 --- a/applications/love/charts/love-manager/values.yaml +++ b/applications/love/charts/love-manager/values.yaml @@ -248,7 +248,7 @@ redis: # -- The redis image to use repository: redis # -- The tag to use for the redis image - tag: 7.4.0 + tag: 7.4.1 # -- The pull policy for the redis image pullPolicy: IfNotPresent envSecrets: From 8a018130b4716827e3e6ca487b375a5bac08a928 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 7 Oct 2024 15:09:39 -0700 Subject: [PATCH 216/567] Update pre-commit and Python dependencies --- .pre-commit-config.yaml | 6 +- requirements/dev.txt | 227 ++++++++++++++++++++-------------------- requirements/main.txt | 123 +++++++++++----------- requirements/tox.txt | 50 ++++----- 4 files changed, 206 insertions(+), 200 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c684835a13..203642570c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-merge-conflict - id: check-toml @@ -14,7 +14,7 @@ repos: - -c=.yamllint.yml - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.29.2 + rev: 0.29.3 hooks: - id: check-jsonschema files: ^applications/.*/secrets(-[^./-]+)?\.yaml @@ -46,7 +46,7 @@ repos: - --template-files=../helm-docs.md.gotmpl - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.7 + rev: v0.6.9 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/requirements/dev.txt b/requirements/dev.txt index a8d900ddd8..f45b4fd9e5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -305,29 +305,29 @@ coverage==7.6.1 \ # via # -r requirements/dev.in # pytest-cov -debugpy==1.8.5 \ - --hash=sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c \ - --hash=sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226 \ - --hash=sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c \ - --hash=sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3 \ - --hash=sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a \ - --hash=sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a \ - --hash=sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408 \ - --hash=sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44 \ - --hash=sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156 \ - --hash=sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a \ - --hash=sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c \ - --hash=sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7 \ - --hash=sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a \ - --hash=sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf \ - --hash=sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34 \ - --hash=sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0 \ - --hash=sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e \ - --hash=sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb \ - --hash=sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7 \ - --hash=sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b \ - --hash=sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed \ - --hash=sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406 +debugpy==1.8.6 \ + --hash=sha256:0a85707c6a84b0c5b3db92a2df685b5230dd8fb8c108298ba4f11dba157a615a \ + --hash=sha256:22140bc02c66cda6053b6eb56dfe01bbe22a4447846581ba1dd6df2c9f97982d \ + --hash=sha256:30f467c5345d9dfdcc0afdb10e018e47f092e383447500f125b4e013236bf14b \ + --hash=sha256:3358aa619a073b620cd0d51d8a6176590af24abcc3fe2e479929a154bf591b51 \ + --hash=sha256:43996632bee7435583952155c06881074b9a742a86cee74e701d87ca532fe833 \ + --hash=sha256:538c6cdcdcdad310bbefd96d7850be1cd46e703079cc9e67d42a9ca776cdc8a8 \ + --hash=sha256:567419081ff67da766c898ccf21e79f1adad0e321381b0dfc7a9c8f7a9347972 \ + --hash=sha256:5d73d8c52614432f4215d0fe79a7e595d0dd162b5c15233762565be2f014803b \ + --hash=sha256:67479a94cf5fd2c2d88f9615e087fcb4fec169ec780464a3f2ba4a9a2bb79955 \ + --hash=sha256:9fb8653f6cbf1dd0a305ac1aa66ec246002145074ea57933978346ea5afdf70b \ + --hash=sha256:b48892df4d810eff21d3ef37274f4c60d32cdcafc462ad5647239036b0f0649f \ + --hash=sha256:c1cef65cffbc96e7b392d9178dbfd524ab0750da6c0023c027ddcac968fd1caa \ + --hash=sha256:c931a9371a86784cee25dec8d65bc2dc7a21f3f1552e3833d9ef8f919d22280a \ + --hash=sha256:c9834dfd701a1f6bf0f7f0b8b1573970ae99ebbeee68314116e0ccc5c78eea3c \ + --hash=sha256:cdaf0b9691879da2d13fa39b61c01887c34558d1ff6e5c30e2eb698f5384cd43 \ + --hash=sha256:db891b141fc6ee4b5fc6d1cc8035ec329cabc64bdd2ae672b4550c87d4ecb128 \ + --hash=sha256:df5dc9eb4ca050273b8e374a4cd967c43be1327eeb42bfe2f58b3cdfe7c68dcb \ + --hash=sha256:e3a82da039cfe717b6fb1886cbbe5c4a3f15d7df4765af857f4307585121c2dd \ + --hash=sha256:e3e182cd98eac20ee23a00653503315085b29ab44ed66269482349d307b08df9 \ + --hash=sha256:e4ce0570aa4aca87137890d23b86faeadf184924ad892d20c54237bcaab75d8f \ + --hash=sha256:f1e60bd06bb3cc5c0e957df748d1fab501e01416c43a7bdc756d2a992ea1b881 \ + --hash=sha256:f7158252803d0752ed5398d291dee4c553bb12d14547c0e1843ab74ee9c31123 # via ipykernel decorator==5.1.1 \ --hash=sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330 \ @@ -478,9 +478,9 @@ ipykernel==6.29.5 \ --hash=sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5 \ --hash=sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215 # via myst-nb -ipython==8.27.0 \ - --hash=sha256:0b99a2dc9f15fd68692e898e5568725c6d49c527d36a9fb5960ffbdeaa82ff7e \ - --hash=sha256:f68b3cb8bde357a5d7adc9598d57e22a45dfbea19eb6b98286fa3b288c9cd55c +ipython==8.28.0 \ + --hash=sha256:0d0d15ca1e01faeb868ef56bc7ee5a0de5bd66885735682e8a322ae289a13d1a \ + --hash=sha256:530ef1e7bb693724d3cdc37287c80b07ad9b25986c007a53aa1857272dac3f35 # via # ipykernel # myst-nb @@ -541,67 +541,68 @@ markdown-it-py==3.0.0 \ # documenteer # mdit-py-plugins # myst-parser -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 +markupsafe==3.0.0 \ + --hash=sha256:03ff62dea2fef3eadf2f1853bc6332bcb0458d9608b11dfb1cd5aeda1c178ea6 \ + --hash=sha256:105ada43a61af22acb8774514c51900dc820c481cc5ba53f17c09d294d9c07ca \ + --hash=sha256:12ddac720b8965332d36196f6f83477c6351ba6a25d4aff91e30708c729350d7 \ + --hash=sha256:1d151b9cf3307e259b749125a5a08c030ba15a8f1d567ca5bfb0e92f35e761f5 \ + --hash=sha256:1ee9790be6f62121c4c58bbced387b0965ab7bffeecb4e17cc42ef290784e363 \ + --hash=sha256:1fd02f47596e00a372f5b4af2b4c45f528bade65c66dfcbc6e1ea1bfda758e98 \ + --hash=sha256:23efb2be7221105c8eb0e905433414d2439cb0a8c5d5ca081c1c72acef0f5613 \ + --hash=sha256:25396abd52b16900932e05b7104bcdc640a4d96c914f39c3b984e5a17b01fba0 \ + --hash=sha256:27d6a73682b99568916c54a4bfced40e7d871ba685b580ea04bbd2e405dfd4c5 \ + --hash=sha256:380faf314c3c84c1682ca672e6280c6c59e92d0bc13dc71758ffa2de3cd4e252 \ + --hash=sha256:3b231255770723f1e125d63c14269bcd8b8136ecfb620b9a18c0297e046d0736 \ + --hash=sha256:3cd0bba31d484fe9b9d77698ddb67c978704603dc10cdc905512af308cfcca6b \ + --hash=sha256:3efde9a8c56c3b6e5f3fa4baea828f8184970c7c78480fedb620d804b1c31e5c \ + --hash=sha256:409535e0521c4630d5b5a1bf284e9d3c76d2fc2f153ebb12cf3827797798cc99 \ + --hash=sha256:494a64efc535e147fcc713dba58eecfce3a79f1e93ebe81995b387f5cd9bc2e1 \ + --hash=sha256:4ca04c60006867610a06575b46941ae616b19da0adc85b9f8f3d9cbd7a3da385 \ + --hash=sha256:4deea1d9169578917d1f35cdb581bc7bab56a7e8c5be2633bd1b9549c3c22a01 \ + --hash=sha256:509c424069dd037d078925b6815fc56b7271f3aaec471e55e6fa513b0a80d2aa \ + --hash=sha256:5509a8373fed30b978557890a226c3d30569746c565b9daba69df80c160365a5 \ + --hash=sha256:59420b5a9a5d3fee483a32adb56d7369ae0d630798da056001be1e9f674f3aa6 \ + --hash=sha256:5d207ff5cceef77796f8aacd44263266248cf1fbc601441524d7835613f8abec \ + --hash=sha256:5ddf5cb8e9c00d9bf8b0c75949fb3ff9ea2096ba531693e2e87336d197fdb908 \ + --hash=sha256:63dae84964a9a3d2610808cee038f435d9a111620c37ccf872c2fcaeca6865b3 \ + --hash=sha256:64a7c7856c3a409011139b17d137c2924df4318dab91ee0530800819617c4381 \ + --hash=sha256:64f7d04410be600aa5ec0626d73d43e68a51c86500ce12917e10fd013e258df5 \ + --hash=sha256:658fdf6022740896c403d45148bf0c36978c6b48c9ef8b1f8d0c7a11b6cdea86 \ + --hash=sha256:678fbceb202382aae42c1f0cd9f56b776bc20a58ae5b553ee1fe6b802983a1d6 \ + --hash=sha256:7835de4c56066e096407a1852e5561f6033786dd987fa90dc384e45b9bd21295 \ + --hash=sha256:7c524203207f5b569df06c96dafdc337228921ee8c3cc5f6e891d024c6595352 \ + --hash=sha256:7ed789d0f7f11fcf118cf0acb378743dfdd4215d7f7d18837c88171405c9a452 \ + --hash=sha256:81be2c0084d8c69e97e3c5d73ce9e2a6e523556f2a19c4e195c09d499be2f808 \ + --hash=sha256:81ee9c967956b9ea39b3a5270b7cb1740928d205b0dc72629164ce621b4debf9 \ + --hash=sha256:8219e2207f6c188d15614ea043636c2b36d2d79bf853639c124a179412325a13 \ + --hash=sha256:96e3ed550600185d34429477f1176cedea8293fa40e47fe37a05751bcb64c997 \ + --hash=sha256:98fb3a2bf525ad66db96745707b93ba0f78928b7a1cb2f1cb4b143bc7e2ba3b3 \ + --hash=sha256:9b36473a2d3e882d1873ea906ce54408b9588dc2c65989664e6e7f5a2de353d7 \ + --hash=sha256:9f91c90f8f3bf436f81c12eeb4d79f9ddd263c71125e6ad71341906832a34386 \ + --hash=sha256:a5fd5500d4e4f7cc88d8c0f2e45126c4307ed31e08f8ec521474f2fd99d35ac3 \ + --hash=sha256:a7171d2b869e9be238ea318c196baf58fbf272704e9c1cd4be8c380eea963342 \ + --hash=sha256:a80c6740e1bfbe50cea7cbf74f48823bb57bd59d914ee22ff8a81963b08e62d2 \ + --hash=sha256:b2a7afd24d408b907672015555bc10be2382e6c5f62a488e2d452da670bbd389 \ + --hash=sha256:b43ac1eb9f91e0c14aac1d2ef0f76bc7b9ceea51de47536f61268191adf52ad7 \ + --hash=sha256:b6cc46a27d904c9be5732029769acf4b0af69345172ed1ef6d4db0c023ff603b \ + --hash=sha256:b94bec9eda10111ec7102ef909eca4f3c2df979643924bfe58375f560713a7d1 \ + --hash=sha256:bd9b8e458e2bab52f9ad3ab5dc8b689a3c84b12b2a2f64cd9a0dfe209fb6b42f \ + --hash=sha256:c182d45600556917f811aa019d834a89fe4b6f6255da2fd0bdcf80e970f95918 \ + --hash=sha256:c409691696bec2b5e5c9efd9593c99025bf2f317380bf0d993ee0213516d908a \ + --hash=sha256:c5243044a927e8a6bb28517838662a019cd7f73d7f106bbb37ab5e7fa8451a92 \ + --hash=sha256:c8ab7efeff1884c5da8e18f743b667215300e09043820d11723718de0b7db934 \ + --hash=sha256:cb244adf2499aa37d5dc43431990c7f0b632d841af66a51d22bd89c437b60264 \ + --hash=sha256:d261ec38b8a99a39b62e0119ed47fe3b62f7691c500bc1e815265adc016438c1 \ + --hash=sha256:d2c099be5274847d606574234e494f23a359e829ba337ea9037c3a72b0851942 \ + --hash=sha256:d7e63d1977d3806ce0a1a3e0099b089f61abdede5238ca6a3f3bf8877b46d095 \ + --hash=sha256:dba0f83119b9514bc37272ad012f0cc03f0805cc6a2bea7244e19250ac8ff29f \ + --hash=sha256:dcbee57fedc9b2182c54ffc1c5eed316c3da8bbfeda8009e1b5d7220199d15da \ + --hash=sha256:e042ccf8fe5bf8b6a4b38b3f7d618eb10ea20402b0c9f4add9293408de447974 \ + --hash=sha256:e363440c8534bf2f2ef1b8fdc02037eb5fff8fce2a558519b22d6a3a38b3ec5e \ + --hash=sha256:e64b390a306f9e849ee809f92af6a52cda41741c914358e0e9f8499d03741526 \ + --hash=sha256:f0411641d31aa6f7f0cc13f0f18b63b8dc08da5f3a7505972a42ab059f479ba3 \ + --hash=sha256:f1c13c6c908811f867a8e9e66efb2d6c03d1cdd83e92788fe97f693c457dc44f \ + --hash=sha256:f846fd7c241e5bd4161e2a483663eb66e4d8e12130fcdc052f310f388f1d61c6 # via # -c requirements/main.txt # jinja2 @@ -652,9 +653,9 @@ mypy-extensions==1.0.0 \ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 # via mypy -myst-nb==1.1.1 \ - --hash=sha256:74227c11f76d03494f43b7788659b161b94f4dedef230a2912412bc8c3c9e553 \ - --hash=sha256:8b8f9085287d948eef46cb3764aafc21915e0e981882b8c742719f5b1a84c36f +myst-nb==1.1.2 \ + --hash=sha256:961b4005657029ca89892a4c75edbf0856c54ceaf6172368b46bf7676c1f7700 \ + --hash=sha256:9b7034e5d62640cb6daf03f9ca16ef45d0462fced27944c77aa3f98c7cdcd566 # via documenteer myst-parser==4.0.0 \ --hash=sha256:851c9dfb44e36e56d15d05e72f02b80da21a9e0d07cba96baf5e2d476bb91531 \ @@ -704,9 +705,9 @@ pluggy==1.5.0 \ --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 # via pytest -prompt-toolkit==3.0.47 \ - --hash=sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10 \ - --hash=sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360 +prompt-toolkit==3.0.48 \ + --hash=sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90 \ + --hash=sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e # via ipython psutil==6.0.0 \ --hash=sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35 \ @@ -896,21 +897,25 @@ python-dotenv==1.0.1 \ --hash=sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca \ --hash=sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a # via pydantic-settings -pywin32==306 ; platform_python_implementation != 'PyPy' and sys_platform == 'win32' \ - --hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \ - --hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \ - --hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \ - --hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \ - --hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \ - --hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \ - --hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \ - --hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \ - --hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \ - --hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \ - --hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \ - --hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \ - --hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \ - --hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0 +pywin32==307 ; platform_python_implementation != 'PyPy' and sys_platform == 'win32' \ + --hash=sha256:00d047992bb5dcf79f8b9b7c81f72e0130f9fe4b22df613f755ab1cc021d8347 \ + --hash=sha256:05de55a7c110478dc4b202230e98af5e0720855360d2b31a44bb4e296d795fba \ + --hash=sha256:07649ec6b01712f36debf39fc94f3d696a46579e852f60157a729ac039df0815 \ + --hash=sha256:0c12d61e0274e0c62acee79e3e503c312426ddd0e8d4899c626cddc1cafe0ff4 \ + --hash=sha256:13d059fb7f10792542082f5731d5d3d9645320fc38814759313e5ee97c3fac01 \ + --hash=sha256:36e650c5e5e6b29b5d317385b02d20803ddbac5d1031e1f88d20d76676dd103d \ + --hash=sha256:5101472f5180c647d4525a0ed289ec723a26231550dbfd369ec19d5faf60e511 \ + --hash=sha256:55ee87f2f8c294e72ad9d4261ca423022310a6e79fb314a8ca76ab3f493854c6 \ + --hash=sha256:576d09813eaf4c8168d0bfd66fb7cb3b15a61041cf41598c2db4a4583bf832d2 \ + --hash=sha256:7e0b2f93769d450a98ac7a31a087e07b126b6d571e8b4386a5762eb85325270b \ + --hash=sha256:987a86971753ed7fdd52a7fb5747aba955b2c7fbbc3d8b76ec850358c1cc28c3 \ + --hash=sha256:b30c9bdbffda6a260beb2919f918daced23d32c79109412c2085cbc513338a0a \ + --hash=sha256:b53658acbfc6a8241d72cc09e9d1d666be4e6c99376bc59e26cdb6223c4554d2 \ + --hash=sha256:e9d5202922e74985b037c9ef46778335c102b74b95cec70f629453dbe7235d87 \ + --hash=sha256:ea4d56e48dc1ab2aa0a5e3c0741ad6e926529510516db7a3b6981a1ae74405e5 \ + --hash=sha256:f8f25d893c1e1ce2d685ef6d0a481e87c6f510d0f3f117932781f412e0eba31b \ + --hash=sha256:fd436897c186a2e693cd0437386ed79f989f4d13d6f353f8787ecbb0ae719398 \ + --hash=sha256:fec5d27cc893178fab299de911b8e4d12c5954e1baf83e8a664311e56a272b75 # via jupyter-core pyyaml==6.0.2 \ --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ @@ -1403,9 +1408,9 @@ tabulate==0.9.0 \ --hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \ --hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f # via jupyter-cache -termcolor==2.4.0 \ - --hash=sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63 \ - --hash=sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a +termcolor==2.5.0 \ + --hash=sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8 \ + --hash=sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f # via pytest-sugar tomlkit==0.13.2 \ --hash=sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde \ diff --git a/requirements/main.txt b/requirements/main.txt index 10ad6927b2..d2304cfef7 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -294,67 +294,68 @@ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via phalanx (pyproject.toml) -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 +markupsafe==3.0.0 \ + --hash=sha256:03ff62dea2fef3eadf2f1853bc6332bcb0458d9608b11dfb1cd5aeda1c178ea6 \ + --hash=sha256:105ada43a61af22acb8774514c51900dc820c481cc5ba53f17c09d294d9c07ca \ + --hash=sha256:12ddac720b8965332d36196f6f83477c6351ba6a25d4aff91e30708c729350d7 \ + --hash=sha256:1d151b9cf3307e259b749125a5a08c030ba15a8f1d567ca5bfb0e92f35e761f5 \ + --hash=sha256:1ee9790be6f62121c4c58bbced387b0965ab7bffeecb4e17cc42ef290784e363 \ + --hash=sha256:1fd02f47596e00a372f5b4af2b4c45f528bade65c66dfcbc6e1ea1bfda758e98 \ + --hash=sha256:23efb2be7221105c8eb0e905433414d2439cb0a8c5d5ca081c1c72acef0f5613 \ + --hash=sha256:25396abd52b16900932e05b7104bcdc640a4d96c914f39c3b984e5a17b01fba0 \ + --hash=sha256:27d6a73682b99568916c54a4bfced40e7d871ba685b580ea04bbd2e405dfd4c5 \ + --hash=sha256:380faf314c3c84c1682ca672e6280c6c59e92d0bc13dc71758ffa2de3cd4e252 \ + --hash=sha256:3b231255770723f1e125d63c14269bcd8b8136ecfb620b9a18c0297e046d0736 \ + --hash=sha256:3cd0bba31d484fe9b9d77698ddb67c978704603dc10cdc905512af308cfcca6b \ + --hash=sha256:3efde9a8c56c3b6e5f3fa4baea828f8184970c7c78480fedb620d804b1c31e5c \ + --hash=sha256:409535e0521c4630d5b5a1bf284e9d3c76d2fc2f153ebb12cf3827797798cc99 \ + --hash=sha256:494a64efc535e147fcc713dba58eecfce3a79f1e93ebe81995b387f5cd9bc2e1 \ + --hash=sha256:4ca04c60006867610a06575b46941ae616b19da0adc85b9f8f3d9cbd7a3da385 \ + --hash=sha256:4deea1d9169578917d1f35cdb581bc7bab56a7e8c5be2633bd1b9549c3c22a01 \ + --hash=sha256:509c424069dd037d078925b6815fc56b7271f3aaec471e55e6fa513b0a80d2aa \ + --hash=sha256:5509a8373fed30b978557890a226c3d30569746c565b9daba69df80c160365a5 \ + --hash=sha256:59420b5a9a5d3fee483a32adb56d7369ae0d630798da056001be1e9f674f3aa6 \ + --hash=sha256:5d207ff5cceef77796f8aacd44263266248cf1fbc601441524d7835613f8abec \ + --hash=sha256:5ddf5cb8e9c00d9bf8b0c75949fb3ff9ea2096ba531693e2e87336d197fdb908 \ + --hash=sha256:63dae84964a9a3d2610808cee038f435d9a111620c37ccf872c2fcaeca6865b3 \ + --hash=sha256:64a7c7856c3a409011139b17d137c2924df4318dab91ee0530800819617c4381 \ + --hash=sha256:64f7d04410be600aa5ec0626d73d43e68a51c86500ce12917e10fd013e258df5 \ + --hash=sha256:658fdf6022740896c403d45148bf0c36978c6b48c9ef8b1f8d0c7a11b6cdea86 \ + --hash=sha256:678fbceb202382aae42c1f0cd9f56b776bc20a58ae5b553ee1fe6b802983a1d6 \ + --hash=sha256:7835de4c56066e096407a1852e5561f6033786dd987fa90dc384e45b9bd21295 \ + --hash=sha256:7c524203207f5b569df06c96dafdc337228921ee8c3cc5f6e891d024c6595352 \ + --hash=sha256:7ed789d0f7f11fcf118cf0acb378743dfdd4215d7f7d18837c88171405c9a452 \ + --hash=sha256:81be2c0084d8c69e97e3c5d73ce9e2a6e523556f2a19c4e195c09d499be2f808 \ + --hash=sha256:81ee9c967956b9ea39b3a5270b7cb1740928d205b0dc72629164ce621b4debf9 \ + --hash=sha256:8219e2207f6c188d15614ea043636c2b36d2d79bf853639c124a179412325a13 \ + --hash=sha256:96e3ed550600185d34429477f1176cedea8293fa40e47fe37a05751bcb64c997 \ + --hash=sha256:98fb3a2bf525ad66db96745707b93ba0f78928b7a1cb2f1cb4b143bc7e2ba3b3 \ + --hash=sha256:9b36473a2d3e882d1873ea906ce54408b9588dc2c65989664e6e7f5a2de353d7 \ + --hash=sha256:9f91c90f8f3bf436f81c12eeb4d79f9ddd263c71125e6ad71341906832a34386 \ + --hash=sha256:a5fd5500d4e4f7cc88d8c0f2e45126c4307ed31e08f8ec521474f2fd99d35ac3 \ + --hash=sha256:a7171d2b869e9be238ea318c196baf58fbf272704e9c1cd4be8c380eea963342 \ + --hash=sha256:a80c6740e1bfbe50cea7cbf74f48823bb57bd59d914ee22ff8a81963b08e62d2 \ + --hash=sha256:b2a7afd24d408b907672015555bc10be2382e6c5f62a488e2d452da670bbd389 \ + --hash=sha256:b43ac1eb9f91e0c14aac1d2ef0f76bc7b9ceea51de47536f61268191adf52ad7 \ + --hash=sha256:b6cc46a27d904c9be5732029769acf4b0af69345172ed1ef6d4db0c023ff603b \ + --hash=sha256:b94bec9eda10111ec7102ef909eca4f3c2df979643924bfe58375f560713a7d1 \ + --hash=sha256:bd9b8e458e2bab52f9ad3ab5dc8b689a3c84b12b2a2f64cd9a0dfe209fb6b42f \ + --hash=sha256:c182d45600556917f811aa019d834a89fe4b6f6255da2fd0bdcf80e970f95918 \ + --hash=sha256:c409691696bec2b5e5c9efd9593c99025bf2f317380bf0d993ee0213516d908a \ + --hash=sha256:c5243044a927e8a6bb28517838662a019cd7f73d7f106bbb37ab5e7fa8451a92 \ + --hash=sha256:c8ab7efeff1884c5da8e18f743b667215300e09043820d11723718de0b7db934 \ + --hash=sha256:cb244adf2499aa37d5dc43431990c7f0b632d841af66a51d22bd89c437b60264 \ + --hash=sha256:d261ec38b8a99a39b62e0119ed47fe3b62f7691c500bc1e815265adc016438c1 \ + --hash=sha256:d2c099be5274847d606574234e494f23a359e829ba337ea9037c3a72b0851942 \ + --hash=sha256:d7e63d1977d3806ce0a1a3e0099b089f61abdede5238ca6a3f3bf8877b46d095 \ + --hash=sha256:dba0f83119b9514bc37272ad012f0cc03f0805cc6a2bea7244e19250ac8ff29f \ + --hash=sha256:dcbee57fedc9b2182c54ffc1c5eed316c3da8bbfeda8009e1b5d7220199d15da \ + --hash=sha256:e042ccf8fe5bf8b6a4b38b3f7d618eb10ea20402b0c9f4add9293408de447974 \ + --hash=sha256:e363440c8534bf2f2ef1b8fdc02037eb5fff8fce2a558519b22d6a3a38b3ec5e \ + --hash=sha256:e64b390a306f9e849ee809f92af6a52cda41741c914358e0e9f8499d03741526 \ + --hash=sha256:f0411641d31aa6f7f0cc13f0f18b63b8dc08da5f3a7505972a42ab059f479ba3 \ + --hash=sha256:f1c13c6c908811f867a8e9e66efb2d6c03d1cdd83e92788fe97f693c457dc44f \ + --hash=sha256:f846fd7c241e5bd4161e2a483663eb66e4d8e12130fcdc052f310f388f1d61c6 # via jinja2 onepasswordconnectsdk==1.5.1 \ --hash=sha256:8924c614ffed98f29faada03dba940dc0bc47851b1f5f4ef7e312e43c10ec25b \ diff --git a/requirements/tox.txt b/requirements/tox.txt index f50f47aa68..50aab2f22b 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -50,9 +50,9 @@ pyproject-api==1.8.0 \ --hash=sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228 \ --hash=sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496 # via tox -tox==4.20.0 \ - --hash=sha256:21a8005e3d3fe5658a8e36b8ca3ed13a4230429063c5cc2a2fdac6ee5aa0de34 \ - --hash=sha256:5b78a49b6eaaeab3ae4186415e7c97d524f762ae967c63562687c3e5f0ec23d5 +tox==4.21.2 \ + --hash=sha256:13d996adcd792e7c82994b0e116d85efd84f0c6d185254d83d156f73f86b2038 \ + --hash=sha256:49381ff102296753e378fa5ff30e42a35e695f149b4dbf8a2c49d15fdb5797b2 # via # -r requirements/tox.in # tox-uv @@ -60,27 +60,27 @@ tox-uv==1.13.0 \ --hash=sha256:1037e4abad15a3b708b5970ed7a17a0765d7249b641a92b155bc3343b8b0145b \ --hash=sha256:fb087b8b4ff779c72b48fc72ea1995387bb1c0dfb37910c20e46cef8b5f98c15 # via -r requirements/tox.in -uv==0.4.15 \ - --hash=sha256:04858bfd551fabe1635127d9a0afe5c62e1e7d56cf309a9674840c90bfc1f21e \ - --hash=sha256:0e9b78f1a800a4cfdfbdc9ff4e5d4cce34af770f8a1f2b9416b161f294eb3703 \ - --hash=sha256:1401e73f0e8df62b4cfbf394e65a75f18b73bf8a94a6c5653a55bd6fdb8e1bc3 \ - --hash=sha256:1bb79cb06be9bb25a1bf8641bf34593f64a96b3ba66ebd8712954f647d9faa24 \ - --hash=sha256:21a3cedb2276d635543a10a11c61f75c6e387110e23e90cdb6c6dd2e1f3c9453 \ - --hash=sha256:27884429b7fed371fe1fcbe829659c4a259463d0ecacb7891d800e4754b5f24c \ - --hash=sha256:4e40deb2cf2cb403dbaf65209d49c45462ebbb1bff290d4c18b902b5b385cdc9 \ - --hash=sha256:6eef6881abf9b858020ffd23f4e5d77423329da2d4a1bc0af6613c2f698c369a \ - --hash=sha256:7fcf7f3812dd173d39273e99fb2abb0814be6133e7a721baa424cbcfd25b483b \ - --hash=sha256:8d45295757f66d1913e5917c06f1974745adad842403d419362491939be889a6 \ - --hash=sha256:8e36b8e07595fc6216d01e729c81a0b4ff029a93cc2ef987a73d3b650d6d559c \ - --hash=sha256:9822fa4db0d8d50abf5eebe081c01666a98120455090d0b71463d01d5d4153c1 \ - --hash=sha256:9e28141883c0aa8525ad5418e519d8791b7dd75f35020d3b1457db89346c5dc8 \ - --hash=sha256:a5920ff4d114025c51d3f925130ca3b0fad277631846b1109347c24948b29159 \ - --hash=sha256:be46b37b569e3c8ffb7d78022bcc0eadeb987109f709c1cec01b00c261ed9595 \ - --hash=sha256:cf7d554656bb8c5b7710300e04d86ab5137ebdd31fe309d66860a9d474b385f8 \ - --hash=sha256:d16ae6b97eb77f478dfe51d6eb3627048d3f47bd04282d3006e6a212e541dba0 \ - --hash=sha256:e32137ba8202b1291e879e8145113bfb543fcc992b5f043852a96d803788b83c +uv==0.4.18 \ + --hash=sha256:0c4cb31594cb2ed21bd3b603a207e99dfb9610c3db44da9dbbff0f237270f582 \ + --hash=sha256:157e4a2c063b270de348862dd31abfe600d5601183fd2a6efe552840ac179626 \ + --hash=sha256:1944c0ee567ca7db60705c5d213a75b25601094b026cc17af3e704651c1e3753 \ + --hash=sha256:1b59d742b81c7acf75a3aac71d9b24e07407e044bebcf39d3fc3c87094014e20 \ + --hash=sha256:3e3ade81af961f48517fcd99318192c9c635ef9a38a7ca65026af0c803c71906 \ + --hash=sha256:4be600474db6733078503012f2811c4383f490f77366e66b5f686316db52c870 \ + --hash=sha256:4ec60141f92c9667548ebad8daf4c13aabdb58b22c21dcd834641e791e55f289 \ + --hash=sha256:5234d47abe339c15c318e8b1bbd136ea61c4574503eda6944a5aaea91b7f6775 \ + --hash=sha256:6566448278b6849846b6c586fc86748c66aa53ed70f5568e713122543cc86a50 \ + --hash=sha256:8250148484e1b0f89ec19467946e86ee303619985c23228b5a2f2d94d15c6d8b \ + --hash=sha256:8af0b60adcfa2e87c77a3008d3ed6e0b577c0535468dc58e06f905ccbd27124f \ + --hash=sha256:954964eff8c7e2bc63dd4beeb8d45bcaddb5149a7ef29a36abd77ec76c8b837e \ + --hash=sha256:96c3ccee0fd8cf0a9d679407e157b76db1a854638a4ba4fa14f4d116b4e39b03 \ + --hash=sha256:ade18dbbeb05c8cba4f842cc15b20e59467069183f348844750901227df5008d \ + --hash=sha256:b08564c8c7e8b3665ad1d6c8924d4654451f96c956eb5f3b8ec995c77734163d \ + --hash=sha256:df225a568da01f3d7e126d886c3694c5a4a7d8b85162a4d6e97822716ca0e7c4 \ + --hash=sha256:f043c3c4514c149a00a86c3bf44df43062416d41002114e60df33895e8511c41 \ + --hash=sha256:fcc606da545d9a5ec5c2209e7eb2a4eb76627ad75df5eb5616c0b40789fe3933 # via tox-uv -virtualenv==20.26.5 \ - --hash=sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6 \ - --hash=sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4 +virtualenv==20.26.6 \ + --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ + --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 # via tox From 5432744d0c5c8b677bac65bb8aa373254ade0cff Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 7 Oct 2024 15:11:12 -0700 Subject: [PATCH 217/567] Update Helm docs --- applications/love/README.md | 2 +- applications/love/charts/love-manager/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/love/README.md b/applications/love/README.md index 3db243aefa..d0e10912c7 100644 --- a/applications/love/README.md +++ b/applications/love/README.md @@ -126,7 +126,7 @@ Deployment for the LSST Operators Visualization Environment | love-manager.redis.envSecrets.REDIS_PASS | string | `"redis-pass"` | The redis password secret key name | | love-manager.redis.image.pullPolicy | string | `"IfNotPresent"` | The pull policy for the redis image | | love-manager.redis.image.repository | string | `"redis"` | The redis image to use | -| love-manager.redis.image.tag | string | `"7.4.0"` | The tag to use for the redis image | +| love-manager.redis.image.tag | string | `"7.4.1"` | The tag to use for the redis image | | love-manager.redis.nodeSelector | object | `{}` | Node selection rules for the LOVE redis pods | | love-manager.redis.port | int | `6379` | The redis port number | | love-manager.redis.resources | object | `{}` | Resource specifications for the LOVE redis pods | diff --git a/applications/love/charts/love-manager/README.md b/applications/love/charts/love-manager/README.md index 47a93da5c5..21e76ee331 100644 --- a/applications/love/charts/love-manager/README.md +++ b/applications/love/charts/love-manager/README.md @@ -115,7 +115,7 @@ Helm chart for the LOVE manager service. | redis.envSecrets.REDIS_PASS | string | `"redis-pass"` | The redis password secret key name | | redis.image.pullPolicy | string | `"IfNotPresent"` | The pull policy for the redis image | | redis.image.repository | string | `"redis"` | The redis image to use | -| redis.image.tag | string | `"7.4.0"` | The tag to use for the redis image | +| redis.image.tag | string | `"7.4.1"` | The tag to use for the redis image | | redis.nodeSelector | object | `{}` | Node selection rules for the LOVE redis pods | | redis.port | int | `6379` | The redis port number | | redis.resources | object | `{}` | Resource specifications for the LOVE redis pods | From 3076791872273db6a4f41b062be81970dcde3e5d Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 09:35:59 -0700 Subject: [PATCH 218/567] Updates for setup. --- .../charts/obsenv-ui/templates/configmap.yaml | 5 +++-- .../obsenv-management/values-tucson-teststand.yaml | 10 ++++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml index 8eab91a720..b2080131d6 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml @@ -5,5 +5,6 @@ metadata: labels: {{- include "obsenv-ui.labels" . | nindent 4 }} data: - BASE_URL: {{ .Values.global.basePath | quote }} - OBSENV_API: "obsenv-api:8080" \ No newline at end of file + BASE_URL: {{ .Values.global.baseUrl | quote }} + OBSENV_API: "obsenv-api/obsenv-api" + AUTH_GROUP: "lsst-ts-integration-testing-team" diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index c6fd6cd501..f8375bf376 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -1,3 +1,13 @@ obsenv-api: + image: + repository: mareuter/obsenv-api + tag: develop config: useFakeObsenvManager: true + +obsenv-ui: + image: + repository: mareuter/obsenv-ui + tag: develop + config: + pathPrefix: /obsenv-management From a781389707ca7c193ad94ee9f8920a24740da183 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 10:19:59 -0700 Subject: [PATCH 219/567] Change pull policy. --- applications/obsenv-management/values-tucson-teststand.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index f8375bf376..6634ba8b17 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -2,6 +2,7 @@ obsenv-api: image: repository: mareuter/obsenv-api tag: develop + pullPolicy: Always config: useFakeObsenvManager: true @@ -9,5 +10,6 @@ obsenv-ui: image: repository: mareuter/obsenv-ui tag: develop + pullPolicy: Always config: pathPrefix: /obsenv-management From 24987dc8274eb22ce8877cd7a7e91e9a03d8f8de Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 10:59:44 -0700 Subject: [PATCH 220/567] Fix obsenv-ui readiness probe port. --- .../charts/obsenv-ui/templates/deployment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml index 01c1bcc4df..79c524e7e4 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml @@ -33,7 +33,7 @@ spec: imagePullPolicy: {{ .Values.image.pullPolicy }} ports: - name: "http" - containerPort: 8080 + containerPort: 3000 protocol: "TCP" readinessProbe: httpGet: From 5941499d48df070f0663481b0acc79e934897ac8 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 13:41:04 -0700 Subject: [PATCH 221/567] Fixup ingress. --- applications/obsenv-management/values-tucson-teststand.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 6634ba8b17..c7c5b756c7 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -12,4 +12,8 @@ obsenv-ui: tag: develop pullPolicy: Always config: - pathPrefix: /obsenv-management + pathPrefix: /obsenv-management/(.*) + ingress: + annotations: + nginx.ingress.kubernetes.io/rewrite-target: /$1 + nginx.ingress.kubernetes.io/use-regex: "true" From fd371095fe3d90cf03f01ff5b409ff94452ab17c Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 15:10:46 -0700 Subject: [PATCH 222/567] Update obsenv-ui netpol. --- .../obsenv-ui/templates/networkpolicy.yaml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/networkpolicy.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/networkpolicy.yaml index 71cfa11d91..6a76476a6f 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/networkpolicy.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/networkpolicy.yaml @@ -8,14 +8,14 @@ spec: {{- include "obsenv-ui.selectorLabels" . | nindent 6 }} policyTypes: - "Ingress" - - "Egress" ingress: + # Allow inbound access from pods (in any namespace) labeled + # gafaelfawr.lsst.io/ingress: true. - from: - - podSelector: - matchLabels: - app.kubernetes.io/name: obsenv-api - egress: - - to: - - podSelector: - matchLabels: - app.kubernetes.io/name: obsenv-api + - namespaceSelector: {} + podSelector: + matchLabels: + gafaelfawr.lsst.io/ingress: "true" + ports: + - protocol: "TCP" + port: 8080 From 96c853ec716c401f92b375d705e33fa10887d332 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 15:33:53 -0700 Subject: [PATCH 223/567] Try another ingress fix. --- applications/obsenv-management/values-tucson-teststand.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index c7c5b756c7..6f6a67ba7c 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -17,3 +17,4 @@ obsenv-ui: annotations: nginx.ingress.kubernetes.io/rewrite-target: /$1 nginx.ingress.kubernetes.io/use-regex: "true" + nginx.ingress.kubernetes.io/proxy-redirect-to: "https://$host/obsenv-management/" From 5f6a5b4ac8813087ebef457c6b6ba4332cf36b1a Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 16:12:31 -0700 Subject: [PATCH 224/567] Try another ingress fix. --- applications/obsenv-management/values-tucson-teststand.yaml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 6f6a67ba7c..9c9d5c7509 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -12,9 +12,7 @@ obsenv-ui: tag: develop pullPolicy: Always config: - pathPrefix: /obsenv-management/(.*) + pathPrefix: /obsenv-management ingress: annotations: - nginx.ingress.kubernetes.io/rewrite-target: /$1 - nginx.ingress.kubernetes.io/use-regex: "true" nginx.ingress.kubernetes.io/proxy-redirect-to: "https://$host/obsenv-management/" From 7d54317f76ae96e45ee8cc773ced95c8ba50955d Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 16:17:37 -0700 Subject: [PATCH 225/567] Try another ingress fix. --- applications/obsenv-management/values-tucson-teststand.yaml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 9c9d5c7509..d07688da6c 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -12,7 +12,4 @@ obsenv-ui: tag: develop pullPolicy: Always config: - pathPrefix: /obsenv-management - ingress: - annotations: - nginx.ingress.kubernetes.io/proxy-redirect-to: "https://$host/obsenv-management/" + pathPrefix: /obsenv-ui From 56ec001a9ffdd7406289372349aaf37ed9eac28f Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 16:47:56 -0700 Subject: [PATCH 226/567] Back to the future. --- applications/obsenv-management/values-tucson-teststand.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index d07688da6c..6634ba8b17 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -12,4 +12,4 @@ obsenv-ui: tag: develop pullPolicy: Always config: - pathPrefix: /obsenv-ui + pathPrefix: /obsenv-management From abe7d87670af84bf4e7988fab9545b1292cef543 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 16:48:07 -0700 Subject: [PATCH 227/567] Fix obsenv-ui netpol. --- .../charts/obsenv-ui/templates/networkpolicy.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/networkpolicy.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/networkpolicy.yaml index 6a76476a6f..549f78902c 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/networkpolicy.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/networkpolicy.yaml @@ -18,4 +18,4 @@ spec: gafaelfawr.lsst.io/ingress: "true" ports: - protocol: "TCP" - port: 8080 + port: 3000 From 2b41fda50c98fca03f474692fe7cf2e0b252fc73 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 17:08:56 -0700 Subject: [PATCH 228/567] Fix OBSENV api URL. --- .../obsenv-management/charts/obsenv-ui/templates/configmap.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml index b2080131d6..052db25066 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml @@ -6,5 +6,5 @@ metadata: {{- include "obsenv-ui.labels" . | nindent 4 }} data: BASE_URL: {{ .Values.global.baseUrl | quote }} - OBSENV_API: "obsenv-api/obsenv-api" + OBSENV_API: "http://obsenv-api/obsenv-api" AUTH_GROUP: "lsst-ts-integration-testing-team" From adb11515cea1da79fff900e920b423eae9a25a6c Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 17:10:27 -0700 Subject: [PATCH 229/567] Change obsenv-ui readiness probe path. --- .../charts/obsenv-ui/templates/deployment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml index 79c524e7e4..8967cde053 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml @@ -37,7 +37,7 @@ spec: protocol: "TCP" readinessProbe: httpGet: - path: "/" + path: "/obsenv-management" port: "http" resources: {{- toYaml .Values.resources | nindent 12 }} From 719b9d57ea3431dcbdc13054c26b37f22d71f427 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 14 Jun 2024 17:27:47 -0700 Subject: [PATCH 230/567] Fix OBSENV api URL. --- .../obsenv-management/charts/obsenv-ui/templates/configmap.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml index 052db25066..bb38aed72f 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml @@ -6,5 +6,5 @@ metadata: {{- include "obsenv-ui.labels" . | nindent 4 }} data: BASE_URL: {{ .Values.global.baseUrl | quote }} - OBSENV_API: "http://obsenv-api/obsenv-api" + OBSENV_API: "http://obsenv-api:8080/obsenv-api" AUTH_GROUP: "lsst-ts-integration-testing-team" From 4d5e9348aae87c22c44f8d0f4f4a8261bdb7a678 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 17 Jun 2024 09:55:20 -0700 Subject: [PATCH 231/567] Use delegate fro gafaelfawr. --- .../charts/obsenv-ui/templates/ingress.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml index a0823292d5..7ad0168089 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml @@ -9,6 +9,11 @@ config: scopes: all: - "exec:internal-tools" + delegate: + internal: + service: "gafaelfawr" + scopes: + - "exec:internal-tools" template: metadata: name: "obsenv-ui" From 0f3332a33f8980e188e62f19f0f5eb62e5b3dcbb Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 17 Jun 2024 10:42:33 -0700 Subject: [PATCH 232/567] Ingress work. --- .../obsenv-management/charts/obsenv-ui/templates/ingress.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml index 7ad0168089..fd957d413e 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml @@ -11,7 +11,7 @@ config: - "exec:internal-tools" delegate: internal: - service: "gafaelfawr" + service: "obsenv-ui" scopes: - "exec:internal-tools" template: From 2bf2a8020f5c1d48d09a5771ca14901585b1806b Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 17 Jun 2024 11:09:47 -0700 Subject: [PATCH 233/567] Ingress work. --- .../obsenv-management/charts/obsenv-ui/templates/ingress.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml index fd957d413e..d8ddd68c49 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml @@ -11,7 +11,7 @@ config: - "exec:internal-tools" delegate: internal: - service: "obsenv-ui" + service: "obsenv-management" scopes: - "exec:internal-tools" template: From 7f22b51fb683b3fd2d6d4eeb39dc7600848c30e4 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 17 Jun 2024 11:14:40 -0700 Subject: [PATCH 234/567] Ingress work. --- .../obsenv-management/charts/obsenv-ui/templates/ingress.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml index d8ddd68c49..7c15c38e72 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml @@ -11,7 +11,7 @@ config: - "exec:internal-tools" delegate: internal: - service: "obsenv-management" + service: "gafaelfawr/gafaelfawr" scopes: - "exec:internal-tools" template: From b57b9030647007015523e3fc1a8461f297ca1e42 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 17 Jun 2024 11:54:33 -0700 Subject: [PATCH 235/567] Ingress work. --- .../charts/obsenv-ui/templates/ingress.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml index 7c15c38e72..5bd2209af2 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml @@ -11,9 +11,8 @@ config: - "exec:internal-tools" delegate: internal: - service: "gafaelfawr/gafaelfawr" - scopes: - - "exec:internal-tools" + service: "obsenv-api" + scopes: [] template: metadata: name: "obsenv-ui" From 9162634d6a664c21d16a8931de2b286a6e760a09 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Thu, 29 Aug 2024 10:41:28 -0700 Subject: [PATCH 236/567] Change repo locations. --- .../obsenv-management/values-tucson-teststand.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 6634ba8b17..8e48991f1a 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -1,15 +1,15 @@ obsenv-api: image: - repository: mareuter/obsenv-api - tag: develop + repository: rubin-cr.lsst.org/obsenv-api + tag: tickets-DM-44286-1 pullPolicy: Always config: useFakeObsenvManager: true obsenv-ui: image: - repository: mareuter/obsenv-ui - tag: develop + repository: rubin-cr.lsst.org/obsenv-ui + tag: tickets-DM-44443 pullPolicy: Always config: pathPrefix: /obsenv-management From 60d1f3cb1f50d4d4511497c18a55ac683ade1fe2 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 27 Sep 2024 13:55:02 -0700 Subject: [PATCH 237/567] Updates for new API container. --- applications/obsenv-management/README.md | 5 +++++ .../charts/obsenv-api/README.md | 5 +++++ .../charts/obsenv-api/templates/deployment.yaml | 14 ++++++++++++-- .../charts/obsenv-api/values.yaml | 17 +++++++++++++++++ .../values-tucson-teststand.yaml | 4 ++-- 5 files changed, 41 insertions(+), 4 deletions(-) diff --git a/applications/obsenv-management/README.md b/applications/obsenv-management/README.md index 8ef6b00f51..7983fbc0b3 100644 --- a/applications/obsenv-management/README.md +++ b/applications/obsenv-management/README.md @@ -23,10 +23,15 @@ Rubin Observatory Environment Management System | obsenv-api.image.repository | string | `"rubincr.lsst.org/obsenv-api"` | Image to use in the obsenv-api deployment | | obsenv-api.image.tag | string | The appVersion of the chart | Tag of image to use | | obsenv-api.ingress.annotations | object | `{}` | Additional annotations for the ingress rule | +| obsenv-api.nfsMount.containerPath | string | `"/net/obs-env"` | Path to mount obs-env directory into container | +| obsenv-api.nfsMount.server | string | `""` | Server where the data lives | +| obsenv-api.nfsMount.serverPath | string | `"/obs-env"` | Path on the server where the data lives | | obsenv-api.nodeSelector | object | `{}` | Node selection rules for the obsenv-api deployment pod | | obsenv-api.podAnnotations | object | `{}` | Annotations for the obsenv-api deployment pod | | obsenv-api.replicaCount | int | `1` | Number of web deployment pods to start | | obsenv-api.resources | object | See `values.yaml` | Resource limits and requests for the obsenv-api deployment pod | +| obsenv-api.securityContext.group | int | `72089` | Group ID | +| obsenv-api.securityContext.user | int | `72091` | User ID | | obsenv-api.tolerations | list | `[]` | Tolerations for the obsenv-api deployment pod | | obsenv-ui.affinity | object | `{}` | Affinity rules for the obsenv-ui deployment pod | | obsenv-ui.config.logLevel | string | `"INFO"` | Logging level | diff --git a/applications/obsenv-management/charts/obsenv-api/README.md b/applications/obsenv-management/charts/obsenv-api/README.md index 47304addfe..3600062904 100644 --- a/applications/obsenv-management/charts/obsenv-api/README.md +++ b/applications/obsenv-management/charts/obsenv-api/README.md @@ -15,8 +15,13 @@ Helm chart for the Observatory Environment Management API. | image.repository | string | `"rubincr.lsst.org/obsenv-api"` | Image to use in the obsenv-api deployment | | image.tag | string | The appVersion of the chart | Tag of image to use | | ingress.annotations | object | `{}` | Additional annotations for the ingress rule | +| nfsMount.containerPath | string | `"/net/obs-env"` | Path to mount obs-env directory into container | +| nfsMount.server | string | `""` | Server where the data lives | +| nfsMount.serverPath | string | `"/obs-env"` | Path on the server where the data lives | | nodeSelector | object | `{}` | Node selection rules for the obsenv-api deployment pod | | podAnnotations | object | `{}` | Annotations for the obsenv-api deployment pod | | replicaCount | int | `1` | Number of web deployment pods to start | | resources | object | See `values.yaml` | Resource limits and requests for the obsenv-api deployment pod | +| securityContext.group | int | `72089` | Group ID | +| securityContext.user | int | `72091` | User ID | | tolerations | list | `[]` | Tolerations for the obsenv-api deployment pod | diff --git a/applications/obsenv-management/charts/obsenv-api/templates/deployment.yaml b/applications/obsenv-management/charts/obsenv-api/templates/deployment.yaml index 5a93a21f4c..188a2647b2 100644 --- a/applications/obsenv-management/charts/obsenv-api/templates/deployment.yaml +++ b/applications/obsenv-management/charts/obsenv-api/templates/deployment.yaml @@ -47,6 +47,10 @@ spec: drop: - "all" readOnlyRootFilesystem: true + volumeMounts: + - name: obsenv + mountPath: {{ .Values.nfsMount.containerPath }} + readOnly: false {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -57,5 +61,11 @@ spec: {{- end }} securityContext: runAsNonRoot: true - runAsUser: 72091 - runAsGroup: 72089 + runAsUser: {{ .Values.securityContext.user }} + runAsGroup: {{ .Values.securityContext.group }} + volumes: + - name: obsenv + nfs: + path: {{ .Values.nfsMount.serverPath }} + readOnly: false + server: {{ .Values.nfsMount.server }} diff --git a/applications/obsenv-management/charts/obsenv-api/values.yaml b/applications/obsenv-management/charts/obsenv-api/values.yaml index 5a5143e32b..45001095d3 100644 --- a/applications/obsenv-management/charts/obsenv-api/values.yaml +++ b/applications/obsenv-management/charts/obsenv-api/values.yaml @@ -26,6 +26,23 @@ config: # -- Use fake obsenv management system useFakeObsenvManager: false +securityContext: + # -- User ID + user: 72091 + + # -- Group ID + group: 72089 + +nfsMount: + # -- Path to mount obs-env directory into container + containerPath: /net/obs-env + + # -- Path on the server where the data lives + serverPath: /obs-env + + # -- Server where the data lives + server: "" + ingress: # -- Additional annotations for the ingress rule annotations: {} diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 8e48991f1a..971799628c 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -3,8 +3,8 @@ obsenv-api: repository: rubin-cr.lsst.org/obsenv-api tag: tickets-DM-44286-1 pullPolicy: Always - config: - useFakeObsenvManager: true + nfsMount: + server: nfs-obsenv.tu.lsst.org obsenv-ui: image: From 1e11d1dbeebeb9d24e80050ed0467798c99c6527 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 27 Sep 2024 15:43:49 -0700 Subject: [PATCH 238/567] Remove egress policy on obsenv-api. --- .../charts/obsenv-api/templates/networkpolicy.yaml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/applications/obsenv-management/charts/obsenv-api/templates/networkpolicy.yaml b/applications/obsenv-management/charts/obsenv-api/templates/networkpolicy.yaml index 33c6be6999..927209fd9a 100644 --- a/applications/obsenv-management/charts/obsenv-api/templates/networkpolicy.yaml +++ b/applications/obsenv-management/charts/obsenv-api/templates/networkpolicy.yaml @@ -15,7 +15,4 @@ spec: matchLabels: app.kubernetes.io/name: obsenv-ui egress: - - to: - - podSelector: - matchLabels: - app.kubernetes.io/name: obsenv-ui + - {} From 37546a485d49383d45ae999f932745caf88d4a55 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 30 Sep 2024 16:07:21 -0700 Subject: [PATCH 239/567] Add login redirect. --- .../obsenv-management/charts/obsenv-ui/templates/ingress.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml index 5bd2209af2..c871bd17d2 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "exec:internal-tools" + loginRedirect: true delegate: internal: service: "obsenv-api" From 791fff55b9cd1b99f37067584c330e5948222711 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 30 Sep 2024 16:09:02 -0700 Subject: [PATCH 240/567] Change API log level. --- applications/obsenv-management/values-tucson-teststand.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 971799628c..5dc1e87445 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -3,6 +3,8 @@ obsenv-api: repository: rubin-cr.lsst.org/obsenv-api tag: tickets-DM-44286-1 pullPolicy: Always + config: + logLevel: "DEBUG" nfsMount: server: nfs-obsenv.tu.lsst.org From e830d88e477602b98c8cde8ae01de84ce0298e80 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 30 Sep 2024 16:20:28 -0700 Subject: [PATCH 241/567] Fix login redirect. --- .../obsenv-management/charts/obsenv-ui/templates/ingress.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml index c871bd17d2..de30d87046 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml @@ -6,10 +6,10 @@ metadata: {{- include "obsenv-ui.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - "exec:internal-tools" - loginRedirect: true delegate: internal: service: "obsenv-api" From 1fe333ebc501c9487a3267694950a075960a6b85 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Tue, 1 Oct 2024 05:29:14 -0700 Subject: [PATCH 242/567] Update obsenv-ui configmap. --- .../obsenv-management/charts/obsenv-ui/templates/configmap.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml index bb38aed72f..789ed10528 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml @@ -7,4 +7,4 @@ metadata: data: BASE_URL: {{ .Values.global.baseUrl | quote }} OBSENV_API: "http://obsenv-api:8080/obsenv-api" - AUTH_GROUP: "lsst-ts-integration-testing-team" + NEXT_PUBLIC_AUTH_GROUP: "lsst-ts-integration-testing-team" From 9fee42f3f05b22260d8612d74692986855794146 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Wed, 2 Oct 2024 12:30:16 -0700 Subject: [PATCH 243/567] Try different approach to envvars. --- .../charts/obsenv-ui/templates/deployment.yaml | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml index 8967cde053..7716f23f3c 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml @@ -26,9 +26,9 @@ spec: automountServiceAccountToken: false containers: - name: {{ .Chart.Name }} - envFrom: - - configMapRef: - name: "obsenv-ui" + # envFrom: + # - configMapRef: + # name: "obsenv-ui" image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy }} ports: @@ -47,6 +47,9 @@ spec: drop: - "all" readOnlyRootFilesystem: true + volumeMounts: + - name: config-mount + mountPath: /app/.env.production.local {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -59,3 +62,7 @@ spec: runAsNonRoot: true runAsUser: 1001 runAsGroup: 1001 + volumes: + - name: config-mount + configMap: + name: obsenv-ui From b447ed9d79cd7748ed988eb04957a1c664e9feab Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Wed, 2 Oct 2024 14:03:49 -0700 Subject: [PATCH 244/567] Change config file name. --- .../charts/obsenv-ui/templates/deployment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml index 7716f23f3c..16cb1bad7a 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml @@ -49,7 +49,7 @@ spec: readOnlyRootFilesystem: true volumeMounts: - name: config-mount - mountPath: /app/.env.production.local + mountPath: /app/.env.production {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} From bbc5d39a0ec1f432cbdb405b76964713f8a83685 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Wed, 2 Oct 2024 14:41:19 -0700 Subject: [PATCH 245/567] Revert config. --- .../obsenv-ui/templates/deployment.yaml | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml index 16cb1bad7a..b0b09681c2 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml @@ -26,9 +26,9 @@ spec: automountServiceAccountToken: false containers: - name: {{ .Chart.Name }} - # envFrom: - # - configMapRef: - # name: "obsenv-ui" + envFrom: + - configMapRef: + name: "obsenv-ui" image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy }} ports: @@ -47,9 +47,9 @@ spec: drop: - "all" readOnlyRootFilesystem: true - volumeMounts: - - name: config-mount - mountPath: /app/.env.production + # volumeMounts: + # - name: config-mount + # mountPath: /app/.env.production {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -62,7 +62,7 @@ spec: runAsNonRoot: true runAsUser: 1001 runAsGroup: 1001 - volumes: - - name: config-mount - configMap: - name: obsenv-ui + # volumes: + # - name: config-mount + # configMap: + # name: obsenv-ui From 6c3873dfc16722fbd4cd2604132d15531909da7a Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 4 Oct 2024 10:53:37 -0700 Subject: [PATCH 246/567] Back to server side var. --- .../obsenv-management/charts/obsenv-ui/templates/configmap.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml index 789ed10528..bb38aed72f 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml @@ -7,4 +7,4 @@ metadata: data: BASE_URL: {{ .Values.global.baseUrl | quote }} OBSENV_API: "http://obsenv-api:8080/obsenv-api" - NEXT_PUBLIC_AUTH_GROUP: "lsst-ts-integration-testing-team" + AUTH_GROUP: "lsst-ts-integration-testing-team" From 8ef2c9bacf9b53745b6a6d7a3b990b42293e41a3 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 7 Oct 2024 16:23:20 -0700 Subject: [PATCH 247/567] Update package versions. --- applications/obsenv-management/values-tucson-teststand.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 5dc1e87445..6c0759ce10 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -1,7 +1,7 @@ obsenv-api: image: repository: rubin-cr.lsst.org/obsenv-api - tag: tickets-DM-44286-1 + tag: 0.1.0 pullPolicy: Always config: logLevel: "DEBUG" @@ -11,7 +11,7 @@ obsenv-api: obsenv-ui: image: repository: rubin-cr.lsst.org/obsenv-ui - tag: tickets-DM-44443 + tag: 0.1.0 pullPolicy: Always config: pathPrefix: /obsenv-management From 9f64a736d980b8b9078eccf210ff7a56253d9e26 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 7 Oct 2024 16:25:56 -0700 Subject: [PATCH 248/567] Remove commented out code. --- .../charts/obsenv-ui/templates/deployment.yaml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml index b0b09681c2..8967cde053 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml @@ -47,9 +47,6 @@ spec: drop: - "all" readOnlyRootFilesystem: true - # volumeMounts: - # - name: config-mount - # mountPath: /app/.env.production {{- with .Values.nodeSelector }} nodeSelector: {{- toYaml . | nindent 8 }} @@ -62,7 +59,3 @@ spec: runAsNonRoot: true runAsUser: 1001 runAsGroup: 1001 - # volumes: - # - name: config-mount - # configMap: - # name: obsenv-ui From b57b6a7a484ecd104b295b7074c25b1cf6c5a7ae Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Fri, 27 Sep 2024 14:39:54 -0400 Subject: [PATCH 249/567] Add templatebot repo configurations - TEMPLATEBOT_TEMPLATE_REPO_URL - Configure and set TEMPLATEBOT_REPO_CACHE_URL --- applications/templatebot/README.md | 1 + applications/templatebot/templates/configmap.yaml | 1 + applications/templatebot/templates/deployment.yaml | 7 +++++++ applications/templatebot/values.yaml | 3 +++ 4 files changed, 12 insertions(+) diff --git a/applications/templatebot/README.md b/applications/templatebot/README.md index fa76b28227..20f4a576f5 100644 --- a/applications/templatebot/README.md +++ b/applications/templatebot/README.md @@ -14,6 +14,7 @@ Create new projects | config.logLevel | string | `"INFO"` | Logging level | | config.logProfile | string | `"production"` | Logging profile (`production` for JSON, `development` for human-friendly) | | config.pathPrefix | string | `"/templatebot"` | URL path prefix | +| config.templateRepoUrl | string | `"https://github.com/lsst/templates"` | URL for the template repository | | config.topics.slackAppMention | string | `"lsst.square-events.squarebot.slack.app.mention"` | Kafka topic name for the Slack `app_mention` events | | config.topics.slackBlockActions | string | `"lsst.square-events.squarebot.slack.interaction.block-actions"` | Kafka topic for Slack `block_actions` interaction events | | config.topics.slackMessageIm | string | `"lsst.square-events.squarebot.slack.message.im"` | Kafka topic name for the Slack `message.im` events (direct message channels) | diff --git a/applications/templatebot/templates/configmap.yaml b/applications/templatebot/templates/configmap.yaml index 343c47e17b..a9ed3ebcb6 100644 --- a/applications/templatebot/templates/configmap.yaml +++ b/applications/templatebot/templates/configmap.yaml @@ -9,6 +9,7 @@ data: TEMPLATEBOT_ENVIRONMENT_URL: {{ .Values.global.baseUrl | quote }} TEMPLATEBOT_PATH_PREFIX: {{ .Values.config.pathPrefix | quote }} TEMPLATEBOT_PROFILE: {{ .Values.config.logProfile | quote }} + TEMPLATEBOT_TEMPLATE_REPO_URL: {{ .Values.config.templateRepoUrl | quote }} TEMPLATEBOT_APP_MENTION_TOPIC: {{ .Values.config.topics.slackAppMention | quote }} TEMPLATEBOT_MESSAGE_IM_TOPIC: {{ .Values.config.topics.slackMessageIm | quote }} TEMPLATEBOT_BLOCK_ACTIONS_TOPIC: {{ .Values.config.topics.slackBlockActions | quote }} diff --git a/applications/templatebot/templates/deployment.yaml b/applications/templatebot/templates/deployment.yaml index 79888b1aff..008eb928a7 100644 --- a/applications/templatebot/templates/deployment.yaml +++ b/applications/templatebot/templates/deployment.yaml @@ -30,6 +30,9 @@ spec: - configMapRef: name: "templatebot" env: + # Writeable directory for caching template repo checkouts + - name: "TEMPLATEBOT_TEMPLATE_CACHE_DIR" + value: "/tmp/template_repo_cache" # Writeable directory for concatenating certs. See "tmp" volume. - name: "KAFKA_CERT_TEMP_DIR" value: "/tmp/kafka_certs" @@ -70,6 +73,8 @@ spec: subPath: "ssl.keystore.key" # private key for the consuming client - name: "tmp" mountPath: "/tmp/kafka_certs" + - name: "repo-cache" + mountPath: "/tmp/template_repo_cache" image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy }} ports: @@ -109,3 +114,5 @@ spec: secretName: "templatebot" - name: "tmp" emptyDir: {} + - name: "repo-cache" + emptyDir: {} diff --git a/applications/templatebot/values.yaml b/applications/templatebot/values.yaml index 227aa85890..1a71e8a5e5 100644 --- a/applications/templatebot/values.yaml +++ b/applications/templatebot/values.yaml @@ -27,6 +27,9 @@ config: # -- URL path prefix pathPrefix: "/templatebot" + # -- URL for the template repository + templateRepoUrl: "https://github.com/lsst/templates" + topics: # -- Kafka topic name for the Slack `app_mention` events slackAppMention: "lsst.square-events.squarebot.slack.app.mention" From 098a783b2dcbb46b994f165906183d131f3420d0 Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Tue, 1 Oct 2024 17:46:45 -0400 Subject: [PATCH 250/567] Configure GitHub App secrets for templatebot --- applications/templatebot/templates/deployment.yaml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/applications/templatebot/templates/deployment.yaml b/applications/templatebot/templates/deployment.yaml index 008eb928a7..5ed0bc0c7e 100644 --- a/applications/templatebot/templates/deployment.yaml +++ b/applications/templatebot/templates/deployment.yaml @@ -61,6 +61,16 @@ spec: secretKeyRef: name: "templatebot" key: "TEMPLATEBOT_SLACK_TOKEN" + - name: "TEMPLATEBOT_GITHUB_APP_ID" + valueFrom: + secretKeyRef: + name: "templatebot" + key: "TEMPLATEBOT_GITHUB_APP_ID" + - name: "TEMPLATEBOT_GITHUB_APP_PRIVATE_KEY" + valueFrom: + secretKeyRef: + name: "templatebot" + key: "TEMPLATEBOT_GITHUB_APP_PRIVATE_KEY" volumeMounts: - name: "kafka" mountPath: "/etc/kafkacluster/ca.crt" From 89d4c487c824c0d14d8f71e8dd30c9426b3f489a Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Wed, 2 Oct 2024 18:22:23 -0400 Subject: [PATCH 251/567] Add LSST the Docs secrets to templatebot --- applications/templatebot/secrets.yaml | 6 ++++++ applications/templatebot/templates/deployment.yaml | 10 ++++++++++ 2 files changed, 16 insertions(+) diff --git a/applications/templatebot/secrets.yaml b/applications/templatebot/secrets.yaml index 7e672c9ecf..96ead8d3eb 100644 --- a/applications/templatebot/secrets.yaml +++ b/applications/templatebot/secrets.yaml @@ -24,3 +24,9 @@ TEMPLATEBOT_SLACK_TOKEN: copy: application: squarebot key: SQUAREBOT_SLACK_TOKEN +TEMPLATEBOT_LTD_USERNAME: + description: >- + The username for the LSST the Docs admin account. +TEMPLATEBOT_LTD_PASSWORD: + description: >- + The password for the LSST the Docs admin account. diff --git a/applications/templatebot/templates/deployment.yaml b/applications/templatebot/templates/deployment.yaml index 5ed0bc0c7e..435be52e33 100644 --- a/applications/templatebot/templates/deployment.yaml +++ b/applications/templatebot/templates/deployment.yaml @@ -71,6 +71,16 @@ spec: secretKeyRef: name: "templatebot" key: "TEMPLATEBOT_GITHUB_APP_PRIVATE_KEY" + - name: "TEMPLATEBOT_LTD_USERNAME" + valueFrom: + secretKeyRef: + name: "templatebot" + key: "TEMPLATEBOT_LTD_USERNAME" + - name: "TEMPLATEBOT_LTD_PASSWORD" + valueFrom: + secretKeyRef: + name: "templatebot" + key: "TEMPLATEBOT_LTD_PASSWORD" volumeMounts: - name: "kafka" mountPath: "/etc/kafkacluster/ca.crt" From 9a63fe465458e585bb740743dc2bfa66af1962bc Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Mon, 7 Oct 2024 16:54:33 -0400 Subject: [PATCH 252/567] Make tmp writable Necessary for setting up tmp directories for new repositories. --- applications/templatebot/templates/deployment.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/applications/templatebot/templates/deployment.yaml b/applications/templatebot/templates/deployment.yaml index 435be52e33..80940f24b9 100644 --- a/applications/templatebot/templates/deployment.yaml +++ b/applications/templatebot/templates/deployment.yaml @@ -91,8 +91,10 @@ spec: - name: "kafka" mountPath: "/etc/kafkauser/user.key" subPath: "ssl.keystore.key" # private key for the consuming client - - name: "tmp" + - name: "kafka-certs-tmp" mountPath: "/tmp/kafka_certs" + - name: "tmp" + mountPath: "/tmp" - name: "repo-cache" mountPath: "/tmp/template_repo_cache" image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" @@ -134,5 +136,7 @@ spec: secretName: "templatebot" - name: "tmp" emptyDir: {} + - name: "kafka-certs-tmp" + emptyDir: {} - name: "repo-cache" emptyDir: {} From 71c755dc3777194d2cad462ed69574e74f9967f5 Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Mon, 7 Oct 2024 17:19:49 -0400 Subject: [PATCH 253/567] Add cookiecutter replay dir as writeable --- applications/templatebot/templates/deployment.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/applications/templatebot/templates/deployment.yaml b/applications/templatebot/templates/deployment.yaml index 80940f24b9..dd30ff8bd7 100644 --- a/applications/templatebot/templates/deployment.yaml +++ b/applications/templatebot/templates/deployment.yaml @@ -97,6 +97,8 @@ spec: mountPath: "/tmp" - name: "repo-cache" mountPath: "/tmp/template_repo_cache" + - name: "cookiecutter-replay-dir" + mountPath: "/home/appuser/.cookiecutter_replay/" image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy }} ports: @@ -136,6 +138,8 @@ spec: secretName: "templatebot" - name: "tmp" emptyDir: {} + - name: "cookiecutter-replay-dir" + emptyDir: {} - name: "kafka-certs-tmp" emptyDir: {} - name: "repo-cache" From c7a380cc3c44f3cd15ac46d80b267bf02a68381f Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Mon, 7 Oct 2024 17:48:35 -0400 Subject: [PATCH 254/567] Add GITHUB_APP_USERNAME secret We'll see if correctly setting this allows the committer to be associated with the bot. Like the other squarebot app secrets, this comes from the Squarebot secrets and gets copied to templatebot. --- applications/squarebot/secrets.yaml | 3 +++ applications/templatebot/secrets.yaml | 6 ++++++ applications/templatebot/templates/deployment.yaml | 5 +++++ 3 files changed, 14 insertions(+) diff --git a/applications/squarebot/secrets.yaml b/applications/squarebot/secrets.yaml index 045c074a21..f1f68a460b 100644 --- a/applications/squarebot/secrets.yaml +++ b/applications/squarebot/secrets.yaml @@ -1,6 +1,9 @@ SQUAREBOT_GITHUB_APP_ID: description: >- The ID of the GitHub App shared by all Squarebot services. +SQUAREBOT_GITHUB_APP_USERNAME: + description: >- + The username slug of the GitHub App shared by all Squarebot services. SQUAREBOT_GITHUB_APP_PRIVATE_KEY: description: >- The private key for the GitHub App shared by all Squarebot services. diff --git a/applications/templatebot/secrets.yaml b/applications/templatebot/secrets.yaml index 96ead8d3eb..a472d3d3ba 100644 --- a/applications/templatebot/secrets.yaml +++ b/applications/templatebot/secrets.yaml @@ -1,3 +1,9 @@ +TEMPLATEBOT_GITHUB_APP_USERNAME: + description: >- + The username slug for the GitHub App shared by all Squarebot services. + copy: + application: squarebot + key: SQUAREBOT_GITHUB_APP_USERNAME TEMPLATEBOT_GITHUB_APP_ID: description: >- The ID of the GitHub App shared by all Squarebot services. diff --git a/applications/templatebot/templates/deployment.yaml b/applications/templatebot/templates/deployment.yaml index dd30ff8bd7..497485cd31 100644 --- a/applications/templatebot/templates/deployment.yaml +++ b/applications/templatebot/templates/deployment.yaml @@ -66,6 +66,11 @@ spec: secretKeyRef: name: "templatebot" key: "TEMPLATEBOT_GITHUB_APP_ID" + - name: "TEMPLATEBOT_GITHUB_APP_USERNAME" + valueFrom: + secretKeyRef: + name: "templatebot" + key: "TEMPLATEBOT_GITHUB_APP_USERNAME" - name: "TEMPLATEBOT_GITHUB_APP_PRIVATE_KEY" valueFrom: secretKeyRef: From 5ff3b803b6b4d9aadb0d5dc3cd9507b06ca35c9c Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Tue, 8 Oct 2024 16:51:28 -0400 Subject: [PATCH 255/567] Deploy templatebot 0.3.0 --- applications/templatebot/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/templatebot/Chart.yaml b/applications/templatebot/Chart.yaml index c8a3e6c9b1..f8999cdf59 100644 --- a/applications/templatebot/Chart.yaml +++ b/applications/templatebot/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: "tickets-DM-43699" +appVersion: "0.3.0" description: Create new projects name: templatebot sources: From 7b19b6aa68882f63897145753e553cca4df53556 Mon Sep 17 00:00:00 2001 From: Jonathan Sick Date: Tue, 8 Oct 2024 16:52:49 -0400 Subject: [PATCH 256/567] Enable templatebot on roundtable-prod --- environments/values-roundtable-prod.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/environments/values-roundtable-prod.yaml b/environments/values-roundtable-prod.yaml index a70ac6123a..c0625aa465 100644 --- a/environments/values-roundtable-prod.yaml +++ b/environments/values-roundtable-prod.yaml @@ -27,5 +27,6 @@ applications: squarebot: true telegraf: true telegraf-ds: true + templatebot: true unfurlbot: true vault: true From e5f68ff1d187fc38e5a1de00676fabf991c8fb90 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 8 Oct 2024 11:24:00 -0700 Subject: [PATCH 257/567] Record MTVMS telemetry at both Summit and USDF --- applications/sasquatch/values-summit.yaml | 7 +++++++ applications/sasquatch/values-usdfprod.yaml | 7 +++++++ 2 files changed, 14 insertions(+) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 7a6158cfef..d95ac527d1 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -401,6 +401,13 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.sal.GIS" ] debug: true + mtvms: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.MTVMS" ] + debug: true lsstcam: enabled: true database: "efd" diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 9e02f4ea5e..58ac5b9707 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -232,6 +232,13 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.sal.GIS" ] debug: true + mtvms: + enabled: true + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.MTVMS" ] + debug: true lsstcam: enabled: true database: "efd" From 0e0ca32df69629a27097a3e453ee6f7ad87ddee0 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 9 Oct 2024 06:34:03 -0700 Subject: [PATCH 258/567] Enable repairer connectors at USDF --- .../templates/_helpers.tpl | 30 +++++++++++++++++++ applications/sasquatch/values-usdfprod.yaml | 18 +++++++++++ 2 files changed, 48 insertions(+) diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl index f2756bc5ee..bb08f3add7 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/templates/_helpers.tpl @@ -66,6 +66,36 @@ data: max_undelivered_messages = {{ default 10000 .value.max_undelivered_messages }} compression_codec = {{ default 3 .value.compression_codec }} + {{- if .value.repair }} + [[inputs.kafka_consumer]] + brokers = [ + "sasquatch-kafka-brokers.sasquatch:9092" + ] + consumer_group = "telegraf-kafka-consumer-{{ .key }}-repairer" + sasl_mechanism = "SCRAM-SHA-512" + sasl_password = "$TELEGRAF_PASSWORD" + sasl_username = "telegraf" + data_format = "avro" + avro_schema_registry = "http://sasquatch-schema-registry.sasquatch:8081" + avro_timestamp = {{ default "private_efdStamp" .value.timestamp_field | quote }} + avro_timestamp_format = {{ default "unix" .value.timestamp_format | quote }} + avro_union_mode = {{ default "nullable" .value.union_mode | quote }} + avro_field_separator = {{ default "" .value.union_field_separator | quote }} + {{- if .value.fields }} + avro_fields = {{ .value.fields }} + {{- end }} + {{- if .value.tags }} + avro_tags = {{ .value.tags }} + {{- end }} + topic_regexps = {{ .value.topicRegexps }} + offset = "oldest" + precision = {{ default "1us" .value.precision | quote }} + max_processing_time = {{ default "5s" .value.max_processing_time | quote }} + consumer_fetch_default = {{ default "20MB" .value.consumer_fetch_default | quote }} + max_undelivered_messages = {{ default 10000 .value.max_undelivered_messages }} + compression_codec = {{ default 3 .value.compression_codec }} + {{- end }} + [[inputs.internal]] name_prefix = "telegraf_" collect_memstats = true diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 58ac5b9707..6ecfd65923 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -143,6 +143,7 @@ telegraf-kafka-consumer: # CSC connectors maintel: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -150,6 +151,7 @@ telegraf-kafka-consumer: debug: true mtmount: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -157,6 +159,7 @@ telegraf-kafka-consumer: debug: true comcam: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -164,6 +167,7 @@ telegraf-kafka-consumer: debug: true eas: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -171,6 +175,7 @@ telegraf-kafka-consumer: debug: true m1m3: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -178,6 +183,7 @@ telegraf-kafka-consumer: debug: true m2: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -185,6 +191,7 @@ telegraf-kafka-consumer: debug: true obssys: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -192,6 +199,7 @@ telegraf-kafka-consumer: debug: true ocps: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -199,6 +207,7 @@ telegraf-kafka-consumer: debug: true pmd: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -206,6 +215,7 @@ telegraf-kafka-consumer: debug: true calsys: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -213,6 +223,7 @@ telegraf-kafka-consumer: debug: true mtaircompressor: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -220,6 +231,7 @@ telegraf-kafka-consumer: debug: true genericcamera: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -227,6 +239,7 @@ telegraf-kafka-consumer: debug: true gis: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -241,6 +254,7 @@ telegraf-kafka-consumer: debug: true lsstcam: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -248,6 +262,7 @@ telegraf-kafka-consumer: debug: true auxtel: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -255,6 +270,7 @@ telegraf-kafka-consumer: debug: true latiss: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -262,6 +278,7 @@ telegraf-kafka-consumer: debug: true test: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -269,6 +286,7 @@ telegraf-kafka-consumer: debug: true lasertracker: enabled: true + repair: true database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | From 4845c00762629d52edaef6f3d59527648d4e9082 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 16:14:53 +0000 Subject: [PATCH 259/567] Update Helm release cert-manager to v1.16.1 --- applications/cert-manager/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/cert-manager/Chart.yaml b/applications/cert-manager/Chart.yaml index 4961f9c5a3..a1e9214962 100644 --- a/applications/cert-manager/Chart.yaml +++ b/applications/cert-manager/Chart.yaml @@ -7,5 +7,5 @@ sources: - https://github.com/cert-manager/cert-manager dependencies: - name: cert-manager - version: v1.15.3 + version: v1.16.1 repository: https://charts.jetstack.io From 4882e835287301855991ea8773aa1712e17bff8b Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 9 Oct 2024 09:41:10 -0700 Subject: [PATCH 260/567] Disable repairer connectors at USDF --- applications/sasquatch/values-usdfprod.yaml | 37 +++++++++++---------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 6ecfd65923..1e4f089dbd 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -143,7 +143,7 @@ telegraf-kafka-consumer: # CSC connectors maintel: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -151,7 +151,7 @@ telegraf-kafka-consumer: debug: true mtmount: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -159,7 +159,7 @@ telegraf-kafka-consumer: debug: true comcam: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -167,7 +167,7 @@ telegraf-kafka-consumer: debug: true eas: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -175,7 +175,7 @@ telegraf-kafka-consumer: debug: true m1m3: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -183,7 +183,7 @@ telegraf-kafka-consumer: debug: true m2: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -191,7 +191,7 @@ telegraf-kafka-consumer: debug: true obssys: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -199,7 +199,7 @@ telegraf-kafka-consumer: debug: true ocps: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -207,7 +207,7 @@ telegraf-kafka-consumer: debug: true pmd: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -215,7 +215,7 @@ telegraf-kafka-consumer: debug: true calsys: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -223,7 +223,7 @@ telegraf-kafka-consumer: debug: true mtaircompressor: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -231,7 +231,7 @@ telegraf-kafka-consumer: debug: true genericcamera: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -239,7 +239,7 @@ telegraf-kafka-consumer: debug: true gis: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -247,6 +247,7 @@ telegraf-kafka-consumer: debug: true mtvms: enabled: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -254,7 +255,7 @@ telegraf-kafka-consumer: debug: true lsstcam: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -262,7 +263,7 @@ telegraf-kafka-consumer: debug: true auxtel: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -270,7 +271,7 @@ telegraf-kafka-consumer: debug: true latiss: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -278,7 +279,7 @@ telegraf-kafka-consumer: debug: true test: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | @@ -286,7 +287,7 @@ telegraf-kafka-consumer: debug: true lasertracker: enabled: true - repair: true + repair: false database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | From 28a4caed94d1ed5509038a81f3fb066b968a280d Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 9 Oct 2024 11:42:55 -0700 Subject: [PATCH 261/567] Upgrade Telegraf to version 1.32.1 - This version releases the fix for the race condition bug found in the Avro parser. --- applications/sasquatch/README.md | 8 ++++---- .../sasquatch/charts/telegraf-kafka-consumer/Chart.yaml | 2 +- .../sasquatch/charts/telegraf-kafka-consumer/README.md | 4 ++-- .../sasquatch/charts/telegraf-kafka-consumer/values.yaml | 4 ++-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index cde6e253fc..4e1c8cda32 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -425,8 +425,8 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer.env | list | See `values.yaml` | Telegraf agent enviroment variables | | telegraf-kafka-consumer.envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | | telegraf-kafka-consumer.image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | -| telegraf-kafka-consumer.image.repo | string | `"docker.io/lsstsqre/telegraf"` | Telegraf image repository | -| telegraf-kafka-consumer.image.tag | string | `"avro-mutex"` | Telegraf image tag | +| telegraf-kafka-consumer.image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | +| telegraf-kafka-consumer.image.tag | string | `"1.32.1-alpine"` | Telegraf image tag | | telegraf-kafka-consumer.imagePullSecrets | list | `[]` | Secret names to use for Docker pulls | | telegraf-kafka-consumer.influxdb.database | string | `"telegraf-kafka-consumer-v1"` | Name of the InfluxDB v1 database to write to | | telegraf-kafka-consumer.influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | @@ -462,8 +462,8 @@ Rubin Observatory's telemetry service | telegraf-kafka-consumer-oss.env | list | See `values.yaml` | Telegraf agent enviroment variables | | telegraf-kafka-consumer-oss.envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | | telegraf-kafka-consumer-oss.image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | -| telegraf-kafka-consumer-oss.image.repo | string | `"docker.io/lsstsqre/telegraf"` | Telegraf image repository | -| telegraf-kafka-consumer-oss.image.tag | string | `"avro-mutex"` | Telegraf image tag | +| telegraf-kafka-consumer-oss.image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | +| telegraf-kafka-consumer-oss.image.tag | string | `"1.32.1-alpine"` | Telegraf image tag | | telegraf-kafka-consumer-oss.imagePullSecrets | list | `[]` | Secret names to use for Docker pulls | | telegraf-kafka-consumer-oss.influxdb.database | string | `"telegraf-kafka-consumer-v1"` | Name of the InfluxDB v1 database to write to | | telegraf-kafka-consumer-oss.influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/Chart.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/Chart.yaml index 92210beefa..9a3b69690e 100755 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/Chart.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/Chart.yaml @@ -4,4 +4,4 @@ version: 1.0.0 description: > Telegraf is an agent written in Go for collecting, processing, aggregating, and writing metrics. This chart deploys multiple instances of the telegraf agent to connect Kafka and InfluxDB in Sasquatch. -appVersion: 1.23.3 +appVersion: 1.32.1 diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md index 0be7c27bdb..532358d835 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/README.md +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/README.md @@ -12,8 +12,8 @@ Telegraf is an agent written in Go for collecting, processing, aggregating, and | env | list | See `values.yaml` | Telegraf agent enviroment variables | | envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | -| image.repo | string | `"docker.io/lsstsqre/telegraf"` | Telegraf image repository | -| image.tag | string | `"avro-mutex"` | Telegraf image tag | +| image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | +| image.tag | string | `"1.32.1-alpine"` | Telegraf image tag | | imagePullSecrets | list | `[]` | Secret names to use for Docker pulls | | influxdb.database | string | `"telegraf-kafka-consumer-v1"` | Name of the InfluxDB v1 database to write to | | influxdb.url | string | `"http://sasquatch-influxdb.sasquatch:8086"` | URL of the InfluxDB v1 instance to write to | diff --git a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml index dd0fc7cb4f..41309868b7 100644 --- a/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml +++ b/applications/sasquatch/charts/telegraf-kafka-consumer/values.yaml @@ -5,10 +5,10 @@ enabled: false image: # -- Telegraf image repository - repo: "docker.io/lsstsqre/telegraf" + repo: "docker.io/library/telegraf" # -- Telegraf image tag - tag: "avro-mutex" + tag: "1.32.1-alpine" # -- Image pull policy pullPolicy: "IfNotPresent" From eb0ba489663b048ca2d39c6fbbadda482aeed0c4 Mon Sep 17 00:00:00 2001 From: Dan Fuchs Date: Wed, 2 Oct 2024 16:57:01 -0500 Subject: [PATCH 262/567] DM-45522 mobu: app metrics sasquatch config --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/app-metrics/README.md | 2 +- applications/sasquatch/charts/app-metrics/values.yaml | 7 ++++++- applications/sasquatch/values-idfdev.yaml | 5 +++++ 4 files changed, 13 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 4e1c8cda32..10644792ef 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -90,7 +90,7 @@ Rubin Observatory's telemetry service | app-metrics.debug | bool | false | Run Telegraf in debug mode. | | app-metrics.env | list | See `values.yaml` | Telegraf agent enviroment variables | | app-metrics.envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | -| app-metrics.globalAppConfig | object | `{}` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | +| app-metrics.globalAppConfig | object | See `values.yaml` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | | app-metrics.globalInfluxTags | list | `["service"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | | app-metrics.image.pullPolicy | string | `"Always"` | Image pull policy | | app-metrics.image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | diff --git a/applications/sasquatch/charts/app-metrics/README.md b/applications/sasquatch/charts/app-metrics/README.md index 1cb6c56b6d..a6a81462af 100644 --- a/applications/sasquatch/charts/app-metrics/README.md +++ b/applications/sasquatch/charts/app-metrics/README.md @@ -13,7 +13,7 @@ Kafka topics, users, and a telegraf connector for metrics events. | debug | bool | false | Run Telegraf in debug mode. | | env | list | See `values.yaml` | Telegraf agent enviroment variables | | envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | -| globalAppConfig | object | `{}` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | +| globalAppConfig | object | See `values.yaml` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | | globalInfluxTags | list | `["service"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | | image.pullPolicy | string | `"Always"` | Image pull policy | | image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | diff --git a/applications/sasquatch/charts/app-metrics/values.yaml b/applications/sasquatch/charts/app-metrics/values.yaml index d5bc17418f..285ff3fc75 100644 --- a/applications/sasquatch/charts/app-metrics/values.yaml +++ b/applications/sasquatch/charts/app-metrics/values.yaml @@ -5,7 +5,12 @@ # overridden. # See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) # for the structure of this value. -globalAppConfig: {} +# +# @default -- See `values.yaml` +globalAppConfig: + mobu: + influxTags: + - "type" # -- A list of applications that will publish metrics events, and the keys that should be ingested into InfluxDB as tags. # The names should be the same as the app names in Phalanx. diff --git a/applications/sasquatch/values-idfdev.yaml b/applications/sasquatch/values-idfdev.yaml index 6519b85afb..32d665c6be 100644 --- a/applications/sasquatch/values-idfdev.yaml +++ b/applications/sasquatch/values-idfdev.yaml @@ -108,3 +108,8 @@ chronograf: GENERIC_API_KEY: sub PUBLIC_URL: https://data-dev.lsst.cloud/ STATUS_FEED_URL: https://raw.githubusercontent.com/lsst-sqre/rsp_broadcast/main/jsonfeeds/idfdev.json + +app-metrics: + enabled: true + apps: + - mobu From 0b6f115b8277a3dbea56791349469d0555df1b82 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Wed, 9 Oct 2024 16:24:24 -0700 Subject: [PATCH 263/567] Add consdb to TTS with correct secrets --- applications/consdb/Chart.yaml | 2 +- applications/consdb/README.md | 2 +- applications/consdb/secrets.yaml | 18 ++++++++++++++---- .../consdb/templates/hinfo-deployment.yaml | 12 ++++++------ .../consdb/templates/pq-deployment.yaml | 2 +- .../consdb/templates/vault-secrets.yaml | 11 +---------- applications/consdb/values-base.yaml | 1 + applications/consdb/values-summit.yaml | 1 + .../consdb/values-tucson-teststand.yaml | 3 ++- applications/consdb/values-usdfdev.yaml | 1 + applications/consdb/values-usdfprod.yaml | 1 + .../sasquatch/values-tucson-teststand.yaml | 2 ++ docs/developers/helm-chart/values-yaml.rst | 2 +- 13 files changed, 33 insertions(+), 25 deletions(-) diff --git a/applications/consdb/Chart.yaml b/applications/consdb/Chart.yaml index 5bc9793fc7..a6202030c4 100644 --- a/applications/consdb/Chart.yaml +++ b/applications/consdb/Chart.yaml @@ -4,7 +4,7 @@ version: 1.0.0 description: Consolidated Database of Image Metadata type: application appVersion: 1.1.0 -home: consdb.lsst.io +home: https://consdb.lsst.io/ sources: - https://github.com/lsst-dm/consdb annotations: diff --git a/applications/consdb/README.md b/applications/consdb/README.md index 47eaf0d7aa..6a4a392fad 100644 --- a/applications/consdb/README.md +++ b/applications/consdb/README.md @@ -2,7 +2,7 @@ Consolidated Database of Image Metadata -**Homepage:** +**Homepage:** ## Source Code diff --git a/applications/consdb/secrets.yaml b/applications/consdb/secrets.yaml index 99a8f6ba13..3799b8f010 100644 --- a/applications/consdb/secrets.yaml +++ b/applications/consdb/secrets.yaml @@ -4,14 +4,24 @@ consdb-password: copy: application: sasquatch key: consdb-password +exposurelog-password: + description: >- + PostgreSQL password for the exposurelog user exposurelog database. + copy: + application: exposurelog + key: exposurelog_password oods-password: description: >- PostgreSQL password for the OODS user Butler database. lfa-password: description: >- LFA password -exposurelog-password: - description: "Password for the TTS where we use exposurelog database." copy: - application: exposure-log - key: exposurelog_password + application: auxtel + key: aws-secret-access-key +lfa-key: + description: >- + LFA key + copy: + application: auxtel + key: aws-access-key-id diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index a47a2327f3..3a9f749110 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -47,7 +47,7 @@ spec: valueFrom: secretKeyRef: name: consdb - key: "oods-password" + key: "{{ .Values.db.passwordkey }}" - name: "DB_USER" value: "{{ .Values.db.user }}" - name: "DB_NAME" @@ -73,7 +73,7 @@ spec: - name: "KAFKA_PASSWORD" valueFrom: secretKeyRef: - name: sasquatch + name: consdb key: "consdb-password" - name: "KAFKA_GROUP_ID" value: "{{ .Values.kafka.group_id }}" @@ -145,7 +145,7 @@ spec: valueFrom: secretKeyRef: name: consdb - key: "oods-password" + key: "{{ .Values.db.passwordkey }}" - name: "DB_USER" value: "{{ .Values.db.user }}" - name: "DB_NAME" @@ -171,7 +171,7 @@ spec: - name: "KAFKA_PASSWORD" valueFrom: secretKeyRef: - name: sasquatch + name: consdb key: "consdb-password" - name: "KAFKA_GROUP_ID" value: "{{ .Values.kafka.group_id }}" @@ -243,7 +243,7 @@ spec: valueFrom: secretKeyRef: name: consdb - key: "oods-password" + key: "{{ .Values.db.passwordkey }}" - name: "DB_USER" value: "{{ .Values.db.user }}" - name: "DB_NAME" @@ -269,7 +269,7 @@ spec: - name: "KAFKA_PASSWORD" valueFrom: secretKeyRef: - name: sasquatch + name: consdb key: "consdb-password" - name: "KAFKA_GROUP_ID" value: "{{ .Values.kafka.group_id }}" diff --git a/applications/consdb/templates/pq-deployment.yaml b/applications/consdb/templates/pq-deployment.yaml index 23ebb7104c..1a4c25e9c6 100644 --- a/applications/consdb/templates/pq-deployment.yaml +++ b/applications/consdb/templates/pq-deployment.yaml @@ -49,7 +49,7 @@ spec: valueFrom: secretKeyRef: name: consdb - key: "oods-password" + key: "{{ .Values.db.passwordkey }}" - name: "DB_USER" value: "{{ .Values.db.user }}" - name: "DB_NAME" diff --git a/applications/consdb/templates/vault-secrets.yaml b/applications/consdb/templates/vault-secrets.yaml index f45a4a8a59..a8f56439e3 100644 --- a/applications/consdb/templates/vault-secrets.yaml +++ b/applications/consdb/templates/vault-secrets.yaml @@ -4,16 +4,7 @@ metadata: name: consdb namespace: consdb spec: - path: {{ .Values.global.vaultSecretsPath }}/consdb - type: Opaque ---- -apiVersion: ricoberger.de/v1alpha1 -kind: VaultSecret -metadata: - name: sasquatch - namespace: consdb -spec: - path: {{ .Values.global.vaultSecretsPath }}/sasquatch + path: "{{ .Values.global.vaultSecretsPath }}/consdb" type: Opaque --- apiVersion: ricoberger.de/v1alpha1 diff --git a/applications/consdb/values-base.yaml b/applications/consdb/values-base.yaml index d7e208c1b6..ff9e972974 100644 --- a/applications/consdb/values-base.yaml +++ b/applications/consdb/values-base.yaml @@ -1,5 +1,6 @@ db: user: "oods" + passwordkey: "oods-password" host: "postgresdb01.ls.lsst.org" database: "butler" lfa: diff --git a/applications/consdb/values-summit.yaml b/applications/consdb/values-summit.yaml index 2c17d584a7..4b34180316 100644 --- a/applications/consdb/values-summit.yaml +++ b/applications/consdb/values-summit.yaml @@ -1,5 +1,6 @@ db: user: "oods" + passwordkey: "oods-password" host: "postgresdb01.cp.lsst.org" database: "exposurelog" lfa: diff --git a/applications/consdb/values-tucson-teststand.yaml b/applications/consdb/values-tucson-teststand.yaml index 21997de89d..2e4015013e 100644 --- a/applications/consdb/values-tucson-teststand.yaml +++ b/applications/consdb/values-tucson-teststand.yaml @@ -1,5 +1,6 @@ db: - user: "oods" + user: "exposurelog" + passwordkey: "exposurelog-password" host: "postgresdb01.tu.lsst.org" database: "exposurelog" lfa: diff --git a/applications/consdb/values-usdfdev.yaml b/applications/consdb/values-usdfdev.yaml index 9ae4a6a14f..71174d1244 100644 --- a/applications/consdb/values-usdfdev.yaml +++ b/applications/consdb/values-usdfdev.yaml @@ -1,5 +1,6 @@ db: user: "usdf" + passwordkey: "oods-password" host: "usdf-summitdb.slac.stanford.edu" database: "exposurelog" hinfo: diff --git a/applications/consdb/values-usdfprod.yaml b/applications/consdb/values-usdfprod.yaml index 9ae4a6a14f..71174d1244 100644 --- a/applications/consdb/values-usdfprod.yaml +++ b/applications/consdb/values-usdfprod.yaml @@ -1,5 +1,6 @@ db: user: "usdf" + passwordkey: "oods-password" host: "usdf-summitdb.slac.stanford.edu" database: "exposurelog" hinfo: diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index 03b2703aa2..49832205e7 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -49,6 +49,8 @@ strimzi-kafka: enabled: true kafkaConnectManager: enabled: true + consdb: + enabled: true registry: ingress: enabled: true diff --git a/docs/developers/helm-chart/values-yaml.rst b/docs/developers/helm-chart/values-yaml.rst index 7ab926f1c3..f5d55a2e52 100644 --- a/docs/developers/helm-chart/values-yaml.rst +++ b/docs/developers/helm-chart/values-yaml.rst @@ -3,7 +3,7 @@ Write the values.yaml file ########################## The :file:`values.yaml` file contains the customizable settings for your application. -Those settings can be overriden for each environment in :file:`values-{environmet}.yaml`. +Those settings can be overriden for each environment in :file:`values-{environment}.yaml`. As a general rule, only use :file:`values.yaml` settings for things that may vary between Phalanx environments. If something is the same in every Phalanx environment, it can be hard-coded into the Kubernetes resource templates. From e7beee0e06b90385e6eb9c59ef6a3bc6964b3fe8 Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Mon, 7 Oct 2024 13:53:41 -0700 Subject: [PATCH 264/567] Add additional datastores for dp02 In the Butler configuration used for Butler server and DirectButler, add new relative roots that can be used to reference the DP02 raw image files and files from DP01. Raw images for DP02 and datasets imported from DP01 were stored in a different S3 buckets than the rest of the files, when this repository was first created. These were previously referenced from the Registry DB as absolute URLs, which are going to break when the data moves to its new home at SLAC. --- applications/butler/templates/configmap.yaml | 22 ++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/applications/butler/templates/configmap.yaml b/applications/butler/templates/configmap.yaml index 8529fa2ba0..5bb161ba12 100644 --- a/applications/butler/templates/configmap.yaml +++ b/applications/butler/templates/configmap.yaml @@ -13,6 +13,12 @@ data: datastore_constraints: # One entry per datastore in datastores section # Use empty `-` if no constraint override required + - constraints: + reject: + - all + - constraints: + reject: + - all - constraints: reject: - all @@ -24,6 +30,22 @@ data: name: FileDatastore@s3://butler-us-central1-panda-dev/dc2 cls: lsst.daf.butler.datastores.fileDatastore.FileDatastore root: s3://butler-us-central1-panda-dev/dc2 + - datastore: + # Datasets of type 'raw' are stored in a separate bucket for + # historical reasons. + name: FileDatastore@s3://curation-us-central1-desc-dc2-run22i + cls: lsst.daf.butler.datastores.fileDatastore.FileDatastore + root: s3://curation-us-central1-desc-dc2-run22i/ + records: + table: raw_datastore_records + - datastore: + # Also for historical reasons, some files that originated in DP01 + # are kept in a separate bucket. + name: FileDatastore@s3://butler-us-central1-dp01-desc-dr6 + cls: lsst.daf.butler.datastores.fileDatastore.FileDatastore + root: s3://butler-us-central1-dp01-desc-dr6/ + records: + table: dp01_datastore_records - datastore: name: FileDatastore@s3://butler-us-central1-dp02-user cls: lsst.daf.butler.datastores.fileDatastore.FileDatastore From 9d1c442d3b0e9564baabc182ed7eda66ce83eec8 Mon Sep 17 00:00:00 2001 From: A I Date: Thu, 10 Oct 2024 12:33:12 +0100 Subject: [PATCH 265/567] added dp03 documentation notes --- applications/squareone/values-roe.yaml | 58 ++++++++++++++++++++++++-- 1 file changed, 54 insertions(+), 4 deletions(-) diff --git a/applications/squareone/values-roe.yaml b/applications/squareone/values-roe.yaml index d9f0d841ba..48c18ddf79 100644 --- a/applications/squareone/values-roe.yaml +++ b/applications/squareone/values-roe.yaml @@ -34,27 +34,77 @@ config: ## Data documentation + ### DP0.3 + + + + ### Data Preview 0.3 (DP0.3) + DP0.3 provides a catalog of solar system objects from a simulated + LSST ten-year wide-fast-deep survey by the Solar System Science + Collaboration. + + + + + ### DP0.3 Tutorials + Tutorials for exploring the DP0.3 solar system object dataset on + the Rubin Science Platform. + + + + + ### DP0.3 Catalog Schema + Schema reference for the DP0.3 catalog dataset available through + the Table Access Protocol (TAP) service. + + + + ### DP0.2 + + + + ### Data Preview 0.2 (DP0.2) + DP0.2 is the second phase of the Data Preview 0 program using + simulated images from the DESC DC2 data challenge processed + with version 23.0 of the LSST Science Pipelines. + + + + + ### DP0.2 Tutorials + Tutorials for exploring the DP0.2 dataset on the Rubin Science + Platform. + + + + + ### DP0.2 Catalog Schema + Schema reference for the DP0.2 catalog dataset available through + the Table Access Protocol (TAP) service. + + + ### GaiaXCatwise - Table of counterpart associations between Gaia DR3 and CatWISE2020. - Uses probabilistic cross-match algorithms as described by Wilson & Naylor (MNRAS, 2017, 2018a,b) and Wilson (RNAAS, 2022). + Table of counterpart associations between Gaia DR3 and CatWISE2020. + Uses probabilistic cross-match algorithms as described by Wilson & Naylor (MNRAS, 2017, 2018a,b) and Wilson (RNAAS, 2022). Sources are returned either as a pairing, in which the Gaia and WISE objects are the same astrophysical source detected twice, or as non-matches, with that particular object in one of the catalogues having a corresponding flux upper limits in the opposing catalogue, and entries include various pieces of metadata such as the probability of the match/non-match, likelihood of match on purely position or brightness grounds, and information on the level to which objects suffer contamination due to hidden and unresolved background sources. ### VISTA-HSC - + The VISTA-HSC fused dataset is designed to deliver pixel-matched images and multiband catalogues through the integration of data from the Visible and Infrared Survey Telescope for Astronomy (VISTA) in the near-infrared and the Hyper SuprimeCam (HSC) in the optical range. The LSST pipelines will handle the CCD processing of VISTA images, while the calibrated exposures from the final HSC PDR3 will be employed for merging with VISTA. This comprehensive dataset encompasses multiple VISTA surveys, with varying depths. - + From a8471bf2bc6bb59b4869f79bad18c29b7e37da3c Mon Sep 17 00:00:00 2001 From: Jeremy McCormick Date: Thu, 10 Oct 2024 12:28:09 -0500 Subject: [PATCH 266/567] Update sdm_schemas to v3.3.0 --- charts/cadc-tap/README.md | 4 ++-- charts/cadc-tap/values.yaml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/charts/cadc-tap/README.md b/charts/cadc-tap/README.md index 1da63ab1f8..a6b4bbf3a9 100644 --- a/charts/cadc-tap/README.md +++ b/charts/cadc-tap/README.md @@ -22,7 +22,7 @@ IVOA TAP service | cloudsql.resources | object | See `values.yaml` | Resource limits and requests for the Cloud SQL Proxy container | | cloudsql.serviceAccount | string | None, must be set | The Google service account that has an IAM binding to the `cadc-tap` Kubernetes service accounts and has the `cloudsql.client` role, access | | config.backend | string | None, must be set to `pg` or `qserv` | What type of backend are we connecting to? | -| config.datalinkPayloadUrl | string | `"https://github.com/lsst/sdm_schemas/releases/download/v3.2.1/datalink-snippets.zip"` | Datalink payload URL | +| config.datalinkPayloadUrl | string | `"https://github.com/lsst/sdm_schemas/releases/download/v3.3.0/datalink-snippets.zip"` | Datalink payload URL | | config.gcsBucket | string | `"async-results.lsst.codes"` | Name of GCS bucket in which to store results | | config.gcsBucketType | string | `"GCS"` | GCS bucket type (GCS or S3) | | config.gcsBucketUrl | string | `"https://tap-files.lsst.codes"` | Base URL for results stored in GCS bucket | @@ -69,7 +69,7 @@ IVOA TAP service | tapSchema.affinity | object | `{}` | Affinity rules for the TAP schema database pod | | tapSchema.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the TAP schema image | | tapSchema.image.repository | string | `"lsstsqre/tap-schema-mock"` | TAP schema image to ue. This must be overridden by each environment with the TAP schema for that environment. | -| tapSchema.image.tag | string | `"v3.2.1"` | Tag of TAP schema image | +| tapSchema.image.tag | string | `"v3.3.0"` | Tag of TAP schema image | | tapSchema.nodeSelector | object | `{}` | Node selection rules for the TAP schema database pod | | tapSchema.podAnnotations | object | `{}` | Annotations for the TAP schema database pod | | tapSchema.resources | object | See `values.yaml` | Resource limits and requests for the TAP schema database pod | diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index fd8b7e20ce..33fd500746 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -99,7 +99,7 @@ config: tapSchemaAddress: "cadc-tap-schema-db:3306" # -- Datalink payload URL - datalinkPayloadUrl: "https://github.com/lsst/sdm_schemas/releases/download/v3.2.1/datalink-snippets.zip" + datalinkPayloadUrl: "https://github.com/lsst/sdm_schemas/releases/download/v3.3.0/datalink-snippets.zip" # -- Name of GCS bucket in which to store results gcsBucket: "async-results.lsst.codes" @@ -162,7 +162,7 @@ tapSchema: pullPolicy: "IfNotPresent" # -- Tag of TAP schema image - tag: "v3.2.1" + tag: "v3.3.0" # -- Resource limits and requests for the TAP schema database pod # @default -- See `values.yaml` From 705cd60e5871265744e610d1f16dc86627632b95 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 10 Oct 2024 14:48:47 -0700 Subject: [PATCH 267/567] Remove retention configuration based on size - This is recommended to have better control on data retention in Kafka --- applications/sasquatch/README.md | 1 - applications/sasquatch/charts/strimzi-kafka/README.md | 1 - applications/sasquatch/charts/strimzi-kafka/values.yaml | 4 ---- 3 files changed, 6 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 10644792ef..cba7f89fe3 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -366,7 +366,6 @@ Rubin Observatory's telemetry service | strimzi-kafka.connect.replicas | int | `3` | Number of Kafka Connect replicas to run | | strimzi-kafka.cruiseControl | object | `{"enabled":false}` | Configuration for the Kafka Cruise Control | | strimzi-kafka.kafka.affinity | object | See `values.yaml` | Affinity for Kafka pod assignment | -| strimzi-kafka.kafka.config."log.retention.bytes" | string | `"350000000000"` | How much disk space Kafka will ensure is available, set to 70% of the data partition size | | strimzi-kafka.kafka.config."log.retention.hours" | int | `48` | Number of days for a topic's data to be retained | | strimzi-kafka.kafka.config."message.max.bytes" | int | `10485760` | The largest record batch size allowed by Kafka | | strimzi-kafka.kafka.config."offsets.retention.minutes" | int | `2880` | Number of minutes for a consumer group's offsets to be retained | diff --git a/applications/sasquatch/charts/strimzi-kafka/README.md b/applications/sasquatch/charts/strimzi-kafka/README.md index 556761d75d..e4a19243cb 100644 --- a/applications/sasquatch/charts/strimzi-kafka/README.md +++ b/applications/sasquatch/charts/strimzi-kafka/README.md @@ -20,7 +20,6 @@ A subchart to deploy Strimzi Kafka components for Sasquatch. | connect.replicas | int | `3` | Number of Kafka Connect replicas to run | | cruiseControl | object | `{"enabled":false}` | Configuration for the Kafka Cruise Control | | kafka.affinity | object | See `values.yaml` | Affinity for Kafka pod assignment | -| kafka.config."log.retention.bytes" | string | `"350000000000"` | How much disk space Kafka will ensure is available, set to 70% of the data partition size | | kafka.config."log.retention.hours" | int | `48` | Number of days for a topic's data to be retained | | kafka.config."message.max.bytes" | int | `10485760` | The largest record batch size allowed by Kafka | | kafka.config."offsets.retention.minutes" | int | `2880` | Number of minutes for a consumer group's offsets to be retained | diff --git a/applications/sasquatch/charts/strimzi-kafka/values.yaml b/applications/sasquatch/charts/strimzi-kafka/values.yaml index 6d587fd746..bbbf61d2be 100644 --- a/applications/sasquatch/charts/strimzi-kafka/values.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/values.yaml @@ -34,10 +34,6 @@ kafka: # -- Number of days for a topic's data to be retained log.retention.hours: 48 - # -- How much disk space Kafka will ensure is available, set to 70% of the - # data partition size - log.retention.bytes: "350000000000" - # -- The largest record batch size allowed by Kafka message.max.bytes: 10485760 From 64a273d4aef5e47e3721accf650e7ac2d094a42d Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 10 Oct 2024 14:51:07 -0700 Subject: [PATCH 268/567] Use log.retention.minutes instead - Using same units for log.retention and offset.retention make it more explicity we are setting the same value for both. --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/strimzi-kafka/README.md | 2 +- applications/sasquatch/charts/strimzi-kafka/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index cba7f89fe3..72b61cfa50 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -366,7 +366,7 @@ Rubin Observatory's telemetry service | strimzi-kafka.connect.replicas | int | `3` | Number of Kafka Connect replicas to run | | strimzi-kafka.cruiseControl | object | `{"enabled":false}` | Configuration for the Kafka Cruise Control | | strimzi-kafka.kafka.affinity | object | See `values.yaml` | Affinity for Kafka pod assignment | -| strimzi-kafka.kafka.config."log.retention.hours" | int | `48` | Number of days for a topic's data to be retained | +| strimzi-kafka.kafka.config."log.retention.minutes" | int | `2880` | Number of days for a topic's data to be retained | | strimzi-kafka.kafka.config."message.max.bytes" | int | `10485760` | The largest record batch size allowed by Kafka | | strimzi-kafka.kafka.config."offsets.retention.minutes" | int | `2880` | Number of minutes for a consumer group's offsets to be retained | | strimzi-kafka.kafka.config."replica.fetch.max.bytes" | int | `10485760` | The number of bytes of messages to attempt to fetch for each partition | diff --git a/applications/sasquatch/charts/strimzi-kafka/README.md b/applications/sasquatch/charts/strimzi-kafka/README.md index e4a19243cb..40ad7ce150 100644 --- a/applications/sasquatch/charts/strimzi-kafka/README.md +++ b/applications/sasquatch/charts/strimzi-kafka/README.md @@ -20,7 +20,7 @@ A subchart to deploy Strimzi Kafka components for Sasquatch. | connect.replicas | int | `3` | Number of Kafka Connect replicas to run | | cruiseControl | object | `{"enabled":false}` | Configuration for the Kafka Cruise Control | | kafka.affinity | object | See `values.yaml` | Affinity for Kafka pod assignment | -| kafka.config."log.retention.hours" | int | `48` | Number of days for a topic's data to be retained | +| kafka.config."log.retention.minutes" | int | `2880` | Number of days for a topic's data to be retained | | kafka.config."message.max.bytes" | int | `10485760` | The largest record batch size allowed by Kafka | | kafka.config."offsets.retention.minutes" | int | `2880` | Number of minutes for a consumer group's offsets to be retained | | kafka.config."replica.fetch.max.bytes" | int | `10485760` | The number of bytes of messages to attempt to fetch for each partition | diff --git a/applications/sasquatch/charts/strimzi-kafka/values.yaml b/applications/sasquatch/charts/strimzi-kafka/values.yaml index bbbf61d2be..661fdd5f34 100644 --- a/applications/sasquatch/charts/strimzi-kafka/values.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/values.yaml @@ -32,7 +32,7 @@ kafka: offsets.retention.minutes: 2880 # -- Number of days for a topic's data to be retained - log.retention.hours: 48 + log.retention.minutes: 2880 # -- The largest record batch size allowed by Kafka message.max.bytes: 10485760 From aa440f13b603f18fc03473081890a137d5c97a4e Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 10 Oct 2024 15:16:26 -0700 Subject: [PATCH 269/567] Default configuration in value.yaml --- applications/sasquatch/values-usdfprod.yaml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 1e4f089dbd..4e24aa9705 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -1,13 +1,5 @@ strimzi-kafka: kafka: - minInsyncReplicas: 1 - listeners: - tls: - enabled: true - plain: - enabled: true - external: - enabled: true config: # -- Replica lag time can't be smaller than request.timeout.ms configuration in kafka connect. replica.lag.time.max.ms: 120000 From e881f46e76a22bc587171e0508bc110905919b9d Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 10 Oct 2024 15:18:37 -0700 Subject: [PATCH 270/567] Set Kafka retention to 7 days at the Summit --- applications/sasquatch/README.md | 4 ++-- applications/sasquatch/values-summit.yaml | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 72b61cfa50..e400b92449 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -366,9 +366,9 @@ Rubin Observatory's telemetry service | strimzi-kafka.connect.replicas | int | `3` | Number of Kafka Connect replicas to run | | strimzi-kafka.cruiseControl | object | `{"enabled":false}` | Configuration for the Kafka Cruise Control | | strimzi-kafka.kafka.affinity | object | See `values.yaml` | Affinity for Kafka pod assignment | -| strimzi-kafka.kafka.config."log.retention.minutes" | int | `2880` | Number of days for a topic's data to be retained | +| strimzi-kafka.kafka.config."log.retention.minutes" | int | 4320 minutes (3 days) | Number of days for a topic's data to be retained | | strimzi-kafka.kafka.config."message.max.bytes" | int | `10485760` | The largest record batch size allowed by Kafka | -| strimzi-kafka.kafka.config."offsets.retention.minutes" | int | `2880` | Number of minutes for a consumer group's offsets to be retained | +| strimzi-kafka.kafka.config."offsets.retention.minutes" | int | 4320 minutes (3 days) | Number of minutes for a consumer group's offsets to be retained | | strimzi-kafka.kafka.config."replica.fetch.max.bytes" | int | `10485760` | The number of bytes of messages to attempt to fetch for each partition | | strimzi-kafka.kafka.externalListener.bootstrap.annotations | object | `{}` | Annotations that will be added to the Ingress, Route, or Service resource | | strimzi-kafka.kafka.externalListener.bootstrap.host | string | Do not configure TLS | Name used for TLS hostname verification | diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index d95ac527d1..628c4e52cf 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -1,5 +1,8 @@ strimzi-kafka: kafka: + config: + log.retention.minutes: 10080 + offsets.retention.minutes: 10080 storage: storageClassName: rook-ceph-block externalListener: From dc68a1877468f3f45c0325c9e4f54e5ff41ea17c Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 10 Oct 2024 15:20:47 -0700 Subject: [PATCH 271/567] Set default retention to 3 days --- applications/sasquatch/charts/strimzi-kafka/README.md | 4 ++-- applications/sasquatch/charts/strimzi-kafka/values.yaml | 6 ++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/applications/sasquatch/charts/strimzi-kafka/README.md b/applications/sasquatch/charts/strimzi-kafka/README.md index 40ad7ce150..10f3965922 100644 --- a/applications/sasquatch/charts/strimzi-kafka/README.md +++ b/applications/sasquatch/charts/strimzi-kafka/README.md @@ -20,9 +20,9 @@ A subchart to deploy Strimzi Kafka components for Sasquatch. | connect.replicas | int | `3` | Number of Kafka Connect replicas to run | | cruiseControl | object | `{"enabled":false}` | Configuration for the Kafka Cruise Control | | kafka.affinity | object | See `values.yaml` | Affinity for Kafka pod assignment | -| kafka.config."log.retention.minutes" | int | `2880` | Number of days for a topic's data to be retained | +| kafka.config."log.retention.minutes" | int | 4320 minutes (3 days) | Number of days for a topic's data to be retained | | kafka.config."message.max.bytes" | int | `10485760` | The largest record batch size allowed by Kafka | -| kafka.config."offsets.retention.minutes" | int | `2880` | Number of minutes for a consumer group's offsets to be retained | +| kafka.config."offsets.retention.minutes" | int | 4320 minutes (3 days) | Number of minutes for a consumer group's offsets to be retained | | kafka.config."replica.fetch.max.bytes" | int | `10485760` | The number of bytes of messages to attempt to fetch for each partition | | kafka.externalListener.bootstrap.annotations | object | `{}` | Annotations that will be added to the Ingress, Route, or Service resource | | kafka.externalListener.bootstrap.host | string | Do not configure TLS | Name used for TLS hostname verification | diff --git a/applications/sasquatch/charts/strimzi-kafka/values.yaml b/applications/sasquatch/charts/strimzi-kafka/values.yaml index 661fdd5f34..8f0eab97bf 100644 --- a/applications/sasquatch/charts/strimzi-kafka/values.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/values.yaml @@ -29,10 +29,12 @@ kafka: config: # -- Number of minutes for a consumer group's offsets to be retained - offsets.retention.minutes: 2880 + # @default -- 4320 minutes (3 days) + offsets.retention.minutes: 4320 # -- Number of days for a topic's data to be retained - log.retention.minutes: 2880 + # @default -- 4320 minutes (3 days) + log.retention.minutes: 4320 # -- The largest record batch size allowed by Kafka message.max.bytes: 10485760 From ff329a6ca367fc305a08fc9fb34684d3fa423923 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 10 Oct 2024 15:32:46 -0700 Subject: [PATCH 272/567] Set kafka retention to 7 days at USDF --- applications/sasquatch/values-usdfprod.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 4e24aa9705..75afdd9d0e 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -3,6 +3,8 @@ strimzi-kafka: config: # -- Replica lag time can't be smaller than request.timeout.ms configuration in kafka connect. replica.lag.time.max.ms: 120000 + log.retention.minutes: 10080 + offsets.retention.minutes: 10080 connect: enabled: true From 158326c58ebb0114dcbfbfa3575dd9a5e38e498e Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Thu, 10 Oct 2024 15:59:43 -0700 Subject: [PATCH 273/567] Bump version of unfurlbot Pick up the fix for expanding Jira ticket references in bot messages. --- applications/unfurlbot/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/unfurlbot/Chart.yaml b/applications/unfurlbot/Chart.yaml index 7924ca56af..408deb1dad 100644 --- a/applications/unfurlbot/Chart.yaml +++ b/applications/unfurlbot/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: "0.3.0" +appVersion: "0.3.1" description: Squarebot backend that unfurls Jira issues. name: unfurlbot sources: From 68b08a1a6561af36dc8e26afcea86d021b709b43 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Fri, 11 Oct 2024 10:32:22 -0300 Subject: [PATCH 274/567] rubintv: update app version for summit and usdf production deployments --- applications/rubintv/values-summit.yaml | 2 +- applications/rubintv/values-usdfprod.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index 07a3594fb2..fdf5ec1e6d 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -20,7 +20,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.3.1 + tag: v2.4.0 pullPolicy: Always workers: diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index 9818e96584..e78b51007c 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -16,7 +16,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.3.1 + tag: v2.4.0 pullPolicy: Always workers: From d29863b4fca52a4a795a6a97e515141a8ccce04f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 14 Oct 2024 09:35:18 +0000 Subject: [PATCH 275/567] chore(deps): update helm release ingress-nginx to v4.11.3 --- applications/ingress-nginx/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/ingress-nginx/Chart.yaml b/applications/ingress-nginx/Chart.yaml index 33658a3bae..06e2fd3e5f 100644 --- a/applications/ingress-nginx/Chart.yaml +++ b/applications/ingress-nginx/Chart.yaml @@ -7,5 +7,5 @@ sources: - https://github.com/kubernetes/ingress-nginx dependencies: - name: ingress-nginx - version: 4.11.2 + version: 4.11.3 repository: https://kubernetes.github.io/ingress-nginx From ceec764f52996fcac780d2507b29371767ab09a2 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 14 Oct 2024 09:35:26 +0000 Subject: [PATCH 276/567] chore(deps): update helm release redis to v1.0.14 --- applications/checkerboard/Chart.yaml | 2 +- applications/gafaelfawr/Chart.yaml | 2 +- applications/noteburst/Chart.yaml | 2 +- applications/portal/Chart.yaml | 2 +- applications/rubintv-dev/Chart.yaml | 2 +- applications/rubintv/Chart.yaml | 2 +- applications/times-square/Chart.yaml | 2 +- applications/unfurlbot/Chart.yaml | 2 +- applications/vo-cutouts/Chart.yaml | 2 +- charts/rubintv/Chart.yaml | 2 +- starters/fastapi-safir-uws/Chart.yaml | 2 +- 11 files changed, 11 insertions(+), 11 deletions(-) diff --git a/applications/checkerboard/Chart.yaml b/applications/checkerboard/Chart.yaml index 5afe4f9433..827a971e39 100644 --- a/applications/checkerboard/Chart.yaml +++ b/applications/checkerboard/Chart.yaml @@ -8,5 +8,5 @@ appVersion: 0.4.5 dependencies: - name: redis - version: 1.0.13 + version: 1.0.14 repository: https://lsst-sqre.github.io/charts/ diff --git a/applications/gafaelfawr/Chart.yaml b/applications/gafaelfawr/Chart.yaml index f06689002c..11a3345ab7 100644 --- a/applications/gafaelfawr/Chart.yaml +++ b/applications/gafaelfawr/Chart.yaml @@ -9,7 +9,7 @@ appVersion: 11.1.1 dependencies: - name: "redis" - version: 1.0.13 + version: 1.0.14 repository: "https://lsst-sqre.github.io/charts/" annotations: diff --git a/applications/noteburst/Chart.yaml b/applications/noteburst/Chart.yaml index bcac46aef7..d47db25b12 100644 --- a/applications/noteburst/Chart.yaml +++ b/applications/noteburst/Chart.yaml @@ -13,7 +13,7 @@ maintainers: dependencies: - name: redis - version: 1.0.13 + version: 1.0.14 repository: https://lsst-sqre.github.io/charts/ annotations: diff --git a/applications/portal/Chart.yaml b/applications/portal/Chart.yaml index 2c73bbe380..6e3cee19bf 100644 --- a/applications/portal/Chart.yaml +++ b/applications/portal/Chart.yaml @@ -9,7 +9,7 @@ appVersion: "portal-2024.2.3" dependencies: - name: redis - version: 1.0.13 + version: 1.0.14 repository: https://lsst-sqre.github.io/charts/ annotations: diff --git a/applications/rubintv-dev/Chart.yaml b/applications/rubintv-dev/Chart.yaml index 3d91759fa2..bdd3d78b9c 100644 --- a/applications/rubintv-dev/Chart.yaml +++ b/applications/rubintv-dev/Chart.yaml @@ -10,5 +10,5 @@ dependencies: version: 1.0.0 repository: "file://../../charts/rubintv" - name: redis - version: 1.0.13 + version: 1.0.14 repository: https://lsst-sqre.github.io/charts/ diff --git a/applications/rubintv/Chart.yaml b/applications/rubintv/Chart.yaml index 3b40aecd66..79e0cf2fe9 100644 --- a/applications/rubintv/Chart.yaml +++ b/applications/rubintv/Chart.yaml @@ -10,5 +10,5 @@ dependencies: version: 1.0.0 repository: "file://../../charts/rubintv" - name: redis - version: 1.0.13 + version: 1.0.14 repository: https://lsst-sqre.github.io/charts/ diff --git a/applications/times-square/Chart.yaml b/applications/times-square/Chart.yaml index 5a48fbb4f9..00567c3136 100644 --- a/applications/times-square/Chart.yaml +++ b/applications/times-square/Chart.yaml @@ -12,7 +12,7 @@ appVersion: "0.13.0" dependencies: - name: redis - version: 1.0.13 + version: 1.0.14 repository: https://lsst-sqre.github.io/charts/ annotations: diff --git a/applications/unfurlbot/Chart.yaml b/applications/unfurlbot/Chart.yaml index 408deb1dad..9f739e437f 100644 --- a/applications/unfurlbot/Chart.yaml +++ b/applications/unfurlbot/Chart.yaml @@ -9,5 +9,5 @@ version: 1.0.0 dependencies: - name: redis - version: 1.0.13 + version: 1.0.14 repository: https://lsst-sqre.github.io/charts/ diff --git a/applications/vo-cutouts/Chart.yaml b/applications/vo-cutouts/Chart.yaml index 4aed5b2fe5..68d876fb99 100644 --- a/applications/vo-cutouts/Chart.yaml +++ b/applications/vo-cutouts/Chart.yaml @@ -8,7 +8,7 @@ appVersion: 3.2.0 dependencies: - name: redis - version: 1.0.13 + version: 1.0.14 repository: https://lsst-sqre.github.io/charts/ annotations: diff --git a/charts/rubintv/Chart.yaml b/charts/rubintv/Chart.yaml index 7a4c6d0448..56963770ea 100644 --- a/charts/rubintv/Chart.yaml +++ b/charts/rubintv/Chart.yaml @@ -7,5 +7,5 @@ sources: appVersion: 0.1.0 dependencies: - name: redis - version: 1.0.13 + version: 1.0.14 repository: https://lsst-sqre.github.io/charts/ diff --git a/starters/fastapi-safir-uws/Chart.yaml b/starters/fastapi-safir-uws/Chart.yaml index 4776b010fb..8e30354f9d 100644 --- a/starters/fastapi-safir-uws/Chart.yaml +++ b/starters/fastapi-safir-uws/Chart.yaml @@ -8,5 +8,5 @@ appVersion: 0.1.0 dependencies: - name: redis - version: 1.0.13 + version: 1.0.14 repository: https://lsst-sqre.github.io/charts/ From fb02ed387cd0022673cd8c284ee032105ebbd537 Mon Sep 17 00:00:00 2001 From: Merlin Fisher-Levine Date: Wed, 2 Oct 2024 06:52:44 -0700 Subject: [PATCH 277/567] Update template names --- ...set.yaml => comcam-gather-rollup-set.yaml} | 6 +- ...er2a-set.yaml => comcam-gather2a-set.yaml} | 6 +- ...worker-set.yaml => comcam-worker-set.yaml} | 6 +- .../comcamsim-gather-rollup-set.yaml | 231 ++++++++++++++++++ .../templates/comcamsim-gather2a-set.yaml | 231 ++++++++++++++++++ .../templates/comcamsim-worker-set.yaml | 231 ++++++++++++++++++ .../templates/lsstcam-gather-rollup-set.yaml | 231 ++++++++++++++++++ .../templates/lsstcam-gather2a-set.yaml | 231 ++++++++++++++++++ .../templates/lsstcam-worker-set.yaml | 231 ++++++++++++++++++ 9 files changed, 1395 insertions(+), 9 deletions(-) rename applications/rapid-analysis/templates/{gather-rollup-set.yaml => comcam-gather-rollup-set.yaml} (98%) rename applications/rapid-analysis/templates/{gather2a-set.yaml => comcam-gather2a-set.yaml} (98%) rename applications/rapid-analysis/templates/{worker-set.yaml => comcam-worker-set.yaml} (98%) create mode 100644 applications/rapid-analysis/templates/comcamsim-gather-rollup-set.yaml create mode 100644 applications/rapid-analysis/templates/comcamsim-gather2a-set.yaml create mode 100644 applications/rapid-analysis/templates/comcamsim-worker-set.yaml create mode 100644 applications/rapid-analysis/templates/lsstcam-gather-rollup-set.yaml create mode 100644 applications/rapid-analysis/templates/lsstcam-gather2a-set.yaml create mode 100644 applications/rapid-analysis/templates/lsstcam-worker-set.yaml diff --git a/applications/rapid-analysis/templates/gather-rollup-set.yaml b/applications/rapid-analysis/templates/comcam-gather-rollup-set.yaml similarity index 98% rename from applications/rapid-analysis/templates/gather-rollup-set.yaml rename to applications/rapid-analysis/templates/comcam-gather-rollup-set.yaml index ac8958cddf..789390a18c 100644 --- a/applications/rapid-analysis/templates/gather-rollup-set.yaml +++ b/applications/rapid-analysis/templates/comcam-gather-rollup-set.yaml @@ -1,5 +1,5 @@ -{{ $_ := set $.Values "script" $.Values.gatherRollupSet }} -{{ $script := $.Values.gatherRollupSet }} +{{ $_ := set $.Values "script" $.Values.comcamGatherRollupSet }} +{{ $script := $.Values.comcamGatherRollupSet }} {{- if $script.name }} --- apiVersion: apps/v1 @@ -20,7 +20,7 @@ spec: metadata: {{- with $.Values.podAnnotations }} annotations: - {{- toYaml $ | nindent 8 }} + {{- toYaml . | nindent 8 }} {{- end }} labels: {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} diff --git a/applications/rapid-analysis/templates/gather2a-set.yaml b/applications/rapid-analysis/templates/comcam-gather2a-set.yaml similarity index 98% rename from applications/rapid-analysis/templates/gather2a-set.yaml rename to applications/rapid-analysis/templates/comcam-gather2a-set.yaml index 2c1fdbee4f..4de84b0ce0 100644 --- a/applications/rapid-analysis/templates/gather2a-set.yaml +++ b/applications/rapid-analysis/templates/comcam-gather2a-set.yaml @@ -1,5 +1,5 @@ -{{ $_ := set $.Values "script" $.Values.gather2aSet }} -{{ $script := $.Values.gather2aSet }} +{{ $_ := set $.Values "script" $.Values.comcamGather2aSet }} +{{ $script := $.Values.comcamGather2aSet }} {{- if $script.name }} --- apiVersion: apps/v1 @@ -20,7 +20,7 @@ spec: metadata: {{- with $.Values.podAnnotations }} annotations: - {{- toYaml $ | nindent 8 }} + {{- toYaml . | nindent 8 }} {{- end }} labels: {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} diff --git a/applications/rapid-analysis/templates/worker-set.yaml b/applications/rapid-analysis/templates/comcam-worker-set.yaml similarity index 98% rename from applications/rapid-analysis/templates/worker-set.yaml rename to applications/rapid-analysis/templates/comcam-worker-set.yaml index ad87fbc2b8..245e075aa9 100644 --- a/applications/rapid-analysis/templates/worker-set.yaml +++ b/applications/rapid-analysis/templates/comcam-worker-set.yaml @@ -1,5 +1,5 @@ -{{ $_ := set $.Values "script" $.Values.workerSet }} -{{ $script := $.Values.workerSet }} +{{ $_ := set $.Values "script" $.Values.comcamWorkerSet }} +{{ $script := $.Values.comcamWorkerSet }} {{- if $script.name }} --- apiVersion: apps/v1 @@ -20,7 +20,7 @@ spec: metadata: {{- with $.Values.podAnnotations }} annotations: - {{- toYaml $ | nindent 8 }} + {{- toYaml . | nindent 8 }} {{- end }} labels: {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} diff --git a/applications/rapid-analysis/templates/comcamsim-gather-rollup-set.yaml b/applications/rapid-analysis/templates/comcamsim-gather-rollup-set.yaml new file mode 100644 index 0000000000..25af267c09 --- /dev/null +++ b/applications/rapid-analysis/templates/comcamsim-gather-rollup-set.yaml @@ -0,0 +1,231 @@ +{{ $_ := set $.Values "script" $.Values.comcamsimGatherRollupSet }} +{{ $script := $.Values.comcamsimGatherRollupSet }} +{{- if $script.name }} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "rapid-analysis.deploymentName" $ }}-gatherrollupset + namespace: {{ $.Values.namespace }} + labels: + {{- include "rapid-analysis.labels" $ | nindent 4 }} +spec: + revisionHistoryLimit: 0 + selector: + matchLabels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 6 }} + replicas: {{ ($script.replicas | int) }} + podManagementPolicy: Parallel + template: + metadata: + {{- with $.Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml $.Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- if $.Values.securityContext }} + securityContext: + {{- if $.Values.securityContext.uid }} + runAsUser: {{ $.Values.securityContext.uid }} + {{- end }} + {{- if $.Values.securityContext.gid }} + runAsGroup: {{ $.Values.securityContext.gid }} + {{- end }} + {{- if $.Values.securityContext.fid }} + fsGroup: {{ $.Values.securityContext.fid }} + {{- end }} + {{- end }} + containers: + - name: {{ include "rapid-analysis.scriptName" $ }} + image: "{{ $.Values.image.repository }}:{{ $.Values.image.tag }}" + imagePullPolicy: {{ $.Values.image.pullPolicy }} + env: + - name: RUN_ARG + value: {{ $script.name }} + - name: WORKER_NAME + valueFrom: + fieldRef: + fieldPath: metadata.labels['statefulset.kubernetes.io/pod-name'] + - name: RAPID_ANALYSIS_LOCATION + value: {{ $.Values.location | upper | quote }} + {{- if or $.Values.env $.Values.envSecrets }} + {{- range $env_var, $env_value := $.Values.env }} + - name: {{ $env_var }} + value: {{ $env_value | quote }} + {{- end }} + {{- range $env := $.Values.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: PGPASSFILE + value: "{{ $.Values.butlerSecret.containerPath }}/postgres-credentials.txt" + - name: PGUSER + value: {{ $.Values.butlerSecret.dbUser | quote }} + - name: AWS_SHARED_CREDENTIALS_FILE + value: "{{ $.Values.butlerSecret.containerPath }}/aws-credentials.ini" + {{- end }} + {{- if $.Values.redis.enabled }} + - name: REDIS_HOST + value: "redis-service" + {{- if $.Values.redis.envSecrets }} + {{- range $env := $.Values.redis.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- end }} + envFrom: + - configMapRef: + name: env-configmap + volumeMounts: + - name: rubintv-creds + mountPath: "/etc/rubintv/creds" + readOnly: true + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + readOnly: {{ $values.readOnly }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + mountPath: {{ $.Values.butlerSecret.containerPath }} + {{- end }} + {{- if or $.Values.resources $script.resources }} + {{- $resources := "" }} + {{- if $script.resources }} + {{- $resources = $script.resources }} + {{- else }} + {{- $resources = $.Values.resources }} + {{- end }} + resources: + {{- toYaml $resources | nindent 12 }} + {{- end }} + {{- if $.Values.butlerSecret }} + initContainers: + - name: {{ $.Release.Name }}-butler-secret-perm-fixer + image: "alpine:latest" + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /secrets + cat /secrets/aws-credentials.ini > new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + cat /secrets-rubintv/aws-credentials.ini >> new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + mv new-aws-credentials.ini /secrets/aws-credentials.ini + chown 73006:73006 /secrets/* + chmod 0600 /secrets/* + volumeMounts: + - name: {{ $.Release.Name }}-raw-butler-secret + mountPath: /secrets-raw + readOnly: true + - name: {{ $.Release.Name }}-butler-secret + mountPath: /secrets + - name: rubintv-aws-creds + mountPath: /secrets-rubintv + readOnly: true + {{- end }} + volumes: + - name: rubintv-creds + secret: + secretName: google-creds + - name: rubintv-aws-creds + secret: + secretName: rubintv-secrets + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + nfs: + path: {{ $values.serverPath }} + readOnly: {{ $values.readOnly }} + server: {{ $values.server }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + emptyDir: {} + - name: {{ $.Release.Name }}-raw-butler-secret + secret: + secretName: butler-secret + defaultMode: 0600 + {{- end }} + {{- if or $.Values.nodeSelector $script.nodeSelector }} + {{- $nodeSelector := "" }} + {{- if $script.nodeSelector }} + {{- $nodeSelector = $script.nodeSelector }} + {{- else }} + {{- $nodeSelector = $.Values.nodeSelector }} + {{- end }} + nodeSelector: + {{- toYaml $nodeSelector | nindent 8 }} + {{- end }} + {{- if or $.Values.affinity $script.affinity }} + {{- $affinity := "" }} + {{- if $script.affinity }} + {{- $affinity = $script.affinity }} + {{- else }} + {{- $affinity = $.Values.affinity }} + {{- end }} + affinity: + {{- toYaml $affinity | nindent 8 }} + {{- end }} + {{- if or $.Values.tolerations $script.tolerations }} + {{- $tolerations := "" }} + {{- if $script.tolerations }} + {{- $tolerations = $script.tolerations }} + {{- else }} + {{- $tolerations = $.Values.tolerations }} + {{- end }} + tolerations: + {{- toYaml $tolerations | nindent 8 }} + {{- end }} +{{- end }} diff --git a/applications/rapid-analysis/templates/comcamsim-gather2a-set.yaml b/applications/rapid-analysis/templates/comcamsim-gather2a-set.yaml new file mode 100644 index 0000000000..d3d23908e3 --- /dev/null +++ b/applications/rapid-analysis/templates/comcamsim-gather2a-set.yaml @@ -0,0 +1,231 @@ +{{ $_ := set $.Values "script" $.Values.comcamsimGather2aSet }} +{{ $script := $.Values.comcamsimGather2aSet }} +{{- if $script.name }} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "rapid-analysis.deploymentName" $ }}-gather2aset + namespace: {{ $.Values.namespace }} + labels: + {{- include "rapid-analysis.labels" $ | nindent 4 }} +spec: + revisionHistoryLimit: 0 + selector: + matchLabels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 6 }} + replicas: {{ ($script.replicas | int) }} + podManagementPolicy: Parallel + template: + metadata: + {{- with $.Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml $.Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- if $.Values.securityContext }} + securityContext: + {{- if $.Values.securityContext.uid }} + runAsUser: {{ $.Values.securityContext.uid }} + {{- end }} + {{- if $.Values.securityContext.gid }} + runAsGroup: {{ $.Values.securityContext.gid }} + {{- end }} + {{- if $.Values.securityContext.fid }} + fsGroup: {{ $.Values.securityContext.fid }} + {{- end }} + {{- end }} + containers: + - name: {{ include "rapid-analysis.scriptName" $ }} + image: "{{ $.Values.image.repository }}:{{ $.Values.image.tag }}" + imagePullPolicy: {{ $.Values.image.pullPolicy }} + env: + - name: RUN_ARG + value: {{ $script.name }} + - name: WORKER_NAME + valueFrom: + fieldRef: + fieldPath: metadata.labels['statefulset.kubernetes.io/pod-name'] + - name: RAPID_ANALYSIS_LOCATION + value: {{ $.Values.location | upper | quote }} + {{- if or $.Values.env $.Values.envSecrets }} + {{- range $env_var, $env_value := $.Values.env }} + - name: {{ $env_var }} + value: {{ $env_value | quote }} + {{- end }} + {{- range $env := $.Values.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: PGPASSFILE + value: "{{ $.Values.butlerSecret.containerPath }}/postgres-credentials.txt" + - name: PGUSER + value: {{ $.Values.butlerSecret.dbUser | quote }} + - name: AWS_SHARED_CREDENTIALS_FILE + value: "{{ $.Values.butlerSecret.containerPath }}/aws-credentials.ini" + {{- end }} + {{- if $.Values.redis.enabled }} + - name: REDIS_HOST + value: "redis-service" + {{- if $.Values.redis.envSecrets }} + {{- range $env := $.Values.redis.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- end }} + envFrom: + - configMapRef: + name: env-configmap + volumeMounts: + - name: rubintv-creds + mountPath: "/etc/rubintv/creds" + readOnly: true + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + readOnly: {{ $values.readOnly }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + mountPath: {{ $.Values.butlerSecret.containerPath }} + {{- end }} + {{- if or $.Values.resources $script.resources }} + {{- $resources := "" }} + {{- if $script.resources }} + {{- $resources = $script.resources }} + {{- else }} + {{- $resources = $.Values.resources }} + {{- end }} + resources: + {{- toYaml $resources | nindent 12 }} + {{- end }} + {{- if $.Values.butlerSecret }} + initContainers: + - name: {{ $.Release.Name }}-butler-secret-perm-fixer + image: "alpine:latest" + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /secrets + cat /secrets/aws-credentials.ini > new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + cat /secrets-rubintv/aws-credentials.ini >> new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + mv new-aws-credentials.ini /secrets/aws-credentials.ini + chown 73006:73006 /secrets/* + chmod 0600 /secrets/* + volumeMounts: + - name: {{ $.Release.Name }}-raw-butler-secret + mountPath: /secrets-raw + readOnly: true + - name: {{ $.Release.Name }}-butler-secret + mountPath: /secrets + - name: rubintv-aws-creds + mountPath: /secrets-rubintv + readOnly: true + {{- end }} + volumes: + - name: rubintv-creds + secret: + secretName: google-creds + - name: rubintv-aws-creds + secret: + secretName: rubintv-secrets + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + nfs: + path: {{ $values.serverPath }} + readOnly: {{ $values.readOnly }} + server: {{ $values.server }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + emptyDir: {} + - name: {{ $.Release.Name }}-raw-butler-secret + secret: + secretName: butler-secret + defaultMode: 0600 + {{- end }} + {{- if or $.Values.nodeSelector $script.nodeSelector }} + {{- $nodeSelector := "" }} + {{- if $script.nodeSelector }} + {{- $nodeSelector = $script.nodeSelector }} + {{- else }} + {{- $nodeSelector = $.Values.nodeSelector }} + {{- end }} + nodeSelector: + {{- toYaml $nodeSelector | nindent 8 }} + {{- end }} + {{- if or $.Values.affinity $script.affinity }} + {{- $affinity := "" }} + {{- if $script.affinity }} + {{- $affinity = $script.affinity }} + {{- else }} + {{- $affinity = $.Values.affinity }} + {{- end }} + affinity: + {{- toYaml $affinity | nindent 8 }} + {{- end }} + {{- if or $.Values.tolerations $script.tolerations }} + {{- $tolerations := "" }} + {{- if $script.tolerations }} + {{- $tolerations = $script.tolerations }} + {{- else }} + {{- $tolerations = $.Values.tolerations }} + {{- end }} + tolerations: + {{- toYaml $tolerations | nindent 8 }} + {{- end }} +{{- end }} diff --git a/applications/rapid-analysis/templates/comcamsim-worker-set.yaml b/applications/rapid-analysis/templates/comcamsim-worker-set.yaml new file mode 100644 index 0000000000..ce31cadd77 --- /dev/null +++ b/applications/rapid-analysis/templates/comcamsim-worker-set.yaml @@ -0,0 +1,231 @@ +{{ $_ := set $.Values "script" $.Values.comcamsimWorkerSet }} +{{ $script := $.Values.comcamsimWorkerSet }} +{{- if $script.name }} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "rapid-analysis.deploymentName" $ }}-workerset + namespace: {{ $.Values.namespace }} + labels: + {{- include "rapid-analysis.labels" $ | nindent 4 }} +spec: + revisionHistoryLimit: 0 + selector: + matchLabels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 6 }} + replicas: {{ ($script.replicas | int) }} + podManagementPolicy: Parallel + template: + metadata: + {{- with $.Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml $.Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- if $.Values.securityContext }} + securityContext: + {{- if $.Values.securityContext.uid }} + runAsUser: {{ $.Values.securityContext.uid }} + {{- end }} + {{- if $.Values.securityContext.gid }} + runAsGroup: {{ $.Values.securityContext.gid }} + {{- end }} + {{- if $.Values.securityContext.fid }} + fsGroup: {{ $.Values.securityContext.fid }} + {{- end }} + {{- end }} + containers: + - name: {{ include "rapid-analysis.scriptName" $ }} + image: "{{ $.Values.image.repository }}:{{ $.Values.image.tag }}" + imagePullPolicy: {{ $.Values.image.pullPolicy }} + env: + - name: RUN_ARG + value: {{ $script.name }} + - name: WORKER_NAME + valueFrom: + fieldRef: + fieldPath: metadata.labels['statefulset.kubernetes.io/pod-name'] + - name: RAPID_ANALYSIS_LOCATION + value: {{ $.Values.location | upper | quote }} + {{- if or $.Values.env $.Values.envSecrets }} + {{- range $env_var, $env_value := $.Values.env }} + - name: {{ $env_var }} + value: {{ $env_value | quote }} + {{- end }} + {{- range $env := $.Values.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: PGPASSFILE + value: "{{ $.Values.butlerSecret.containerPath }}/postgres-credentials.txt" + - name: PGUSER + value: {{ $.Values.butlerSecret.dbUser | quote }} + - name: AWS_SHARED_CREDENTIALS_FILE + value: "{{ $.Values.butlerSecret.containerPath }}/aws-credentials.ini" + {{- end }} + {{- if $.Values.redis.enabled }} + - name: REDIS_HOST + value: "redis-service" + {{- if $.Values.redis.envSecrets }} + {{- range $env := $.Values.redis.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- end }} + envFrom: + - configMapRef: + name: env-configmap + volumeMounts: + - name: rubintv-creds + mountPath: "/etc/rubintv/creds" + readOnly: true + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + readOnly: {{ $values.readOnly }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + mountPath: {{ $.Values.butlerSecret.containerPath }} + {{- end }} + {{- if or $.Values.resources $script.resources }} + {{- $resources := "" }} + {{- if $script.resources }} + {{- $resources = $script.resources }} + {{- else }} + {{- $resources = $.Values.resources }} + {{- end }} + resources: + {{- toYaml $resources | nindent 12 }} + {{- end }} + {{- if $.Values.butlerSecret }} + initContainers: + - name: {{ $.Release.Name }}-butler-secret-perm-fixer + image: "alpine:latest" + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /secrets + cat /secrets/aws-credentials.ini > new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + cat /secrets-rubintv/aws-credentials.ini >> new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + mv new-aws-credentials.ini /secrets/aws-credentials.ini + chown 73006:73006 /secrets/* + chmod 0600 /secrets/* + volumeMounts: + - name: {{ $.Release.Name }}-raw-butler-secret + mountPath: /secrets-raw + readOnly: true + - name: {{ $.Release.Name }}-butler-secret + mountPath: /secrets + - name: rubintv-aws-creds + mountPath: /secrets-rubintv + readOnly: true + {{- end }} + volumes: + - name: rubintv-creds + secret: + secretName: google-creds + - name: rubintv-aws-creds + secret: + secretName: rubintv-secrets + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + nfs: + path: {{ $values.serverPath }} + readOnly: {{ $values.readOnly }} + server: {{ $values.server }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + emptyDir: {} + - name: {{ $.Release.Name }}-raw-butler-secret + secret: + secretName: butler-secret + defaultMode: 0600 + {{- end }} + {{- if or $.Values.nodeSelector $script.nodeSelector }} + {{- $nodeSelector := "" }} + {{- if $script.nodeSelector }} + {{- $nodeSelector = $script.nodeSelector }} + {{- else }} + {{- $nodeSelector = $.Values.nodeSelector }} + {{- end }} + nodeSelector: + {{- toYaml $nodeSelector | nindent 8 }} + {{- end }} + {{- if or $.Values.affinity $script.affinity }} + {{- $affinity := "" }} + {{- if $script.affinity }} + {{- $affinity = $script.affinity }} + {{- else }} + {{- $affinity = $.Values.affinity }} + {{- end }} + affinity: + {{- toYaml $affinity | nindent 8 }} + {{- end }} + {{- if or $.Values.tolerations $script.tolerations }} + {{- $tolerations := "" }} + {{- if $script.tolerations }} + {{- $tolerations = $script.tolerations }} + {{- else }} + {{- $tolerations = $.Values.tolerations }} + {{- end }} + tolerations: + {{- toYaml $tolerations | nindent 8 }} + {{- end }} +{{- end }} diff --git a/applications/rapid-analysis/templates/lsstcam-gather-rollup-set.yaml b/applications/rapid-analysis/templates/lsstcam-gather-rollup-set.yaml new file mode 100644 index 0000000000..92818aa6b6 --- /dev/null +++ b/applications/rapid-analysis/templates/lsstcam-gather-rollup-set.yaml @@ -0,0 +1,231 @@ +{{ $_ := set $.Values "script" $.Values.lsstcamGatherRollupSet }} +{{ $script := $.Values.lsstcamGatherRollupSet }} +{{- if $script.name }} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "rapid-analysis.deploymentName" $ }}-gatherrollupset + namespace: {{ $.Values.namespace }} + labels: + {{- include "rapid-analysis.labels" $ | nindent 4 }} +spec: + revisionHistoryLimit: 0 + selector: + matchLabels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 6 }} + replicas: {{ ($script.replicas | int) }} + podManagementPolicy: Parallel + template: + metadata: + {{- with $.Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml $.Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- if $.Values.securityContext }} + securityContext: + {{- if $.Values.securityContext.uid }} + runAsUser: {{ $.Values.securityContext.uid }} + {{- end }} + {{- if $.Values.securityContext.gid }} + runAsGroup: {{ $.Values.securityContext.gid }} + {{- end }} + {{- if $.Values.securityContext.fid }} + fsGroup: {{ $.Values.securityContext.fid }} + {{- end }} + {{- end }} + containers: + - name: {{ include "rapid-analysis.scriptName" $ }} + image: "{{ $.Values.image.repository }}:{{ $.Values.image.tag }}" + imagePullPolicy: {{ $.Values.image.pullPolicy }} + env: + - name: RUN_ARG + value: {{ $script.name }} + - name: WORKER_NAME + valueFrom: + fieldRef: + fieldPath: metadata.labels['statefulset.kubernetes.io/pod-name'] + - name: RAPID_ANALYSIS_LOCATION + value: {{ $.Values.location | upper | quote }} + {{- if or $.Values.env $.Values.envSecrets }} + {{- range $env_var, $env_value := $.Values.env }} + - name: {{ $env_var }} + value: {{ $env_value | quote }} + {{- end }} + {{- range $env := $.Values.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: PGPASSFILE + value: "{{ $.Values.butlerSecret.containerPath }}/postgres-credentials.txt" + - name: PGUSER + value: {{ $.Values.butlerSecret.dbUser | quote }} + - name: AWS_SHARED_CREDENTIALS_FILE + value: "{{ $.Values.butlerSecret.containerPath }}/aws-credentials.ini" + {{- end }} + {{- if $.Values.redis.enabled }} + - name: REDIS_HOST + value: "redis-service" + {{- if $.Values.redis.envSecrets }} + {{- range $env := $.Values.redis.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- end }} + envFrom: + - configMapRef: + name: env-configmap + volumeMounts: + - name: rubintv-creds + mountPath: "/etc/rubintv/creds" + readOnly: true + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + readOnly: {{ $values.readOnly }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + mountPath: {{ $.Values.butlerSecret.containerPath }} + {{- end }} + {{- if or $.Values.resources $script.resources }} + {{- $resources := "" }} + {{- if $script.resources }} + {{- $resources = $script.resources }} + {{- else }} + {{- $resources = $.Values.resources }} + {{- end }} + resources: + {{- toYaml $resources | nindent 12 }} + {{- end }} + {{- if $.Values.butlerSecret }} + initContainers: + - name: {{ $.Release.Name }}-butler-secret-perm-fixer + image: "alpine:latest" + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /secrets + cat /secrets/aws-credentials.ini > new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + cat /secrets-rubintv/aws-credentials.ini >> new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + mv new-aws-credentials.ini /secrets/aws-credentials.ini + chown 73006:73006 /secrets/* + chmod 0600 /secrets/* + volumeMounts: + - name: {{ $.Release.Name }}-raw-butler-secret + mountPath: /secrets-raw + readOnly: true + - name: {{ $.Release.Name }}-butler-secret + mountPath: /secrets + - name: rubintv-aws-creds + mountPath: /secrets-rubintv + readOnly: true + {{- end }} + volumes: + - name: rubintv-creds + secret: + secretName: google-creds + - name: rubintv-aws-creds + secret: + secretName: rubintv-secrets + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + nfs: + path: {{ $values.serverPath }} + readOnly: {{ $values.readOnly }} + server: {{ $values.server }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + emptyDir: {} + - name: {{ $.Release.Name }}-raw-butler-secret + secret: + secretName: butler-secret + defaultMode: 0600 + {{- end }} + {{- if or $.Values.nodeSelector $script.nodeSelector }} + {{- $nodeSelector := "" }} + {{- if $script.nodeSelector }} + {{- $nodeSelector = $script.nodeSelector }} + {{- else }} + {{- $nodeSelector = $.Values.nodeSelector }} + {{- end }} + nodeSelector: + {{- toYaml $nodeSelector | nindent 8 }} + {{- end }} + {{- if or $.Values.affinity $script.affinity }} + {{- $affinity := "" }} + {{- if $script.affinity }} + {{- $affinity = $script.affinity }} + {{- else }} + {{- $affinity = $.Values.affinity }} + {{- end }} + affinity: + {{- toYaml $affinity | nindent 8 }} + {{- end }} + {{- if or $.Values.tolerations $script.tolerations }} + {{- $tolerations := "" }} + {{- if $script.tolerations }} + {{- $tolerations = $script.tolerations }} + {{- else }} + {{- $tolerations = $.Values.tolerations }} + {{- end }} + tolerations: + {{- toYaml $tolerations | nindent 8 }} + {{- end }} +{{- end }} diff --git a/applications/rapid-analysis/templates/lsstcam-gather2a-set.yaml b/applications/rapid-analysis/templates/lsstcam-gather2a-set.yaml new file mode 100644 index 0000000000..c55e8b5715 --- /dev/null +++ b/applications/rapid-analysis/templates/lsstcam-gather2a-set.yaml @@ -0,0 +1,231 @@ +{{ $_ := set $.Values "script" $.Values.lsstcamGather2aSet }} +{{ $script := $.Values.lsstcamGather2aSet }} +{{- if $script.name }} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "rapid-analysis.deploymentName" $ }}-gather2aset + namespace: {{ $.Values.namespace }} + labels: + {{- include "rapid-analysis.labels" $ | nindent 4 }} +spec: + revisionHistoryLimit: 0 + selector: + matchLabels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 6 }} + replicas: {{ ($script.replicas | int) }} + podManagementPolicy: Parallel + template: + metadata: + {{- with $.Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml $.Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- if $.Values.securityContext }} + securityContext: + {{- if $.Values.securityContext.uid }} + runAsUser: {{ $.Values.securityContext.uid }} + {{- end }} + {{- if $.Values.securityContext.gid }} + runAsGroup: {{ $.Values.securityContext.gid }} + {{- end }} + {{- if $.Values.securityContext.fid }} + fsGroup: {{ $.Values.securityContext.fid }} + {{- end }} + {{- end }} + containers: + - name: {{ include "rapid-analysis.scriptName" $ }} + image: "{{ $.Values.image.repository }}:{{ $.Values.image.tag }}" + imagePullPolicy: {{ $.Values.image.pullPolicy }} + env: + - name: RUN_ARG + value: {{ $script.name }} + - name: WORKER_NAME + valueFrom: + fieldRef: + fieldPath: metadata.labels['statefulset.kubernetes.io/pod-name'] + - name: RAPID_ANALYSIS_LOCATION + value: {{ $.Values.location | upper | quote }} + {{- if or $.Values.env $.Values.envSecrets }} + {{- range $env_var, $env_value := $.Values.env }} + - name: {{ $env_var }} + value: {{ $env_value | quote }} + {{- end }} + {{- range $env := $.Values.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: PGPASSFILE + value: "{{ $.Values.butlerSecret.containerPath }}/postgres-credentials.txt" + - name: PGUSER + value: {{ $.Values.butlerSecret.dbUser | quote }} + - name: AWS_SHARED_CREDENTIALS_FILE + value: "{{ $.Values.butlerSecret.containerPath }}/aws-credentials.ini" + {{- end }} + {{- if $.Values.redis.enabled }} + - name: REDIS_HOST + value: "redis-service" + {{- if $.Values.redis.envSecrets }} + {{- range $env := $.Values.redis.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- end }} + envFrom: + - configMapRef: + name: env-configmap + volumeMounts: + - name: rubintv-creds + mountPath: "/etc/rubintv/creds" + readOnly: true + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + readOnly: {{ $values.readOnly }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + mountPath: {{ $.Values.butlerSecret.containerPath }} + {{- end }} + {{- if or $.Values.resources $script.resources }} + {{- $resources := "" }} + {{- if $script.resources }} + {{- $resources = $script.resources }} + {{- else }} + {{- $resources = $.Values.resources }} + {{- end }} + resources: + {{- toYaml $resources | nindent 12 }} + {{- end }} + {{- if $.Values.butlerSecret }} + initContainers: + - name: {{ $.Release.Name }}-butler-secret-perm-fixer + image: "alpine:latest" + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /secrets + cat /secrets/aws-credentials.ini > new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + cat /secrets-rubintv/aws-credentials.ini >> new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + mv new-aws-credentials.ini /secrets/aws-credentials.ini + chown 73006:73006 /secrets/* + chmod 0600 /secrets/* + volumeMounts: + - name: {{ $.Release.Name }}-raw-butler-secret + mountPath: /secrets-raw + readOnly: true + - name: {{ $.Release.Name }}-butler-secret + mountPath: /secrets + - name: rubintv-aws-creds + mountPath: /secrets-rubintv + readOnly: true + {{- end }} + volumes: + - name: rubintv-creds + secret: + secretName: google-creds + - name: rubintv-aws-creds + secret: + secretName: rubintv-secrets + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + nfs: + path: {{ $values.serverPath }} + readOnly: {{ $values.readOnly }} + server: {{ $values.server }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + emptyDir: {} + - name: {{ $.Release.Name }}-raw-butler-secret + secret: + secretName: butler-secret + defaultMode: 0600 + {{- end }} + {{- if or $.Values.nodeSelector $script.nodeSelector }} + {{- $nodeSelector := "" }} + {{- if $script.nodeSelector }} + {{- $nodeSelector = $script.nodeSelector }} + {{- else }} + {{- $nodeSelector = $.Values.nodeSelector }} + {{- end }} + nodeSelector: + {{- toYaml $nodeSelector | nindent 8 }} + {{- end }} + {{- if or $.Values.affinity $script.affinity }} + {{- $affinity := "" }} + {{- if $script.affinity }} + {{- $affinity = $script.affinity }} + {{- else }} + {{- $affinity = $.Values.affinity }} + {{- end }} + affinity: + {{- toYaml $affinity | nindent 8 }} + {{- end }} + {{- if or $.Values.tolerations $script.tolerations }} + {{- $tolerations := "" }} + {{- if $script.tolerations }} + {{- $tolerations = $script.tolerations }} + {{- else }} + {{- $tolerations = $.Values.tolerations }} + {{- end }} + tolerations: + {{- toYaml $tolerations | nindent 8 }} + {{- end }} +{{- end }} diff --git a/applications/rapid-analysis/templates/lsstcam-worker-set.yaml b/applications/rapid-analysis/templates/lsstcam-worker-set.yaml new file mode 100644 index 0000000000..bc60241909 --- /dev/null +++ b/applications/rapid-analysis/templates/lsstcam-worker-set.yaml @@ -0,0 +1,231 @@ +{{ $_ := set $.Values "script" $.Values.lsstcamWorkerSet }} +{{ $script := $.Values.lsstcamWorkerSet }} +{{- if $script.name }} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "rapid-analysis.deploymentName" $ }}-workerset + namespace: {{ $.Values.namespace }} + labels: + {{- include "rapid-analysis.labels" $ | nindent 4 }} +spec: + revisionHistoryLimit: 0 + selector: + matchLabels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 6 }} + replicas: {{ ($script.replicas | int) }} + podManagementPolicy: Parallel + template: + metadata: + {{- with $.Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml $.Values.imagePullSecrets | nindent 8 }} + {{- end }} + {{- if $.Values.securityContext }} + securityContext: + {{- if $.Values.securityContext.uid }} + runAsUser: {{ $.Values.securityContext.uid }} + {{- end }} + {{- if $.Values.securityContext.gid }} + runAsGroup: {{ $.Values.securityContext.gid }} + {{- end }} + {{- if $.Values.securityContext.fid }} + fsGroup: {{ $.Values.securityContext.fid }} + {{- end }} + {{- end }} + containers: + - name: {{ include "rapid-analysis.scriptName" $ }} + image: "{{ $.Values.image.repository }}:{{ $.Values.image.tag }}" + imagePullPolicy: {{ $.Values.image.pullPolicy }} + env: + - name: RUN_ARG + value: {{ $script.name }} + - name: WORKER_NAME + valueFrom: + fieldRef: + fieldPath: metadata.labels['statefulset.kubernetes.io/pod-name'] + - name: RAPID_ANALYSIS_LOCATION + value: {{ $.Values.location | upper | quote }} + {{- if or $.Values.env $.Values.envSecrets }} + {{- range $env_var, $env_value := $.Values.env }} + - name: {{ $env_var }} + value: {{ $env_value | quote }} + {{- end }} + {{- range $env := $.Values.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: PGPASSFILE + value: "{{ $.Values.butlerSecret.containerPath }}/postgres-credentials.txt" + - name: PGUSER + value: {{ $.Values.butlerSecret.dbUser | quote }} + - name: AWS_SHARED_CREDENTIALS_FILE + value: "{{ $.Values.butlerSecret.containerPath }}/aws-credentials.ini" + {{- end }} + {{- if $.Values.redis.enabled }} + - name: REDIS_HOST + value: "redis-service" + {{- if $.Values.redis.envSecrets }} + {{- range $env := $.Values.redis.envSecrets }} + - name: {{ $env.name }} + valueFrom: + secretKeyRef: + name: {{ $env.secretName }} + key: {{ $env.secretKey }} + {{- end }} + {{- end }} + {{- end }} + envFrom: + - configMapRef: + name: env-configmap + volumeMounts: + - name: rubintv-creds + mountPath: "/etc/rubintv/creds" + readOnly: true + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + readOnly: {{ $values.readOnly }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + mountPath: {{ $values.containerPath }} + {{- if ($values.subPath) }} + subPath: {{ $values.subPath }} + {{- end }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + mountPath: {{ $.Values.butlerSecret.containerPath }} + {{- end }} + {{- if or $.Values.resources $script.resources }} + {{- $resources := "" }} + {{- if $script.resources }} + {{- $resources = $script.resources }} + {{- else }} + {{- $resources = $.Values.resources }} + {{- end }} + resources: + {{- toYaml $resources | nindent 12 }} + {{- end }} + {{- if $.Values.butlerSecret }} + initContainers: + - name: {{ $.Release.Name }}-butler-secret-perm-fixer + image: "alpine:latest" + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /secrets + cat /secrets/aws-credentials.ini > new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + cat /secrets-rubintv/aws-credentials.ini >> new-aws-credentials.ini + printf "\n" >> new-aws-credentials.ini + mv new-aws-credentials.ini /secrets/aws-credentials.ini + chown 73006:73006 /secrets/* + chmod 0600 /secrets/* + volumeMounts: + - name: {{ $.Release.Name }}-raw-butler-secret + mountPath: /secrets-raw + readOnly: true + - name: {{ $.Release.Name }}-butler-secret + mountPath: /secrets + - name: rubintv-aws-creds + mountPath: /secrets-rubintv + readOnly: true + {{- end }} + volumes: + - name: rubintv-creds + secret: + secretName: google-creds + - name: rubintv-aws-creds + secret: + secretName: rubintv-secrets + {{- if $.Values.nfsMountpoint }} + {{- range $values := $.Values.nfsMountpoint }} + - name: {{ $values.name }} + nfs: + path: {{ $values.serverPath }} + readOnly: {{ $values.readOnly }} + server: {{ $values.server }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpoint }} + {{- range $values := $.Values.pvcMountpoint }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.pvcMountpointClaim }} + {{- range $values := $.Values.pvcMountpointClaim }} + - name: {{ $values.name }} + persistentVolumeClaim: + claimName: {{ $values.name }} + {{- end }} + {{- end }} + {{- if $.Values.butlerSecret }} + - name: {{ $.Release.Name }}-butler-secret + emptyDir: {} + - name: {{ $.Release.Name }}-raw-butler-secret + secret: + secretName: butler-secret + defaultMode: 0600 + {{- end }} + {{- if or $.Values.nodeSelector $script.nodeSelector }} + {{- $nodeSelector := "" }} + {{- if $script.nodeSelector }} + {{- $nodeSelector = $script.nodeSelector }} + {{- else }} + {{- $nodeSelector = $.Values.nodeSelector }} + {{- end }} + nodeSelector: + {{- toYaml $nodeSelector | nindent 8 }} + {{- end }} + {{- if or $.Values.affinity $script.affinity }} + {{- $affinity := "" }} + {{- if $script.affinity }} + {{- $affinity = $script.affinity }} + {{- else }} + {{- $affinity = $.Values.affinity }} + {{- end }} + affinity: + {{- toYaml $affinity | nindent 8 }} + {{- end }} + {{- if or $.Values.tolerations $script.tolerations }} + {{- $tolerations := "" }} + {{- if $script.tolerations }} + {{- $tolerations = $script.tolerations }} + {{- else }} + {{- $tolerations = $.Values.tolerations }} + {{- end }} + tolerations: + {{- toYaml $tolerations | nindent 8 }} + {{- end }} +{{- end }} From 3713bbde46d3134f3f86e7f89f0e16f1d3f7f66a Mon Sep 17 00:00:00 2001 From: Merlin Fisher-Levine Date: Wed, 2 Oct 2024 06:53:22 -0700 Subject: [PATCH 278/567] Add podAnnotations support - no actual annotations yet --- applications/rapid-analysis/templates/deployment.yaml | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/applications/rapid-analysis/templates/deployment.yaml b/applications/rapid-analysis/templates/deployment.yaml index d6a44033ca..86fa621f86 100644 --- a/applications/rapid-analysis/templates/deployment.yaml +++ b/applications/rapid-analysis/templates/deployment.yaml @@ -15,9 +15,15 @@ spec: {{- include "rapid-analysis.selectorLabels" $ | nindent 6 }} template: metadata: - {{- with $.Values.podAnnotations }} + {{- if or $.Values.podAnnotations $script.podAnnotations }} + {{- $podAnnotations := "" }} + {{- if $script.podAnnotations }} + {{- $podAnnotations = $script.podAnnotations }} + {{- else }} + {{- $podAnnotations = $.Values.podAnnotations }} + {{- end }} annotations: - {{- toYaml $ | nindent 8 }} + {{- toYaml $podAnnotations | nindent 8 }} {{- end }} labels: {{- include "rapid-analysis.selectorLabels" $ | nindent 8 }} From f0e12258d77b8df62df11252c4c32d33f42fe236 Mon Sep 17 00:00:00 2001 From: Merlin Fisher-Levine Date: Wed, 2 Oct 2024 07:18:55 -0700 Subject: [PATCH 279/567] Add LHN annotations to pods --- .../rapid-analysis/values-summit.yaml | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/applications/rapid-analysis/values-summit.yaml b/applications/rapid-analysis/values-summit.yaml index 185b063e84..9700759b38 100644 --- a/applications/rapid-analysis/values-summit.yaml +++ b/applications/rapid-analysis/values-summit.yaml @@ -6,7 +6,12 @@ location: SUMMIT env: DAF_BUTLER_REPOSITORY_INDEX: /project/data-repos.yaml scripts: +### +### AuxTel pods +### - name: summit/auxTel/runBackgroundService.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: requests: cpu: 0.5 @@ -16,11 +21,19 @@ scripts: memory: 10G - name: summit/auxTel/runButlerWatcher.py - name: summit/auxTel/runCalibrateCcdRunner.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runImExaminer.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runIsrRunner.py - name: summit/auxTel/runMetadataCreator.py - name: summit/auxTel/runMetadataServer.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runMonitor.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: requests: cpu: 0.5 @@ -29,8 +42,14 @@ scripts: cpu: 1.0 memory: 10G - name: summit/auxTel/runMountTorquePlotter.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runNightReporter.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runSpecExaminer.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: requests: cpu: 0.5 @@ -39,6 +58,8 @@ scripts: cpu: 1.0 memory: 4G - name: summit/misc/runAllSky.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: requests: cpu: 1.0 @@ -47,15 +68,34 @@ scripts: cpu: 2 memory: 6G - name: summit/misc/runStarTracker.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runStarTrackerCatchup.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runStarTrackerFast.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runStarTrackerMetadata.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runStarTrackerNightReport.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runStarTrackerWide.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runTmaTelemetry.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" +### +### ComCam pods +### - name: summit/LSSTComCam/runButlerWatcher.py - name: summit/LSSTComCam/runHeadNode.py - name: summit/LSSTComCam/runMetadataServer.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/LSSTComCam/runPlotter.py workerSet: name: summit/LSSTComCam/runSfmRunner.py From acef76d625cf9e3f27cb0177c9ff9111b41d04e5 Mon Sep 17 00:00:00 2001 From: Merlin Fisher-Levine Date: Wed, 2 Oct 2024 07:19:54 -0700 Subject: [PATCH 280/567] Update template names and add new pods --- .../rapid-analysis/values-summit.yaml | 145 +++++++++++++++++- 1 file changed, 144 insertions(+), 1 deletion(-) diff --git a/applications/rapid-analysis/values-summit.yaml b/applications/rapid-analysis/values-summit.yaml index 9700759b38..614b510b65 100644 --- a/applications/rapid-analysis/values-summit.yaml +++ b/applications/rapid-analysis/values-summit.yaml @@ -57,6 +57,67 @@ scripts: limits: cpu: 1.0 memory: 4G +### +### ComCamSim pods +### +- name: summit/LSSTComCamSim/runButlerWatcher.py +- name: summit/LSSTComCamSim/runHeadNode.py +- name: summit/LSSTComCamSim/runMetadataServer.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" +- name: summit/LSSTComCamSim/runPlotter.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" + resources: + requests: + cpu: 0.5 + memory: 4G + limits: + cpu: 1.0 + memory: 6G +- name: summit/LSSTComCamSim/runAosDonutPipeline.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" + resources: + requests: + cpu: 32 + memory: 96G + limits: + cpu: 32 + memory: 96G +- name: summit/LSSTComCamSim/runFocusSweepAnalysis.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" + resources: + requests: + cpu: 0.5 + memory: 1G + limits: + cpu: 1 + memory: 2G +- name: summit/LSSTComCamSim/runMetadataServerAos.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" + resources: + requests: + cpu: 0.25 + memory: 500M + limits: + cpu: .5 + memory: 1G +- name: summit/LSSTComCamSim/runPsfPlotting.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" + resources: + requests: + cpu: .5 + memory: 4G + limits: + cpu: 1 + memory: 6G +### +### Misc pods +### - name: summit/misc/runAllSky.py podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" @@ -97,7 +158,56 @@ scripts: podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/LSSTComCam/runPlotter.py -workerSet: + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" +### +### LSSTCam pods +### +- name: summit/LSSTCam/runButlerWatcher.py +- name: summit/LSSTCam/runHeadNode.py +- name: summit/LSSTCam/runMetadataServer.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" +- name: summit/LSSTCam/runPlotter.py + podAnnotations: + k8s.v1.cni.cncf.io/networks: "kube-system/lhn" +### +### ComCamSim StatefulSets +### +comcamsimWorkerSet: + name: summit/LSSTComCamSim/runSfmRunner.py + replicas: 9 + resources: + requests: + cpu: 1.0 + memory: 4G + limits: + cpu: 1.0 + memory: 8G +comcamsimGather2aSet: + name: summit/LSSTComCamSim/runStep2aWorker.py + replicas: 1 + resources: + requests: + cpu: 1.0 + memory: 4G + limits: + cpu: 1.0 + memory: 8G +comcamsimGatherRollupSet: + name: summit/LSSTComCamSim/runNightlyWorker.py + replicas: 1 + resources: + requests: + cpu: 1.0 + memory: 12G + limits: + cpu: 1.0 + memory: 24G +### +### ComCam StatefulSets +### +comcamWorkerSet: name: summit/LSSTComCam/runSfmRunner.py replicas: 36 resources: @@ -107,6 +217,39 @@ workerSet: limits: cpu: 1.0 memory: 8G +comcamGather2aSet: + name: summit/LSSTComCam/runStep2aWorker.py + replicas: 4 # 4 deep to match comcamWorkerSet + resources: + requests: + cpu: 1.0 + memory: "4G" + limits: + cpu: 1.0 + memory: "8G" +comcamGatherRollupSet: + name: summit/LSSTComCam/runNightlyWorker.py + replicas: 2 # 2 is probably plenty, 1 might even be fine + resources: + requests: + cpu: 1.0 + memory: "12G" + limits: + cpu: 1.0 + memory: "24G" +### +### LSSTCam StatefulSets +### +lsstcamWorkerSet: + name: summit/LSSTCam/runSfmRunner.py + replicas: 1 # 1 deep for now - in-focus chips only + resources: + requests: + cpu: 1.0 + memory: 4G + limits: + cpu: 1.0 + memory: 8G # we should check this value credentialFile: google_write_creds pullSecretsPath: pull-secret rubinTvSecretsPath: rubintv From 26c749084404e6b4dff94ef2b074e3daf064cedf Mon Sep 17 00:00:00 2001 From: Merlin Fisher-Levine Date: Wed, 2 Oct 2024 11:35:37 -0700 Subject: [PATCH 281/567] Use StatefulSets on TTS --- .../values-tucson-teststand.yaml | 69 +++++++++++++++---- 1 file changed, 55 insertions(+), 14 deletions(-) diff --git a/applications/rapid-analysis/values-tucson-teststand.yaml b/applications/rapid-analysis/values-tucson-teststand.yaml index 8604e12165..546a495f2e 100644 --- a/applications/rapid-analysis/values-tucson-teststand.yaml +++ b/applications/rapid-analysis/values-tucson-teststand.yaml @@ -9,6 +9,13 @@ siteTag: tts location: TTS scripts: - name: summit/auxTel/runBackgroundService.py + resources: + requests: + cpu: 0.5 + memory: 4G + limits: + cpu: 1.0 + memory: 10G - name: summit/auxTel/runButlerWatcher.py - name: summit/auxTel/runCalibrateCcdRunner.py - name: summit/auxTel/runImExaminer.py @@ -16,29 +23,63 @@ scripts: - name: summit/auxTel/runMetadataCreator.py - name: summit/auxTel/runMetadataServer.py - name: summit/auxTel/runMonitor.py + resources: + requests: + cpu: 0.5 + memory: 1G + limits: + cpu: 1.0 + memory: 10G - name: summit/auxTel/runMountTorquePlotter.py - name: summit/auxTel/runNightReporter.py - name: summit/auxTel/runSpecExaminer.py -- name: summit/comCam/runButlerWatcher.py -- name: summit/comCam/runIsrRunner_000.py -- name: summit/comCam/runIsrRunner_001.py -- name: summit/comCam/runIsrRunner_002.py -- name: summit/comCam/runIsrRunner_003.py -- name: summit/comCam/runIsrRunner_004.py -- name: summit/comCam/runIsrRunner_005.py -- name: summit/comCam/runIsrRunner_006.py -- name: summit/comCam/runIsrRunner_007.py -- name: summit/comCam/runIsrRunner_008.py -- name: summit/comCam/runMetadataServer.py -- name: summit/comCam/runPlotter.py resources: requests: cpu: 0.5 + memory: 2G + limits: + cpu: 1.0 memory: 4G +### +### ComCam pods +### +- name: summit/LSSTComCam/runButlerWatcher.py +- name: summit/LSSTComCam/runHeadNode.py +- name: summit/LSSTComCam/runMetadataServer.py +- name: summit/LSSTComCam/runPlotter.py +### +### ComCam StatefulSets +### +comcamWorkerSet: + name: summit/LSSTComCam/runSfmRunner.py + replicas: 1 + resources: + requests: + cpu: 1.0 + memory: 4G + limits: + cpu: 1.0 + memory: 8G +comcamGather2aSet: + name: summit/LSSTComCam/runStep2aWorker.py + replicas: 1 + resources: + requests: + cpu: 1.0 + memory: "4G" + limits: + cpu: 1.0 + memory: "8G" +comcamGatherRollupSet: + name: summit/LSSTComCam/runNightlyWorker.py + replicas: 1 + resources: + requests: + cpu: 1.0 + memory: "12G" limits: cpu: 1.0 - memory: 6G -- name: summit/misc/runTmaTelemetry.py + memory: "24G" # TODO: remove google credentials credentialFile: google_write_creds vaultPrefixPath: secret/k8s_operator/tucson-teststand.lsst.codes From 73605480e880bd64040b6da9cf176f8a9386023d Mon Sep 17 00:00:00 2001 From: Merlin Fisher-Levine Date: Wed, 2 Oct 2024 12:40:39 -0700 Subject: [PATCH 282/567] Reduce the number of pods to a minimum for initial deployment --- applications/rapid-analysis/values-summit.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/applications/rapid-analysis/values-summit.yaml b/applications/rapid-analysis/values-summit.yaml index 614b510b65..6c2c14c23f 100644 --- a/applications/rapid-analysis/values-summit.yaml +++ b/applications/rapid-analysis/values-summit.yaml @@ -176,7 +176,7 @@ scripts: ### comcamsimWorkerSet: name: summit/LSSTComCamSim/runSfmRunner.py - replicas: 9 + replicas: 1 resources: requests: cpu: 1.0 @@ -209,7 +209,7 @@ comcamsimGatherRollupSet: ### comcamWorkerSet: name: summit/LSSTComCam/runSfmRunner.py - replicas: 36 + replicas: 1 resources: requests: cpu: 1.0 @@ -219,7 +219,7 @@ comcamWorkerSet: memory: 8G comcamGather2aSet: name: summit/LSSTComCam/runStep2aWorker.py - replicas: 4 # 4 deep to match comcamWorkerSet + replicas: 1 resources: requests: cpu: 1.0 @@ -229,7 +229,7 @@ comcamGather2aSet: memory: "8G" comcamGatherRollupSet: name: summit/LSSTComCam/runNightlyWorker.py - replicas: 2 # 2 is probably plenty, 1 might even be fine + replicas: 1 resources: requests: cpu: 1.0 @@ -242,7 +242,7 @@ comcamGatherRollupSet: ### lsstcamWorkerSet: name: summit/LSSTCam/runSfmRunner.py - replicas: 1 # 1 deep for now - in-focus chips only + replicas: 1 resources: requests: cpu: 1.0 From ed956587edac8992319ebc21d3e1ed59fb2e8812 Mon Sep 17 00:00:00 2001 From: Merlin Fisher-Levine Date: Wed, 2 Oct 2024 13:50:23 -0700 Subject: [PATCH 283/567] Bring up one pod per CCD for ComCam + ComCamSim --- applications/rapid-analysis/values-summit.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/rapid-analysis/values-summit.yaml b/applications/rapid-analysis/values-summit.yaml index 6c2c14c23f..c57872bb53 100644 --- a/applications/rapid-analysis/values-summit.yaml +++ b/applications/rapid-analysis/values-summit.yaml @@ -176,7 +176,7 @@ scripts: ### comcamsimWorkerSet: name: summit/LSSTComCamSim/runSfmRunner.py - replicas: 1 + replicas: 9 resources: requests: cpu: 1.0 @@ -209,7 +209,7 @@ comcamsimGatherRollupSet: ### comcamWorkerSet: name: summit/LSSTComCam/runSfmRunner.py - replicas: 1 + replicas: 9 resources: requests: cpu: 1.0 From 245ab15e6bf8aaa3777033c39fb15b552208c797 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Date: Wed, 2 Oct 2024 18:19:14 -0300 Subject: [PATCH 284/567] rapid-analysis: add LSSTCam NFS mount on summit --- applications/rapid-analysis/values-summit.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/applications/rapid-analysis/values-summit.yaml b/applications/rapid-analysis/values-summit.yaml index c57872bb53..41b0dae2bb 100644 --- a/applications/rapid-analysis/values-summit.yaml +++ b/applications/rapid-analysis/values-summit.yaml @@ -306,6 +306,11 @@ nfsMountpoint: readOnly: false server: nfs1.cp.lsst.org serverPath: /scratch/rubintv +- name: lsstcam-data + containerPath: /repo/LSSTCam + readOnly: false + server: nfs3.cp.lsst.org + serverPath: /lsstcam/repo/LSSTCam resources: requests: cpu: 0.5 From d024e32bc0f6a3417db648a74d15626775379fa2 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Thu, 3 Oct 2024 11:24:18 -0300 Subject: [PATCH 285/567] rapid-analysis: remove unused readonly nfs mounts from tucson --- .../values-tucson-teststand.yaml | 20 ------------------- 1 file changed, 20 deletions(-) diff --git a/applications/rapid-analysis/values-tucson-teststand.yaml b/applications/rapid-analysis/values-tucson-teststand.yaml index 546a495f2e..7789a91b55 100644 --- a/applications/rapid-analysis/values-tucson-teststand.yaml +++ b/applications/rapid-analysis/values-tucson-teststand.yaml @@ -104,31 +104,11 @@ nfsMountpoint: readOnly: false server: comcam-archiver.tu.lsst.org serverPath: /repo/LSSTComCam -- name: auxtel-data - containerPath: /readonly/lsstdata/auxtel - readOnly: true - server: nfs-auxtel.tu.lsst.org - serverPath: /auxtel/lsstdata -- name: comcam-data - containerPath: /readonly/lsstdata/comcam - readOnly: true - server: comcam-archiver.tu.lsst.org - serverPath: /lsstdata - name: project-shared containerPath: /project readOnly: false server: nfs-project.tu.lsst.org serverPath: /project -- name: auxtel-gen3-data-temp - containerPath: /data/lsstdata/TTS/auxtel - readOnly: true - server: nfs-auxtel.tu.lsst.org - serverPath: /auxtel/lsstdata/TTS/auxtel -- name: comcam-gen3-data-temp - containerPath: /data/lsstdata/TTS/comcam - readOnly: true - server: comcam-archiver.tu.lsst.org - serverPath: /lsstdata/TTS/comcam - name: scratch-shared containerPath: /scratch readOnly: false From 6718e13ccf5ea42ef5aba880cf88fc2bec2e2cea Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Thu, 3 Oct 2024 11:25:26 -0300 Subject: [PATCH 286/567] rapid-analysis: remove unused `scratch-shared` nfs mounts from tucson --- applications/rapid-analysis/values-tucson-teststand.yaml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/applications/rapid-analysis/values-tucson-teststand.yaml b/applications/rapid-analysis/values-tucson-teststand.yaml index 7789a91b55..1141204745 100644 --- a/applications/rapid-analysis/values-tucson-teststand.yaml +++ b/applications/rapid-analysis/values-tucson-teststand.yaml @@ -109,11 +109,6 @@ nfsMountpoint: readOnly: false server: nfs-project.tu.lsst.org serverPath: /project -- name: scratch-shared - containerPath: /scratch - readOnly: false - server: nfs-scratch.tu.lsst.org - serverPath: /scratch/rubintv resources: requests: cpu: 0.5 From c867386119c4d7fb1406262f6c1ef1a65b32f21f Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Thu, 3 Oct 2024 14:49:35 -0300 Subject: [PATCH 287/567] rapid-analysis: remove unused readonly nfs mounts from summit --- .../rapid-analysis/values-summit.yaml | 20 ------------------- 1 file changed, 20 deletions(-) diff --git a/applications/rapid-analysis/values-summit.yaml b/applications/rapid-analysis/values-summit.yaml index 41b0dae2bb..60e02a6ef7 100644 --- a/applications/rapid-analysis/values-summit.yaml +++ b/applications/rapid-analysis/values-summit.yaml @@ -271,31 +271,11 @@ nfsMountpoint: readOnly: false server: nfs3.cp.lsst.org serverPath: /comcam/repo/LSSTComCam -- name: auxtel-data - containerPath: /readonly/lsstdata/auxtel - readOnly: true - server: nfs-auxtel.cp.lsst.org - serverPath: /auxtel/lsstdata -- name: comcam-data - containerPath: /readonly/lsstdata/comcam - readOnly: true - server: nfs3.cp.lsst.org - serverPath: /comcam/lsstdata - name: project-shared containerPath: /project readOnly: false server: nfs1.cp.lsst.org serverPath: /project -- name: auxtel-gen3-data-temp - containerPath: /data/lsstdata/base/auxtel - readOnly: true - server: nfs-auxtel.cp.lsst.org - serverPath: /auxtel/lsstdata/base/auxtel -- name: comcam-gen3-data-temp - containerPath: /data/lsstdata/base/comcam - readOnly: true - server: nfs3.cp.lsst.org - serverPath: /comcam/lsstdata/base/comcam - name: allsky-data containerPath: /data/allsky readOnly: true From 9ed23e1751ebe03646e92a9cce19cf8801d20e5f Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Thu, 3 Oct 2024 14:50:39 -0300 Subject: [PATCH 288/567] rapid-analysis: remove unused nfs mounts from summit --- applications/rapid-analysis/values-summit.yaml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/applications/rapid-analysis/values-summit.yaml b/applications/rapid-analysis/values-summit.yaml index 60e02a6ef7..2728ef2491 100644 --- a/applications/rapid-analysis/values-summit.yaml +++ b/applications/rapid-analysis/values-summit.yaml @@ -281,11 +281,6 @@ nfsMountpoint: readOnly: true server: nfs-auxtel.cp.lsst.org serverPath: /auxtel/allsky -- name: scratch-shared - containerPath: /scratch - readOnly: false - server: nfs1.cp.lsst.org - serverPath: /scratch/rubintv - name: lsstcam-data containerPath: /repo/LSSTCam readOnly: false From 1e0193d43b6e904256f25ca94bc8ecc823a193ee Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Thu, 3 Oct 2024 11:39:31 -0300 Subject: [PATCH 289/567] rapid-analysis: split mounts so pods don't mount everything on tucson and summit --- .../templates/comcam-gather-rollup-set.yaml | 12 ++ .../templates/comcam-gather2a-set.yaml | 12 ++ .../templates/comcam-worker-set.yaml | 12 ++ .../comcamsim-gather-rollup-set.yaml | 12 ++ .../templates/comcamsim-gather2a-set.yaml | 12 ++ .../templates/comcamsim-worker-set.yaml | 12 ++ .../rapid-analysis/templates/deployment.yaml | 12 ++ .../templates/lsstcam-gather-rollup-set.yaml | 12 ++ .../templates/lsstcam-gather2a-set.yaml | 12 ++ .../templates/lsstcam-worker-set.yaml | 12 ++ .../rapid-analysis/values-summit.yaml | 120 ++++++++++++++++++ .../values-tucson-teststand.yaml | 57 +++++++++ 12 files changed, 297 insertions(+) diff --git a/applications/rapid-analysis/templates/comcam-gather-rollup-set.yaml b/applications/rapid-analysis/templates/comcam-gather-rollup-set.yaml index 789390a18c..b2b30498c5 100644 --- a/applications/rapid-analysis/templates/comcam-gather-rollup-set.yaml +++ b/applications/rapid-analysis/templates/comcam-gather-rollup-set.yaml @@ -97,13 +97,16 @@ spec: readOnly: true {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} readOnly: {{ $values.readOnly }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -111,8 +114,10 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -120,6 +125,7 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret mountPath: {{ $.Values.butlerSecret.containerPath }} @@ -169,6 +175,7 @@ spec: secretName: rubintv-secrets {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} nfs: path: {{ $values.serverPath }} @@ -176,20 +183,25 @@ spec: server: {{ $values.server }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret emptyDir: {} diff --git a/applications/rapid-analysis/templates/comcam-gather2a-set.yaml b/applications/rapid-analysis/templates/comcam-gather2a-set.yaml index 4de84b0ce0..46078cb74d 100644 --- a/applications/rapid-analysis/templates/comcam-gather2a-set.yaml +++ b/applications/rapid-analysis/templates/comcam-gather2a-set.yaml @@ -97,13 +97,16 @@ spec: readOnly: true {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} readOnly: {{ $values.readOnly }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -111,8 +114,10 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -120,6 +125,7 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret mountPath: {{ $.Values.butlerSecret.containerPath }} @@ -169,6 +175,7 @@ spec: secretName: rubintv-secrets {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} nfs: path: {{ $values.serverPath }} @@ -176,20 +183,25 @@ spec: server: {{ $values.server }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret emptyDir: {} diff --git a/applications/rapid-analysis/templates/comcam-worker-set.yaml b/applications/rapid-analysis/templates/comcam-worker-set.yaml index 245e075aa9..496b40ff33 100644 --- a/applications/rapid-analysis/templates/comcam-worker-set.yaml +++ b/applications/rapid-analysis/templates/comcam-worker-set.yaml @@ -97,13 +97,16 @@ spec: readOnly: true {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} readOnly: {{ $values.readOnly }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -111,8 +114,10 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -120,6 +125,7 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret mountPath: {{ $.Values.butlerSecret.containerPath }} @@ -169,6 +175,7 @@ spec: secretName: rubintv-secrets {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} nfs: path: {{ $values.serverPath }} @@ -176,20 +183,25 @@ spec: server: {{ $values.server }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret emptyDir: {} diff --git a/applications/rapid-analysis/templates/comcamsim-gather-rollup-set.yaml b/applications/rapid-analysis/templates/comcamsim-gather-rollup-set.yaml index 25af267c09..47fa606e84 100644 --- a/applications/rapid-analysis/templates/comcamsim-gather-rollup-set.yaml +++ b/applications/rapid-analysis/templates/comcamsim-gather-rollup-set.yaml @@ -97,13 +97,16 @@ spec: readOnly: true {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} readOnly: {{ $values.readOnly }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -111,8 +114,10 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -120,6 +125,7 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret mountPath: {{ $.Values.butlerSecret.containerPath }} @@ -169,6 +175,7 @@ spec: secretName: rubintv-secrets {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} nfs: path: {{ $values.serverPath }} @@ -176,20 +183,25 @@ spec: server: {{ $values.server }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret emptyDir: {} diff --git a/applications/rapid-analysis/templates/comcamsim-gather2a-set.yaml b/applications/rapid-analysis/templates/comcamsim-gather2a-set.yaml index d3d23908e3..7588341778 100644 --- a/applications/rapid-analysis/templates/comcamsim-gather2a-set.yaml +++ b/applications/rapid-analysis/templates/comcamsim-gather2a-set.yaml @@ -97,13 +97,16 @@ spec: readOnly: true {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} readOnly: {{ $values.readOnly }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -111,8 +114,10 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -120,6 +125,7 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret mountPath: {{ $.Values.butlerSecret.containerPath }} @@ -169,6 +175,7 @@ spec: secretName: rubintv-secrets {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} nfs: path: {{ $values.serverPath }} @@ -176,20 +183,25 @@ spec: server: {{ $values.server }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret emptyDir: {} diff --git a/applications/rapid-analysis/templates/comcamsim-worker-set.yaml b/applications/rapid-analysis/templates/comcamsim-worker-set.yaml index ce31cadd77..da9bf53328 100644 --- a/applications/rapid-analysis/templates/comcamsim-worker-set.yaml +++ b/applications/rapid-analysis/templates/comcamsim-worker-set.yaml @@ -97,13 +97,16 @@ spec: readOnly: true {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} readOnly: {{ $values.readOnly }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -111,8 +114,10 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -120,6 +125,7 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret mountPath: {{ $.Values.butlerSecret.containerPath }} @@ -169,6 +175,7 @@ spec: secretName: rubintv-secrets {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} nfs: path: {{ $values.serverPath }} @@ -176,20 +183,25 @@ spec: server: {{ $values.server }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret emptyDir: {} diff --git a/applications/rapid-analysis/templates/deployment.yaml b/applications/rapid-analysis/templates/deployment.yaml index 86fa621f86..65ae5966b5 100644 --- a/applications/rapid-analysis/templates/deployment.yaml +++ b/applications/rapid-analysis/templates/deployment.yaml @@ -96,13 +96,16 @@ spec: readOnly: true {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} readOnly: {{ $values.readOnly }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -110,8 +113,10 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -119,6 +124,7 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret mountPath: {{ $.Values.butlerSecret.containerPath }} @@ -168,6 +174,7 @@ spec: secretName: rubintv-secrets {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} nfs: path: {{ $values.serverPath }} @@ -175,20 +182,25 @@ spec: server: {{ $values.server }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret emptyDir: {} diff --git a/applications/rapid-analysis/templates/lsstcam-gather-rollup-set.yaml b/applications/rapid-analysis/templates/lsstcam-gather-rollup-set.yaml index 92818aa6b6..97b37a8929 100644 --- a/applications/rapid-analysis/templates/lsstcam-gather-rollup-set.yaml +++ b/applications/rapid-analysis/templates/lsstcam-gather-rollup-set.yaml @@ -97,13 +97,16 @@ spec: readOnly: true {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} readOnly: {{ $values.readOnly }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -111,8 +114,10 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -120,6 +125,7 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret mountPath: {{ $.Values.butlerSecret.containerPath }} @@ -169,6 +175,7 @@ spec: secretName: rubintv-secrets {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} nfs: path: {{ $values.serverPath }} @@ -176,20 +183,25 @@ spec: server: {{ $values.server }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret emptyDir: {} diff --git a/applications/rapid-analysis/templates/lsstcam-gather2a-set.yaml b/applications/rapid-analysis/templates/lsstcam-gather2a-set.yaml index c55e8b5715..5ec06f24a3 100644 --- a/applications/rapid-analysis/templates/lsstcam-gather2a-set.yaml +++ b/applications/rapid-analysis/templates/lsstcam-gather2a-set.yaml @@ -97,13 +97,16 @@ spec: readOnly: true {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} readOnly: {{ $values.readOnly }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -111,8 +114,10 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -120,6 +125,7 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret mountPath: {{ $.Values.butlerSecret.containerPath }} @@ -169,6 +175,7 @@ spec: secretName: rubintv-secrets {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} nfs: path: {{ $values.serverPath }} @@ -176,20 +183,25 @@ spec: server: {{ $values.server }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret emptyDir: {} diff --git a/applications/rapid-analysis/templates/lsstcam-worker-set.yaml b/applications/rapid-analysis/templates/lsstcam-worker-set.yaml index bc60241909..82d8f3e911 100644 --- a/applications/rapid-analysis/templates/lsstcam-worker-set.yaml +++ b/applications/rapid-analysis/templates/lsstcam-worker-set.yaml @@ -97,13 +97,16 @@ spec: readOnly: true {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} readOnly: {{ $values.readOnly }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -111,8 +114,10 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} mountPath: {{ $values.containerPath }} {{- if ($values.subPath) }} @@ -120,6 +125,7 @@ spec: {{- end }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret mountPath: {{ $.Values.butlerSecret.containerPath }} @@ -169,6 +175,7 @@ spec: secretName: rubintv-secrets {{- if $.Values.nfsMountpoint }} {{- range $values := $.Values.nfsMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} nfs: path: {{ $values.serverPath }} @@ -176,20 +183,25 @@ spec: server: {{ $values.server }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpoint }} {{- range $values := $.Values.pvcMountpoint }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.pvcMountpointClaim }} {{- range $values := $.Values.pvcMountpointClaim }} + {{- if (has $values.name $script.mounts) }} - name: {{ $values.name }} persistentVolumeClaim: claimName: {{ $values.name }} {{- end }} {{- end }} + {{- end }} {{- if $.Values.butlerSecret }} - name: {{ $.Release.Name }}-butler-secret emptyDir: {} diff --git a/applications/rapid-analysis/values-summit.yaml b/applications/rapid-analysis/values-summit.yaml index 2728ef2491..cc24295912 100644 --- a/applications/rapid-analysis/values-summit.yaml +++ b/applications/rapid-analysis/values-summit.yaml @@ -10,6 +10,10 @@ scripts: ### AuxTel pods ### - name: summit/auxTel/runBackgroundService.py + mounts: + - auxtel-gen3-data + - allsky-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: @@ -20,18 +24,39 @@ scripts: cpu: 1.0 memory: 10G - name: summit/auxTel/runButlerWatcher.py + mounts: + - auxtel-gen3-data + - project-shared - name: summit/auxTel/runCalibrateCcdRunner.py + mounts: + - auxtel-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runImExaminer.py + mounts: + - auxtel-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runIsrRunner.py + mounts: + - auxtel-gen3-data + - project-shared - name: summit/auxTel/runMetadataCreator.py + mounts: + - auxtel-gen3-data + - project-shared - name: summit/auxTel/runMetadataServer.py + mounts: + - auxtel-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runMonitor.py + mounts: + - auxtel-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: @@ -42,12 +67,21 @@ scripts: cpu: 1.0 memory: 10G - name: summit/auxTel/runMountTorquePlotter.py + mounts: + - auxtel-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runNightReporter.py + mounts: + - auxtel-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runSpecExaminer.py + mounts: + - auxtel-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: @@ -61,11 +95,23 @@ scripts: ### ComCamSim pods ### - name: summit/LSSTComCamSim/runButlerWatcher.py + mounts: + - comcam-gen3-data + - project-shared - name: summit/LSSTComCamSim/runHeadNode.py + mounts: + - comcam-gen3-data + - project-shared - name: summit/LSSTComCamSim/runMetadataServer.py + mounts: + - comcam-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/LSSTComCamSim/runPlotter.py + mounts: + - comcam-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: @@ -76,6 +122,9 @@ scripts: cpu: 1.0 memory: 6G - name: summit/LSSTComCamSim/runAosDonutPipeline.py + mounts: + - comcam-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: @@ -86,6 +135,9 @@ scripts: cpu: 32 memory: 96G - name: summit/LSSTComCamSim/runFocusSweepAnalysis.py + mounts: + - comcam-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: @@ -96,6 +148,9 @@ scripts: cpu: 1 memory: 2G - name: summit/LSSTComCamSim/runMetadataServerAos.py + mounts: + - comcam-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: @@ -106,6 +161,9 @@ scripts: cpu: .5 memory: 1G - name: summit/LSSTComCamSim/runPsfPlotting.py + mounts: + - comcam-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: @@ -119,6 +177,9 @@ scripts: ### Misc pods ### - name: summit/misc/runAllSky.py + mounts: + - allsky-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" resources: @@ -129,46 +190,84 @@ scripts: cpu: 2 memory: 6G - name: summit/misc/runStarTracker.py + mounts: + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runStarTrackerCatchup.py + mounts: + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runStarTrackerFast.py + mounts: + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runStarTrackerMetadata.py + mounts: + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runStarTrackerNightReport.py + mounts: + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runStarTrackerWide.py + mounts: + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/misc/runTmaTelemetry.py + mounts: + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" ### ### ComCam pods ### - name: summit/LSSTComCam/runButlerWatcher.py + mounts: + - comcam-gen3-data + - project-shared - name: summit/LSSTComCam/runHeadNode.py + mounts: + - comcam-gen3-data + - project-shared - name: summit/LSSTComCam/runMetadataServer.py + mounts: + - comcam-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/LSSTComCam/runPlotter.py + mounts: + - comcam-gen3-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" ### ### LSSTCam pods ### - name: summit/LSSTCam/runButlerWatcher.py + mounts: + - lsstcam-data + - project-shared - name: summit/LSSTCam/runHeadNode.py + mounts: + - lsstcam-data + - project-shared - name: summit/LSSTCam/runMetadataServer.py + mounts: + - lsstcam-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/LSSTCam/runPlotter.py + mounts: + - lsstcam-data + - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" ### @@ -176,6 +275,9 @@ scripts: ### comcamsimWorkerSet: name: summit/LSSTComCamSim/runSfmRunner.py + mounts: + - comcam-gen3-data + - project-shared replicas: 9 resources: requests: @@ -186,6 +288,9 @@ comcamsimWorkerSet: memory: 8G comcamsimGather2aSet: name: summit/LSSTComCamSim/runStep2aWorker.py + mounts: + - comcam-gen3-data + - project-shared replicas: 1 resources: requests: @@ -196,6 +301,9 @@ comcamsimGather2aSet: memory: 8G comcamsimGatherRollupSet: name: summit/LSSTComCamSim/runNightlyWorker.py + mounts: + - comcam-gen3-data + - project-shared replicas: 1 resources: requests: @@ -209,6 +317,9 @@ comcamsimGatherRollupSet: ### comcamWorkerSet: name: summit/LSSTComCam/runSfmRunner.py + mounts: + - comcam-gen3-data + - project-shared replicas: 9 resources: requests: @@ -219,6 +330,9 @@ comcamWorkerSet: memory: 8G comcamGather2aSet: name: summit/LSSTComCam/runStep2aWorker.py + mounts: + - comcam-gen3-data + - project-shared replicas: 1 resources: requests: @@ -229,6 +343,9 @@ comcamGather2aSet: memory: "8G" comcamGatherRollupSet: name: summit/LSSTComCam/runNightlyWorker.py + mounts: + - comcam-gen3-data + - project-shared replicas: 1 resources: requests: @@ -242,6 +359,9 @@ comcamGatherRollupSet: ### lsstcamWorkerSet: name: summit/LSSTCam/runSfmRunner.py + mounts: + - lsstcam-data + - project-shared replicas: 1 resources: requests: diff --git a/applications/rapid-analysis/values-tucson-teststand.yaml b/applications/rapid-analysis/values-tucson-teststand.yaml index 1141204745..7afc7c77a9 100644 --- a/applications/rapid-analysis/values-tucson-teststand.yaml +++ b/applications/rapid-analysis/values-tucson-teststand.yaml @@ -8,7 +8,13 @@ env: siteTag: tts location: TTS scripts: +### +### AuxTel pods +### - name: summit/auxTel/runBackgroundService.py + mounts: + - auxtel-gen3-data + - project-shared resources: requests: cpu: 0.5 @@ -17,12 +23,33 @@ scripts: cpu: 1.0 memory: 10G - name: summit/auxTel/runButlerWatcher.py + mounts: + - auxtel-gen3-data + - project-shared - name: summit/auxTel/runCalibrateCcdRunner.py + mounts: + - auxtel-gen3-data + - project-shared - name: summit/auxTel/runImExaminer.py + mounts: + - auxtel-gen3-data + - project-shared - name: summit/auxTel/runIsrRunner.py + mounts: + - auxtel-gen3-data + - project-shared - name: summit/auxTel/runMetadataCreator.py + mounts: + - auxtel-gen3-data + - project-shared - name: summit/auxTel/runMetadataServer.py + mounts: + - auxtel-gen3-data + - project-shared - name: summit/auxTel/runMonitor.py + mounts: + - auxtel-gen3-data + - project-shared resources: requests: cpu: 0.5 @@ -31,8 +58,17 @@ scripts: cpu: 1.0 memory: 10G - name: summit/auxTel/runMountTorquePlotter.py + mounts: + - auxtel-gen3-data + - project-shared - name: summit/auxTel/runNightReporter.py + mounts: + - auxtel-gen3-data + - project-shared - name: summit/auxTel/runSpecExaminer.py + mounts: + - auxtel-gen3-data + - project-shared resources: requests: cpu: 0.5 @@ -44,14 +80,29 @@ scripts: ### ComCam pods ### - name: summit/LSSTComCam/runButlerWatcher.py + mounts: + - comcam-gen3-data + - project-shared - name: summit/LSSTComCam/runHeadNode.py + mounts: + - comcam-gen3-data + - project-shared - name: summit/LSSTComCam/runMetadataServer.py + mounts: + - comcam-gen3-data + - project-shared - name: summit/LSSTComCam/runPlotter.py + mounts: + - comcam-gen3-data + - project-shared ### ### ComCam StatefulSets ### comcamWorkerSet: name: summit/LSSTComCam/runSfmRunner.py + mounts: + - comcam-gen3-data + - project-shared replicas: 1 resources: requests: @@ -62,6 +113,9 @@ comcamWorkerSet: memory: 8G comcamGather2aSet: name: summit/LSSTComCam/runStep2aWorker.py + mounts: + - comcam-gen3-data + - project-shared replicas: 1 resources: requests: @@ -72,6 +126,9 @@ comcamGather2aSet: memory: "8G" comcamGatherRollupSet: name: summit/LSSTComCam/runNightlyWorker.py + mounts: + - comcam-gen3-data + - project-shared replicas: 1 resources: requests: From 947b8b7a4a9ac1e6b43cee9c160026760ea1f089 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Date: Fri, 4 Oct 2024 09:29:28 -0300 Subject: [PATCH 290/567] rapid-analysis: roll back NFS mounts with `data/` prefix on summit --- .../rapid-analysis/values-summit.yaml | 59 +++++++++++++++---- 1 file changed, 49 insertions(+), 10 deletions(-) diff --git a/applications/rapid-analysis/values-summit.yaml b/applications/rapid-analysis/values-summit.yaml index cc24295912..5c4fd7c692 100644 --- a/applications/rapid-analysis/values-summit.yaml +++ b/applications/rapid-analysis/values-summit.yaml @@ -11,6 +11,7 @@ scripts: ### - name: summit/auxTel/runBackgroundService.py mounts: + - auxtel-data - auxtel-gen3-data - allsky-data - project-shared @@ -25,36 +26,43 @@ scripts: memory: 10G - name: summit/auxTel/runButlerWatcher.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared - name: summit/auxTel/runCalibrateCcdRunner.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runImExaminer.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runIsrRunner.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared - name: summit/auxTel/runMetadataCreator.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared - name: summit/auxTel/runMetadataServer.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runMonitor.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared podAnnotations: @@ -68,18 +76,21 @@ scripts: memory: 10G - name: summit/auxTel/runMountTorquePlotter.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runNightReporter.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/auxTel/runSpecExaminer.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared podAnnotations: @@ -96,20 +107,24 @@ scripts: ### - name: summit/LSSTComCamSim/runButlerWatcher.py mounts: + - comcam-data - comcam-gen3-data - project-shared - name: summit/LSSTComCamSim/runHeadNode.py mounts: + - comcam-data - comcam-gen3-data - project-shared - name: summit/LSSTComCamSim/runMetadataServer.py mounts: + - comcam-data - comcam-gen3-data - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/LSSTComCamSim/runPlotter.py mounts: + - comcam-data - comcam-gen3-data - project-shared podAnnotations: @@ -123,6 +138,7 @@ scripts: memory: 6G - name: summit/LSSTComCamSim/runAosDonutPipeline.py mounts: + - comcam-data - comcam-gen3-data - project-shared podAnnotations: @@ -136,6 +152,7 @@ scripts: memory: 96G - name: summit/LSSTComCamSim/runFocusSweepAnalysis.py mounts: + - comcam-data - comcam-gen3-data - project-shared podAnnotations: @@ -149,6 +166,7 @@ scripts: memory: 2G - name: summit/LSSTComCamSim/runMetadataServerAos.py mounts: + - comcam-data - comcam-gen3-data - project-shared podAnnotations: @@ -162,6 +180,7 @@ scripts: memory: 1G - name: summit/LSSTComCamSim/runPsfPlotting.py mounts: + - comcam-data - comcam-gen3-data - project-shared podAnnotations: @@ -229,20 +248,24 @@ scripts: ### - name: summit/LSSTComCam/runButlerWatcher.py mounts: + - comcam-data - comcam-gen3-data - project-shared - name: summit/LSSTComCam/runHeadNode.py mounts: + - comcam-data - comcam-gen3-data - project-shared - name: summit/LSSTComCam/runMetadataServer.py mounts: + - comcam-data - comcam-gen3-data - project-shared podAnnotations: k8s.v1.cni.cncf.io/networks: "kube-system/lhn" - name: summit/LSSTComCam/runPlotter.py mounts: + - comcam-data - comcam-gen3-data - project-shared podAnnotations: @@ -276,6 +299,7 @@ scripts: comcamsimWorkerSet: name: summit/LSSTComCamSim/runSfmRunner.py mounts: + - comcam-data - comcam-gen3-data - project-shared replicas: 9 @@ -289,6 +313,7 @@ comcamsimWorkerSet: comcamsimGather2aSet: name: summit/LSSTComCamSim/runStep2aWorker.py mounts: + - comcam-data - comcam-gen3-data - project-shared replicas: 1 @@ -302,6 +327,7 @@ comcamsimGather2aSet: comcamsimGatherRollupSet: name: summit/LSSTComCamSim/runNightlyWorker.py mounts: + - comcam-data - comcam-gen3-data - project-shared replicas: 1 @@ -318,6 +344,7 @@ comcamsimGatherRollupSet: comcamWorkerSet: name: summit/LSSTComCam/runSfmRunner.py mounts: + - comcam-data - comcam-gen3-data - project-shared replicas: 9 @@ -331,6 +358,7 @@ comcamWorkerSet: comcamGather2aSet: name: summit/LSSTComCam/runStep2aWorker.py mounts: + - comcam-data - comcam-gen3-data - project-shared replicas: 1 @@ -344,6 +372,7 @@ comcamGather2aSet: comcamGatherRollupSet: name: summit/LSSTComCam/runNightlyWorker.py mounts: + - comcam-data - comcam-gen3-data - project-shared replicas: 1 @@ -381,31 +410,41 @@ butlerSecret: imagePullSecrets: - name: pull-secret nfsMountpoint: -- name: auxtel-gen3-data +- name: auxtel-data containerPath: /repo/LATISS readOnly: false server: nfs-auxtel.cp.lsst.org serverPath: /auxtel/repo/LATISS -- name: comcam-gen3-data +- name: auxtel-gen3-data + containerPath: /data/lsstdata/base/auxtel + readOnly: true + server: nfs-auxtel.cp.lsst.org + serverPath: /auxtel/lsstdata/base/auxtel +- name: comcam-data containerPath: /repo/LSSTComCam readOnly: false server: nfs3.cp.lsst.org serverPath: /comcam/repo/LSSTComCam -- name: project-shared - containerPath: /project +- name: comcam-gen3-data + containerPath: /data/lsstdata/base/comcam + readOnly: true + server: nfs3.cp.lsst.org + serverPath: /comcam/lsstdata/base/comcam +- name: lsstcam-data + containerPath: /repo/LSSTCam readOnly: false - server: nfs1.cp.lsst.org - serverPath: /project + server: nfs3.cp.lsst.org + serverPath: /lsstcam/repo/LSSTCam - name: allsky-data containerPath: /data/allsky readOnly: true server: nfs-auxtel.cp.lsst.org serverPath: /auxtel/allsky -- name: lsstcam-data - containerPath: /repo/LSSTCam +- name: project-shared + containerPath: /project readOnly: false - server: nfs3.cp.lsst.org - serverPath: /lsstcam/repo/LSSTCam + server: nfs1.cp.lsst.org + serverPath: /project resources: requests: cpu: 0.5 From 31904d8e6e72f32641198f29cd74ee34e655d91d Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Date: Fri, 4 Oct 2024 09:39:57 -0300 Subject: [PATCH 291/567] rapid-analysis: roll back NFS mounts with `data/` prefix on tucson --- .../values-tucson-teststand.yaml | 32 +++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/applications/rapid-analysis/values-tucson-teststand.yaml b/applications/rapid-analysis/values-tucson-teststand.yaml index 7afc7c77a9..f5ff7a7adb 100644 --- a/applications/rapid-analysis/values-tucson-teststand.yaml +++ b/applications/rapid-analysis/values-tucson-teststand.yaml @@ -13,6 +13,7 @@ scripts: ### - name: summit/auxTel/runBackgroundService.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared resources: @@ -24,30 +25,37 @@ scripts: memory: 10G - name: summit/auxTel/runButlerWatcher.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared - name: summit/auxTel/runCalibrateCcdRunner.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared - name: summit/auxTel/runImExaminer.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared - name: summit/auxTel/runIsrRunner.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared - name: summit/auxTel/runMetadataCreator.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared - name: summit/auxTel/runMetadataServer.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared - name: summit/auxTel/runMonitor.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared resources: @@ -59,14 +67,17 @@ scripts: memory: 10G - name: summit/auxTel/runMountTorquePlotter.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared - name: summit/auxTel/runNightReporter.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared - name: summit/auxTel/runSpecExaminer.py mounts: + - auxtel-data - auxtel-gen3-data - project-shared resources: @@ -81,18 +92,22 @@ scripts: ### - name: summit/LSSTComCam/runButlerWatcher.py mounts: + - comcam-data - comcam-gen3-data - project-shared - name: summit/LSSTComCam/runHeadNode.py mounts: + - comcam-data - comcam-gen3-data - project-shared - name: summit/LSSTComCam/runMetadataServer.py mounts: + - comcam-data - comcam-gen3-data - project-shared - name: summit/LSSTComCam/runPlotter.py mounts: + - comcam-data - comcam-gen3-data - project-shared ### @@ -101,6 +116,7 @@ scripts: comcamWorkerSet: name: summit/LSSTComCam/runSfmRunner.py mounts: + - comcam-data - comcam-gen3-data - project-shared replicas: 1 @@ -114,6 +130,7 @@ comcamWorkerSet: comcamGather2aSet: name: summit/LSSTComCam/runStep2aWorker.py mounts: + - comcam-data - comcam-gen3-data - project-shared replicas: 1 @@ -127,6 +144,7 @@ comcamGather2aSet: comcamGatherRollupSet: name: summit/LSSTComCam/runNightlyWorker.py mounts: + - comcam-data - comcam-gen3-data - project-shared replicas: 1 @@ -151,16 +169,26 @@ butlerSecret: imagePullSecrets: - name: pull-secret nfsMountpoint: -- name: auxtel-gen3-data +- name: auxtel-data containerPath: /repo/LATISS readOnly: false server: nfs-auxtel.tu.lsst.org serverPath: /auxtel/repo/LATISS -- name: comcam-gen3-data +- name: auxtel-gen3-data + containerPath: /data/lsstdata/TTS/auxtel + readOnly: true + server: nfs-auxtel.tu.lsst.org + serverPath: /auxtel/lsstdata/TTS/auxtel +- name: comcam-data containerPath: /repo/LSSTComCam readOnly: false server: comcam-archiver.tu.lsst.org serverPath: /repo/LSSTComCam +- name: comcam-gen3-data + containerPath: /data/lsstdata/TTS/comcam + readOnly: true + server: comcam-archiver.tu.lsst.org + serverPath: /lsstdata/TTS/comcam - name: project-shared containerPath: /project readOnly: false From fb241c4e7d1dc77aa6157753e5fd9cfb58bb3a4a Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Wed, 9 Oct 2024 12:45:22 -0300 Subject: [PATCH 292/567] rapid-analysis: add LSSTCam gen3-data NFS mount on summit --- applications/rapid-analysis/values-summit.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/applications/rapid-analysis/values-summit.yaml b/applications/rapid-analysis/values-summit.yaml index 5c4fd7c692..f28b339294 100644 --- a/applications/rapid-analysis/values-summit.yaml +++ b/applications/rapid-analysis/values-summit.yaml @@ -435,6 +435,11 @@ nfsMountpoint: readOnly: false server: nfs3.cp.lsst.org serverPath: /lsstcam/repo/LSSTCam +- name: lsstcam-gen3-data + containerPath: /data/lsstdata/base/maintel + readOnly: true + server: nfs3.cp.lsst.org + serverPath: /lsstcam/lsstdata/base/maintel - name: allsky-data containerPath: /data/allsky readOnly: true From 21b7b2ce6140e82b0ba380edaedc0999f7c334cf Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 14 Oct 2024 17:12:43 -0700 Subject: [PATCH 293/567] Update pre-commit and Python dependencies --- .pre-commit-config.yaml | 4 +- requirements/dev.txt | 632 ++++++++++++++++++++-------------------- requirements/main.txt | 339 +++++++++++---------- requirements/tox.txt | 50 ++-- 4 files changed, 528 insertions(+), 497 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 203642570c..4149a1f72f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,7 +14,7 @@ repos: - -c=.yamllint.yml - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.29.3 + rev: 0.29.4 hooks: - id: check-jsonschema files: ^applications/.*/secrets(-[^./-]+)?\.yaml @@ -53,7 +53,7 @@ repos: - id: ruff-format - repo: https://github.com/adamchainz/blacken-docs - rev: 1.18.0 + rev: 1.19.0 hooks: - id: blacken-docs additional_dependencies: [black==23.7.0] diff --git a/requirements/dev.txt b/requirements/dev.txt index f45b4fd9e5..024cf825d7 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -114,97 +114,112 @@ cffi==1.17.1 ; implementation_name == 'pypy' \ # via # -c requirements/main.txt # pyzmq -charset-normalizer==3.3.2 \ - --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ - --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ - --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \ - --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \ - --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \ - --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \ - --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \ - --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \ - --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \ - --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \ - --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \ - --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \ - --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \ - --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \ - --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \ - --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \ - --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \ - --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \ - --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \ - --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \ - --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \ - --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \ - --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \ - --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \ - --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \ - --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \ - --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \ - --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \ - --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \ - --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \ - --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \ - --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \ - --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \ - --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \ - --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \ - --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \ - --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \ - --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \ - --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \ - --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \ - --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \ - --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \ - --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \ - --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \ - --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \ - --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \ - --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \ - --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \ - --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \ - --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \ - --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \ - --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \ - --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \ - --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \ - --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \ - --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \ - --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \ - --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \ - --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \ - --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \ - --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \ - --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \ - --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \ - --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \ - --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \ - --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \ - --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \ - --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \ - --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \ - --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \ - --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \ - --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \ - --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \ - --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \ - --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \ - --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \ - --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \ - --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \ - --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \ - --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \ - --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \ - --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \ - --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \ - --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \ - --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \ - --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \ - --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \ - --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \ - --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ - --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 +charset-normalizer==3.4.0 \ + --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \ + --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \ + --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \ + --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \ + --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \ + --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \ + --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \ + --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \ + --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \ + --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \ + --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \ + --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \ + --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \ + --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \ + --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \ + --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \ + --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \ + --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \ + --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \ + --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \ + --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \ + --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \ + --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \ + --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \ + --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \ + --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \ + --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \ + --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \ + --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \ + --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \ + --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \ + --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \ + --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \ + --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \ + --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \ + --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \ + --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \ + --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \ + --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \ + --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \ + --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \ + --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \ + --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \ + --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \ + --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \ + --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \ + --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \ + --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \ + --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \ + --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \ + --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \ + --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \ + --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \ + --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \ + --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \ + --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \ + --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \ + --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \ + --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \ + --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \ + --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \ + --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \ + --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \ + --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \ + --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \ + --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \ + --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \ + --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \ + --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \ + --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \ + --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \ + --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \ + --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \ + --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \ + --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \ + --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \ + --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \ + --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \ + --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \ + --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \ + --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \ + --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \ + --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \ + --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \ + --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \ + --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \ + --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \ + --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \ + --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \ + --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \ + --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \ + --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \ + --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \ + --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \ + --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \ + --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \ + --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \ + --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \ + --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \ + --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \ + --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \ + --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \ + --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \ + --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ + --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 # via # -c requirements/main.txt # requests @@ -229,105 +244,99 @@ comm==0.2.2 \ --hash=sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e \ --hash=sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3 # via ipykernel -coverage==7.6.1 \ - --hash=sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca \ - --hash=sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d \ - --hash=sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6 \ - --hash=sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989 \ - --hash=sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c \ - --hash=sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b \ - --hash=sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223 \ - --hash=sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f \ - --hash=sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56 \ - --hash=sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3 \ - --hash=sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8 \ - --hash=sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb \ - --hash=sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388 \ - --hash=sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0 \ - --hash=sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a \ - --hash=sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8 \ - --hash=sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f \ - --hash=sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a \ - --hash=sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962 \ - --hash=sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8 \ - --hash=sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391 \ - --hash=sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc \ - --hash=sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2 \ - --hash=sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155 \ - --hash=sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb \ - --hash=sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0 \ - --hash=sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c \ - --hash=sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a \ - --hash=sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004 \ - --hash=sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060 \ - --hash=sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232 \ - --hash=sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93 \ - --hash=sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129 \ - --hash=sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163 \ - --hash=sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de \ - --hash=sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6 \ - --hash=sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23 \ - --hash=sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569 \ - --hash=sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d \ - --hash=sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778 \ - --hash=sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d \ - --hash=sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36 \ - --hash=sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a \ - --hash=sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6 \ - --hash=sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34 \ - --hash=sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704 \ - --hash=sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106 \ - --hash=sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9 \ - --hash=sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862 \ - --hash=sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b \ - --hash=sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255 \ - --hash=sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16 \ - --hash=sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3 \ - --hash=sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133 \ - --hash=sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb \ - --hash=sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657 \ - --hash=sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d \ - --hash=sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca \ - --hash=sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36 \ - --hash=sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c \ - --hash=sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e \ - --hash=sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff \ - --hash=sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7 \ - --hash=sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5 \ - --hash=sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02 \ - --hash=sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c \ - --hash=sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df \ - --hash=sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3 \ - --hash=sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a \ - --hash=sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959 \ - --hash=sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234 \ - --hash=sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc +coverage==7.6.3 \ + --hash=sha256:04f2189716e85ec9192df307f7c255f90e78b6e9863a03223c3b998d24a3c6c6 \ + --hash=sha256:0c6c0f4d53ef603397fc894a895b960ecd7d44c727df42a8d500031716d4e8d2 \ + --hash=sha256:0ca37993206402c6c35dc717f90d4c8f53568a8b80f0bf1a1b2b334f4d488fba \ + --hash=sha256:12f9515d875859faedb4144fd38694a761cd2a61ef9603bf887b13956d0bbfbb \ + --hash=sha256:1990b1f4e2c402beb317840030bb9f1b6a363f86e14e21b4212e618acdfce7f6 \ + --hash=sha256:2341a78ae3a5ed454d524206a3fcb3cec408c2a0c7c2752cd78b606a2ff15af4 \ + --hash=sha256:23bb63ae3f4c645d2d82fa22697364b0046fbafb6261b258a58587441c5f7bd0 \ + --hash=sha256:27bd5f18d8f2879e45724b0ce74f61811639a846ff0e5c0395b7818fae87aec6 \ + --hash=sha256:2dc7d6b380ca76f5e817ac9eef0c3686e7834c8346bef30b041a4ad286449990 \ + --hash=sha256:331b200ad03dbaa44151d74daeb7da2cf382db424ab923574f6ecca7d3b30de3 \ + --hash=sha256:365defc257c687ce3e7d275f39738dcd230777424117a6c76043459db131dd43 \ + --hash=sha256:37be7b5ea3ff5b7c4a9db16074dc94523b5f10dd1f3b362a827af66a55198175 \ + --hash=sha256:3c2e6fa98032fec8282f6b27e3f3986c6e05702828380618776ad794e938f53a \ + --hash=sha256:40e8b1983080439d4802d80b951f4a93d991ef3261f69e81095a66f86cf3c3c6 \ + --hash=sha256:43517e1f6b19f610a93d8227e47790722c8bf7422e46b365e0469fc3d3563d97 \ + --hash=sha256:43b32a06c47539fe275106b376658638b418c7cfdfff0e0259fbf877e845f14b \ + --hash=sha256:43d6a66e33b1455b98fc7312b124296dad97a2e191c80320587234a77b1b736e \ + --hash=sha256:4c59d6a4a4633fad297f943c03d0d2569867bd5372eb5684befdff8df8522e39 \ + --hash=sha256:52ac29cc72ee7e25ace7807249638f94c9b6a862c56b1df015d2b2e388e51dbd \ + --hash=sha256:54356a76b67cf8a3085818026bb556545ebb8353951923b88292556dfa9f812d \ + --hash=sha256:583049c63106c0555e3ae3931edab5669668bbef84c15861421b94e121878d3f \ + --hash=sha256:6d99198203f0b9cb0b5d1c0393859555bc26b548223a769baf7e321a627ed4fc \ + --hash=sha256:6da42bbcec130b188169107ecb6ee7bd7b4c849d24c9370a0c884cf728d8e976 \ + --hash=sha256:6e484e479860e00da1f005cd19d1c5d4a813324e5951319ac3f3eefb497cc549 \ + --hash=sha256:70a6756ce66cd6fe8486c775b30889f0dc4cb20c157aa8c35b45fd7868255c5c \ + --hash=sha256:70d24936ca6c15a3bbc91ee9c7fc661132c6f4c9d42a23b31b6686c05073bde5 \ + --hash=sha256:71967c35828c9ff94e8c7d405469a1fb68257f686bca7c1ed85ed34e7c2529c4 \ + --hash=sha256:79644f68a6ff23b251cae1c82b01a0b51bc40c8468ca9585c6c4b1aeee570e0b \ + --hash=sha256:87cd2e29067ea397a47e352efb13f976eb1b03e18c999270bb50589323294c6e \ + --hash=sha256:8d4c6ea0f498c7c79111033a290d060c517853a7bcb2f46516f591dab628ddd3 \ + --hash=sha256:9134032f5aa445ae591c2ba6991d10136a1f533b1d2fa8f8c21126468c5025c6 \ + --hash=sha256:921fbe13492caf6a69528f09d5d7c7d518c8d0e7b9f6701b7719715f29a71e6e \ + --hash=sha256:99670790f21a96665a35849990b1df447993880bb6463a0a1d757897f30da929 \ + --hash=sha256:9975442f2e7a5cfcf87299c26b5a45266ab0696348420049b9b94b2ad3d40234 \ + --hash=sha256:99ded130555c021d99729fabd4ddb91a6f4cc0707df4b1daf912c7850c373b13 \ + --hash=sha256:a3328c3e64ea4ab12b85999eb0779e6139295bbf5485f69d42cf794309e3d007 \ + --hash=sha256:a4fb91d5f72b7e06a14ff4ae5be625a81cd7e5f869d7a54578fc271d08d58ae3 \ + --hash=sha256:aa23ce39661a3e90eea5f99ec59b763b7d655c2cada10729ed920a38bfc2b167 \ + --hash=sha256:aac7501ae73d4a02f4b7ac8fcb9dc55342ca98ffb9ed9f2dfb8a25d53eda0e4d \ + --hash=sha256:ab84a8b698ad5a6c365b08061920138e7a7dd9a04b6feb09ba1bfae68346ce6d \ + --hash=sha256:b4adeb878a374126f1e5cf03b87f66279f479e01af0e9a654cf6d1509af46c40 \ + --hash=sha256:b9853509b4bf57ba7b1f99b9d866c422c9c5248799ab20e652bbb8a184a38181 \ + --hash=sha256:bb7d5fe92bd0dc235f63ebe9f8c6e0884f7360f88f3411bfed1350c872ef2054 \ + --hash=sha256:bca4c8abc50d38f9773c1ec80d43f3768df2e8576807d1656016b9d3eeaa96fd \ + --hash=sha256:c222958f59b0ae091f4535851cbb24eb57fc0baea07ba675af718fb5302dddb2 \ + --hash=sha256:c30e42ea11badb147f0d2e387115b15e2bd8205a5ad70d6ad79cf37f6ac08c91 \ + --hash=sha256:c3a79f56dee9136084cf84a6c7c4341427ef36e05ae6415bf7d787c96ff5eaa3 \ + --hash=sha256:c51ef82302386d686feea1c44dbeef744585da16fcf97deea2a8d6c1556f519b \ + --hash=sha256:c77326300b839c44c3e5a8fe26c15b7e87b2f32dfd2fc9fee1d13604347c9b38 \ + --hash=sha256:d33a785ea8354c480515e781554d3be582a86297e41ccbea627a5c632647f2cd \ + --hash=sha256:d546cfa78844b8b9c1c0533de1851569a13f87449897bbc95d698d1d3cb2a30f \ + --hash=sha256:da29ceabe3025a1e5a5aeeb331c5b1af686daab4ff0fb4f83df18b1180ea83e2 \ + --hash=sha256:df8c05a0f574d480947cba11b947dc41b1265d721c3777881da2fb8d3a1ddfba \ + --hash=sha256:e266af4da2c1a4cbc6135a570c64577fd3e6eb204607eaff99d8e9b710003c6f \ + --hash=sha256:e279f3db904e3b55f520f11f983cc8dc8a4ce9b65f11692d4718ed021ec58b83 \ + --hash=sha256:ea52bd218d4ba260399a8ae4bb6b577d82adfc4518b93566ce1fddd4a49d1dce \ + --hash=sha256:ebec65f5068e7df2d49466aab9128510c4867e532e07cb6960075b27658dca38 \ + --hash=sha256:ec1e3b40b82236d100d259854840555469fad4db64f669ab817279eb95cd535c \ + --hash=sha256:ee77c7bef0724165e795b6b7bf9c4c22a9b8468a6bdb9c6b4281293c6b22a90f \ + --hash=sha256:f263b18692f8ed52c8de7f40a0751e79015983dbd77b16906e5b310a39d3ca21 \ + --hash=sha256:f7b26757b22faf88fcf232f5f0e62f6e0fd9e22a8a5d0d5016888cdfe1f6c1c4 \ + --hash=sha256:f7ddb920106bbbbcaf2a274d56f46956bf56ecbde210d88061824a95bdd94e92 # via # -r requirements/dev.in # pytest-cov -debugpy==1.8.6 \ - --hash=sha256:0a85707c6a84b0c5b3db92a2df685b5230dd8fb8c108298ba4f11dba157a615a \ - --hash=sha256:22140bc02c66cda6053b6eb56dfe01bbe22a4447846581ba1dd6df2c9f97982d \ - --hash=sha256:30f467c5345d9dfdcc0afdb10e018e47f092e383447500f125b4e013236bf14b \ - --hash=sha256:3358aa619a073b620cd0d51d8a6176590af24abcc3fe2e479929a154bf591b51 \ - --hash=sha256:43996632bee7435583952155c06881074b9a742a86cee74e701d87ca532fe833 \ - --hash=sha256:538c6cdcdcdad310bbefd96d7850be1cd46e703079cc9e67d42a9ca776cdc8a8 \ - --hash=sha256:567419081ff67da766c898ccf21e79f1adad0e321381b0dfc7a9c8f7a9347972 \ - --hash=sha256:5d73d8c52614432f4215d0fe79a7e595d0dd162b5c15233762565be2f014803b \ - --hash=sha256:67479a94cf5fd2c2d88f9615e087fcb4fec169ec780464a3f2ba4a9a2bb79955 \ - --hash=sha256:9fb8653f6cbf1dd0a305ac1aa66ec246002145074ea57933978346ea5afdf70b \ - --hash=sha256:b48892df4d810eff21d3ef37274f4c60d32cdcafc462ad5647239036b0f0649f \ - --hash=sha256:c1cef65cffbc96e7b392d9178dbfd524ab0750da6c0023c027ddcac968fd1caa \ - --hash=sha256:c931a9371a86784cee25dec8d65bc2dc7a21f3f1552e3833d9ef8f919d22280a \ - --hash=sha256:c9834dfd701a1f6bf0f7f0b8b1573970ae99ebbeee68314116e0ccc5c78eea3c \ - --hash=sha256:cdaf0b9691879da2d13fa39b61c01887c34558d1ff6e5c30e2eb698f5384cd43 \ - --hash=sha256:db891b141fc6ee4b5fc6d1cc8035ec329cabc64bdd2ae672b4550c87d4ecb128 \ - --hash=sha256:df5dc9eb4ca050273b8e374a4cd967c43be1327eeb42bfe2f58b3cdfe7c68dcb \ - --hash=sha256:e3a82da039cfe717b6fb1886cbbe5c4a3f15d7df4765af857f4307585121c2dd \ - --hash=sha256:e3e182cd98eac20ee23a00653503315085b29ab44ed66269482349d307b08df9 \ - --hash=sha256:e4ce0570aa4aca87137890d23b86faeadf184924ad892d20c54237bcaab75d8f \ - --hash=sha256:f1e60bd06bb3cc5c0e957df748d1fab501e01416c43a7bdc756d2a992ea1b881 \ - --hash=sha256:f7158252803d0752ed5398d291dee4c553bb12d14547c0e1843ab74ee9c31123 +debugpy==1.8.7 \ + --hash=sha256:11ad72eb9ddb436afb8337891a986302e14944f0f755fd94e90d0d71e9100bba \ + --hash=sha256:171899588bcd412151e593bd40d9907133a7622cd6ecdbdb75f89d1551df13c2 \ + --hash=sha256:18b8f731ed3e2e1df8e9cdaa23fb1fc9c24e570cd0081625308ec51c82efe42e \ + --hash=sha256:29e1571c276d643757ea126d014abda081eb5ea4c851628b33de0c2b6245b037 \ + --hash=sha256:2efb84d6789352d7950b03d7f866e6d180284bc02c7e12cb37b489b7083d81aa \ + --hash=sha256:2f729228430ef191c1e4df72a75ac94e9bf77413ce5f3f900018712c9da0aaca \ + --hash=sha256:45c30aaefb3e1975e8a0258f5bbd26cd40cde9bfe71e9e5a7ac82e79bad64e39 \ + --hash=sha256:4b908291a1d051ef3331484de8e959ef3e66f12b5e610c203b5b75d2725613a7 \ + --hash=sha256:4d27d842311353ede0ad572600c62e4bcd74f458ee01ab0dd3a1a4457e7e3706 \ + --hash=sha256:57b00de1c8d2c84a61b90880f7e5b6deaf4c312ecbde3a0e8912f2a56c4ac9ae \ + --hash=sha256:628a11f4b295ffb4141d8242a9bb52b77ad4a63a2ad19217a93be0f77f2c28c9 \ + --hash=sha256:6a9d9d6d31846d8e34f52987ee0f1a904c7baa4912bf4843ab39dadf9b8f3e0d \ + --hash=sha256:6e1c4ffb0c79f66e89dfd97944f335880f0d50ad29525dc792785384923e2211 \ + --hash=sha256:703c1fd62ae0356e194f3e7b7a92acd931f71fe81c4b3be2c17a7b8a4b546ec2 \ + --hash=sha256:85ce9c1d0eebf622f86cc68618ad64bf66c4fc3197d88f74bb695a416837dd55 \ + --hash=sha256:90d93e4f2db442f8222dec5ec55ccfc8005821028982f1968ebf551d32b28907 \ + --hash=sha256:93176e7672551cb5281577cdb62c63aadc87ec036f0c6a486f0ded337c504596 \ + --hash=sha256:95fe04a573b8b22896c404365e03f4eda0ce0ba135b7667a1e57bd079793b96b \ + --hash=sha256:a6cf2510740e0c0b4a40330640e4b454f928c7b99b0c9dbf48b11efba08a8cda \ + --hash=sha256:b12515e04720e9e5c2216cc7086d0edadf25d7ab7e3564ec8b4521cf111b4f8c \ + --hash=sha256:b6db2a370e2700557a976eaadb16243ec9c91bd46f1b3bb15376d7aaa7632c81 \ + --hash=sha256:caf528ff9e7308b74a1749c183d6808ffbedbb9fb6af78b033c28974d9b8831f \ + --hash=sha256:cba1d078cf2e1e0b8402e6bda528bf8fda7ccd158c3dba6c012b7897747c41a0 \ + --hash=sha256:d050a1ec7e925f514f0f6594a1e522580317da31fbda1af71d1530d6ea1f2b40 \ + --hash=sha256:da8df5b89a41f1fd31503b179d0a84a5fdb752dddd5b5388dbd1ae23cda31ce9 \ + --hash=sha256:f2f4349a28e3228a42958f8ddaa6333d6f8282d5edaea456070e48609c5983b7 # via ipykernel decorator==5.1.1 \ --hash=sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330 \ @@ -337,9 +346,9 @@ diagrams==0.23.4 \ --hash=sha256:1ba69d98fcf8d768dbddf07d2c77aba6cc95c2e6f90f37146c04c96bc6765450 \ --hash=sha256:b7ada0b119b5189dd021b1dc1467fad3704737452bb18b1e06d05e4d1fa48ed7 # via sphinx-diagrams -documenteer==1.4.0 \ - --hash=sha256:759fdbf4554449a74df9fb10cfe91984bc1272f0a2c6c688817d1a2525c72881 \ - --hash=sha256:e456e21cb6d0be659b5297de87cb3e60d9bf0fffb63e316dbaba20b38a5f70ee +documenteer==1.4.1 \ + --hash=sha256:b8e5a4b253cdf14b1306407f109d2b4811931e827ddef57a8be753232b546ebf \ + --hash=sha256:dadd0777d23aa1cc21bb737ff28b84c6594247f5237d1962334534c9c5cbd766 # via -r requirements/dev.in docutils==0.21.2 \ --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ @@ -504,9 +513,9 @@ jsonschema==4.23.0 \ # via # nbformat # sphinxcontrib-redoc -jsonschema-specifications==2023.12.1 \ - --hash=sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc \ - --hash=sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c +jsonschema-specifications==2024.10.1 \ + --hash=sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272 \ + --hash=sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf # via jsonschema jupyter-cache==1.0.0 \ --hash=sha256:594b1c4e29b488b36547e12477645f489dbdc62cc939b2408df5679f79245078 \ @@ -541,68 +550,68 @@ markdown-it-py==3.0.0 \ # documenteer # mdit-py-plugins # myst-parser -markupsafe==3.0.0 \ - --hash=sha256:03ff62dea2fef3eadf2f1853bc6332bcb0458d9608b11dfb1cd5aeda1c178ea6 \ - --hash=sha256:105ada43a61af22acb8774514c51900dc820c481cc5ba53f17c09d294d9c07ca \ - --hash=sha256:12ddac720b8965332d36196f6f83477c6351ba6a25d4aff91e30708c729350d7 \ - --hash=sha256:1d151b9cf3307e259b749125a5a08c030ba15a8f1d567ca5bfb0e92f35e761f5 \ - --hash=sha256:1ee9790be6f62121c4c58bbced387b0965ab7bffeecb4e17cc42ef290784e363 \ - --hash=sha256:1fd02f47596e00a372f5b4af2b4c45f528bade65c66dfcbc6e1ea1bfda758e98 \ - --hash=sha256:23efb2be7221105c8eb0e905433414d2439cb0a8c5d5ca081c1c72acef0f5613 \ - --hash=sha256:25396abd52b16900932e05b7104bcdc640a4d96c914f39c3b984e5a17b01fba0 \ - --hash=sha256:27d6a73682b99568916c54a4bfced40e7d871ba685b580ea04bbd2e405dfd4c5 \ - --hash=sha256:380faf314c3c84c1682ca672e6280c6c59e92d0bc13dc71758ffa2de3cd4e252 \ - --hash=sha256:3b231255770723f1e125d63c14269bcd8b8136ecfb620b9a18c0297e046d0736 \ - --hash=sha256:3cd0bba31d484fe9b9d77698ddb67c978704603dc10cdc905512af308cfcca6b \ - --hash=sha256:3efde9a8c56c3b6e5f3fa4baea828f8184970c7c78480fedb620d804b1c31e5c \ - --hash=sha256:409535e0521c4630d5b5a1bf284e9d3c76d2fc2f153ebb12cf3827797798cc99 \ - --hash=sha256:494a64efc535e147fcc713dba58eecfce3a79f1e93ebe81995b387f5cd9bc2e1 \ - --hash=sha256:4ca04c60006867610a06575b46941ae616b19da0adc85b9f8f3d9cbd7a3da385 \ - --hash=sha256:4deea1d9169578917d1f35cdb581bc7bab56a7e8c5be2633bd1b9549c3c22a01 \ - --hash=sha256:509c424069dd037d078925b6815fc56b7271f3aaec471e55e6fa513b0a80d2aa \ - --hash=sha256:5509a8373fed30b978557890a226c3d30569746c565b9daba69df80c160365a5 \ - --hash=sha256:59420b5a9a5d3fee483a32adb56d7369ae0d630798da056001be1e9f674f3aa6 \ - --hash=sha256:5d207ff5cceef77796f8aacd44263266248cf1fbc601441524d7835613f8abec \ - --hash=sha256:5ddf5cb8e9c00d9bf8b0c75949fb3ff9ea2096ba531693e2e87336d197fdb908 \ - --hash=sha256:63dae84964a9a3d2610808cee038f435d9a111620c37ccf872c2fcaeca6865b3 \ - --hash=sha256:64a7c7856c3a409011139b17d137c2924df4318dab91ee0530800819617c4381 \ - --hash=sha256:64f7d04410be600aa5ec0626d73d43e68a51c86500ce12917e10fd013e258df5 \ - --hash=sha256:658fdf6022740896c403d45148bf0c36978c6b48c9ef8b1f8d0c7a11b6cdea86 \ - --hash=sha256:678fbceb202382aae42c1f0cd9f56b776bc20a58ae5b553ee1fe6b802983a1d6 \ - --hash=sha256:7835de4c56066e096407a1852e5561f6033786dd987fa90dc384e45b9bd21295 \ - --hash=sha256:7c524203207f5b569df06c96dafdc337228921ee8c3cc5f6e891d024c6595352 \ - --hash=sha256:7ed789d0f7f11fcf118cf0acb378743dfdd4215d7f7d18837c88171405c9a452 \ - --hash=sha256:81be2c0084d8c69e97e3c5d73ce9e2a6e523556f2a19c4e195c09d499be2f808 \ - --hash=sha256:81ee9c967956b9ea39b3a5270b7cb1740928d205b0dc72629164ce621b4debf9 \ - --hash=sha256:8219e2207f6c188d15614ea043636c2b36d2d79bf853639c124a179412325a13 \ - --hash=sha256:96e3ed550600185d34429477f1176cedea8293fa40e47fe37a05751bcb64c997 \ - --hash=sha256:98fb3a2bf525ad66db96745707b93ba0f78928b7a1cb2f1cb4b143bc7e2ba3b3 \ - --hash=sha256:9b36473a2d3e882d1873ea906ce54408b9588dc2c65989664e6e7f5a2de353d7 \ - --hash=sha256:9f91c90f8f3bf436f81c12eeb4d79f9ddd263c71125e6ad71341906832a34386 \ - --hash=sha256:a5fd5500d4e4f7cc88d8c0f2e45126c4307ed31e08f8ec521474f2fd99d35ac3 \ - --hash=sha256:a7171d2b869e9be238ea318c196baf58fbf272704e9c1cd4be8c380eea963342 \ - --hash=sha256:a80c6740e1bfbe50cea7cbf74f48823bb57bd59d914ee22ff8a81963b08e62d2 \ - --hash=sha256:b2a7afd24d408b907672015555bc10be2382e6c5f62a488e2d452da670bbd389 \ - --hash=sha256:b43ac1eb9f91e0c14aac1d2ef0f76bc7b9ceea51de47536f61268191adf52ad7 \ - --hash=sha256:b6cc46a27d904c9be5732029769acf4b0af69345172ed1ef6d4db0c023ff603b \ - --hash=sha256:b94bec9eda10111ec7102ef909eca4f3c2df979643924bfe58375f560713a7d1 \ - --hash=sha256:bd9b8e458e2bab52f9ad3ab5dc8b689a3c84b12b2a2f64cd9a0dfe209fb6b42f \ - --hash=sha256:c182d45600556917f811aa019d834a89fe4b6f6255da2fd0bdcf80e970f95918 \ - --hash=sha256:c409691696bec2b5e5c9efd9593c99025bf2f317380bf0d993ee0213516d908a \ - --hash=sha256:c5243044a927e8a6bb28517838662a019cd7f73d7f106bbb37ab5e7fa8451a92 \ - --hash=sha256:c8ab7efeff1884c5da8e18f743b667215300e09043820d11723718de0b7db934 \ - --hash=sha256:cb244adf2499aa37d5dc43431990c7f0b632d841af66a51d22bd89c437b60264 \ - --hash=sha256:d261ec38b8a99a39b62e0119ed47fe3b62f7691c500bc1e815265adc016438c1 \ - --hash=sha256:d2c099be5274847d606574234e494f23a359e829ba337ea9037c3a72b0851942 \ - --hash=sha256:d7e63d1977d3806ce0a1a3e0099b089f61abdede5238ca6a3f3bf8877b46d095 \ - --hash=sha256:dba0f83119b9514bc37272ad012f0cc03f0805cc6a2bea7244e19250ac8ff29f \ - --hash=sha256:dcbee57fedc9b2182c54ffc1c5eed316c3da8bbfeda8009e1b5d7220199d15da \ - --hash=sha256:e042ccf8fe5bf8b6a4b38b3f7d618eb10ea20402b0c9f4add9293408de447974 \ - --hash=sha256:e363440c8534bf2f2ef1b8fdc02037eb5fff8fce2a558519b22d6a3a38b3ec5e \ - --hash=sha256:e64b390a306f9e849ee809f92af6a52cda41741c914358e0e9f8499d03741526 \ - --hash=sha256:f0411641d31aa6f7f0cc13f0f18b63b8dc08da5f3a7505972a42ab059f479ba3 \ - --hash=sha256:f1c13c6c908811f867a8e9e66efb2d6c03d1cdd83e92788fe97f693c457dc44f \ - --hash=sha256:f846fd7c241e5bd4161e2a483663eb66e4d8e12130fcdc052f310f388f1d61c6 +markupsafe==3.0.1 \ + --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ + --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ + --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ + --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ + --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ + --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ + --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ + --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ + --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ + --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ + --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ + --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ + --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ + --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ + --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ + --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ + --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ + --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ + --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ + --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ + --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ + --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ + --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ + --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ + --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ + --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ + --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ + --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ + --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ + --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ + --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ + --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ + --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ + --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ + --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ + --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ + --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ + --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ + --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ + --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ + --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ + --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ + --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ + --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ + --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ + --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ + --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ + --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ + --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ + --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ + --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ + --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ + --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ + --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ + --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ + --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ + --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ + --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ + --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ + --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ + --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f # via # -c requirements/main.txt # jinja2 @@ -620,34 +629,39 @@ mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -mypy==1.11.2 \ - --hash=sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36 \ - --hash=sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce \ - --hash=sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6 \ - --hash=sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b \ - --hash=sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca \ - --hash=sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24 \ - --hash=sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383 \ - --hash=sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7 \ - --hash=sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86 \ - --hash=sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d \ - --hash=sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4 \ - --hash=sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8 \ - --hash=sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987 \ - --hash=sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385 \ - --hash=sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79 \ - --hash=sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef \ - --hash=sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6 \ - --hash=sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70 \ - --hash=sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca \ - --hash=sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70 \ - --hash=sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12 \ - --hash=sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104 \ - --hash=sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a \ - --hash=sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318 \ - --hash=sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1 \ - --hash=sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b \ - --hash=sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d +mypy==1.12.0 \ + --hash=sha256:060a07b10e999ac9e7fa249ce2bdcfa9183ca2b70756f3bce9df7a92f78a3c0a \ + --hash=sha256:06de0498798527451ffb60f68db0d368bd2bae2bbfb5237eae616d4330cc87aa \ + --hash=sha256:0eff042d7257f39ba4ca06641d110ca7d2ad98c9c1fb52200fe6b1c865d360ff \ + --hash=sha256:1ebf9e796521f99d61864ed89d1fb2926d9ab6a5fab421e457cd9c7e4dd65aa9 \ + --hash=sha256:20c7c5ce0c1be0b0aea628374e6cf68b420bcc772d85c3c974f675b88e3e6e57 \ + --hash=sha256:233e11b3f73ee1f10efada2e6da0f555b2f3a5316e9d8a4a1224acc10e7181d3 \ + --hash=sha256:2c40658d4fa1ab27cb53d9e2f1066345596af2f8fe4827defc398a09c7c9519b \ + --hash=sha256:2f106db5ccb60681b622ac768455743ee0e6a857724d648c9629a9bd2ac3f721 \ + --hash=sha256:4397081e620dc4dc18e2f124d5e1d2c288194c2c08df6bdb1db31c38cd1fe1ed \ + --hash=sha256:48d3e37dd7d9403e38fa86c46191de72705166d40b8c9f91a3de77350daa0893 \ + --hash=sha256:4ae8959c21abcf9d73aa6c74a313c45c0b5a188752bf37dace564e29f06e9c1b \ + --hash=sha256:4b86de37a0da945f6d48cf110d5206c5ed514b1ca2614d7ad652d4bf099c7de7 \ + --hash=sha256:52b9e1492e47e1790360a43755fa04101a7ac72287b1a53ce817f35899ba0521 \ + --hash=sha256:5bc81701d52cc8767005fdd2a08c19980de9ec61a25dbd2a937dfb1338a826f9 \ + --hash=sha256:5feee5c74eb9749e91b77f60b30771563327329e29218d95bedbe1257e2fe4b0 \ + --hash=sha256:65a22d87e757ccd95cbbf6f7e181e6caa87128255eb2b6be901bb71b26d8a99d \ + --hash=sha256:684a9c508a283f324804fea3f0effeb7858eb03f85c4402a967d187f64562469 \ + --hash=sha256:6b5df6c8a8224f6b86746bda716bbe4dbe0ce89fd67b1fa4661e11bfe38e8ec8 \ + --hash=sha256:6cabe4cda2fa5eca7ac94854c6c37039324baaa428ecbf4de4567279e9810f9e \ + --hash=sha256:77278e8c6ffe2abfba6db4125de55f1024de9a323be13d20e4f73b8ed3402bd1 \ + --hash=sha256:8462655b6694feb1c99e433ea905d46c478041a8b8f0c33f1dab00ae881b2164 \ + --hash=sha256:923ea66d282d8af9e0f9c21ffc6653643abb95b658c3a8a32dca1eff09c06475 \ + --hash=sha256:9b9ce1ad8daeb049c0b55fdb753d7414260bad8952645367e70ac91aec90e07e \ + --hash=sha256:a64ee25f05fc2d3d8474985c58042b6759100a475f8237da1f4faf7fcd7e6309 \ + --hash=sha256:bfe012b50e1491d439172c43ccb50db66d23fab714d500b57ed52526a1020bb7 \ + --hash=sha256:c72861b7139a4f738344faa0e150834467521a3fba42dc98264e5aa9507dd601 \ + --hash=sha256:dcfb754dea911039ac12434d1950d69a2f05acd4d56f7935ed402be09fad145e \ + --hash=sha256:dee78a8b9746c30c1e617ccb1307b351ded57f0de0d287ca6276378d770006c0 \ + --hash=sha256:e478601cc3e3fa9d6734d255a59c7a2e5c2934da4378f3dd1e3411ea8a248642 \ + --hash=sha256:eafc1b7319b40ddabdc3db8d7d48e76cfc65bbeeafaa525a4e0fa6b76175467f \ + --hash=sha256:faca7ab947c9f457a08dcb8d9a8664fd438080e002b0fa3e41b0535335edcf7f \ + --hash=sha256:fd313226af375d52e1e36c383f39bf3836e1f192801116b31b090dfcd3ec5266 # via -r requirements/dev.in mypy-extensions==1.0.0 \ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ @@ -897,25 +911,25 @@ python-dotenv==1.0.1 \ --hash=sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca \ --hash=sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a # via pydantic-settings -pywin32==307 ; platform_python_implementation != 'PyPy' and sys_platform == 'win32' \ - --hash=sha256:00d047992bb5dcf79f8b9b7c81f72e0130f9fe4b22df613f755ab1cc021d8347 \ - --hash=sha256:05de55a7c110478dc4b202230e98af5e0720855360d2b31a44bb4e296d795fba \ - --hash=sha256:07649ec6b01712f36debf39fc94f3d696a46579e852f60157a729ac039df0815 \ - --hash=sha256:0c12d61e0274e0c62acee79e3e503c312426ddd0e8d4899c626cddc1cafe0ff4 \ - --hash=sha256:13d059fb7f10792542082f5731d5d3d9645320fc38814759313e5ee97c3fac01 \ - --hash=sha256:36e650c5e5e6b29b5d317385b02d20803ddbac5d1031e1f88d20d76676dd103d \ - --hash=sha256:5101472f5180c647d4525a0ed289ec723a26231550dbfd369ec19d5faf60e511 \ - --hash=sha256:55ee87f2f8c294e72ad9d4261ca423022310a6e79fb314a8ca76ab3f493854c6 \ - --hash=sha256:576d09813eaf4c8168d0bfd66fb7cb3b15a61041cf41598c2db4a4583bf832d2 \ - --hash=sha256:7e0b2f93769d450a98ac7a31a087e07b126b6d571e8b4386a5762eb85325270b \ - --hash=sha256:987a86971753ed7fdd52a7fb5747aba955b2c7fbbc3d8b76ec850358c1cc28c3 \ - --hash=sha256:b30c9bdbffda6a260beb2919f918daced23d32c79109412c2085cbc513338a0a \ - --hash=sha256:b53658acbfc6a8241d72cc09e9d1d666be4e6c99376bc59e26cdb6223c4554d2 \ - --hash=sha256:e9d5202922e74985b037c9ef46778335c102b74b95cec70f629453dbe7235d87 \ - --hash=sha256:ea4d56e48dc1ab2aa0a5e3c0741ad6e926529510516db7a3b6981a1ae74405e5 \ - --hash=sha256:f8f25d893c1e1ce2d685ef6d0a481e87c6f510d0f3f117932781f412e0eba31b \ - --hash=sha256:fd436897c186a2e693cd0437386ed79f989f4d13d6f353f8787ecbb0ae719398 \ - --hash=sha256:fec5d27cc893178fab299de911b8e4d12c5954e1baf83e8a664311e56a272b75 +pywin32==308 ; platform_python_implementation != 'PyPy' and sys_platform == 'win32' \ + --hash=sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47 \ + --hash=sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6 \ + --hash=sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6 \ + --hash=sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed \ + --hash=sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff \ + --hash=sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de \ + --hash=sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e \ + --hash=sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b \ + --hash=sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0 \ + --hash=sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897 \ + --hash=sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a \ + --hash=sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920 \ + --hash=sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341 \ + --hash=sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e \ + --hash=sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091 \ + --hash=sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c \ + --hash=sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd \ + --hash=sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4 # via jupyter-core pyyaml==6.0.2 \ --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ @@ -978,6 +992,7 @@ pyyaml==6.0.2 \ # myst-nb # myst-parser # pybtex + # sphinxcontrib-mermaid # sphinxcontrib-redoc pyzmq==26.2.0 \ --hash=sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6 \ @@ -1260,13 +1275,14 @@ sphinx==8.0.2 \ # sphinx-prompt # sphinxcontrib-bibtex # sphinxcontrib-jquery + # sphinxcontrib-mermaid # sphinxcontrib-redoc # sphinxcontrib-youtube # sphinxext-opengraph # sphinxext-rediraffe -sphinx-autodoc-typehints==2.4.4 \ - --hash=sha256:940de2951fd584d147e46772579fdc904f945c5f1ee1a78c614646abfbbef18b \ - --hash=sha256:e743512da58b67a06579a1462798a6907664ab77460758a43234adeac350afbf +sphinx-autodoc-typehints==2.5.0 \ + --hash=sha256:259e1026b218d563d72743f417fcc25906a9614897fe37f91bd8d7d58f748c3b \ + --hash=sha256:53def4753239683835b19bfa8b68c021388bd48a096efcb02cdab508ece27363 # via documenteer sphinx-automodapi==0.18.0 \ --hash=sha256:022860385590768f52d4f6e19abb83b2574772d2721fb4050ecdb6e593a1a440 \ @@ -1322,9 +1338,9 @@ sphinxcontrib-jsmath==1.0.1 \ --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \ --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8 # via sphinx -sphinxcontrib-mermaid==0.9.2 \ - --hash=sha256:252ef13dd23164b28f16d8b0205cf184b9d8e2b714a302274d9f59eb708e77af \ - --hash=sha256:6795a72037ca55e65663d2a2c1a043d636dc3d30d418e56dd6087d1459d98a5d +sphinxcontrib-mermaid==1.0.0 \ + --hash=sha256:2e8ab67d3e1e2816663f9347d026a8dee4a858acdd4ad32dd1c808893db88146 \ + --hash=sha256:60b72710ea02087f212028feb09711225fbc2e343a10d34822fe787510e1caa3 # via documenteer sphinxcontrib-qthelp==2.0.0 \ --hash=sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab \ diff --git a/requirements/main.txt b/requirements/main.txt index d2304cfef7..ea6bfef483 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -4,9 +4,9 @@ annotated-types==0.7.0 \ --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \ --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89 # via pydantic -anyio==4.6.0 \ - --hash=sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb \ - --hash=sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a +anyio==4.6.2.post1 \ + --hash=sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c \ + --hash=sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d # via # httpcore # starlette @@ -115,97 +115,112 @@ cffi==1.17.1 ; platform_python_implementation != 'PyPy' \ --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b # via cryptography -charset-normalizer==3.3.2 \ - --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ - --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ - --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \ - --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \ - --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \ - --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \ - --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \ - --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \ - --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \ - --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \ - --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \ - --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \ - --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \ - --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \ - --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \ - --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \ - --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \ - --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \ - --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \ - --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \ - --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \ - --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \ - --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \ - --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \ - --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \ - --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \ - --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \ - --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \ - --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \ - --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \ - --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \ - --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \ - --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \ - --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \ - --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \ - --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \ - --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \ - --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \ - --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \ - --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \ - --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \ - --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \ - --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \ - --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \ - --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \ - --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \ - --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \ - --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \ - --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \ - --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \ - --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \ - --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \ - --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \ - --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \ - --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \ - --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \ - --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \ - --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \ - --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \ - --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \ - --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \ - --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \ - --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \ - --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \ - --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \ - --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \ - --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \ - --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \ - --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \ - --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \ - --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \ - --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \ - --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \ - --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \ - --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \ - --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \ - --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \ - --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \ - --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \ - --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \ - --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \ - --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \ - --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \ - --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \ - --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \ - --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \ - --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \ - --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \ - --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ - --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 +charset-normalizer==3.4.0 \ + --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \ + --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \ + --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \ + --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \ + --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \ + --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \ + --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \ + --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \ + --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \ + --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \ + --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \ + --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \ + --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \ + --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \ + --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \ + --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \ + --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \ + --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \ + --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \ + --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \ + --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \ + --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \ + --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \ + --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \ + --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \ + --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \ + --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \ + --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \ + --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \ + --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \ + --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \ + --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \ + --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \ + --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \ + --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \ + --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \ + --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \ + --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \ + --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \ + --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \ + --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \ + --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \ + --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \ + --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \ + --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \ + --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \ + --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \ + --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \ + --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \ + --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \ + --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \ + --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \ + --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \ + --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \ + --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \ + --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \ + --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \ + --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \ + --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \ + --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \ + --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \ + --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \ + --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \ + --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \ + --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \ + --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \ + --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \ + --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \ + --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \ + --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \ + --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \ + --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \ + --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \ + --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \ + --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \ + --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \ + --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \ + --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \ + --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \ + --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \ + --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \ + --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \ + --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \ + --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \ + --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \ + --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \ + --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \ + --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \ + --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \ + --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \ + --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \ + --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \ + --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \ + --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \ + --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \ + --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \ + --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \ + --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \ + --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \ + --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \ + --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \ + --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \ + --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \ + --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ + --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 # via requests click==8.1.7 \ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ @@ -249,9 +264,9 @@ cryptography==43.0.1 \ # phalanx (pyproject.toml) # pyjwt # safir -fastapi==0.115.0 \ - --hash=sha256:17ea427674467486e997206a5ab25760f6b09e069f099b96f5b55a32fb6f1631 \ - --hash=sha256:f93b4ca3529a8ebc6fc3fcf710e5efa8de3df9b41570958abf1d97d843138004 +fastapi==0.115.2 \ + --hash=sha256:3995739e0b09fa12f984bce8fa9ae197b35d433750d3d312422d846e283697ee \ + --hash=sha256:61704c71286579cc5a598763905928f24ee98bfcc07aabe84cfefb98812bbc86 # via safir gidgethub==5.3.0 \ --hash=sha256:4dd92f2252d12756b13f9dd15cde322bfb0d625b6fb5d680da1567ec74b462c0 \ @@ -294,68 +309,68 @@ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via phalanx (pyproject.toml) -markupsafe==3.0.0 \ - --hash=sha256:03ff62dea2fef3eadf2f1853bc6332bcb0458d9608b11dfb1cd5aeda1c178ea6 \ - --hash=sha256:105ada43a61af22acb8774514c51900dc820c481cc5ba53f17c09d294d9c07ca \ - --hash=sha256:12ddac720b8965332d36196f6f83477c6351ba6a25d4aff91e30708c729350d7 \ - --hash=sha256:1d151b9cf3307e259b749125a5a08c030ba15a8f1d567ca5bfb0e92f35e761f5 \ - --hash=sha256:1ee9790be6f62121c4c58bbced387b0965ab7bffeecb4e17cc42ef290784e363 \ - --hash=sha256:1fd02f47596e00a372f5b4af2b4c45f528bade65c66dfcbc6e1ea1bfda758e98 \ - --hash=sha256:23efb2be7221105c8eb0e905433414d2439cb0a8c5d5ca081c1c72acef0f5613 \ - --hash=sha256:25396abd52b16900932e05b7104bcdc640a4d96c914f39c3b984e5a17b01fba0 \ - --hash=sha256:27d6a73682b99568916c54a4bfced40e7d871ba685b580ea04bbd2e405dfd4c5 \ - --hash=sha256:380faf314c3c84c1682ca672e6280c6c59e92d0bc13dc71758ffa2de3cd4e252 \ - --hash=sha256:3b231255770723f1e125d63c14269bcd8b8136ecfb620b9a18c0297e046d0736 \ - --hash=sha256:3cd0bba31d484fe9b9d77698ddb67c978704603dc10cdc905512af308cfcca6b \ - --hash=sha256:3efde9a8c56c3b6e5f3fa4baea828f8184970c7c78480fedb620d804b1c31e5c \ - --hash=sha256:409535e0521c4630d5b5a1bf284e9d3c76d2fc2f153ebb12cf3827797798cc99 \ - --hash=sha256:494a64efc535e147fcc713dba58eecfce3a79f1e93ebe81995b387f5cd9bc2e1 \ - --hash=sha256:4ca04c60006867610a06575b46941ae616b19da0adc85b9f8f3d9cbd7a3da385 \ - --hash=sha256:4deea1d9169578917d1f35cdb581bc7bab56a7e8c5be2633bd1b9549c3c22a01 \ - --hash=sha256:509c424069dd037d078925b6815fc56b7271f3aaec471e55e6fa513b0a80d2aa \ - --hash=sha256:5509a8373fed30b978557890a226c3d30569746c565b9daba69df80c160365a5 \ - --hash=sha256:59420b5a9a5d3fee483a32adb56d7369ae0d630798da056001be1e9f674f3aa6 \ - --hash=sha256:5d207ff5cceef77796f8aacd44263266248cf1fbc601441524d7835613f8abec \ - --hash=sha256:5ddf5cb8e9c00d9bf8b0c75949fb3ff9ea2096ba531693e2e87336d197fdb908 \ - --hash=sha256:63dae84964a9a3d2610808cee038f435d9a111620c37ccf872c2fcaeca6865b3 \ - --hash=sha256:64a7c7856c3a409011139b17d137c2924df4318dab91ee0530800819617c4381 \ - --hash=sha256:64f7d04410be600aa5ec0626d73d43e68a51c86500ce12917e10fd013e258df5 \ - --hash=sha256:658fdf6022740896c403d45148bf0c36978c6b48c9ef8b1f8d0c7a11b6cdea86 \ - --hash=sha256:678fbceb202382aae42c1f0cd9f56b776bc20a58ae5b553ee1fe6b802983a1d6 \ - --hash=sha256:7835de4c56066e096407a1852e5561f6033786dd987fa90dc384e45b9bd21295 \ - --hash=sha256:7c524203207f5b569df06c96dafdc337228921ee8c3cc5f6e891d024c6595352 \ - --hash=sha256:7ed789d0f7f11fcf118cf0acb378743dfdd4215d7f7d18837c88171405c9a452 \ - --hash=sha256:81be2c0084d8c69e97e3c5d73ce9e2a6e523556f2a19c4e195c09d499be2f808 \ - --hash=sha256:81ee9c967956b9ea39b3a5270b7cb1740928d205b0dc72629164ce621b4debf9 \ - --hash=sha256:8219e2207f6c188d15614ea043636c2b36d2d79bf853639c124a179412325a13 \ - --hash=sha256:96e3ed550600185d34429477f1176cedea8293fa40e47fe37a05751bcb64c997 \ - --hash=sha256:98fb3a2bf525ad66db96745707b93ba0f78928b7a1cb2f1cb4b143bc7e2ba3b3 \ - --hash=sha256:9b36473a2d3e882d1873ea906ce54408b9588dc2c65989664e6e7f5a2de353d7 \ - --hash=sha256:9f91c90f8f3bf436f81c12eeb4d79f9ddd263c71125e6ad71341906832a34386 \ - --hash=sha256:a5fd5500d4e4f7cc88d8c0f2e45126c4307ed31e08f8ec521474f2fd99d35ac3 \ - --hash=sha256:a7171d2b869e9be238ea318c196baf58fbf272704e9c1cd4be8c380eea963342 \ - --hash=sha256:a80c6740e1bfbe50cea7cbf74f48823bb57bd59d914ee22ff8a81963b08e62d2 \ - --hash=sha256:b2a7afd24d408b907672015555bc10be2382e6c5f62a488e2d452da670bbd389 \ - --hash=sha256:b43ac1eb9f91e0c14aac1d2ef0f76bc7b9ceea51de47536f61268191adf52ad7 \ - --hash=sha256:b6cc46a27d904c9be5732029769acf4b0af69345172ed1ef6d4db0c023ff603b \ - --hash=sha256:b94bec9eda10111ec7102ef909eca4f3c2df979643924bfe58375f560713a7d1 \ - --hash=sha256:bd9b8e458e2bab52f9ad3ab5dc8b689a3c84b12b2a2f64cd9a0dfe209fb6b42f \ - --hash=sha256:c182d45600556917f811aa019d834a89fe4b6f6255da2fd0bdcf80e970f95918 \ - --hash=sha256:c409691696bec2b5e5c9efd9593c99025bf2f317380bf0d993ee0213516d908a \ - --hash=sha256:c5243044a927e8a6bb28517838662a019cd7f73d7f106bbb37ab5e7fa8451a92 \ - --hash=sha256:c8ab7efeff1884c5da8e18f743b667215300e09043820d11723718de0b7db934 \ - --hash=sha256:cb244adf2499aa37d5dc43431990c7f0b632d841af66a51d22bd89c437b60264 \ - --hash=sha256:d261ec38b8a99a39b62e0119ed47fe3b62f7691c500bc1e815265adc016438c1 \ - --hash=sha256:d2c099be5274847d606574234e494f23a359e829ba337ea9037c3a72b0851942 \ - --hash=sha256:d7e63d1977d3806ce0a1a3e0099b089f61abdede5238ca6a3f3bf8877b46d095 \ - --hash=sha256:dba0f83119b9514bc37272ad012f0cc03f0805cc6a2bea7244e19250ac8ff29f \ - --hash=sha256:dcbee57fedc9b2182c54ffc1c5eed316c3da8bbfeda8009e1b5d7220199d15da \ - --hash=sha256:e042ccf8fe5bf8b6a4b38b3f7d618eb10ea20402b0c9f4add9293408de447974 \ - --hash=sha256:e363440c8534bf2f2ef1b8fdc02037eb5fff8fce2a558519b22d6a3a38b3ec5e \ - --hash=sha256:e64b390a306f9e849ee809f92af6a52cda41741c914358e0e9f8499d03741526 \ - --hash=sha256:f0411641d31aa6f7f0cc13f0f18b63b8dc08da5f3a7505972a42ab059f479ba3 \ - --hash=sha256:f1c13c6c908811f867a8e9e66efb2d6c03d1cdd83e92788fe97f693c457dc44f \ - --hash=sha256:f846fd7c241e5bd4161e2a483663eb66e4d8e12130fcdc052f310f388f1d61c6 +markupsafe==3.0.1 \ + --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ + --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ + --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ + --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ + --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ + --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ + --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ + --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ + --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ + --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ + --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ + --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ + --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ + --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ + --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ + --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ + --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ + --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ + --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ + --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ + --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ + --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ + --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ + --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ + --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ + --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ + --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ + --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ + --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ + --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ + --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ + --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ + --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ + --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ + --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ + --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ + --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ + --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ + --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ + --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ + --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ + --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ + --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ + --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ + --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ + --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ + --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ + --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ + --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ + --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ + --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ + --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ + --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ + --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ + --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ + --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ + --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ + --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ + --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ + --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ + --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f # via jinja2 onepasswordconnectsdk==1.5.1 \ --hash=sha256:8924c614ffed98f29faada03dba940dc0bc47851b1f5f4ef7e312e43c10ec25b \ @@ -559,9 +574,9 @@ sniffio==1.3.1 \ # anyio # httpcore # httpx -starlette==0.38.6 \ - --hash=sha256:4517a1409e2e73ee4951214ba012052b9e16f60e90d73cfb06192c19203bbb05 \ - --hash=sha256:863a1588f5574e70a821dadefb41e4881ea451a47a3cd1b4df359d4ffefe5ead +starlette==0.39.2 \ + --hash=sha256:134dd6deb655a9775991d352312d53f1879775e5cc8a481f966e83416a2c3f71 \ + --hash=sha256:caaa3b87ef8518ef913dac4f073dea44e85f73343ad2bdc17941931835b2a26a # via # fastapi # safir diff --git a/requirements/tox.txt b/requirements/tox.txt index 50aab2f22b..94fa7bbf2f 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -15,9 +15,9 @@ colorama==0.4.6 \ # -c requirements/dev.txt # -c requirements/main.txt # tox -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv filelock==3.16.1 \ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ @@ -56,29 +56,29 @@ tox==4.21.2 \ # via # -r requirements/tox.in # tox-uv -tox-uv==1.13.0 \ - --hash=sha256:1037e4abad15a3b708b5970ed7a17a0765d7249b641a92b155bc3343b8b0145b \ - --hash=sha256:fb087b8b4ff779c72b48fc72ea1995387bb1c0dfb37910c20e46cef8b5f98c15 +tox-uv==1.15.0 \ + --hash=sha256:a5f08c80a3eabc47881e378700e5044b67ba94b03181ae38246627127f6a9183 \ + --hash=sha256:dfe7e48274248458349b47780da7db815c0156bd3751b6486152bbf01d7672fb # via -r requirements/tox.in -uv==0.4.18 \ - --hash=sha256:0c4cb31594cb2ed21bd3b603a207e99dfb9610c3db44da9dbbff0f237270f582 \ - --hash=sha256:157e4a2c063b270de348862dd31abfe600d5601183fd2a6efe552840ac179626 \ - --hash=sha256:1944c0ee567ca7db60705c5d213a75b25601094b026cc17af3e704651c1e3753 \ - --hash=sha256:1b59d742b81c7acf75a3aac71d9b24e07407e044bebcf39d3fc3c87094014e20 \ - --hash=sha256:3e3ade81af961f48517fcd99318192c9c635ef9a38a7ca65026af0c803c71906 \ - --hash=sha256:4be600474db6733078503012f2811c4383f490f77366e66b5f686316db52c870 \ - --hash=sha256:4ec60141f92c9667548ebad8daf4c13aabdb58b22c21dcd834641e791e55f289 \ - --hash=sha256:5234d47abe339c15c318e8b1bbd136ea61c4574503eda6944a5aaea91b7f6775 \ - --hash=sha256:6566448278b6849846b6c586fc86748c66aa53ed70f5568e713122543cc86a50 \ - --hash=sha256:8250148484e1b0f89ec19467946e86ee303619985c23228b5a2f2d94d15c6d8b \ - --hash=sha256:8af0b60adcfa2e87c77a3008d3ed6e0b577c0535468dc58e06f905ccbd27124f \ - --hash=sha256:954964eff8c7e2bc63dd4beeb8d45bcaddb5149a7ef29a36abd77ec76c8b837e \ - --hash=sha256:96c3ccee0fd8cf0a9d679407e157b76db1a854638a4ba4fa14f4d116b4e39b03 \ - --hash=sha256:ade18dbbeb05c8cba4f842cc15b20e59467069183f348844750901227df5008d \ - --hash=sha256:b08564c8c7e8b3665ad1d6c8924d4654451f96c956eb5f3b8ec995c77734163d \ - --hash=sha256:df225a568da01f3d7e126d886c3694c5a4a7d8b85162a4d6e97822716ca0e7c4 \ - --hash=sha256:f043c3c4514c149a00a86c3bf44df43062416d41002114e60df33895e8511c41 \ - --hash=sha256:fcc606da545d9a5ec5c2209e7eb2a4eb76627ad75df5eb5616c0b40789fe3933 +uv==0.4.21 \ + --hash=sha256:0fccf9e232e95917ecbba10767c43dc308e243ea4d17531112a2f4ad63c0d3f1 \ + --hash=sha256:14224075d2edd3d2984391dfcb3138e4840cc998a81c1046cdc746ae1d38cc62 \ + --hash=sha256:19607da8ee024e4ff060804efb8251e3b821cbd7f830b58612600ffe739fd33d \ + --hash=sha256:23d635ef5fe716fb1a1c4b411619f05caa5f9ee669651fcf7a5c00c8a3a1f749 \ + --hash=sha256:343c4ffe77ea93563861b46ed024a90efc162c06749836d9d7a8506db40d4565 \ + --hash=sha256:3d3e35a10f7813d7e540aad24cd3a3e20745a42b671a217e7761686791a562f3 \ + --hash=sha256:45df47a4f43db730bea72bd3150c206d00d1a4d854137ed63dc04bb73032f280 \ + --hash=sha256:58a770b278b0555a966275dbe1461dd6632f938a0aefea89037155dee676c78d \ + --hash=sha256:7d1e239b683fb541cad1ddfa16ef4f8f0681ad666c73f12da17e70edc86aab4b \ + --hash=sha256:9c08b01f8571d2c64d45d569990aa7bffad5eb259cf64bc329d40d8c787fb9ba \ + --hash=sha256:9dcddbb3b6e1662c6db41d63db539742450e2ce17d6c746329c016e3651bfb4a \ + --hash=sha256:a1a9a126ce48f0f0893891adb5a9749220425169092f3e4da1216168736ac16d \ + --hash=sha256:aaff052175df7e43ac2f25849a26a6856dcce498653c69a2f4245cdf47db46f7 \ + --hash=sha256:ba3e3b40cc1d5a980d36589775d6a7e4defa1b33e7e06423af0e395b8e4d9505 \ + --hash=sha256:be55a34aa56192f2fd80a3954ad33e3d4587762f8fffe13a0bdf25da1f34ea5d \ + --hash=sha256:e2d7e9c65e799876a45c9134945d548c3de51e13ee650b58bc936190744a66e1 \ + --hash=sha256:e8efba624edb9ab36e0b3550252dc34b2eb1492c73ca8bfb5faa8148307efa1d \ + --hash=sha256:f787d74abb24532f69cd3029c16edea7544931fd36cc1acda5b3af1cbffa5fb4 # via tox-uv virtualenv==20.26.6 \ --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ From 2e997a5ab93d3ad7a50296b8ac9a8dc94c170882 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Tue, 15 Oct 2024 13:31:46 -0700 Subject: [PATCH 294/567] Update Python dependencies Pick up the new documenteer, which fixes an incompatibility with sphinxcontrib-mermaid. --- requirements/dev.txt | 93 +++++++++++++++++-------------------------- requirements/main.txt | 6 +-- requirements/tox.txt | 44 ++++++++++---------- 3 files changed, 62 insertions(+), 81 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 024cf825d7..a341c2e6d3 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -346,9 +346,9 @@ diagrams==0.23.4 \ --hash=sha256:1ba69d98fcf8d768dbddf07d2c77aba6cc95c2e6f90f37146c04c96bc6765450 \ --hash=sha256:b7ada0b119b5189dd021b1dc1467fad3704737452bb18b1e06d05e4d1fa48ed7 # via sphinx-diagrams -documenteer==1.4.1 \ - --hash=sha256:b8e5a4b253cdf14b1306407f109d2b4811931e827ddef57a8be753232b546ebf \ - --hash=sha256:dadd0777d23aa1cc21bb737ff28b84c6594247f5237d1962334534c9c5cbd766 +documenteer==1.4.2 \ + --hash=sha256:03a4cf3b8ffa4905c59662131f87afe77417238f10e9f01075d849f08a32e99d \ + --hash=sha256:89756cf2026c3e70a36b9d2ecb69c38d58c320554f498be5955ddc815de4b035 # via -r requirements/dev.in docutils==0.21.2 \ --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ @@ -1257,9 +1257,9 @@ soupsieve==2.6 \ --hash=sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb \ --hash=sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9 # via beautifulsoup4 -sphinx==8.0.2 \ - --hash=sha256:0cce1ddcc4fd3532cf1dd283bc7d886758362c5c1de6598696579ce96d8ffa5b \ - --hash=sha256:56173572ae6c1b9a38911786e206a110c9749116745873feae4f9ce88e59391d +sphinx==8.1.3 \ + --hash=sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2 \ + --hash=sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927 # via # autodoc-pydantic # documenteer @@ -1365,56 +1365,37 @@ sphinxext-rediraffe==0.2.7 \ --hash=sha256:651dcbfae5ffda9ffd534dfb8025f36120e5efb6ea1a33f5420023862b9f725d \ --hash=sha256:9e430a52d4403847f4ffb3a8dd6dfc34a9fe43525305131f52ed899743a5fd8c # via documenteer -sqlalchemy==2.0.35 \ - --hash=sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9 \ - --hash=sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00 \ - --hash=sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee \ - --hash=sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6 \ - --hash=sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1 \ - --hash=sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72 \ - --hash=sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf \ - --hash=sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8 \ - --hash=sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b \ - --hash=sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc \ - --hash=sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c \ - --hash=sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1 \ - --hash=sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3 \ - --hash=sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5 \ - --hash=sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90 \ - --hash=sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec \ - --hash=sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71 \ - --hash=sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7 \ - --hash=sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b \ - --hash=sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468 \ - --hash=sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3 \ - --hash=sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e \ - --hash=sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139 \ - --hash=sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff \ - --hash=sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11 \ - --hash=sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01 \ - --hash=sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62 \ - --hash=sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d \ - --hash=sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a \ - --hash=sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db \ - --hash=sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87 \ - --hash=sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e \ - --hash=sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1 \ - --hash=sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9 \ - --hash=sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f \ - --hash=sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0 \ - --hash=sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44 \ - --hash=sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936 \ - --hash=sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8 \ - --hash=sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea \ - --hash=sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f \ - --hash=sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4 \ - --hash=sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0 \ - --hash=sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c \ - --hash=sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f \ - --hash=sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60 \ - --hash=sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2 \ - --hash=sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9 \ - --hash=sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33 +sqlalchemy==2.0.36 \ + --hash=sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436 \ + --hash=sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588 \ + --hash=sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e \ + --hash=sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959 \ + --hash=sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d \ + --hash=sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575 \ + --hash=sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8 \ + --hash=sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7 \ + --hash=sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971 \ + --hash=sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c \ + --hash=sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f \ + --hash=sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e \ + --hash=sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5 \ + --hash=sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793 \ + --hash=sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88 \ + --hash=sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2 \ + --hash=sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28 \ + --hash=sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5 \ + --hash=sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a \ + --hash=sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a \ + --hash=sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5 \ + --hash=sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c \ + --hash=sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07 \ + --hash=sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa \ + --hash=sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06 \ + --hash=sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1 \ + --hash=sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687 \ + --hash=sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb \ + --hash=sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44 \ + --hash=sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e # via jupyter-cache stack-data==0.6.3 \ --hash=sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9 \ diff --git a/requirements/main.txt b/requirements/main.txt index ea6bfef483..9e4fbbc2e6 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -574,9 +574,9 @@ sniffio==1.3.1 \ # anyio # httpcore # httpx -starlette==0.39.2 \ - --hash=sha256:134dd6deb655a9775991d352312d53f1879775e5cc8a481f966e83416a2c3f71 \ - --hash=sha256:caaa3b87ef8518ef913dac4f073dea44e85f73343ad2bdc17941931835b2a26a +starlette==0.40.0 \ + --hash=sha256:1a3139688fb298ce5e2d661d37046a66ad996ce94be4d4983be019a23a04ea35 \ + --hash=sha256:c494a22fae73805376ea6bf88439783ecfba9aac88a43911b48c653437e784c4 # via # fastapi # safir diff --git a/requirements/tox.txt b/requirements/tox.txt index 94fa7bbf2f..0ff0174f02 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -50,9 +50,9 @@ pyproject-api==1.8.0 \ --hash=sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228 \ --hash=sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496 # via tox -tox==4.21.2 \ - --hash=sha256:13d996adcd792e7c82994b0e116d85efd84f0c6d185254d83d156f73f86b2038 \ - --hash=sha256:49381ff102296753e378fa5ff30e42a35e695f149b4dbf8a2c49d15fdb5797b2 +tox==4.22.0 \ + --hash=sha256:03734d9a9ac138cd1a898a372fb1b8079e2728618ae06dc37cbf3686cfb56eea \ + --hash=sha256:acc6c627cb3316585238d55d2b633e132fea1bdb01b9d93b56bce7caea6ae73d # via # -r requirements/tox.in # tox-uv @@ -60,25 +60,25 @@ tox-uv==1.15.0 \ --hash=sha256:a5f08c80a3eabc47881e378700e5044b67ba94b03181ae38246627127f6a9183 \ --hash=sha256:dfe7e48274248458349b47780da7db815c0156bd3751b6486152bbf01d7672fb # via -r requirements/tox.in -uv==0.4.21 \ - --hash=sha256:0fccf9e232e95917ecbba10767c43dc308e243ea4d17531112a2f4ad63c0d3f1 \ - --hash=sha256:14224075d2edd3d2984391dfcb3138e4840cc998a81c1046cdc746ae1d38cc62 \ - --hash=sha256:19607da8ee024e4ff060804efb8251e3b821cbd7f830b58612600ffe739fd33d \ - --hash=sha256:23d635ef5fe716fb1a1c4b411619f05caa5f9ee669651fcf7a5c00c8a3a1f749 \ - --hash=sha256:343c4ffe77ea93563861b46ed024a90efc162c06749836d9d7a8506db40d4565 \ - --hash=sha256:3d3e35a10f7813d7e540aad24cd3a3e20745a42b671a217e7761686791a562f3 \ - --hash=sha256:45df47a4f43db730bea72bd3150c206d00d1a4d854137ed63dc04bb73032f280 \ - --hash=sha256:58a770b278b0555a966275dbe1461dd6632f938a0aefea89037155dee676c78d \ - --hash=sha256:7d1e239b683fb541cad1ddfa16ef4f8f0681ad666c73f12da17e70edc86aab4b \ - --hash=sha256:9c08b01f8571d2c64d45d569990aa7bffad5eb259cf64bc329d40d8c787fb9ba \ - --hash=sha256:9dcddbb3b6e1662c6db41d63db539742450e2ce17d6c746329c016e3651bfb4a \ - --hash=sha256:a1a9a126ce48f0f0893891adb5a9749220425169092f3e4da1216168736ac16d \ - --hash=sha256:aaff052175df7e43ac2f25849a26a6856dcce498653c69a2f4245cdf47db46f7 \ - --hash=sha256:ba3e3b40cc1d5a980d36589775d6a7e4defa1b33e7e06423af0e395b8e4d9505 \ - --hash=sha256:be55a34aa56192f2fd80a3954ad33e3d4587762f8fffe13a0bdf25da1f34ea5d \ - --hash=sha256:e2d7e9c65e799876a45c9134945d548c3de51e13ee650b58bc936190744a66e1 \ - --hash=sha256:e8efba624edb9ab36e0b3550252dc34b2eb1492c73ca8bfb5faa8148307efa1d \ - --hash=sha256:f787d74abb24532f69cd3029c16edea7544931fd36cc1acda5b3af1cbffa5fb4 +uv==0.4.22 \ + --hash=sha256:062a57ac3aab9a7d41e1b6a66948d563bf47478c719894661ea2c5ed6485a146 \ + --hash=sha256:0904c141f9fd7088d7837fb7ac5e43191236ed9cf8edf824ed838bdc77da7406 \ + --hash=sha256:0ff4ff91a25ed633f4d2556777e1b317262c01f71e8f72dfbc540e97e7eb5392 \ + --hash=sha256:455538b910db65f20a70cf806c5e65cc1d80ea7f40a116ba1c3d4bd1dab933d9 \ + --hash=sha256:48232daa35ebd3e963eea236cf33915a8b0c8a3673d5da35d764f8b1fec0b1b2 \ + --hash=sha256:52605e291f7ab1daca682b7a92b926c2f70e1fc86caaa37cbd56b64587730ea2 \ + --hash=sha256:527d785dafa5bf8fa4aba42188787a4b25c11d005a5f4bd8afda6e8c2c231e1b \ + --hash=sha256:63156e306f860d9fa2bb1d7c9af30053b88276004b2790cd9bbf20cc83ce988b \ + --hash=sha256:7041bf9d2d5d391cebca7778207eb88a96537ff2e93df2ff9f41d6c4057252c3 \ + --hash=sha256:71f3faaa94f60d362a6984fdf7675d6d2d244139de91a7d46e2367caf950951e \ + --hash=sha256:765dac79e5c8e2924efbd4663d4e03f5d7689f1baa98223b298fe4292610a25a \ + --hash=sha256:7be7adf47158c456031b2b78742a432260b5c22e9a86784fa57e7a208b0c3206 \ + --hash=sha256:956c4f0a9eddb8e18003bc39d114c78f6d6b4ba2683a262af043770abee44f2e \ + --hash=sha256:9cf96ddcb6ea2743e4c44fa22b08a4f2fd09cc9c5e228e8ab04b0cd08371c868 \ + --hash=sha256:af70ea49389397d0f6ff43827f73e0e71db0fc45cdf50c7dcff8318d726c8224 \ + --hash=sha256:c96eb12d1bdb1a826cba3c38273604629ac51e723d705aed17ae282650d030f0 \ + --hash=sha256:d9a242b3360c3a62e248053b3a6f618dc59cb5c56f4e30748433a19a002e4bf5 \ + --hash=sha256:e18c42cc99bc2a3f91d43aeb2df61a6d259114fca50dd3818879e9ee12064f7f # via tox-uv virtualenv==20.26.6 \ --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ From b64adda961ebbfa0352e200587ca56c982f5c957 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Tue, 15 Oct 2024 13:36:45 -0700 Subject: [PATCH 295/567] Force a docs build if dependencies change We need to test the documentation build if any dependencies have changed, since they may be Sphinx dependencies. We missed a breaking Sphinx change because we didn't have that test. --- .github/workflows/docs.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index e53b746bf5..b5610d35ce 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -40,6 +40,7 @@ jobs: - "applications/argocd/values-*.yaml" - "applications/gafaelfawr/values-*.yaml" - "environments/values-*.yaml" + - "requirements/*.txt" - "src/phalanx/**" docsSpecific: - "docs/**" From 798ec6bb59cff16e4028d5445554ed7d98373b2a Mon Sep 17 00:00:00 2001 From: Hsin-Fang Chiang Date: Tue, 8 Oct 2024 10:03:06 -0700 Subject: [PATCH 296/567] Use the new dev butler central repo at central_repo_2 --- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfdev-prompt-processing.yaml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml index 7e9e4e559b..08310c2342 100644 --- a/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: pipelines: main: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/ApPipe.yaml] preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/Preprocessing.yaml] - calibRepo: s3://rubin-pp-dev-users/central_repo/ + calibRepo: s3://rubin-pp-dev-users/central_repo_2 s3: imageBucket: rubin-pp-dev diff --git a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml index aba3ca2b2c..857f01a6ae 100644 --- a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml @@ -15,7 +15,7 @@ prompt-proto-service: pipelines: main: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/ApPipe.yaml] preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/Preprocessing.yaml] - calibRepo: s3://rubin-pp-dev-users/central_repo/ + calibRepo: s3://rubin-pp-dev-users/central_repo_2 s3: imageBucket: rubin-pp-dev diff --git a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml index 9e0c60bf5d..1d459bf40f 100644 --- a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml @@ -15,7 +15,7 @@ prompt-proto-service: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/ApPipe.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr.yaml] preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Preprocessing.yaml] - calibRepo: s3://rubin-pp-dev-users/central_repo/ + calibRepo: s3://rubin-pp-dev-users/central_repo_2 s3: imageBucket: rubin-pp-dev diff --git a/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml index 86f51c8ce7..9a8af83570 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml @@ -16,7 +16,7 @@ prompt-proto-service: ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/SingleFrame.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Isr.yaml] preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Preprocessing.yaml] - calibRepo: s3://rubin-pp-dev-users/central_repo/ + calibRepo: s3://rubin-pp-dev-users/central_repo_2 s3: imageBucket: rubin-pp-dev From 4588aee3a2995f66ee4539b8862da7e8fd271433 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Wed, 16 Oct 2024 12:17:47 -0300 Subject: [PATCH 297/567] rubintv: update app version to v2.5.0 for summit and usdf production deployments --- applications/rubintv/values-summit.yaml | 2 +- applications/rubintv/values-usdfprod.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index fdf5ec1e6d..c733551b29 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -20,7 +20,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.4.0 + tag: v2.5.0 pullPolicy: Always workers: diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index e78b51007c..a569e70d35 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -16,7 +16,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.4.0 + tag: v2.5.0 pullPolicy: Always workers: From 7aeda16916a0e92c3ea8811119778533663c9661 Mon Sep 17 00:00:00 2001 From: Dan Fuchs Date: Wed, 16 Oct 2024 12:08:47 -0500 Subject: [PATCH 298/567] DM-45522 sasquatch app metrics: service -> app_name I changed the `service` field on metrics event metadata to `app_name`, but I never changed it here. --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/app-metrics/README.md | 2 +- applications/sasquatch/charts/app-metrics/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index e400b92449..77b7ba8072 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -91,7 +91,7 @@ Rubin Observatory's telemetry service | app-metrics.env | list | See `values.yaml` | Telegraf agent enviroment variables | | app-metrics.envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | | app-metrics.globalAppConfig | object | See `values.yaml` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | -| app-metrics.globalInfluxTags | list | `["service"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | +| app-metrics.globalInfluxTags | list | `["app_name"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | | app-metrics.image.pullPolicy | string | `"Always"` | Image pull policy | | app-metrics.image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | | app-metrics.image.tag | string | `"1.30.2-alpine"` | Telegraf image tag | diff --git a/applications/sasquatch/charts/app-metrics/README.md b/applications/sasquatch/charts/app-metrics/README.md index a6a81462af..df8737703a 100644 --- a/applications/sasquatch/charts/app-metrics/README.md +++ b/applications/sasquatch/charts/app-metrics/README.md @@ -14,7 +14,7 @@ Kafka topics, users, and a telegraf connector for metrics events. | env | list | See `values.yaml` | Telegraf agent enviroment variables | | envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | | globalAppConfig | object | See `values.yaml` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | -| globalInfluxTags | list | `["service"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | +| globalInfluxTags | list | `["app_name"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | | image.pullPolicy | string | `"Always"` | Image pull policy | | image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | | image.tag | string | `"1.30.2-alpine"` | Telegraf image tag | diff --git a/applications/sasquatch/charts/app-metrics/values.yaml b/applications/sasquatch/charts/app-metrics/values.yaml index 285ff3fc75..93329be191 100644 --- a/applications/sasquatch/charts/app-metrics/values.yaml +++ b/applications/sasquatch/charts/app-metrics/values.yaml @@ -19,7 +19,7 @@ apps: [] # -- Keys in an every event sent by any app that should be recorded in InfluxDB # as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from # `globalAppConfig` -globalInfluxTags: ["service"] +globalInfluxTags: ["app_name"] cluster: # The name of the Strimzi cluster. Synchronize this with the cluster name in From 2b7ea2d3acebc56d5b1b1ffe7fd72e82aa9f64f2 Mon Sep 17 00:00:00 2001 From: Dan Fuchs Date: Wed, 16 Oct 2024 12:11:36 -0500 Subject: [PATCH 299/567] DM-45522 sasquatch app metrics: make InfluxDB database name consistent with the rest of the Sasquatch DBs --- .../charts/app-metrics/templates/telegraf-configmap.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml b/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml index 4721483645..8b04c7de4c 100644 --- a/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml +++ b/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml @@ -23,7 +23,7 @@ data: urls = [ {{ .Values.influxdb.url | quote }} ] - database = "telegraf-kafka-app-metrics-consumer" + database = "lsst.square.metrics" username = "${INFLUXDB_USER}" password = "${INFLUXDB_PASSWORD}" From 20aaea1b8e5402cd24fa36c426b8ff7aea8183c0 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Thu, 26 Sep 2024 12:34:31 -0700 Subject: [PATCH 300/567] Convert manual ingresses to GafaelfawrIngress The next release of Gafaelfawr will drop support for manually configuring Kubernetes ingresses to use Gafaelfawr and will require the use of GafaelfawrIngress resources. Convert the three remaining relevant usages in Phalanx. These ingresses previously didn't require any specific scopes for access. I added exec:internal-tools for this purpose about a year ago so everyone's tokens should now have that scope. Add that scope restriction as part of this conversion if authentication is enabled. Currently, the authentication portion was disabled. Preserve that by configuring the ingreses to be anonymous by default, but add a configuration option that can be used to enable authentication when that is ready. --- applications/exposurelog/README.md | 2 +- .../exposurelog/templates/ingress.yaml | 56 ++++++++++--------- applications/exposurelog/values.yaml | 9 +-- applications/narrativelog/README.md | 2 +- .../narrativelog/templates/ingress.yaml | 55 +++++++++--------- applications/narrativelog/values.yaml | 9 +-- applications/nightreport/README.md | 2 +- .../nightreport/templates/ingress.yaml | 55 +++++++++--------- applications/nightreport/values.yaml | 9 +-- 9 files changed, 108 insertions(+), 91 deletions(-) diff --git a/applications/exposurelog/README.md b/applications/exposurelog/README.md index 927c35f2f7..d9037d6912 100644 --- a/applications/exposurelog/README.md +++ b/applications/exposurelog/README.md @@ -40,7 +40,7 @@ Log messages related to an exposure | image.pullPolicy | string | `"Always"` | Pull policy for the exposurelog image | | image.repository | string | `"lsstsqre/exposurelog"` | exposurelog image to use | | image.tag | string | The appVersion of the chart | Tag of exposure image to use | -| ingress.gafaelfawrAuthQuery | string | `""` | Gafaelfawr auth query string | +| ingress.auth.enabled | bool | `false` | Whether to require Gafaelfawr authentication for access | | nameOverride | string | `""` | Override the base name for resources | | nodeSelector | object | `{}` | Node selector rules for the exposurelog pod | | podAnnotations | object | `{}` | Annotations for the exposurelog pod | diff --git a/applications/exposurelog/templates/ingress.yaml b/applications/exposurelog/templates/ingress.yaml index aa26a054db..c929dfb9a8 100644 --- a/applications/exposurelog/templates/ingress.yaml +++ b/applications/exposurelog/templates/ingress.yaml @@ -1,30 +1,34 @@ -apiVersion: networking.k8s.io/v1 -kind: Ingress +apiVersion: gafaelfawr.lsst.io/v1alpha1 +kind: GafaelfawrIngress metadata: name: {{ template "exposurelog.fullname" . }} labels: {{- include "exposurelog.labels" . | nindent 4 }} - annotations: - {{- if .Values.ingress.gafaelfawrAuthQuery }} - nginx.ingress.kubernetes.io/auth-method: "GET" - nginx.ingress.kubernetes.io/auth-response-headers: "X-Auth-Request-User,X-Auth-Request-Email,X-Auth-Request-Token" - nginx.ingress.kubernetes.io/auth-signin: "{{ .Values.global.baseUrl }}/login" - nginx.ingress.kubernetes.io/auth-url: "{{ .Values.global.baseUrl }}/auth?{{ .Values.ingress.gafaelfawrAuthQuery }}" - {{- end }} - {{- with .Values.ingress.annotations }} - {{- toYaml . | nindent 4 }} - {{- end }} -spec: - ingressClassName: "nginx" - rules: - - host: {{ required "global.host must be set" .Values.global.host | quote }} - http: - paths: - - path: "/exposurelog" - pathType: "Prefix" - backend: - service: - name: {{ include "exposurelog.fullname" . }} - port: - number: 8080 - +config: + baseUrl: {{ .Values.global.baseUrl | quote }} + {{- if .Values.ingress.auth.enabled }} + loginRedirect: true + scopes: + all: + - "exec:internal-tools" + {{- else }} + scopes: + anonymous: true + {{- end }} +template: + metadata: + name: {{ template "exposurelog.fullname" . }} + labels: + {{- include "exposurelog.labels" . | nindent 4 }} + spec: + rules: + - host: {{ required "global.host must be set" .Values.global.host | quote }} + http: + paths: + - path: "/exposurelog" + pathType: "Prefix" + backend: + service: + name: {{ include "exposurelog.fullname" . }} + port: + number: 8080 diff --git a/applications/exposurelog/values.yaml b/applications/exposurelog/values.yaml index ece7625737..929ddd59ca 100644 --- a/applications/exposurelog/values.yaml +++ b/applications/exposurelog/values.yaml @@ -20,6 +20,11 @@ image: # @default -- The appVersion of the chart tag: "" +ingress: + auth: + # -- Whether to require Gafaelfawr authentication for access + enabled: false + db: # -- database host host: postgres.postgres @@ -30,10 +35,6 @@ db: # -- database name database: exposurelog -ingress: - # -- Gafaelfawr auth query string - gafaelfawrAuthQuery: "" - # -- Application-specific configuration config: # -- NFS path to butler registry 1 diff --git a/applications/narrativelog/README.md b/applications/narrativelog/README.md index 7fe1f08bf4..f7bd7464d6 100644 --- a/applications/narrativelog/README.md +++ b/applications/narrativelog/README.md @@ -30,7 +30,7 @@ Narrative log service | image.pullPolicy | string | `"Always"` | Pull policy for the narrativelog image | | image.repository | string | `"lsstsqre/narrativelog"` | narrativelog image to use | | image.tag | string | The appVersion of the chart | Tag of exposure image to use | -| ingress.gafaelfawrAuthQuery | string | `""` | Gafaelfawr auth query string | +| ingress.auth.enabled | bool | `false` | Whether to require Gafaelfawr authentication for access | | nameOverride | string | `""` | Override the base name for resources | | nodeSelector | object | `{}` | Node selector rules for the narrativelog pod | | podAnnotations | object | `{}` | Annotations for the narrativelog pod | diff --git a/applications/narrativelog/templates/ingress.yaml b/applications/narrativelog/templates/ingress.yaml index cdf8f56d85..f81cd57dd6 100644 --- a/applications/narrativelog/templates/ingress.yaml +++ b/applications/narrativelog/templates/ingress.yaml @@ -1,29 +1,34 @@ -apiVersion: networking.k8s.io/v1 -kind: Ingress +apiVersion: gafaelfawr.lsst.io/v1alpha1 +kind: GafaelfawrIngress metadata: name: {{ template "narrativelog.fullname" . }} labels: {{- include "narrativelog.labels" . | nindent 4 }} - annotations: - {{- if .Values.ingress.gafaelfawrAuthQuery }} - nginx.ingress.kubernetes.io/auth-method: "GET" - nginx.ingress.kubernetes.io/auth-response-headers: "X-Auth-Request-User,X-Auth-Request-Email,X-Auth-Request-Token" - nginx.ingress.kubernetes.io/auth-signin: "{{ .Values.global.baseUrl }}/login" - nginx.ingress.kubernetes.io/auth-url: "https://{{ .Values.global.baseUrl }}/auth?{{ .Values.ingress.gafaelfawrAuthQuery }}" - {{- end }} - {{- with .Values.ingress.annotations }} - {{- toYaml . | nindent 4 }} - {{- end }} -spec: - ingressClassName: "nginx" - rules: - - host: {{ required "global.host must be set" .Values.global.host | quote }} - http: - paths: - - path: /narrativelog - pathType: Prefix - backend: - service: - name: {{ include "narrativelog.fullname" . }} - port: - number: 8080 +config: + baseUrl: {{ .Values.global.baseUrl | quote }} + {{- if .Values.ingress.auth.enabled }} + loginRedirect: true + scopes: + all: + - "exec:internal-tools" + {{- else }} + scopes: + anonymous: true + {{- end }} +template: + metadata: + name: {{ template "narrativelog.fullname" . }} + labels: + {{- include "narrativelog.labels" . | nindent 4 }} + spec: + rules: + - host: {{ required "global.host must be set" .Values.global.host | quote }} + http: + paths: + - path: /narrativelog + pathType: Prefix + backend: + service: + name: {{ include "narrativelog.fullname" . }} + port: + number: 8080 diff --git a/applications/narrativelog/values.yaml b/applications/narrativelog/values.yaml index 87e629f34d..c85c032573 100644 --- a/applications/narrativelog/values.yaml +++ b/applications/narrativelog/values.yaml @@ -20,6 +20,11 @@ image: # @default -- The appVersion of the chart tag: "" +ingress: + auth: + # -- Whether to require Gafaelfawr authentication for access + enabled: false + db: # -- database host host: postgres.postgres @@ -30,10 +35,6 @@ db: # -- database name database: narrativelog -ingress: - # -- Gafaelfawr auth query string - gafaelfawrAuthQuery: "" - # -- Application-specific configuration config: # -- Site ID; a non-empty string of up to 16 characters. diff --git a/applications/nightreport/README.md b/applications/nightreport/README.md index 4a8df51c45..6adbda45d4 100644 --- a/applications/nightreport/README.md +++ b/applications/nightreport/README.md @@ -31,7 +31,7 @@ Night report log service | image.pullPolicy | string | `"Always"` | Pull policy for the nightreport image | | image.repository | string | `"lsstts/nightreport"` | nightreport image to use | | image.tag | string | The appVersion of the chart | Tag of exposure image to use | -| ingress.gafaelfawrAuthQuery | string | `""` | Gafaelfawr auth query string | +| ingress.auth.enabled | bool | `false` | Whether to require Gafaelfawr authentication for access | | nameOverride | string | `""` | Override the base name for resources | | nodeSelector | object | `{}` | Node selector rules for the nightreport pod | | podAnnotations | object | `{}` | Annotations for the nightreport pod | diff --git a/applications/nightreport/templates/ingress.yaml b/applications/nightreport/templates/ingress.yaml index 99768a13f2..4cdf367a87 100644 --- a/applications/nightreport/templates/ingress.yaml +++ b/applications/nightreport/templates/ingress.yaml @@ -1,29 +1,34 @@ -apiVersion: networking.k8s.io/v1 -kind: Ingress +apiVersion: gafaelfawr.lsst.io/v1alpha1 +kind: GafaelfawrIngress metadata: name: {{ template "nightreport.fullname" . }} labels: {{- include "nightreport.labels" . | nindent 4 }} - annotations: - {{- if .Values.ingress.gafaelfawrAuthQuery }} - nginx.ingress.kubernetes.io/auth-method: "GET" - nginx.ingress.kubernetes.io/auth-response-headers: "X-Auth-Request-User,X-Auth-Request-Email,X-Auth-Request-Token" - nginx.ingress.kubernetes.io/auth-signin: "{{ .Values.global.baseUrl }}/login" - nginx.ingress.kubernetes.io/auth-url: "https://{{ .Values.global.baseUrl }}/auth?{{ .Values.ingress.gafaelfawrAuthQuery }}" - {{- end }} - {{- with .Values.ingress.annotations }} - {{- toYaml . | nindent 4 }} - {{- end }} -spec: - ingressClassName: "nginx" - rules: - - host: {{ required "global.host must be set" .Values.global.host | quote }} - http: - paths: - - path: /nightreport - pathType: Prefix - backend: - service: - name: {{ include "nightreport.fullname" . }} - port: - number: 8080 +config: + baseUrl: {{ .Values.global.baseUrl | quote }} + {{- if .Values.ingress.auth.enabled }} + loginRedirect: true + scopes: + all: + - "exec:internal-tools" + {{- else }} + scopes: + anonymous: true + {{- end }} +template: + metadata: + name: {{ template "nightreport.fullname" . }} + labels: + {{- include "nightreport.labels" . | nindent 4 }} + spec: + rules: + - host: {{ required "global.host must be set" .Values.global.host | quote }} + http: + paths: + - path: /nightreport + pathType: Prefix + backend: + service: + name: {{ include "nightreport.fullname" . }} + port: + number: 8080 diff --git a/applications/nightreport/values.yaml b/applications/nightreport/values.yaml index 9471348e9f..87d725b6e0 100644 --- a/applications/nightreport/values.yaml +++ b/applications/nightreport/values.yaml @@ -20,6 +20,11 @@ image: # @default -- The appVersion of the chart tag: "" +ingress: + auth: + # -- Whether to require Gafaelfawr authentication for access + enabled: false + db: # -- database host host: postgres.postgres @@ -30,10 +35,6 @@ db: # -- database name database: nightreport -ingress: - # -- Gafaelfawr auth query string - gafaelfawrAuthQuery: "" - # -- Application-specific configuration config: # -- Site ID; a non-empty string of up to 16 characters. From a98415cb55679a845604a75347b83494658d5502 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Thu, 10 Oct 2024 14:56:01 -0700 Subject: [PATCH 301/567] Upgrade consdb tags to main --- applications/consdb/values-tucson-teststand.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/consdb/values-tucson-teststand.yaml b/applications/consdb/values-tucson-teststand.yaml index 2e4015013e..26f1065957 100644 --- a/applications/consdb/values-tucson-teststand.yaml +++ b/applications/consdb/values-tucson-teststand.yaml @@ -8,15 +8,15 @@ lfa: hinfo: latiss: enable: true - tag: "tickets-DM-44551" + tag: "main" logConfig: "consdb.hinfo=DEBUG" lsstcomcam: enable: true - tag: "tickets-DM-44551" + tag: "main" logConfig: "consdb.hinfo=DEBUG" lsstcam: enable: false - tag: "tickets-DM-44551" + tag: "main" pq: image: tag: "main" From 2b9c3bba24300017aa3764ec7663acaf5fc12a22 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Thu, 10 Oct 2024 15:48:59 -0700 Subject: [PATCH 302/567] Adding resource specifications --- applications/consdb/values-tucson-teststand.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/applications/consdb/values-tucson-teststand.yaml b/applications/consdb/values-tucson-teststand.yaml index 26f1065957..25b414a65f 100644 --- a/applications/consdb/values-tucson-teststand.yaml +++ b/applications/consdb/values-tucson-teststand.yaml @@ -17,6 +17,13 @@ hinfo: lsstcam: enable: false tag: "main" + resources: + requests: + cpu: 200m + memory: 80Gi + limits: + cpu: 500m + memory: 80Gi pq: image: tag: "main" From c17cfb448ecaa6cabac28c28b0f8fa962f99acd2 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Thu, 10 Oct 2024 16:00:25 -0700 Subject: [PATCH 303/567] Add more resources to the correct spots --- .../consdb/values-tucson-teststand.yaml | 28 ++++++++++++++----- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/applications/consdb/values-tucson-teststand.yaml b/applications/consdb/values-tucson-teststand.yaml index 25b414a65f..1b7ed15ec2 100644 --- a/applications/consdb/values-tucson-teststand.yaml +++ b/applications/consdb/values-tucson-teststand.yaml @@ -10,20 +10,34 @@ hinfo: enable: true tag: "main" logConfig: "consdb.hinfo=DEBUG" + resources: + requests: + cpu: 200m + memory: 80Gi + limits: + cpu: 500m + memory: 80Gi lsstcomcam: enable: true tag: "main" logConfig: "consdb.hinfo=DEBUG" + resources: + requests: + cpu: 200m + memory: 80Gi + limits: + cpu: 500m + memory: 80Gi lsstcam: enable: false tag: "main" - resources: - requests: - cpu: 200m - memory: 80Gi - limits: - cpu: 500m - memory: 80Gi + resources: + requests: + cpu: 200m + memory: 80Gi + limits: + cpu: 500m + memory: 80Gi pq: image: tag: "main" From a7bcf942101ea8f88f46ab663e1521b05f93d2ca Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Thu, 10 Oct 2024 16:17:55 -0700 Subject: [PATCH 304/567] Add Resources to whole file --- .../consdb/values-tucson-teststand.yaml | 30 ++++++------------- 1 file changed, 9 insertions(+), 21 deletions(-) diff --git a/applications/consdb/values-tucson-teststand.yaml b/applications/consdb/values-tucson-teststand.yaml index 1b7ed15ec2..054d139a55 100644 --- a/applications/consdb/values-tucson-teststand.yaml +++ b/applications/consdb/values-tucson-teststand.yaml @@ -10,34 +10,22 @@ hinfo: enable: true tag: "main" logConfig: "consdb.hinfo=DEBUG" - resources: - requests: - cpu: 200m - memory: 80Gi - limits: - cpu: 500m - memory: 80Gi lsstcomcam: enable: true tag: "main" logConfig: "consdb.hinfo=DEBUG" - resources: - requests: - cpu: 200m - memory: 80Gi - limits: - cpu: 500m - memory: 80Gi lsstcam: enable: false tag: "main" - resources: - requests: - cpu: 200m - memory: 80Gi - limits: - cpu: 500m - memory: 80Gi + pq: image: tag: "main" + +resources: + requests: + cpu: 200m + memory: 80Gi + limits: + cpu: 500m + memory: 80Gi From 72bc6eb819deee5a2db226d9c1740e0b27d8371b Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Thu, 10 Oct 2024 16:28:48 -0700 Subject: [PATCH 305/567] Specify less resources for consdb --- applications/consdb/values-tucson-teststand.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/consdb/values-tucson-teststand.yaml b/applications/consdb/values-tucson-teststand.yaml index 054d139a55..5a0f11d082 100644 --- a/applications/consdb/values-tucson-teststand.yaml +++ b/applications/consdb/values-tucson-teststand.yaml @@ -25,7 +25,7 @@ pq: resources: requests: cpu: 200m - memory: 80Gi + memory: 20Gi limits: cpu: 500m - memory: 80Gi + memory: 20Gi From db02ad3e131d4f7bfa1c2d925fae05441fb55f8d Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Fri, 11 Oct 2024 09:52:37 -0700 Subject: [PATCH 306/567] Add tmp in volumes to hinfo deployment --- applications/consdb/templates/hinfo-deployment.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index 3a9f749110..d7a733fd93 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -292,3 +292,10 @@ spec: {{- toYaml . | nindent 8 }} {{- end }} {{- end }} + +volumes: + - name: "tmp" + emptyDir: {} +volumeMounts: + - name: "tmp" + mountPath: "/tmp" \ No newline at end of file From c9780532cfb316c255cbd49ea7bc80b54862f2c2 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Fri, 11 Oct 2024 10:09:08 -0700 Subject: [PATCH 307/567] Add volumes in the correct spots --- .../consdb/templates/hinfo-deployment.yaml | 25 +++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index d7a733fd93..26d2384203 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -79,6 +79,12 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" + volumes: + - name: "tmp" + emptyDir: {} + volumeMounts: + - name: "tmp" + mountPath: "/tmp" securityContext: runAsNonRoot: true runAsUser: 1000 @@ -177,6 +183,12 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" + volumes: + - name: "tmp" + emptyDir: {} + volumeMounts: + - name: "tmp" + mountPath: "/tmp" securityContext: runAsNonRoot: true runAsUser: 1000 @@ -275,6 +287,12 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" + volumes: + - name: "tmp" + emptyDir: {} + volumeMounts: + - name: "tmp" + mountPath: "/tmp" securityContext: runAsNonRoot: true runAsUser: 1000 @@ -292,10 +310,3 @@ spec: {{- toYaml . | nindent 8 }} {{- end }} {{- end }} - -volumes: - - name: "tmp" - emptyDir: {} -volumeMounts: - - name: "tmp" - mountPath: "/tmp" \ No newline at end of file From 232097f446690b0701094ee473bb0dd6666cf929 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Fri, 11 Oct 2024 10:11:18 -0700 Subject: [PATCH 308/567] Add size specification to volumes --- applications/consdb/templates/hinfo-deployment.yaml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index 26d2384203..51712e03ba 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -81,7 +81,8 @@ spec: value: "{{ .Values.kafka.schema_url }}" volumes: - name: "tmp" - emptyDir: {} + emptyDir: + sizeLimit: 100Mi volumeMounts: - name: "tmp" mountPath: "/tmp" @@ -185,7 +186,8 @@ spec: value: "{{ .Values.kafka.schema_url }}" volumes: - name: "tmp" - emptyDir: {} + emptyDir: + sizeLimit: 100Mi volumeMounts: - name: "tmp" mountPath: "/tmp" @@ -289,7 +291,8 @@ spec: value: "{{ .Values.kafka.schema_url }}" volumes: - name: "tmp" - emptyDir: {} + emptyDir: + sizeLimit: 100Mi volumeMounts: - name: "tmp" mountPath: "/tmp" From 1376421a696bb8c2b9c4dea0ecd59f2253f77b04 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Fri, 11 Oct 2024 10:18:54 -0700 Subject: [PATCH 309/567] Remove size limit from tmp --- applications/consdb/templates/hinfo-deployment.yaml | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index 51712e03ba..26d2384203 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -81,8 +81,7 @@ spec: value: "{{ .Values.kafka.schema_url }}" volumes: - name: "tmp" - emptyDir: - sizeLimit: 100Mi + emptyDir: {} volumeMounts: - name: "tmp" mountPath: "/tmp" @@ -186,8 +185,7 @@ spec: value: "{{ .Values.kafka.schema_url }}" volumes: - name: "tmp" - emptyDir: - sizeLimit: 100Mi + emptyDir: {} volumeMounts: - name: "tmp" mountPath: "/tmp" @@ -291,8 +289,7 @@ spec: value: "{{ .Values.kafka.schema_url }}" volumes: - name: "tmp" - emptyDir: - sizeLimit: 100Mi + emptyDir: {} volumeMounts: - name: "tmp" mountPath: "/tmp" From 5af53e24e8d0637093245a9718943d1bacc8066f Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Fri, 11 Oct 2024 12:39:33 -0700 Subject: [PATCH 310/567] Update volume and mounts in deployment --- .../consdb/templates/hinfo-deployment.yaml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index 26d2384203..4038d6c4ce 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -79,12 +79,12 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" + - volumeMounts: + - name: "tmp" + mountPath: "/tmp" volumes: - name: "tmp" emptyDir: {} - volumeMounts: - - name: "tmp" - mountPath: "/tmp" securityContext: runAsNonRoot: true runAsUser: 1000 @@ -183,12 +183,12 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" + - volumeMounts: + - name: "tmp" + mountPath: "/tmp" volumes: - name: "tmp" emptyDir: {} - volumeMounts: - - name: "tmp" - mountPath: "/tmp" securityContext: runAsNonRoot: true runAsUser: 1000 @@ -287,12 +287,12 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" + - volumeMounts: + - name: "tmp" + mountPath: "/tmp" volumes: - name: "tmp" emptyDir: {} - volumeMounts: - - name: "tmp" - mountPath: "/tmp" securityContext: runAsNonRoot: true runAsUser: 1000 From 70fb870a936ba20d5bcf78ff1bb3cec85550c188 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Fri, 11 Oct 2024 13:25:54 -0700 Subject: [PATCH 311/567] Fix volume mounts syntax --- applications/consdb/templates/hinfo-deployment.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index 4038d6c4ce..9d7654e7ee 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -80,8 +80,8 @@ spec: - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" - volumeMounts: - - name: "tmp" - mountPath: "/tmp" + mountPath: "/tmp" + name: "tmp" volumes: - name: "tmp" emptyDir: {} @@ -184,8 +184,8 @@ spec: - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" - volumeMounts: - - name: "tmp" - mountPath: "/tmp" + mountPath: "/tmp" + name: "tmp" volumes: - name: "tmp" emptyDir: {} @@ -288,8 +288,8 @@ spec: - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" - volumeMounts: - - name: "tmp" - mountPath: "/tmp" + mountPath: "/tmp" + name: "tmp" volumes: - name: "tmp" emptyDir: {} From 10ecc5470a2ab9b9793afe7ec4b8983804ce5d8b Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Fri, 11 Oct 2024 13:35:47 -0700 Subject: [PATCH 312/567] Move VolumeMounts section --- .../consdb/templates/hinfo-deployment.yaml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index 9d7654e7ee..088be9d7d2 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -36,6 +36,9 @@ spec: imagePullPolicy: {{ .Values.hinfo.image.pullPolicy }} resources: {{- toYaml .Values.resources | nindent 12 }} + volumeMounts: + - name: "tmp" + - mountPath: "/tmp" env: - name: "INSTRUMENT" value: "LATISS" @@ -79,9 +82,6 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" - - volumeMounts: - mountPath: "/tmp" - name: "tmp" volumes: - name: "tmp" emptyDir: {} @@ -137,6 +137,9 @@ spec: - "all" readOnlyRootFilesystem: true image: "{{ .Values.hinfo.image.repository }}:{{ .Values.hinfo.lsstcomcam.tag | default .Chart.AppVersion }}" + volumeMounts: + - name: "tmp" + - mountPath: "/tmp" imagePullPolicy: {{ .Values.hinfo.image.pullPolicy }} resources: {{- toYaml .Values.resources | nindent 12 }} @@ -183,9 +186,6 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" - - volumeMounts: - mountPath: "/tmp" - name: "tmp" volumes: - name: "tmp" emptyDir: {} @@ -244,6 +244,9 @@ spec: imagePullPolicy: {{ .Values.hinfo.image.pullPolicy }} resources: {{- toYaml .Values.resources | nindent 12 }} + volumeMounts: + - name: "tmp" + - mountPath: "/tmp" env: - name: "INSTRUMENT" value: "LSSTCam" @@ -287,9 +290,6 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" - - volumeMounts: - mountPath: "/tmp" - name: "tmp" volumes: - name: "tmp" emptyDir: {} From 7b2a24f4595088ad78550547db3e2b2e89da0e06 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Fri, 11 Oct 2024 13:40:34 -0700 Subject: [PATCH 313/567] Slide volume mounts below env --- .../consdb/templates/hinfo-deployment.yaml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index 088be9d7d2..c81aa50a5c 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -36,9 +36,6 @@ spec: imagePullPolicy: {{ .Values.hinfo.image.pullPolicy }} resources: {{- toYaml .Values.resources | nindent 12 }} - volumeMounts: - - name: "tmp" - - mountPath: "/tmp" env: - name: "INSTRUMENT" value: "LATISS" @@ -82,6 +79,9 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" + volumeMounts: + - name: "tmp" + mountPath: "/tmp" volumes: - name: "tmp" emptyDir: {} @@ -137,9 +137,6 @@ spec: - "all" readOnlyRootFilesystem: true image: "{{ .Values.hinfo.image.repository }}:{{ .Values.hinfo.lsstcomcam.tag | default .Chart.AppVersion }}" - volumeMounts: - - name: "tmp" - - mountPath: "/tmp" imagePullPolicy: {{ .Values.hinfo.image.pullPolicy }} resources: {{- toYaml .Values.resources | nindent 12 }} @@ -186,6 +183,9 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" + volumeMounts: + - name: "tmp" + mountPath: "/tmp" volumes: - name: "tmp" emptyDir: {} @@ -244,9 +244,6 @@ spec: imagePullPolicy: {{ .Values.hinfo.image.pullPolicy }} resources: {{- toYaml .Values.resources | nindent 12 }} - volumeMounts: - - name: "tmp" - - mountPath: "/tmp" env: - name: "INSTRUMENT" value: "LSSTCam" @@ -290,6 +287,9 @@ spec: value: "{{ .Values.kafka.group_id }}" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" + volumeMounts: + - name: "tmp" + mountPath: "/tmp" volumes: - name: "tmp" emptyDir: {} From 61e09fa1d527760ee9883f5df3fad046c30c5071 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Mon, 14 Oct 2024 14:08:47 -0700 Subject: [PATCH 314/567] Add Astropy cache dir --- applications/consdb/templates/hinfo-deployment.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index c81aa50a5c..517c09875b 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -82,9 +82,13 @@ spec: volumeMounts: - name: "tmp" mountPath: "/tmp" + - name: ".astropy" + mountPath: "/home/lsst/.astropy" volumes: - name: "tmp" emptyDir: {} + - name: ".astropy" + emptyDir: {} securityContext: runAsNonRoot: true runAsUser: 1000 @@ -186,9 +190,13 @@ spec: volumeMounts: - name: "tmp" mountPath: "/tmp" + - name: ".astropy" + mountPath: "/home/lsst/.astropy" volumes: - name: "tmp" emptyDir: {} + - name: ".astropy" + emptyDir: {} securityContext: runAsNonRoot: true runAsUser: 1000 @@ -290,9 +298,13 @@ spec: volumeMounts: - name: "tmp" mountPath: "/tmp" + - name: ".astropy" + mountPath: "/home/lsst/.astropy" volumes: - name: "tmp" emptyDir: {} + - name: ".astropy" + emptyDir: {} securityContext: runAsNonRoot: true runAsUser: 1000 From eb5fed8f9f1a756965b7357df8e4d2b7e2024706 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Mon, 14 Oct 2024 19:57:59 -0700 Subject: [PATCH 315/567] Remove . from volume name --- applications/consdb/templates/hinfo-deployment.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index 517c09875b..332e038e61 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -82,12 +82,12 @@ spec: volumeMounts: - name: "tmp" mountPath: "/tmp" - - name: ".astropy" + - name: "astropy" mountPath: "/home/lsst/.astropy" volumes: - name: "tmp" emptyDir: {} - - name: ".astropy" + - name: "astropy" emptyDir: {} securityContext: runAsNonRoot: true @@ -190,12 +190,12 @@ spec: volumeMounts: - name: "tmp" mountPath: "/tmp" - - name: ".astropy" + - name: "astropy" mountPath: "/home/lsst/.astropy" volumes: - name: "tmp" emptyDir: {} - - name: ".astropy" + - name: "astropy" emptyDir: {} securityContext: runAsNonRoot: true @@ -298,12 +298,12 @@ spec: volumeMounts: - name: "tmp" mountPath: "/tmp" - - name: ".astropy" + - name: "astropy" mountPath: "/home/lsst/.astropy" volumes: - name: "tmp" emptyDir: {} - - name: ".astropy" + - name: "astropy" emptyDir: {} securityContext: runAsNonRoot: true From cdf722257040fb5ef78c4160eab2f18853ca59bf Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Tue, 15 Oct 2024 11:58:30 -0700 Subject: [PATCH 316/567] Apply updates to consdb branch to values-summit --- applications/consdb/values-summit.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/consdb/values-summit.yaml b/applications/consdb/values-summit.yaml index 4b34180316..29650b6655 100644 --- a/applications/consdb/values-summit.yaml +++ b/applications/consdb/values-summit.yaml @@ -8,15 +8,15 @@ lfa: hinfo: latiss: enable: true - tag: "tickets-DM-44551" + tag: "main" logConfig: "consdb.hinfo=DEBUG" lsstcomcam: enable: true - tag: "tickets-DM-44551" + tag: "main" logConfig: "consdb.hinfo=DEBUG" lsstcam: enable: false - tag: "tickets-DM-44551" + tag: "main" pq: image: tag: "main" From 66f5fc3a00a9c514260c686f9390a47581bacde3 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Wed, 16 Oct 2024 17:24:20 -0700 Subject: [PATCH 317/567] Update kafka group id to include instrument name --- applications/consdb/templates/hinfo-deployment.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/consdb/templates/hinfo-deployment.yaml b/applications/consdb/templates/hinfo-deployment.yaml index 332e038e61..aeb9495e36 100644 --- a/applications/consdb/templates/hinfo-deployment.yaml +++ b/applications/consdb/templates/hinfo-deployment.yaml @@ -76,7 +76,7 @@ spec: name: consdb key: "consdb-password" - name: "KAFKA_GROUP_ID" - value: "{{ .Values.kafka.group_id }}" + value: "{{ .Values.kafka.group_id }}-latiss" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" volumeMounts: @@ -184,7 +184,7 @@ spec: name: consdb key: "consdb-password" - name: "KAFKA_GROUP_ID" - value: "{{ .Values.kafka.group_id }}" + value: "{{ .Values.kafka.group_id }}-lsstcomcam" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" volumeMounts: @@ -292,7 +292,7 @@ spec: name: consdb key: "consdb-password" - name: "KAFKA_GROUP_ID" - value: "{{ .Values.kafka.group_id }}" + value: "{{ .Values.kafka.group_id }}-lsstcam" - name: "SCHEMA_URL" value: "{{ .Values.kafka.schema_url }}" volumeMounts: From 2f372e5bb577b422feecfeb3df5ba04c94d67183 Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Wed, 16 Oct 2024 17:25:26 -0700 Subject: [PATCH 318/567] Update to use tagged version --- applications/consdb/values-summit.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/consdb/values-summit.yaml b/applications/consdb/values-summit.yaml index 29650b6655..f8fc03e7b7 100644 --- a/applications/consdb/values-summit.yaml +++ b/applications/consdb/values-summit.yaml @@ -8,15 +8,15 @@ lfa: hinfo: latiss: enable: true - tag: "main" + tag: "24.10.1" logConfig: "consdb.hinfo=DEBUG" lsstcomcam: enable: true - tag: "main" + tag: "24.10.1" logConfig: "consdb.hinfo=DEBUG" lsstcam: enable: false - tag: "main" + tag: "24.10.1" pq: image: - tag: "main" + tag: "24.10.1" From 44ab65f67b51d5c1fd643c1eff267e3c878ad24f Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Wed, 16 Oct 2024 17:28:42 -0700 Subject: [PATCH 319/567] Update to use tagged version --- applications/consdb/values-tucson-teststand.yaml | 8 ++++---- applications/consdb/values-usdfdev.yaml | 8 ++++---- applications/consdb/values-usdfprod.yaml | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/applications/consdb/values-tucson-teststand.yaml b/applications/consdb/values-tucson-teststand.yaml index 5a0f11d082..c6810b178a 100644 --- a/applications/consdb/values-tucson-teststand.yaml +++ b/applications/consdb/values-tucson-teststand.yaml @@ -8,19 +8,19 @@ lfa: hinfo: latiss: enable: true - tag: "main" + tag: "24.10.1" logConfig: "consdb.hinfo=DEBUG" lsstcomcam: enable: true - tag: "main" + tag: "24.10.1" logConfig: "consdb.hinfo=DEBUG" lsstcam: enable: false - tag: "main" + tag: "24.10.1" pq: image: - tag: "main" + tag: "24.10.1" resources: requests: diff --git a/applications/consdb/values-usdfdev.yaml b/applications/consdb/values-usdfdev.yaml index 71174d1244..9447f015f6 100644 --- a/applications/consdb/values-usdfdev.yaml +++ b/applications/consdb/values-usdfdev.yaml @@ -6,13 +6,13 @@ db: hinfo: latiss: enable: false - tag: "tickets-DM-44551" + tag: "24.10.1" lsstcomcam: enable: false - tag: "tickets-DM-44551" + tag: "24.10.1" lsstcam: enable: false - tag: "tickets-DM-44551" + tag: "24.10.1" pq: image: - tag: "main" + tag: "24.10.1" diff --git a/applications/consdb/values-usdfprod.yaml b/applications/consdb/values-usdfprod.yaml index 71174d1244..9447f015f6 100644 --- a/applications/consdb/values-usdfprod.yaml +++ b/applications/consdb/values-usdfprod.yaml @@ -6,13 +6,13 @@ db: hinfo: latiss: enable: false - tag: "tickets-DM-44551" + tag: "24.10.1" lsstcomcam: enable: false - tag: "tickets-DM-44551" + tag: "24.10.1" lsstcam: enable: false - tag: "tickets-DM-44551" + tag: "24.10.1" pq: image: - tag: "main" + tag: "24.10.1" From c51161b86abb11b9bc1929ff1372a65c0be9ee31 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 15 Oct 2024 10:52:29 -0700 Subject: [PATCH 320/567] Increase liveness probe failureThreshold for InfluxDB Enterprise - Increase the failureThreshold to ensure that the pod can remain not ready for longer before it is killed by the liveness probe, and thus has enough time to start. --- .../charts/influxdb-enterprise/templates/data-statefulset.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/sasquatch/charts/influxdb-enterprise/templates/data-statefulset.yaml b/applications/sasquatch/charts/influxdb-enterprise/templates/data-statefulset.yaml index 1cc01f575a..87a82cc693 100644 --- a/applications/sasquatch/charts/influxdb-enterprise/templates/data-statefulset.yaml +++ b/applications/sasquatch/charts/influxdb-enterprise/templates/data-statefulset.yaml @@ -86,6 +86,7 @@ spec: containerPort: 25826 protocol: UDP livenessProbe: + failureThreshold: 6 httpGet: path: /ping port: http From c64b06f107fde962b8b77cf775a441889c274361 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Thu, 26 Sep 2024 10:52:01 -0700 Subject: [PATCH 321/567] Change simple workflow template. --- .../integration-testing/templates/simple-workflow.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/control-system-test/charts/integration-testing/templates/simple-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/simple-workflow.yaml index aeb703d910..149873d03e 100644 --- a/applications/control-system-test/charts/integration-testing/templates/simple-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/simple-workflow.yaml @@ -31,15 +31,15 @@ spec: templateRef: name: cleanup-reports-workflow template: cleanup-reports - - - name: standby + - - name: offline templateRef: name: integration-test-job-template template: inttest-template arguments: parameters: - name: integrationtest - value: "-A Test_Report_Standby.list" + value: "-A Test_Report_Offline.list" - name: jobname - value: simple-standby + value: simple-offline - name: reportname - value: standby.xml + value: offline.xml From 5d3b864438619e95a15cf5748d2614a0bd61ec43 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Thu, 26 Sep 2024 10:55:50 -0700 Subject: [PATCH 322/567] Add pull secrets to workflow templates that need it. --- .../charts/integration-testing/templates/imaging-workflow.yaml | 2 ++ .../templates/love-stress-test-workflow.yaml | 2 ++ .../charts/integration-testing/templates/restart-workflow.yaml | 2 ++ .../charts/integration-testing/templates/shutdown-workflow.yaml | 2 ++ .../charts/integration-testing/templates/testing-workflow.yaml | 2 ++ 5 files changed, 10 insertions(+) diff --git a/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml index f6b91b7fac..6b4b459d2f 100644 --- a/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml @@ -14,6 +14,8 @@ spec: - name: testreports persistentVolumeClaim: claimName: {{ .Values.persistentVolume.claimName }} + imagePullSecrets: + - name: pull-secret podMetadata: labels: argocd.argoproj.io/instance: {{ .Values.jobLabelName }} diff --git a/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml index c67033f844..8d64b4a0e9 100644 --- a/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml @@ -14,6 +14,8 @@ spec: - name: testreports persistentVolumeClaim: claimName: {{ .Values.persistentVolume.claimName }} + imagePullSecrets: + - name: pull-secret podMetadata: labels: argocd.argoproj.io/instance: {{ .Values.jobLabelName }} diff --git a/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml index 61137b5a1d..2390c458d7 100644 --- a/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml @@ -14,6 +14,8 @@ spec: - name: testreports persistentVolumeClaim: claimName: {{ .Values.persistentVolume.claimName }} + imagePullSecrets: + - name: pull-secret podMetadata: labels: argocd.argoproj.io/instance: {{ .Values.jobLabelName }} diff --git a/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml index f300ca5feb..367d2fc377 100644 --- a/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml @@ -14,6 +14,8 @@ spec: - name: testreports persistentVolumeClaim: claimName: {{ .Values.persistentVolume.claimName }} + imagePullSecrets: + - name: pull-secret podMetadata: labels: argocd.argoproj.io/instance: {{ .Values.jobLabelName }} diff --git a/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml index e59c9db73c..4bb2b65f0e 100644 --- a/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml @@ -14,6 +14,8 @@ spec: - name: testreports persistentVolumeClaim: claimName: {{ .Values.persistentVolume.claimName }} + imagePullSecrets: + - name: pull-secret podMetadata: labels: argocd.argoproj.io/instance: {{ .Values.jobLabelName }} From 414c3a6f3dfdc711fa387cb28af75172f66b47a8 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Thu, 26 Sep 2024 12:41:54 -0700 Subject: [PATCH 323/567] Add report names to tests. --- .../templates/imaging-workflow.yaml | 40 ++++++++++++++ .../templates/love-stress-test-workflow.yaml | 2 + .../templates/restart-workflow.yaml | 18 +++++++ .../templates/shutdown-workflow.yaml | 2 + .../templates/testing-workflow.yaml | 52 +++++++++++++++++++ 5 files changed, 114 insertions(+) diff --git a/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml index 6b4b459d2f..350cb9b44f 100644 --- a/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml @@ -43,6 +43,8 @@ spec: value: "-A Test_Report_AuxTel_Housekeeping.list" - name: jobname value: auxtel-housekeeping + - name: reportname + value: athousekeeping.xml - name: maintel-housekeeping depends: call-cleanup-reports templateRef: @@ -54,6 +56,8 @@ spec: value: "-A Test_Report_MainTel_Housekeeping.list" - name: jobname value: maintel-housekeeping + - name: reportname + value: mthousekeeping.xml - name: auxtel-image-verification depends: auxtel-housekeeping templateRef: @@ -65,6 +69,8 @@ spec: value: "-A Test_Report_AuxTel_Image_Verification.list" - name: jobname value: auxtel-image-verification + - name: reportname + value: at_image_verify.xml - name: auxtel-latiss-daytime-checkout depends: auxtel-image-verification templateRef: @@ -76,6 +82,8 @@ spec: value: "-A Test_Report_AuxTel_LATISS_Checkout.list" - name: jobname value: auxtel-latiss-daytime-checkout + - name: reportname + value: at_latiss_checkout.xml - name: auxtel-telescope-dome-daytime-checkout depends: auxtel-latiss-daytime-checkout templateRef: @@ -87,6 +95,8 @@ spec: value: "-A Test_Report_AuxTel_Telescope_Dome_Checkout.list" - name: jobname value: auxtel-telescope-dome-daytime-checkout + - name: reportname + value: at_tel_dome_checkout.xml - name: auxtel-telescope-slew-take-image-daytime-checkout depends: auxtel-telescope-dome-daytime-checkout templateRef: @@ -98,6 +108,8 @@ spec: value: "-A Test_Report_AuxTel_Slew_and_Take_Image_Checkout.list" - name: jobname value: auxtel-telescope-slew-take-image-daytime-checkout + - name: reportname + value: at_slew_take_image_checkout.xml - name: auxtel-prep-flat depends: auxtel-telescope-slew-take-image-daytime-checkout templateRef: @@ -109,6 +121,8 @@ spec: value: "-A Test_Report_AuxTel_Prep_Flat.list" - name: jobname value: auxtel-prep-flat + - name: reportname + value: at_prep_flat.xml - name: auxtel-flat-calibrations depends: auxtel-prep-flat templateRef: @@ -120,6 +134,8 @@ spec: value: "-A Test_Report_AuxTel_Flat_Calibrations.list" - name: jobname value: auxtel-flat-calibrations + - name: reportname + value: at_flat_calib.xml - name: auxtel-ptc-calibrations depends: auxtel-flat-calibrations templateRef: @@ -131,6 +147,8 @@ spec: value: "-A Test_Report_AuxTel_PTC_Calibrations.list" - name: jobname value: auxtel-ptc-calibrations + - name: reportname + value: at_ptc_calib.xml - name: auxtel-prep-onsky depends: auxtel-ptc-calibrations templateRef: @@ -142,6 +160,8 @@ spec: value: "-A Test_Report_AuxTel_Prep_Onsky.list" - name: jobname value: auxtel-prep-onsky + - name: reportname + value: at_prep_onsky.xml - name: auxtel-wep-align depends: auxtel-prep-onsky templateRef: @@ -153,6 +173,8 @@ spec: value: "-A Test_Report_AuxTel_WEP_Align.list" - name: jobname value: auxtel-wep-align + - name: reportname + value: at_wep_align.xml - name: auxtel-acq-take-seq-pointing depends: auxtel-wep-align templateRef: @@ -164,6 +186,8 @@ spec: value: "-A Test_Report_AuxTel_Acq_and_Take_Seq_POINTING.list" - name: jobname value: auxtel-acq-take-seq-pointing + - name: reportname + value: at_acq_take_seq_pointing.xml - name: auxtel-acq-take-seq-verify depends: auxtel-acq-take-seq-pointing templateRef: @@ -175,6 +199,8 @@ spec: value: "-A Test_Report_AuxTel_Acq_Take_Seq_VERIFY.list" - name: jobname value: auxtel-acq-take-seq-verify + - name: reportname + value: at_acq_take_seq_verify.xml - name: auxtel-acq-take-seq-test depends: auxtel-acq-take-seq-verify templateRef: @@ -186,6 +212,8 @@ spec: value: "-A Test_Report_AuxTel_Acq_Take_Seq_TEST.list" - name: jobname value: auxtel-acq-take-seq-test + - name: reportname + value: at_acq_take_seq_test.xml - name: auxtel-acq-take-seq-nominal depends: auxtel-acq-take-seq-test templateRef: @@ -197,6 +225,8 @@ spec: value: "-A Test_Report_AuxTel_Acq_Take_Seq_NOMINAL.list" - name: jobname value: auxtel-acq-take-seq-nominal + - name: reportname + value: at_acq_take_seq_nominal.xml - name: auxtel-stop depends: auxtel-acq-take-seq-nominal templateRef: @@ -208,6 +238,8 @@ spec: value: "-A Test_Report_AuxTel_Stop.list" - name: jobname value: auxtel-stop + - name: reportname + value: at_stop.xml - name: auxtel-shutdown depends: auxtel-stop templateRef: @@ -219,6 +251,8 @@ spec: value: "-A Test_Report_AuxTel_Shutdown.list" - name: jobname value: auxtel-shutdown + - name: reportname + value: at_shutdown.xml - name: enable-atcs depends: auxtel-shutdown templateRef: @@ -230,6 +264,8 @@ spec: value: "-A Test_Report_Enable_ATCS.list" - name: jobname value: enable-atcs + - name: reportname + value: enable_atcs.xml - name: bigcam-image-verification depends: maintel-housekeeping templateRef: @@ -241,6 +277,8 @@ spec: value: "-A Test_Report_BigCamera_Image_Verification.list" - name: jobname value: bigcam-image-verification + - name: reportname + value: bigcam_image_verify.xml - name: bigcam-calibrations depends: bigcam-image-verification templateRef: @@ -252,6 +290,8 @@ spec: value: "-A Test_Report_BigCamera_Calibrations.list" - name: jobname value: bigcam-calibrations + - name: reportname + value: bigcam_calib.xml - name: call-save-reports depends: bigcam-calibrations && enable-atcs templateRef: diff --git a/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml index 8d64b4a0e9..11f753c23a 100644 --- a/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml @@ -43,6 +43,8 @@ spec: value: "-A Test_Report_LOVE_Stress_Test.list" - name: jobname value: love-stress-test + - name: reportname + value: love_stress_test.xml - name: call-save-reports depends: love-stress-test templateRef: diff --git a/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml index 2390c458d7..ec04131a53 100644 --- a/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml @@ -43,6 +43,8 @@ spec: value: "-A Test_Report_Offline.list" - name: jobname value: cameras-offline + - name: reportname + value: offline.xml - name: standby depends: cameras-offline templateRef: @@ -54,6 +56,8 @@ spec: value: "-A Test_Report_Standby.list" - name: jobname value: standby + - name: reportname + value: standby.xml - name: disabled depends: standby templateRef: @@ -65,6 +69,8 @@ spec: value: "-A Test_Report_Disabled.list" - name: jobname value: disabled + - name: reportname + value: disabled.xml - name: enabled depends: disabled templateRef: @@ -76,6 +82,8 @@ spec: value: "-A Test_Report_Enabled.list" - name: jobname value: enabled + - name: reportname + value: enabled.xml - name: auxtel-housekeeping depends: enabled templateRef: @@ -87,6 +95,8 @@ spec: value: "-A Test_Report_AuxTel_Housekeeping.list" - name: jobname value: auxtel-housekeeping + - name: reportname + value: athousekeeping.xml - name: maintel-housekeeping depends: enabled templateRef: @@ -98,6 +108,8 @@ spec: value: "-A Test_Report_MainTel_Housekeeping.list" - name: jobname value: maintel-housekeeping + - name: reportname + value: mthousekeeping.xml - name: auxtel-image-verification depends: auxtel-housekeeping templateRef: @@ -109,6 +121,8 @@ spec: value: "-A Test_Report_AuxTel_Image_Verification.list" - name: jobname value: auxtel-image-verification + - name: reportname + value: at_image_verify.xml - name: bigcam-image-verification depends: maintel-housekeeping templateRef: @@ -120,6 +134,8 @@ spec: value: "-A Test_Report_BigCamera_Image_Verification.list" - name: jobname value: bigcam-image-verification + - name: reportname + value: bigcam_image_verify.xml - name: love-stress-test depends: auxtel-image-verification && bigcam-image-verification templateRef: @@ -131,6 +147,8 @@ spec: value: "-A Test_Report_LOVE_Stress_Test.list" - name: jobname value: love-stress-test + - name: reportname + value: love_stress_test.xml - name: call-save-reports depends: love-stress-test templateRef: diff --git a/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml index 367d2fc377..da6ab36d22 100644 --- a/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml @@ -43,6 +43,8 @@ spec: value: "-A Test_Report_Shutdown.list" - name: jobname value: shutdown + - name: reportname + value: shutdown.xml - name: call-save-reports depends: shutdown templateRef: diff --git a/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml index 4bb2b65f0e..5e732fcfdb 100644 --- a/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml @@ -43,6 +43,8 @@ spec: value: "-A Test_Report_Offline.list" - name: jobname value: cameras-offline + - name: reportname + value: offline.xml - name: standby depends: cameras-offline templateRef: @@ -54,6 +56,8 @@ spec: value: "-A Test_Report_Standby.list" - name: jobname value: standby + - name: reportname + value: standby.xml - name: disabled depends: standby templateRef: @@ -65,6 +69,8 @@ spec: value: "-A Test_Report_Disabled.list" - name: jobname value: disabled + - name: reportname + value: disabled.xml - name: enabled depends: disabled templateRef: @@ -76,6 +82,8 @@ spec: value: "-A Test_Report_Enabled.list" - name: jobname value: enabled + - name: reportname + value: enabled.xml - name: auxtel-housekeeping depends: enabled templateRef: @@ -87,6 +95,8 @@ spec: value: "-A Test_Report_AuxTel_Housekeeping.list" - name: jobname value: auxtel-housekeeping + - name: reportname + value: athousekeeping.xml - name: maintel-housekeeping depends: enabled templateRef: @@ -98,6 +108,8 @@ spec: value: "-A Test_Report_MainTel_Housekeeping.list" - name: jobname value: maintel-housekeeping + - name: reportname + value: mthousekeeping.xml - name: auxtel-image-verification depends: auxtel-housekeeping templateRef: @@ -109,6 +121,8 @@ spec: value: "-A Test_Report_AuxTel_Image_Verification.list" - name: jobname value: auxtel-image-verification + - name: reportname + value: at_image_verify.xml - name: auxtel-latiss-daytime-checkout depends: auxtel-image-verification templateRef: @@ -120,6 +134,8 @@ spec: value: "-A Test_Report_AuxTel_LATISS_Checkout.list" - name: jobname value: auxtel-latiss-daytime-checkout + - name: reportname + value: at_latiss_checkout.xml - name: auxtel-telescope-dome-daytime-checkout depends: auxtel-latiss-daytime-checkout templateRef: @@ -131,6 +147,8 @@ spec: value: "-A Test_Report_AuxTel_Telescope_Dome_Checkout.list" - name: jobname value: auxtel-telescope-dome-daytime-checkout + - name: reportname + value: at_tel_dome_checkout.xml - name: auxtel-telescope-slew-take-image-daytime-checkout depends: auxtel-telescope-dome-daytime-checkout templateRef: @@ -142,6 +160,8 @@ spec: value: "-A Test_Report_AuxTel_Slew_and_Take_Image_Checkout.list" - name: jobname value: auxtel-telescope-slew-take-image-daytime-checkout + - name: reportname + value: at_slew_take_image_checkout.xml - name: auxtel-prep-flat depends: auxtel-telescope-slew-take-image-daytime-checkout templateRef: @@ -153,6 +173,8 @@ spec: value: "-A Test_Report_AuxTel_Prep_Flat.list" - name: jobname value: auxtel-prep-flat + - name: reportname + value: at_prep_flat.xml - name: auxtel-flat-calibrations depends: auxtel-prep-flat templateRef: @@ -164,6 +186,8 @@ spec: value: "-A Test_Report_AuxTel_Flat_Calibrations.list" - name: jobname value: auxtel-flat-calibrations + - name: reportname + value: at_flat_calib.xml - name: auxtel-ptc-calibrations depends: auxtel-flat-calibrations templateRef: @@ -175,6 +199,8 @@ spec: value: "-A Test_Report_AuxTel_PTC_Calibrations.list" - name: jobname value: auxtel-ptc-calibrations + - name: reportname + value: at_ptc_calib.xml - name: auxtel-prep-onsky depends: auxtel-ptc-calibrations templateRef: @@ -186,6 +212,8 @@ spec: value: "-A Test_Report_AuxTel_Prep_Onsky.list" - name: jobname value: auxtel-prep-onsky + - name: reportname + value: at_prep_onsky.xml - name: auxtel-wep-align depends: auxtel-prep-onsky templateRef: @@ -197,6 +225,8 @@ spec: value: "-A Test_Report_AuxTel_WEP_Align.list" - name: jobname value: auxtel-wep-align + - name: reportname + value: at_wep_align.xml - name: auxtel-acq-take-seq-pointing depends: auxtel-wep-align templateRef: @@ -208,6 +238,8 @@ spec: value: "-A Test_Report_AuxTel_Acq_and_Take_Seq_POINTING.list" - name: jobname value: auxtel-acq-take-seq-pointing + - name: reportname + value: at_acq_take_seq_pointing.xml - name: auxtel-acq-take-seq-verify depends: auxtel-acq-take-seq-pointing templateRef: @@ -219,6 +251,8 @@ spec: value: "-A Test_Report_AuxTel_Acq_Take_Seq_VERIFY.list" - name: jobname value: auxtel-acq-take-seq-verify + - name: reportname + value: at_acq_take_seq_verify.xml - name: auxtel-acq-take-seq-test depends: auxtel-acq-take-seq-verify templateRef: @@ -230,6 +264,8 @@ spec: value: "-A Test_Report_AuxTel_Acq_Take_Seq_TEST.list" - name: jobname value: auxtel-acq-take-seq-test + - name: reportname + value: at_acq_take_seq_test.xml - name: auxtel-acq-take-seq-nominal depends: auxtel-acq-take-seq-test templateRef: @@ -241,6 +277,8 @@ spec: value: "-A Test_Report_AuxTel_Acq_Take_Seq_NOMINAL.list" - name: jobname value: auxtel-acq-take-seq-nominal + - name: reportname + value: at_acq_take_seq_nominal.xml - name: auxtel-stop depends: auxtel-acq-take-seq-nominal templateRef: @@ -252,6 +290,8 @@ spec: value: "-A Test_Report_AuxTel_Stop.list" - name: jobname value: auxtel-stop + - name: reportname + value: at_stop.xml - name: auxtel-shutdown depends: auxtel-stop templateRef: @@ -263,6 +303,8 @@ spec: value: "-A Test_Report_AuxTel_Shutdown.list" - name: jobname value: auxtel-shutdown + - name: reportname + value: at_shutdown.xml - name: enable-atcs depends: auxtel-shutdown templateRef: @@ -274,6 +316,8 @@ spec: value: "-A Test_Report_Enable_ATCS.list" - name: jobname value: enable-atcs + - name: reportname + value: enable_atcs.xml - name: bigcam-image-verification depends: maintel-housekeeping templateRef: @@ -285,6 +329,8 @@ spec: value: "-A Test_Report_BigCamera_Image_Verification.list" - name: jobname value: bigcam-image-verification + - name: reportname + value: bigcam_image_verify.xml - name: bigcam-calibrations depends: bigcam-image-verification templateRef: @@ -296,6 +342,8 @@ spec: value: "-A Test_Report_BigCamera_Calibrations.list" - name: jobname value: bigcam-calibrations + - name: reportname + value: bigcam_calib.xml - name: love-stress-test depends: bigcam-calibrations && enable-atcs templateRef: @@ -307,6 +355,8 @@ spec: value: "-A Test_Report_LOVE_Stress_Test.list" - name: jobname value: love-stress-test + - name: reportname + value: love_stress_test.xml - name: shutdown depends: love-stress-test templateRef: @@ -318,6 +368,8 @@ spec: value: "-A Test_Report_Shutdown.list" - name: jobname value: shutdown + - name: reportname + value: shutdown.xml - name: call-save-reports depends: shutdown templateRef: From aa1338170b4a269ceb5d409fb25f958e13764517 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Thu, 26 Sep 2024 13:02:29 -0700 Subject: [PATCH 324/567] Add sensor CSC tests. --- .../templates/sensor-csc-workflow.yaml | 47 +++++++++++++++++++ .../templates/testing-workflow.yaml | 15 +++++- 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 applications/control-system-test/charts/integration-testing/templates/sensor-csc-workflow.yaml diff --git a/applications/control-system-test/charts/integration-testing/templates/sensor-csc-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/sensor-csc-workflow.yaml new file mode 100644 index 0000000000..44798c9998 --- /dev/null +++ b/applications/control-system-test/charts/integration-testing/templates/sensor-csc-workflow.yaml @@ -0,0 +1,47 @@ +apiVersion: argoproj.io/v1alpha1 +kind: WorkflowTemplate +metadata: + name: sensor-cscs-workflow + labels: + workflows.argoproj.io/type: "integration-test" + argocd.argoproj.io/instance: integration-testing +spec: + serviceAccountName: {{ .Values.serviceAccount }} + artifactRepositoryRef: + configMap: integration-test-controller-configmap + key: artifactRepository + volumes: + - name: testreports + persistentVolumeClaim: + claimName: {{ .Values.persistentVolume.claimName }} + imagePullSecrets: + - name: pull-secret + podMetadata: + labels: + argocd.argoproj.io/instance: integration-testing + arguments: + parameters: + - name: date-key + value: "20240725" + entrypoint: run-tests + templates: + - name: run-tests + dag: + tasks: + - name: call-cleanup-reports + templateRef: + name: cleanup-reports-workflow + template: cleanup-reports + - name: sensor-cscs + depends: call-cleanup-reports + templateRef: + name: integration-test-job-template + template: inttest-template + arguments: + parameters: + - name: integrationtest + value: "-A Test_Report_SensorCSCs.list" + - name: jobname + value: sensor-cscs + - name: reportname + value: sensor_cscs.xml diff --git a/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml index 5e732fcfdb..ff1e60118f 100644 --- a/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml @@ -84,6 +84,19 @@ spec: value: enabled - name: reportname value: enabled.xml + - name: sensor-cscs + depends: enabled + templateRef: + name: integration-test-job-template + template: inttest-template + arguments: + parameters: + - name: integrationtest + value: "-A Test_Report_SensorCSCs.list" + - name: jobname + value: sensor-cscs + - name: reportname + value: sensor_cscs.xml - name: auxtel-housekeeping depends: enabled templateRef: @@ -345,7 +358,7 @@ spec: - name: reportname value: bigcam_calib.xml - name: love-stress-test - depends: bigcam-calibrations && enable-atcs + depends: bigcam-calibrations && enable-atcs && sensor-cscs templateRef: name: integration-test-job-template template: inttest-template From a4f6b12a1fb849e0307ecc466e2044110c5c3bf1 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Thu, 26 Sep 2024 13:06:15 -0700 Subject: [PATCH 325/567] Remove save reports step as they are saved on the fly now. --- .../integration-testing/templates/imaging-workflow.yaml | 5 ----- .../templates/love-stress-test-workflow.yaml | 5 ----- .../integration-testing/templates/restart-workflow.yaml | 5 ----- .../integration-testing/templates/shutdown-workflow.yaml | 5 ----- .../integration-testing/templates/testing-workflow.yaml | 5 ----- 5 files changed, 25 deletions(-) diff --git a/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml index 350cb9b44f..7158c26804 100644 --- a/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/imaging-workflow.yaml @@ -292,8 +292,3 @@ spec: value: bigcam-calibrations - name: reportname value: bigcam_calib.xml - - name: call-save-reports - depends: bigcam-calibrations && enable-atcs - templateRef: - name: save-reports-workflow - template: save-reports diff --git a/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml index 11f753c23a..219df45e9d 100644 --- a/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/love-stress-test-workflow.yaml @@ -45,8 +45,3 @@ spec: value: love-stress-test - name: reportname value: love_stress_test.xml - - name: call-save-reports - depends: love-stress-test - templateRef: - name: save-reports-workflow - template: save-reports diff --git a/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml index ec04131a53..8fe44bbd4e 100644 --- a/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/restart-workflow.yaml @@ -149,8 +149,3 @@ spec: value: love-stress-test - name: reportname value: love_stress_test.xml - - name: call-save-reports - depends: love-stress-test - templateRef: - name: save-reports-workflow - template: save-reports diff --git a/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml index da6ab36d22..5ae3e86256 100644 --- a/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/shutdown-workflow.yaml @@ -45,8 +45,3 @@ spec: value: shutdown - name: reportname value: shutdown.xml - - name: call-save-reports - depends: shutdown - templateRef: - name: save-reports-workflow - template: save-reports diff --git a/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml b/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml index ff1e60118f..a1e0ca2323 100644 --- a/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/testing-workflow.yaml @@ -383,8 +383,3 @@ spec: value: shutdown - name: reportname value: shutdown.xml - - name: call-save-reports - depends: shutdown - templateRef: - name: save-reports-workflow - template: save-reports From 2ab5f250bdf148274e8aecc421fea6ee15263299 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Thu, 26 Sep 2024 13:51:23 -0700 Subject: [PATCH 326/567] Add resources to job pod. --- .../templates/job-workflow-template.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/applications/control-system-test/charts/integration-testing/templates/job-workflow-template.yaml b/applications/control-system-test/charts/integration-testing/templates/job-workflow-template.yaml index 7300085d61..3f02750a3a 100644 --- a/applications/control-system-test/charts/integration-testing/templates/job-workflow-template.yaml +++ b/applications/control-system-test/charts/integration-testing/templates/job-workflow-template.yaml @@ -60,6 +60,13 @@ spec: secretKeyRef: name: control-system-test key: ts-salkafka-password + resources: + limits: + cpu: 4 + memory: 4Gi + requests: + cpu: 1 + memory: 1Gi volumeMounts: - name: testreports mountPath: {{ .Values.reportLocation }} From de6d15efdcbf95111d1b062127d71bd6a2dce8b4 Mon Sep 17 00:00:00 2001 From: pav511 <38131208+pav511@users.noreply.github.com> Date: Thu, 17 Oct 2024 15:44:21 -0700 Subject: [PATCH 327/567] dan fuchs argocd rbac usdfprod --- applications/argocd/values-usdfprod.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/argocd/values-usdfprod.yaml b/applications/argocd/values-usdfprod.yaml index 9c5fdf1734..e97bea0f23 100644 --- a/applications/argocd/values-usdfprod.yaml +++ b/applications/argocd/values-usdfprod.yaml @@ -34,6 +34,7 @@ argo-cd: g, afausti@slac.stanford.edu, role:admin g, athor@slac.stanford.edu, role:admin + g, danfuchs@slac.stanford.edu, role:admin g, dspeck@slac.stanford.edu, role:admin g, frossie@slac.stanford.edu, role:admin g, jsick@slac.stanford.edu, role:admin From 06f77a95ead21ac8d658cdf10e904445de4c7183 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Thu, 17 Oct 2024 17:20:41 +0000 Subject: [PATCH 328/567] Set the TTL after finishing to 0. --- applications/uws/charts/uws-api-server/templates/configmap.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/uws/charts/uws-api-server/templates/configmap.yaml b/applications/uws/charts/uws-api-server/templates/configmap.yaml index 4956d00f2d..069bc9da23 100644 --- a/applications/uws/charts/uws-api-server/templates/configmap.yaml +++ b/applications/uws/charts/uws-api-server/templates/configmap.yaml @@ -4,6 +4,7 @@ metadata: name: {{ .Release.Name }}-configmap data: config: | + ttlSecondsAfterFinished: 0 workingVolume: {{- toYaml .Values.workingVolume | nindent 6 }} volumes: From 8dc07af4a353e7fe3e4b0990ac50d97e1d498dce Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Thu, 17 Oct 2024 15:57:15 -0700 Subject: [PATCH 329/567] Make TTL configurable. --- applications/uws/README.md | 1 + applications/uws/charts/uws-api-server/README.md | 1 + .../uws/charts/uws-api-server/templates/configmap.yaml | 2 +- applications/uws/charts/uws-api-server/values.yaml | 3 +++ 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/applications/uws/README.md b/applications/uws/README.md index 4f1cb161a0..4ec810c701 100644 --- a/applications/uws/README.md +++ b/applications/uws/README.md @@ -38,6 +38,7 @@ Deployment for the UWS and DM OCPS CSCs | uws-api-server.server.securityContext.runAsGroup | int | `202` | Set the GID for the UWS server container entrypoint | | uws-api-server.server.securityContext.runAsUser | int | `1000` | Set the UID for the UWS server container entrypoint | | uws-api-server.targetCluster | string | `""` | Target Kubernetes cluster | +| uws-api-server.ttlSecondsAfterFinished | int | `0` | Time to live (in seconds) for pod after it completes Allows logs to be inspected. | | uws-api-server.vaultPathPrefix | string | `""` | Site-specific Vault path for secrets. | | uws-api-server.volumes | list | `[]` | Central data volumes to be mounted in job containers. Each object listed can have the following attributes defined: _name_ (A label identifier for the data volume mount) _server_ (The hostname for the NFS server with the data volume mount) _claimName_ (The PVC claim name for the data volume mount) _mountPath_ (The mount path in the server container for the data volume mount) _exportPath_ (The export path on the NFS server for the data volume mount) _subPath_ (A possible sub path for the data volume mount) _readOnly_ (Flag to mark the data volume mount as read only or read/write) | | uws-api-server.workingVolume.claimName | string | `""` | The PVC claim name for the working volume | diff --git a/applications/uws/charts/uws-api-server/README.md b/applications/uws/charts/uws-api-server/README.md index d0a72c8090..92e3631a55 100644 --- a/applications/uws/charts/uws-api-server/README.md +++ b/applications/uws/charts/uws-api-server/README.md @@ -24,6 +24,7 @@ Helm chart for deploying the Universal Worker Service API Server | server.securityContext.runAsGroup | int | `202` | Set the GID for the UWS server container entrypoint | | server.securityContext.runAsUser | int | `1000` | Set the UID for the UWS server container entrypoint | | targetCluster | string | `""` | Target Kubernetes cluster | +| ttlSecondsAfterFinished | int | `0` | Time to live (in seconds) for pod after it completes Allows logs to be inspected. | | vaultPathPrefix | string | `""` | Site-specific Vault path for secrets. | | volumes | list | `[]` | Central data volumes to be mounted in job containers. Each object listed can have the following attributes defined: _name_ (A label identifier for the data volume mount) _server_ (The hostname for the NFS server with the data volume mount) _claimName_ (The PVC claim name for the data volume mount) _mountPath_ (The mount path in the server container for the data volume mount) _exportPath_ (The export path on the NFS server for the data volume mount) _subPath_ (A possible sub path for the data volume mount) _readOnly_ (Flag to mark the data volume mount as read only or read/write) | | workingVolume.claimName | string | `""` | The PVC claim name for the working volume | diff --git a/applications/uws/charts/uws-api-server/templates/configmap.yaml b/applications/uws/charts/uws-api-server/templates/configmap.yaml index 069bc9da23..c37a98cfc8 100644 --- a/applications/uws/charts/uws-api-server/templates/configmap.yaml +++ b/applications/uws/charts/uws-api-server/templates/configmap.yaml @@ -4,7 +4,7 @@ metadata: name: {{ .Release.Name }}-configmap data: config: | - ttlSecondsAfterFinished: 0 + ttlSecondsAfterFinished: "{{ .Values.ttlSecondsAfterFinished }}" workingVolume: {{- toYaml .Values.workingVolume | nindent 6 }} volumes: diff --git a/applications/uws/charts/uws-api-server/values.yaml b/applications/uws/charts/uws-api-server/values.yaml index 77d14684a0..3e37c2c88f 100644 --- a/applications/uws/charts/uws-api-server/values.yaml +++ b/applications/uws/charts/uws-api-server/values.yaml @@ -71,3 +71,6 @@ volumes: [] # -- Temporary flag to make service deploy own namespace. # Doing this to not disrupt other sites. createNamespace: false +# -- Time to live (in seconds) for pod after it completes +# Allows logs to be inspected. +ttlSecondsAfterFinished: 0 From b4f99d0fb8e2ec9f81eb601d4121f265d4e248d6 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Thu, 17 Oct 2024 15:59:39 -0700 Subject: [PATCH 330/567] Allow pods to stay at test stands. --- applications/uws/values-base.yaml | 1 + applications/uws/values-tucson-teststand.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/applications/uws/values-base.yaml b/applications/uws/values-base.yaml index c9ef439fe2..6b5b6bd37a 100644 --- a/applications/uws/values-base.yaml +++ b/applications/uws/values-base.yaml @@ -4,6 +4,7 @@ uws-api-server: image: tag: latest logLevel: INFO + ttlSecondsAfterFinished: 3600 butlerPg: secretName: uws containerPath: /home/lsst/.lsst diff --git a/applications/uws/values-tucson-teststand.yaml b/applications/uws/values-tucson-teststand.yaml index 9af166ba6e..869325aefc 100644 --- a/applications/uws/values-tucson-teststand.yaml +++ b/applications/uws/values-tucson-teststand.yaml @@ -6,6 +6,7 @@ uws-api-server: image: tag: latest logLevel: INFO + ttlSecondsAfterFinished: 3600 butlerPg: secretName: uws containerPath: /home/lsst/.lsst From 0acb1601b3538de86f814afda810a0d48a864e67 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 8 Oct 2024 14:52:54 -0300 Subject: [PATCH 331/567] rubintv: change workers image to use usdf version and match user permissions --- applications/rubintv/values-usdfdev.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/rubintv/values-usdfdev.yaml b/applications/rubintv/values-usdfdev.yaml index e165673019..ce990de409 100644 --- a/applications/rubintv/values-usdfdev.yaml +++ b/applications/rubintv/values-usdfdev.yaml @@ -24,11 +24,11 @@ rubintv: workers: replicas: 1 image: - repository: ts-dockerhub.lsst.org/rapid-analysis - tag: c0037 + repository: lsstts/rapid-analysis + tag: c0039_usdf pullPolicy: Always - uid: 73006 - gid: 73006 + uid: 15517 + gid: 4085 scriptsLocation: /repos/rubintv_analysis_service/scripts script: rubintv_worker.py -a rubintv -p 8080 -l usdf env: From 4d2b51df80c68e5eeacb121027fa9b15709392b3 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Date: Thu, 10 Oct 2024 13:43:10 -0300 Subject: [PATCH 332/567] rubintv: add PVC for usdfdev NFS mount for ddv configurations --- applications/rubintv/values-usdfdev.yaml | 8 ++++++++ charts/rubintv/templates/deployment-workers.yaml | 3 +++ 2 files changed, 11 insertions(+) diff --git a/applications/rubintv/values-usdfdev.yaml b/applications/rubintv/values-usdfdev.yaml index ce990de409..1d8ae5fb58 100644 --- a/applications/rubintv/values-usdfdev.yaml +++ b/applications/rubintv/values-usdfdev.yaml @@ -55,6 +55,14 @@ rubintv: capacity: 1Gi accessMode: ReadOnlyMany mountPath: /sdf/data/rubin + - name: sdf-data-rubin-rubintv-ddv-config + persistentVolumeClaim: + name: sdf-data-rubin-rubintv-ddv-config + storageClassName: sdf-data-rubin + capacity: 1Gi + accessMode: ReadWriteMany + mountPath: /etc/test + subPath: shared/rubintv-ddv-config resources: limits: cpu: 2.0 diff --git a/charts/rubintv/templates/deployment-workers.yaml b/charts/rubintv/templates/deployment-workers.yaml index f4bebf3e5a..212cf2f20f 100644 --- a/charts/rubintv/templates/deployment-workers.yaml +++ b/charts/rubintv/templates/deployment-workers.yaml @@ -88,6 +88,9 @@ spec: {{- range $vol := .Values.workers.volumes }} - name: {{ $vol.name }} mountPath: {{ $vol.mountPath }} + {{- if ($vol.subPath) }} + subPath: {{ $vol.subPath }} + {{- end }} {{- end }} initContainers: - name: "secret-perm-fixer" From 320710f397045daaadbdf79a0d05752355529986 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Date: Mon, 14 Oct 2024 13:49:44 -0300 Subject: [PATCH 333/567] rubintv: extend volume mount points to allow PVC on usdf and NFS on summit, for dev deployments --- applications/rubintv-dev/values-summit.yaml | 8 +++++++- applications/rubintv/README.md | 3 ++- applications/rubintv/values-usdfdev.yaml | 6 +++--- applications/rubintv/values.yaml | 19 ++++++++++++++----- charts/rubintv/README.md | 3 ++- .../rubintv/templates/deployment-workers.yaml | 16 ++++++++++++++-- charts/rubintv/templates/pvc.yaml | 4 ++-- charts/rubintv/values.yaml | 19 ++++++++++++++----- 8 files changed, 58 insertions(+), 20 deletions(-) diff --git a/applications/rubintv-dev/values-summit.yaml b/applications/rubintv-dev/values-summit.yaml index d9ae6f096b..95fa42519f 100644 --- a/applications/rubintv-dev/values-summit.yaml +++ b/applications/rubintv-dev/values-summit.yaml @@ -28,7 +28,7 @@ rubintv: replicas: 1 image: repository: ts-dockerhub.lsst.org/rapid-analysis - tag: c0037 + tag: c0039 pullPolicy: Always uid: 73006 gid: 73006 @@ -43,6 +43,12 @@ rubintv: value: "/sdf/group/rubin/repo/ir2/butler.yaml" - name: DEPLOY_BRANCH value: *s-dbE + nfsMountpoint: + - name: project-rubintv-ddv-config + containerPath: /var/ddv-config + readOnly: false + server: nfs1.cp.lsst.org + serverPath: /project/rubintv/ddv-config resources: requests: cpu: 0.5 diff --git a/applications/rubintv/README.md b/applications/rubintv/README.md index 45e56b7ba2..9caeed997e 100644 --- a/applications/rubintv/README.md +++ b/applications/rubintv/README.md @@ -53,12 +53,13 @@ Real-time display front end | rubintv.workers.image.repository | string | `"ts-dockerhub.lsst.org/rubintv-broadcaster"` | The Docker registry name for the container image. | | rubintv.workers.image.tag | string | `"develop"` | The tag of the container image to use. | | rubintv.workers.imagePullSecrets | list | See `values.yaml` | Image pull secrets. | +| rubintv.workers.nfsMountpoint | list | See `values.yaml` | NFS mountpoints for the rubintv worker pods | | rubintv.workers.nodeSelector | object | `{}` | Node selector rules for the rubintv worker pods | | rubintv.workers.pathPrefix | string | `"/"` | Prefix for the (internal) worker API routes | | rubintv.workers.podAnnotations | object | `{}` | Annotations for the rubintv worker pods | +| rubintv.workers.pvcMountpoint | list | See `values.yaml` | PVC claims for the rubintv worker pods | | rubintv.workers.replicas | int | `0` | how many replicas to use | | rubintv.workers.resources | object | `{}` | Resource limits and requests for the rubintv worker pods | | rubintv.workers.script | string | `"slac/rubintv/workerPod1.py"` | Script that runs in RUN_ARG. This dynamic mechanism needs to be replaced with something less scary, but there is resistance to that, at least while iterating. | | rubintv.workers.tolerations | list | `[]` | Tolerations for the rubintv worker pods | | rubintv.workers.uid | string | `nil` | UID to run as (site-dependent because of filesystem access; must be specified) | -| rubintv.workers.volumes | list | See `values.yaml` | Volumes for the rubintv worker pods | diff --git a/applications/rubintv/values-usdfdev.yaml b/applications/rubintv/values-usdfdev.yaml index 1d8ae5fb58..1bc20932f8 100644 --- a/applications/rubintv/values-usdfdev.yaml +++ b/applications/rubintv/values-usdfdev.yaml @@ -27,7 +27,7 @@ rubintv: repository: lsstts/rapid-analysis tag: c0039_usdf pullPolicy: Always - uid: 15517 + uid: 17951 gid: 4085 scriptsLocation: /repos/rubintv_analysis_service/scripts script: rubintv_worker.py -a rubintv -p 8080 -l usdf @@ -40,7 +40,7 @@ rubintv: value: "/sdf/group/rubin/repo/ir2/butler.yaml" - name: DEPLOY_BRANCH value: *dbE - volumes: + pvcMountpoint: - name: sdf-group-rubin persistentVolumeClaim: name: sdf-group-rubin @@ -61,7 +61,7 @@ rubintv: storageClassName: sdf-data-rubin capacity: 1Gi accessMode: ReadWriteMany - mountPath: /etc/test + mountPath: /var/ddv-config subPath: shared/rubintv-ddv-config resources: limits: diff --git a/applications/rubintv/values.yaml b/applications/rubintv/values.yaml index 79342a1339..6d754d0153 100644 --- a/applications/rubintv/values.yaml +++ b/applications/rubintv/values.yaml @@ -104,11 +104,11 @@ rubintv: imagePullSecrets: [] # Each entry is of the form: { name: pull-secret-name } - # -- Volumes for the rubintv worker pods + # -- PVC claims for the rubintv worker pods # @default -- See `values.yaml` - volumes: [] + pvcMountpoint: [] # Each list item must have the following form: - # { name: volume-name, + # { name: pvc-name, # accessMode: one of "ReadOnly", "ReadWriteOnce", "ReadWriteMany", # mountPath: path-mounted-in-container, # persistentVolumeClaim: { @@ -117,8 +117,17 @@ rubintv: # capacity: size-as-string-of-pvc (e.g. "1Gi") # } # } - # It is planned to implement "nfs" as an alternative to - # "PersistentVolumeClaim" but that has not yet been done. + + # -- NFS mountpoints for the rubintv worker pods + # @default -- See `values.yaml` + nfsMountpoint: [] + # Each list item must have the following form: + # { name: nfs-name, + # containerPath: path-mounted-in-container, + # readOnly: boolean, + # server: nfs-server, + # serverPath: nfs-server-path + # } # -- Resource limits and requests for the rubintv worker pods resources: {} diff --git a/charts/rubintv/README.md b/charts/rubintv/README.md index 330b344e75..72573843c6 100644 --- a/charts/rubintv/README.md +++ b/charts/rubintv/README.md @@ -53,13 +53,14 @@ Real-time display front end | workers.image.repository | string | `"ts-dockerhub.lsst.org/rubintv-broadcaster"` | The Docker registry name for the container image. | | workers.image.tag | string | `"develop"` | The tag of the container image to use. | | workers.imagePullSecrets | list | See `values.yaml` | Image pull secrets. | +| workers.nfsMountpoint | list | See `values.yaml` | NFS mountpoints for the rubintv worker pods | | workers.nodeSelector | object | `{}` | Node selector rules for the rubintv worker pods | | workers.pathPrefix | string | `"/"` | Prefix for the (internal) worker API routes | | workers.podAnnotations | object | `{}` | Annotations for the rubintv worker pods | +| workers.pvcMountpoint | list | See `values.yaml` | PVC claims for the rubintv worker pods | | workers.replicas | int | `0` | how many replicas to use | | workers.resources | object | `{}` | Resource limits and requests for the rubintv worker pods | | workers.script | string | `"slac/rubintv/workerPod1.py"` | Script that runs in RUN_ARG. This dynamic mechanism needs to be replaced with something less scary, but there is resistance to that, at least while iterating. | | workers.scriptsLocation | string | `"/repos/rubintv_production/scripts"` | The location of the scripts folder where the worker pod will run specific scripts, set by RUN_ARG. | | workers.tolerations | list | `[]` | Tolerations for the rubintv worker pods | | workers.uid | string | `nil` | UID to run as (site-dependent because of filesystem access; must be specified) | -| workers.volumes | list | See `values.yaml` | Volumes for the rubintv worker pods | diff --git a/charts/rubintv/templates/deployment-workers.yaml b/charts/rubintv/templates/deployment-workers.yaml index 212cf2f20f..b4784e839e 100644 --- a/charts/rubintv/templates/deployment-workers.yaml +++ b/charts/rubintv/templates/deployment-workers.yaml @@ -85,13 +85,18 @@ spec: volumeMounts: - name: "user-secrets" mountPath: "/etc/secrets" - {{- range $vol := .Values.workers.volumes }} + {{- range $vol := .Values.workers.pvcMountpoint }} - name: {{ $vol.name }} mountPath: {{ $vol.mountPath }} {{- if ($vol.subPath) }} subPath: {{ $vol.subPath }} {{- end }} {{- end }} + {{- range $vol := .Values.workers.nfsMountpoint }} + - name: {{ $vol.name }} + mountPath: {{ $vol.containerPath }} + readOnly: {{ $vol.readOnly }} + {{- end }} initContainers: - name: "secret-perm-fixer" image: "busybox" @@ -126,13 +131,20 @@ spec: {{- end }} - name: "user-secrets" emptyDir: {} - {{- range $vol := .Values.workers.volumes }} + {{- range $vol := .Values.workers.pvcMountpoint }} - name: {{ $vol.name | quote }} {{ with $vol.persistentVolumeClaim }} persistentVolumeClaim: claimName: {{ .name | quote }} {{- end }} {{- end }} + {{- range $vol := .Values.workers.nfsMountpoint }} + - name: {{ $vol.name | quote }} + nfs: + path: {{ $vol.serverPath }} + readOnly: {{ $vol.readOnly }} + server: {{ $vol.server }} + {{- end }} securityContext: runAsNonRoot: true runAsUser: {{ .Values.workers.uid }} diff --git a/charts/rubintv/templates/pvc.yaml b/charts/rubintv/templates/pvc.yaml index 15be702e83..802f4830e6 100644 --- a/charts/rubintv/templates/pvc.yaml +++ b/charts/rubintv/templates/pvc.yaml @@ -1,5 +1,5 @@ -{{- if .Values.workers.volumes }} -{{- range $vol := .Values.workers.volumes }} +{{- if .Values.workers.pvcMountpoint }} +{{- range $vol := .Values.workers.pvcMountpoint }} {{- if $vol.persistentVolumeClaim }} --- kind: PersistentVolumeClaim diff --git a/charts/rubintv/values.yaml b/charts/rubintv/values.yaml index a1b3658c14..7d6dc9e185 100644 --- a/charts/rubintv/values.yaml +++ b/charts/rubintv/values.yaml @@ -109,11 +109,11 @@ workers: imagePullSecrets: [] # Each entry is of the form: { name: pull-secret-name } - # -- Volumes for the rubintv worker pods + # -- PVC claims for the rubintv worker pods # @default -- See `values.yaml` - volumes: [] + pvcMountpoint: [] # Each list item must have the following form: - # { name: volume-name, + # { name: pvc-name, # accessMode: one of "ReadOnly", "ReadWriteOnce", "ReadWriteMany", # mountPath: path-mounted-in-container, # persistentVolumeClaim: { @@ -122,8 +122,17 @@ workers: # capacity: size-as-string-of-pvc (e.g. "1Gi") # } # } - # It is planned to implement "nfs" as an alternative to - # "PersistentVolumeClaim" but that has not yet been done. + + # -- NFS mountpoints for the rubintv worker pods + # @default -- See `values.yaml` + nfsMountpoint: [] + # Each list item must have the following form: + # { name: nfs-name, + # containerPath: path-mounted-in-container, + # readOnly: boolean, + # server: nfs-server, + # serverPath: nfs-server-path + # } # -- Resource limits and requests for the rubintv worker pods resources: {} From 8ee84e9c117dc59a5bd30e224cbbe9cc76abc2c0 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 18 Oct 2024 08:41:33 -0700 Subject: [PATCH 334/567] Add ATBuilding and DREAM to sasquatch connectors/consumers. --- applications/sasquatch/values-summit.yaml | 8 ++++---- applications/sasquatch/values-tucson-teststand.yaml | 4 ++-- applications/sasquatch/values-usdfprod.yaml | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 628c4e52cf..3919490042 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -163,7 +163,7 @@ kafka-connect-manager: auxtel: enabled: true repairerConnector: false - topicsRegex: ".*ATAOS|.*ATDome|.*ATDomeTrajectory|.*ATHexapod|.*ATPneumatics|.*ATPtg|.*ATMCS" + topicsRegex: ".*ATAOS|.*ATBuilding|.*ATDome|.*ATDomeTrajectory|.*ATHexapod|.*ATPneumatics|.*ATPtg|.*ATMCS" maintel: enabled: true repairerConnector: false @@ -180,7 +180,7 @@ kafka-connect-manager: eas: enabled: true repairerConnector: false - topicsRegex: ".*DIMM|.*DSM|.*EPM|.*ESS|.*HVAC|.*WeatherForecast" + topicsRegex: ".*DIMM|.*DREAM|.*DSM|.*EPM|.*ESS|.*HVAC|.*WeatherForecast" latiss: enabled: true repairerConnector: false @@ -339,7 +339,7 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + [ "lsst.sal.DIMM", "lsst.sal.DREAM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] debug: true m1m3: enabled: true @@ -423,7 +423,7 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] + [ "lsst.sal.ATAOS", "lsst.sal.ATBuilding", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] debug: true latiss: enabled: true diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index 49832205e7..cba6baf46d 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -84,7 +84,7 @@ telegraf-kafka-consumer: enabled: true database: "efd" topicRegexps: | - [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] + [ "lsst.sal.ATAOS", "lsst.sal.ATBuilding", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] debug: true maintel: enabled: true @@ -104,7 +104,7 @@ telegraf-kafka-consumer: metric_batch_size: 100 flush_interval: 20s topicRegexps: | - [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + [ "lsst.sal.DIMM", "lsst.sal.DREAM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] debug: true latiss: enabled: true diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 75afdd9d0e..1ea6936014 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -165,7 +165,7 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.DIMM", "lsst.sal.ESS", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + [ "lsst.sal.DIMM", "lsst.sal.DREAM", "lsst.sal.ESS", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] debug: true m1m3: enabled: true @@ -261,7 +261,7 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.ATAOS", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] + [ "lsst.sal.ATAOS", "lsst.sal.ATBuilding", "lsst.sal.ATDome", "lsst.sal.ATDomeTrajectory", "lsst.sal.ATHexapod", "lsst.sal.ATPneumatics", "lsst.sal.ATPtg", "lsst.sal.ATMCS" ] debug: true latiss: enabled: true From a1b16ead454d3112970724ad78232af4174023de Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 18 Oct 2024 14:42:05 -0700 Subject: [PATCH 335/567] Add RubinTV redis pw to UWS. --- applications/uws/secrets-tucson-teststand.yaml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 applications/uws/secrets-tucson-teststand.yaml diff --git a/applications/uws/secrets-tucson-teststand.yaml b/applications/uws/secrets-tucson-teststand.yaml new file mode 100644 index 0000000000..c6609ef2f1 --- /dev/null +++ b/applications/uws/secrets-tucson-teststand.yaml @@ -0,0 +1,6 @@ +redis-password: + description: >- + Password to the Rapid Analysis redis instance. + copy: + application: rubintv + key: redis-password From 948bcc324cb18eaf0ad774d076c6ded3b8f6b43c Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Thu, 26 Sep 2024 16:29:08 -0700 Subject: [PATCH 336/567] Update for Gafaelfawr 12.0.0 Pass the base URL and base internal URL into Gafaelfawr as environment variables and update the deployed version to 12.0.0. This does not yet add service values to every GafaelfawrIngress or remove the now-unneeded baseUrl parameters. Those changes will be made after Gafaelfawr is upgraded. --- applications/gafaelfawr/Chart.yaml | 2 +- applications/gafaelfawr/crds/ingress.yaml | 42 ++++++++++++------- .../gafaelfawr/templates/_helpers.tpl | 4 ++ 3 files changed, 31 insertions(+), 17 deletions(-) diff --git a/applications/gafaelfawr/Chart.yaml b/applications/gafaelfawr/Chart.yaml index 11a3345ab7..ca77327765 100644 --- a/applications/gafaelfawr/Chart.yaml +++ b/applications/gafaelfawr/Chart.yaml @@ -5,7 +5,7 @@ description: "Authentication and identity system" home: "https://gafaelfawr.lsst.io/" sources: - "https://github.com/lsst-sqre/gafaelfawr" -appVersion: 11.1.1 +appVersion: 12.0.0 dependencies: - name: "redis" diff --git a/applications/gafaelfawr/crds/ingress.yaml b/applications/gafaelfawr/crds/ingress.yaml index 4cedee74e0..faa50108de 100644 --- a/applications/gafaelfawr/crds/ingress.yaml +++ b/applications/gafaelfawr/crds/ingress.yaml @@ -56,8 +56,6 @@ spec: config: type: object description: "Configuration for the ingress to create." - required: - - baseUrl properties: authCacheDuration: type: string @@ -96,10 +94,10 @@ spec: scopes: type: array description: >- - Scopes to include in the delegated token if - they are available. These scopes are not - required to access the service; to make them - required, include them in spec.scopes as well. + Scopes to include in the delegated token if they + are available. These scopes are not required to + access the service; to make them required, include + them in spec.scopes as well. items: type: string service: @@ -114,9 +112,9 @@ spec: minimumLifetime: type: integer description: >- - Minimum lifetime of delegated token in seconds. If - the user's token has less than that time - remaining, force them to reauthenticate. + Minimum lifetime of delegated token in seconds. If the + user's token has less than that time remaining, force + them to reauthenticate. useAuthorization: type: boolean description: >- @@ -133,19 +131,24 @@ spec: description: >- Whether to redirect to the login flow if the user is not currently authenticated. + onlyServices: + type: array + description: >- + If set, access is restricted to tokens issued to one of + the listed services, in addition to any other access + constraints. Users will not be able to access the ingress + directly with their own tokens. + items: + type: string replace403: type: boolean description: >- - Whether to replace 403 responses with a custom 403 - response from Gafaelfawr that disables caching and - includes authorization-related errors in the - `WWW-Authenticate` header. + Obsolete setting. No longer has any effect. scopes: type: object description: >- - The token scope or scopes required to access this - service. May be omitted if the service allows - anonymous access. + The token scope or scopes required to access this service. + May be omitted if the service allows anonymous access. properties: any: type: array @@ -179,6 +182,13 @@ spec: - true required: - anonymous + service: + type: string + description: >- + The name of the service corresponding to this ingress, + used for metrics reporting. When delegating internal + tokens, this must match config.delegate.internal.service. + This attribute will be required in the future. username: type: string description: >- diff --git a/applications/gafaelfawr/templates/_helpers.tpl b/applications/gafaelfawr/templates/_helpers.tpl index 4b484a5aa5..0aee8daccd 100644 --- a/applications/gafaelfawr/templates/_helpers.tpl +++ b/applications/gafaelfawr/templates/_helpers.tpl @@ -34,6 +34,10 @@ Common environment variables - name: "GAFAELFAWR_AFTER_LOGOUT_URL" value: {{ required "global.baseUrl must be set" .Values.global.baseUrl | quote }} {{- end }} +- name: "GAFAELFAWR_BASE_URL" + value: {{ .Values.global.baseUrl | quote }} +- name: "GAFAELFAWR_BASE_INTERNAL_URL" + value: "http://gafaelfawr.{{ .Release.Namespace }}.svc.cluster.local:8080" - name: "GAFAELFAWR_BOOTSTRAP_TOKEN" valueFrom: secretKeyRef: From 837b7c88136c45bb1eff2c350e77a8ffe2d6e7df Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Tue, 15 Oct 2024 11:33:31 -0700 Subject: [PATCH 337/567] Add metrics configuration for Gafaelfawr Add Gafaelfawr to the Sasquatch app-metrics configuration and add Kafka configuration to Gafaelfawr so that it can push app metrics. Enable app-metrics for Gafaelfawr in idfdev in Sasquatch. --- applications/gafaelfawr/README.md | 5 ++++ .../gafaelfawr/templates/_helpers.tpl | 12 +++++++++ .../gafaelfawr/templates/cronjob-audit.yaml | 27 +++++++++++++++++++ .../templates/cronjob-maintenance.yaml | 27 +++++++++++++++++++ .../templates/deployment-operator.yaml | 27 +++++++++++++++++++ .../gafaelfawr/templates/deployment.yaml | 27 +++++++++++++++++++ .../gafaelfawr/templates/kafka-access.yaml | 16 +++++++++++ applications/gafaelfawr/values-idfdev.yaml | 5 ++++ applications/gafaelfawr/values.yaml | 23 ++++++++++++++++ .../sasquatch/charts/app-metrics/values.yaml | 4 +++ applications/sasquatch/values-idfdev.yaml | 1 + 11 files changed, 174 insertions(+) create mode 100644 applications/gafaelfawr/templates/kafka-access.yaml diff --git a/applications/gafaelfawr/README.md b/applications/gafaelfawr/README.md index 2798a697c9..97d95f761c 100644 --- a/applications/gafaelfawr/README.md +++ b/applications/gafaelfawr/README.md @@ -55,6 +55,11 @@ Authentication and identity system | config.ldap.userDn | string | Use anonymous binds | Bind DN for simple bind authentication. If set, `ldap-secret` must be set in the Gafaelfawr Vault secret. Set this or `kerberosConfig`, not both. | | config.ldap.userSearchAttr | string | `"uid"` | Search attribute containing the user's username | | config.logLevel | string | `"INFO"` | Choose from the text form of Python logging levels | +| config.metrics.metricsEvents.appName | string | `"gafaelfawr"` | Name under which to log metric events. Generally there is no reason to change this. | +| config.metrics.metricsEvents.disable | bool | `true` | Whether to disable sending metric events. If disabled, other settings must be present but are ignored. | +| config.metrics.metricsEvents.topicPrefix | string | `"lsst.square.app-metrics.events"` | Topic prefix for events. It may sometimes be useful to change this in development environments. | +| config.metrics.schemaManager.registryUrl | string | Sasquatch in the local cluster | URL of the Confluent-compatible schema registry server | +| config.metrics.schemaManager.suffix | string | `""` | Suffix to add to all registered subjects. This is sometimes useful for experimentation during development. | | config.oidc.audience | string | Same as `clientId` | Audience (`aud` claim) to expect in ID tokens. | | config.oidc.clientId | string | `nil` | Client ID for generic OpenID Connect support. One and only one of this, `config.cilogon.clientId`, or `config.github.clientId` must be set. | | config.oidc.enrollmentUrl | string | Login fails with an error | Where to send the user if their username cannot be found in LDAP | diff --git a/applications/gafaelfawr/templates/_helpers.tpl b/applications/gafaelfawr/templates/_helpers.tpl index 0aee8daccd..e6a957f170 100644 --- a/applications/gafaelfawr/templates/_helpers.tpl +++ b/applications/gafaelfawr/templates/_helpers.tpl @@ -127,4 +127,16 @@ Common environment variables name: {{ .secretName | quote }} key: "slack-webhook" {{- end }} +{{- if .Values.config.metrics.metricsEvents }} +- name: "KAFKA_BOOTSTRAP_SERVERS" + valueFrom: + secretKeyRef: + name: "gafaelfawr-kafka" + key: "bootstrapServers" +- name: "KAFKA_SECURITY_PROTOCOL" + valueFrom: + secretKeyRef: + name: "gafaelfawr-kafka" + key: "securityProtocol" +{{- end }} {{- end }} diff --git a/applications/gafaelfawr/templates/cronjob-audit.yaml b/applications/gafaelfawr/templates/cronjob-audit.yaml index a9cebf91c2..84cd3c07c3 100644 --- a/applications/gafaelfawr/templates/cronjob-audit.yaml +++ b/applications/gafaelfawr/templates/cronjob-audit.yaml @@ -38,6 +38,14 @@ spec: - "audit" env: {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret") | nindent 16 }} + {{- if .Values.config.metrics.metricsEvents }} + - name: "KAFKA_CLIENT_CERT_PATH" + value: "/etc/gafaelfawr-kafka/user.crt" + - name: "KAFKA_CLIENT_KEY_PATH" + value: "/etc/gafaelfawr-kafka/user.key" + - name: "KAFKA_CLUSTER_CA_PATH" + value: "/etc/gafaelfawr-kafka/ca.crt" + {{- end }} image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy | quote }} {{- with .Values.maintenance.resources }} @@ -54,6 +62,20 @@ spec: - name: "config" mountPath: "/etc/gafaelfawr" readOnly: true + {{- if .Values.config.metrics.metricsEvents }} + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/ca.crt" + readOnly: true + subPath: "ssl.truststore.crt" + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/user.crt" + readOnly: true + subPath: "ssl.keystore.crt" + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/user.key" + readOnly: true + subPath: "ssl.keystore.key" + {{- end }} {{- if .Values.config.ldap.kerberosConfig }} - name: "keytab" mountPath: "/etc/krb5.keytab" @@ -74,6 +96,11 @@ spec: - name: "config" configMap: name: "gafaelfawr-config" + {{- if .Values.config.metrics.metricsEvents }} + - name: "kafka" + secret: + secretName: "gafaelfawr-kafka" + {{- end }} {{- if .Values.config.ldap.kerberosConfig }} - name: "keytab" secret: diff --git a/applications/gafaelfawr/templates/cronjob-maintenance.yaml b/applications/gafaelfawr/templates/cronjob-maintenance.yaml index 85227e1d29..7b9cadc348 100644 --- a/applications/gafaelfawr/templates/cronjob-maintenance.yaml +++ b/applications/gafaelfawr/templates/cronjob-maintenance.yaml @@ -37,6 +37,14 @@ spec: - "maintenance" env: {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret") | nindent 16 }} + {{- if .Values.config.metrics.metricsEvents }} + - name: "KAFKA_CLIENT_CERT_PATH" + value: "/etc/gafaelfawr-kafka/user.crt" + - name: "KAFKA_CLIENT_KEY_PATH" + value: "/etc/gafaelfawr-kafka/user.key" + - name: "KAFKA_CLUSTER_CA_PATH" + value: "/etc/gafaelfawr-kafka/ca.crt" + {{- end }} image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy | quote }} {{- with .Values.maintenance.resources }} @@ -53,6 +61,20 @@ spec: - name: "config" mountPath: "/etc/gafaelfawr" readOnly: true + {{- if .Values.config.metrics.metricsEvents }} + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/ca.crt" + readOnly: true + subPath: "ssl.truststore.crt" + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/user.crt" + readOnly: true + subPath: "ssl.keystore.crt" + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/user.key" + readOnly: true + subPath: "ssl.keystore.key" + {{- end }} {{- if .Values.config.ldap.kerberosConfig }} - name: "keytab" mountPath: "/etc/krb5.keytab" @@ -73,6 +95,11 @@ spec: - name: "config" configMap: name: "gafaelfawr-config" + {{- if .Values.config.metrics.metricsEvents }} + - name: "kafka" + secret: + secretName: "gafaelfawr-kafka" + {{- end }} {{- if .Values.config.ldap.kerberosConfig }} - name: "keytab" secret: diff --git a/applications/gafaelfawr/templates/deployment-operator.yaml b/applications/gafaelfawr/templates/deployment-operator.yaml index cc4786accf..53f3f2d62a 100644 --- a/applications/gafaelfawr/templates/deployment-operator.yaml +++ b/applications/gafaelfawr/templates/deployment-operator.yaml @@ -43,6 +43,14 @@ spec: - "gafaelfawr.operator" env: {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret") | nindent 12 }} + {{- if .Values.config.metrics.metricsEvents }} + - name: "KAFKA_CLIENT_CERT_PATH" + value: "/etc/gafaelfawr-kafka/user.crt" + - name: "KAFKA_CLIENT_KEY_PATH" + value: "/etc/gafaelfawr-kafka/user.key" + - name: "KAFKA_CLUSTER_CA_PATH" + value: "/etc/gafaelfawr-kafka/ca.crt" + {{- end }} image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy | quote }} livenessProbe: @@ -71,6 +79,20 @@ spec: - name: "config" mountPath: "/etc/gafaelfawr" readOnly: true + {{- if .Values.config.metrics.metricsEvents }} + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/ca.crt" + readOnly: true + subPath: "ssl.truststore.crt" + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/user.crt" + readOnly: true + subPath: "ssl.keystore.crt" + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/user.key" + readOnly: true + subPath: "ssl.keystore.key" + {{- end }} {{- if .Values.config.ldap.kerberosConfig }} - name: "keytab" mountPath: "/etc/krb5.keytab" @@ -91,6 +113,11 @@ spec: - name: "config" configMap: name: "gafaelfawr-config" + {{- if .Values.config.metrics.metricsEvents }} + - name: "kafka" + secret: + secretName: "gafaelfawr-kafka" + {{- end }} {{- if .Values.config.ldap.kerberosConfig }} - name: "keytab" secret: diff --git a/applications/gafaelfawr/templates/deployment.yaml b/applications/gafaelfawr/templates/deployment.yaml index 697aedba86..3690d094bd 100644 --- a/applications/gafaelfawr/templates/deployment.yaml +++ b/applications/gafaelfawr/templates/deployment.yaml @@ -55,6 +55,14 @@ spec: - name: "gafaelfawr" env: {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret" "sidecar" true) | nindent 12 }} + {{- if .Values.config.metrics.metricsEvents }} + - name: "KAFKA_CLIENT_CERT_PATH" + value: "/etc/gafaelfawr-kafka/user.crt" + - name: "KAFKA_CLIENT_KEY_PATH" + value: "/etc/gafaelfawr-kafka/user.key" + - name: "KAFKA_CLUSTER_CA_PATH" + value: "/etc/gafaelfawr-kafka/ca.crt" + {{- end }} image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy | quote }} livenessProbe: @@ -94,6 +102,20 @@ spec: - name: "config" mountPath: "/etc/gafaelfawr" readOnly: true + {{- if .Values.config.metrics.metricsEvents }} + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/ca.crt" + readOnly: true + subPath: "ssl.truststore.crt" + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/user.crt" + readOnly: true + subPath: "ssl.keystore.crt" + - name: "kafka" + mountPath: "/etc/gafaelfawr-kafka/user.key" + readOnly: true + subPath: "ssl.keystore.key" + {{- end }} {{- if .Values.config.ldap.kerberosConfig }} - name: "keytab" mountPath: "/etc/krb5.keytab" @@ -114,6 +136,11 @@ spec: - name: "config" configMap: name: "gafaelfawr-config" + {{- if .Values.config.metrics.metricsEvents }} + - name: "kafka" + secret: + secretName: "gafaelfawr-kafka" + {{- end }} {{- if .Values.config.ldap.kerberosConfig }} - name: "keytab" secret: diff --git a/applications/gafaelfawr/templates/kafka-access.yaml b/applications/gafaelfawr/templates/kafka-access.yaml new file mode 100644 index 0000000000..31a3759dd0 --- /dev/null +++ b/applications/gafaelfawr/templates/kafka-access.yaml @@ -0,0 +1,16 @@ +{{- if .Values.config.metrics.metricsEvents -}} +apiVersion: access.strimzi.io/v1alpha1 +kind: KafkaAccess +metadata: + name: "gafaelfawr-kafka" +spec: + kafka: + name: "sasquatch" + namespace: "sasquatch" + listener: "tls" + user: + kind: "KafkaUser" + apiGroup: "kafka.strimzi.io" + name: "app-metrics-gafaelfawr" + namespace: "sasquatch" +{{- end }} diff --git a/applications/gafaelfawr/values-idfdev.yaml b/applications/gafaelfawr/values-idfdev.yaml index 5166cdca19..a3e3b55aa9 100644 --- a/applications/gafaelfawr/values-idfdev.yaml +++ b/applications/gafaelfawr/values-idfdev.yaml @@ -35,6 +35,11 @@ config: # Support generating user metadata for CADC authentication code. cadcBaseUuid: "db8626e0-3b93-45c0-89ab-3058b0ed39fe" + # Enable metrics reporting. + metrics: + metricsEvents: + disable: false + # User quota settings for services. quota: default: diff --git a/applications/gafaelfawr/values.yaml b/applications/gafaelfawr/values.yaml index 3780cee7ea..091dfcea4b 100644 --- a/applications/gafaelfawr/values.yaml +++ b/applications/gafaelfawr/values.yaml @@ -230,6 +230,29 @@ config: # the `rubin` scope. dataRightsMapping: {} + metrics: + metricsEvents: + # -- Whether to disable sending metric events. If disabled, other + # settings must be present but are ignored. + disable: true + + # -- Name under which to log metric events. Generally there is no reason + # to change this. + appName: "gafaelfawr" + + # -- Topic prefix for events. It may sometimes be useful to change this + # in development environments. + topicPrefix: "lsst.square.app-metrics.events" + + schemaManager: + # -- URL of the Confluent-compatible schema registry server + # @default -- Sasquatch in the local cluster + registryUrl: "http://sasquatch-schema-registry.sasquatch.svc.cluster.local:8081" + + # -- Suffix to add to all registered subjects. This is sometimes useful + # for experimentation during development. + suffix: "" + # -- Quota settings (see # [Quotas](https://gafaelfawr.lsst.io/user-guide/helm.html#quotas)). quota: {} diff --git a/applications/sasquatch/charts/app-metrics/values.yaml b/applications/sasquatch/charts/app-metrics/values.yaml index 93329be191..ce140111b2 100644 --- a/applications/sasquatch/charts/app-metrics/values.yaml +++ b/applications/sasquatch/charts/app-metrics/values.yaml @@ -8,6 +8,10 @@ # # @default -- See `values.yaml` globalAppConfig: + gafaelfawr: + influxTags: + - "service" + - "username" mobu: influxTags: - "type" diff --git a/applications/sasquatch/values-idfdev.yaml b/applications/sasquatch/values-idfdev.yaml index 32d665c6be..5b9101ccb5 100644 --- a/applications/sasquatch/values-idfdev.yaml +++ b/applications/sasquatch/values-idfdev.yaml @@ -112,4 +112,5 @@ chronograf: app-metrics: enabled: true apps: + - gafaelfawr - mobu From b6d57572422268d3c5589d04e10261e238712e58 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Tue, 15 Oct 2024 14:26:00 -0700 Subject: [PATCH 338/567] Remove obsolete cadcBaseUuid Gafaelfawr setting This is no longer needed or accepted, since the CADC authentication code no longer requires UUIDs. --- applications/gafaelfawr/README.md | 1 - applications/gafaelfawr/values-ccin2p3.yaml | 2 -- applications/gafaelfawr/values-idfdev.yaml | 3 --- applications/gafaelfawr/values-idfint.yaml | 3 --- applications/gafaelfawr/values-idfprod.yaml | 3 --- applications/gafaelfawr/values-roe.yaml | 3 --- applications/gafaelfawr/values-usdfdev.yaml | 3 --- applications/gafaelfawr/values-usdfint.yaml | 3 --- applications/gafaelfawr/values-usdfprod.yaml | 3 --- applications/gafaelfawr/values.yaml | 6 ------ 10 files changed, 30 deletions(-) diff --git a/applications/gafaelfawr/README.md b/applications/gafaelfawr/README.md index 97d95f761c..7c0bea3b7d 100644 --- a/applications/gafaelfawr/README.md +++ b/applications/gafaelfawr/README.md @@ -26,7 +26,6 @@ Authentication and identity system | cloudsql.serviceAccount | string | None, must be set if Cloud SQL Auth Proxy is enabled | The Google service account that has an IAM binding to the `gafaelfawr` Kubernetes service account and has the `cloudsql.client` role | | cloudsql.tolerations | list | `[]` | Tolerations for the Cloud SQL Proxy pod | | config.afterLogoutUrl | string | Top-level page of this Phalanx environment | Where to send the user after they log out | -| config.cadcBaseUuid | string | Disabled | Whether to support the `/auth/cadc/userinfo` route. If set, this UUID is used as the namespace to generate UUID v5 `sub` claims returned by this route to meet the needs of CADC authentication code. | | config.cilogon.clientId | string | `nil` | CILogon client ID. One and only one of this, `config.github.clientId`, or `config.oidc.clientId` must be set. | | config.cilogon.enrollmentUrl | string | Login fails with an error | Where to send the user if their username cannot be found in LDAP | | config.cilogon.loginParams | object | `{"skin":"LSST"}` | Additional parameters to add | diff --git a/applications/gafaelfawr/values-ccin2p3.yaml b/applications/gafaelfawr/values-ccin2p3.yaml index da12e492e5..7d59a28426 100644 --- a/applications/gafaelfawr/values-ccin2p3.yaml +++ b/applications/gafaelfawr/values-ccin2p3.yaml @@ -68,8 +68,6 @@ config: oidcServer: enabled: false - cadcBaseUuid: "df534647-a1df-4608-b08e-3af8dc291e41" - # initialAdmins: # - "mainetti" diff --git a/applications/gafaelfawr/values-idfdev.yaml b/applications/gafaelfawr/values-idfdev.yaml index a3e3b55aa9..b29b2c25d9 100644 --- a/applications/gafaelfawr/values-idfdev.yaml +++ b/applications/gafaelfawr/values-idfdev.yaml @@ -32,9 +32,6 @@ config: oidcServer: enabled: true - # Support generating user metadata for CADC authentication code. - cadcBaseUuid: "db8626e0-3b93-45c0-89ab-3058b0ed39fe" - # Enable metrics reporting. metrics: metricsEvents: diff --git a/applications/gafaelfawr/values-idfint.yaml b/applications/gafaelfawr/values-idfint.yaml index 9cb90b1377..2da17e0e44 100644 --- a/applications/gafaelfawr/values-idfint.yaml +++ b/applications/gafaelfawr/values-idfint.yaml @@ -33,9 +33,6 @@ config: oidcServer: enabled: true - # Support generating user metadata for CADC authentication code. - cadcBaseUuid: "dd5cd3ee-4239-48e4-b0e3-282f2328b9d1" - # User quota settings for services. quota: default: diff --git a/applications/gafaelfawr/values-idfprod.yaml b/applications/gafaelfawr/values-idfprod.yaml index f9148ef05d..6e55cc5d89 100644 --- a/applications/gafaelfawr/values-idfprod.yaml +++ b/applications/gafaelfawr/values-idfprod.yaml @@ -36,9 +36,6 @@ config: - "dp0.2" - "dp0.3" - # Support generating user metadata for CADC authentication code. - cadcBaseUuid: "5f0eb655-0e72-4948-a6a5-a94c0be9019f" - # User quota settings for services. quota: default: diff --git a/applications/gafaelfawr/values-roe.yaml b/applications/gafaelfawr/values-roe.yaml index f3914a1d96..f53b9e0ead 100644 --- a/applications/gafaelfawr/values-roe.yaml +++ b/applications/gafaelfawr/values-roe.yaml @@ -8,9 +8,6 @@ config: github: clientId: "10172b4db1b67ee31620" - # Support generating user metadata for CADC authentication code. - cadcBaseUuid: "4cb5f948-aad9-466c-837b-5eae565b0a77" - # Allow access by GitHub team. groupMapping: "exec:admin": diff --git a/applications/gafaelfawr/values-usdfdev.yaml b/applications/gafaelfawr/values-usdfdev.yaml index dd17d804d8..52e5b584bf 100644 --- a/applications/gafaelfawr/values-usdfdev.yaml +++ b/applications/gafaelfawr/values-usdfdev.yaml @@ -18,9 +18,6 @@ config: oidcServer: enabled: true - # Support generating user metadata for CADC authentication code. - cadcBaseUuid: "efa0a347-b648-4948-a987-055efbf6802a" - oidc: clientId: "rubin-usdf-rsp-dev" audience: "rubin-usdf-rsp-dev" diff --git a/applications/gafaelfawr/values-usdfint.yaml b/applications/gafaelfawr/values-usdfint.yaml index 91a7a20b07..c29d1dd918 100644 --- a/applications/gafaelfawr/values-usdfint.yaml +++ b/applications/gafaelfawr/values-usdfint.yaml @@ -11,9 +11,6 @@ config: oidcServer: enabled: true - # Support generating user metadata for CADC authentication code. - cadcBaseUuid: "82c6fc76-b7d3-4368-92a9-6a468dfa23dc" - oidc: clientId: vcluster--usdf-rsp-int audience: "vcluster--usdf-rsp-int" diff --git a/applications/gafaelfawr/values-usdfprod.yaml b/applications/gafaelfawr/values-usdfprod.yaml index 0d7e8d1e35..cd2dda0b9a 100644 --- a/applications/gafaelfawr/values-usdfprod.yaml +++ b/applications/gafaelfawr/values-usdfprod.yaml @@ -11,9 +11,6 @@ config: oidcServer: enabled: true - # Support generating user metadata for CADC authentication code. - cadcBaseUuid: "595f5a03-bef4-473b-8e5a-588d87f13799" - oidc: clientId: rubin-usdf-rsp audience: "rubin-usdf-rsp" diff --git a/applications/gafaelfawr/values.yaml b/applications/gafaelfawr/values.yaml index 091dfcea4b..6e903e03a9 100644 --- a/applications/gafaelfawr/values.yaml +++ b/applications/gafaelfawr/values.yaml @@ -41,12 +41,6 @@ config: # @default -- Top-level page of this Phalanx environment afterLogoutUrl: null - # -- Whether to support the `/auth/cadc/userinfo` route. If set, this UUID - # is used as the namespace to generate UUID v5 `sub` claims returned by this - # route to meet the needs of CADC authentication code. - # @default -- Disabled - cadcBaseUuid: null - # -- URL for the PostgreSQL database # @default -- None, must be set if neither `cloudsql.enabled` nor # `config.internalDatabase` are true From 42b3a2e2b9fa94aa3db370439104d57474aaf73d Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Tue, 15 Oct 2024 14:35:03 -0700 Subject: [PATCH 339/567] Avoid app-metrics in Kafka topics Avro doesn't like the dash in the name of the topic, so change app-metrics to metrics in the app-metrics Sasquatch subchart and change Gafaelfawr to match. --- applications/gafaelfawr/README.md | 2 +- applications/gafaelfawr/values.yaml | 2 +- .../sasquatch/charts/app-metrics/templates/kafka-topics.yaml | 2 +- .../sasquatch/charts/app-metrics/templates/kafka-users.yaml | 2 +- .../charts/app-metrics/templates/telegraf-configmap.yaml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/applications/gafaelfawr/README.md b/applications/gafaelfawr/README.md index 7c0bea3b7d..c99b2223b8 100644 --- a/applications/gafaelfawr/README.md +++ b/applications/gafaelfawr/README.md @@ -56,7 +56,7 @@ Authentication and identity system | config.logLevel | string | `"INFO"` | Choose from the text form of Python logging levels | | config.metrics.metricsEvents.appName | string | `"gafaelfawr"` | Name under which to log metric events. Generally there is no reason to change this. | | config.metrics.metricsEvents.disable | bool | `true` | Whether to disable sending metric events. If disabled, other settings must be present but are ignored. | -| config.metrics.metricsEvents.topicPrefix | string | `"lsst.square.app-metrics.events"` | Topic prefix for events. It may sometimes be useful to change this in development environments. | +| config.metrics.metricsEvents.topicPrefix | string | `"lsst.square.metrics.events"` | Topic prefix for events. It may sometimes be useful to change this in development environments. | | config.metrics.schemaManager.registryUrl | string | Sasquatch in the local cluster | URL of the Confluent-compatible schema registry server | | config.metrics.schemaManager.suffix | string | `""` | Suffix to add to all registered subjects. This is sometimes useful for experimentation during development. | | config.oidc.audience | string | Same as `clientId` | Audience (`aud` claim) to expect in ID tokens. | diff --git a/applications/gafaelfawr/values.yaml b/applications/gafaelfawr/values.yaml index 6e903e03a9..2f01d66449 100644 --- a/applications/gafaelfawr/values.yaml +++ b/applications/gafaelfawr/values.yaml @@ -236,7 +236,7 @@ config: # -- Topic prefix for events. It may sometimes be useful to change this # in development environments. - topicPrefix: "lsst.square.app-metrics.events" + topicPrefix: "lsst.square.metrics.events" schemaManager: # -- URL of the Confluent-compatible schema registry server diff --git a/applications/sasquatch/charts/app-metrics/templates/kafka-topics.yaml b/applications/sasquatch/charts/app-metrics/templates/kafka-topics.yaml index 70db2590de..67a7cbe687 100644 --- a/applications/sasquatch/charts/app-metrics/templates/kafka-topics.yaml +++ b/applications/sasquatch/charts/app-metrics/templates/kafka-topics.yaml @@ -3,7 +3,7 @@ apiVersion: kafka.strimzi.io/v1beta2 kind: KafkaTopic metadata: - name: "lsst.square.app-metrics.events.{{ . }}" + name: "lsst.square.metrics.events.{{ . }}" labels: strimzi.io/cluster: {{ $.Values.cluster.name }} spec: diff --git a/applications/sasquatch/charts/app-metrics/templates/kafka-users.yaml b/applications/sasquatch/charts/app-metrics/templates/kafka-users.yaml index 9ddab60b5e..2cde67ecef 100644 --- a/applications/sasquatch/charts/app-metrics/templates/kafka-users.yaml +++ b/applications/sasquatch/charts/app-metrics/templates/kafka-users.yaml @@ -21,7 +21,7 @@ spec: host: "*" - resource: type: topic - name: "lsst.square.app-metrics.events.{{ . }}" + name: "lsst.square.metrics.events.{{ . }}" patternType: literal operations: - "Describe" diff --git a/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml b/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml index 8b04c7de4c..84f6e47fd8 100644 --- a/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml +++ b/applications/sasquatch/charts/app-metrics/templates/telegraf-configmap.yaml @@ -55,7 +55,7 @@ data: avro_union_mode = "nullable" avro_tags = {{ include "helpers.toTomlArray" $influxTags }} topics = [ - "lsst.square.app-metrics.events.{{ $app }}", + "lsst.square.metrics.events.{{ $app }}", ] max_processing_time = "5s" consumer_fetch_default = "5MB" From e7191c55bd2bdd10c33303f50148be67610722cb Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Thu, 17 Oct 2024 17:40:41 -0700 Subject: [PATCH 340/567] Update Gafaelfawr metrics configuration Update the Gafaelfawr metrics configuration for additional changes to the upcoming Gafaelfawr release and to the underlying Safir library. --- applications/gafaelfawr/README.md | 6 +++--- applications/gafaelfawr/templates/_helpers.tpl | 2 +- .../gafaelfawr/templates/cronjob-audit.yaml | 6 +++--- .../gafaelfawr/templates/cronjob-maintenance.yaml | 6 +++--- .../gafaelfawr/templates/deployment-operator.yaml | 6 +++--- applications/gafaelfawr/templates/deployment.yaml | 6 +++--- applications/gafaelfawr/templates/kafka-access.yaml | 2 +- applications/gafaelfawr/values-idfdev.yaml | 3 +-- applications/gafaelfawr/values.yaml | 13 ++++++------- 9 files changed, 24 insertions(+), 26 deletions(-) diff --git a/applications/gafaelfawr/README.md b/applications/gafaelfawr/README.md index c99b2223b8..12d11e3701 100644 --- a/applications/gafaelfawr/README.md +++ b/applications/gafaelfawr/README.md @@ -54,9 +54,9 @@ Authentication and identity system | config.ldap.userDn | string | Use anonymous binds | Bind DN for simple bind authentication. If set, `ldap-secret` must be set in the Gafaelfawr Vault secret. Set this or `kerberosConfig`, not both. | | config.ldap.userSearchAttr | string | `"uid"` | Search attribute containing the user's username | | config.logLevel | string | `"INFO"` | Choose from the text form of Python logging levels | -| config.metrics.metricsEvents.appName | string | `"gafaelfawr"` | Name under which to log metric events. Generally there is no reason to change this. | -| config.metrics.metricsEvents.disable | bool | `true` | Whether to disable sending metric events. If disabled, other settings must be present but are ignored. | -| config.metrics.metricsEvents.topicPrefix | string | `"lsst.square.metrics.events"` | Topic prefix for events. It may sometimes be useful to change this in development environments. | +| config.metrics.appName | string | `"gafaelfawr"` | Name under which to log metrics. Generally there is no reason to change this. | +| config.metrics.enabled | bool | `false` | Whether to enable sending metrics | +| config.metrics.events.topicPrefix | string | `"lsst.square.metrics.events"` | Topic prefix for events. It may sometimes be useful to change this in development environments. | | config.metrics.schemaManager.registryUrl | string | Sasquatch in the local cluster | URL of the Confluent-compatible schema registry server | | config.metrics.schemaManager.suffix | string | `""` | Suffix to add to all registered subjects. This is sometimes useful for experimentation during development. | | config.oidc.audience | string | Same as `clientId` | Audience (`aud` claim) to expect in ID tokens. | diff --git a/applications/gafaelfawr/templates/_helpers.tpl b/applications/gafaelfawr/templates/_helpers.tpl index e6a957f170..d43a58f2ef 100644 --- a/applications/gafaelfawr/templates/_helpers.tpl +++ b/applications/gafaelfawr/templates/_helpers.tpl @@ -127,7 +127,7 @@ Common environment variables name: {{ .secretName | quote }} key: "slack-webhook" {{- end }} -{{- if .Values.config.metrics.metricsEvents }} +{{- if .Values.config.metrics.enabled }} - name: "KAFKA_BOOTSTRAP_SERVERS" valueFrom: secretKeyRef: diff --git a/applications/gafaelfawr/templates/cronjob-audit.yaml b/applications/gafaelfawr/templates/cronjob-audit.yaml index 84cd3c07c3..0013a16262 100644 --- a/applications/gafaelfawr/templates/cronjob-audit.yaml +++ b/applications/gafaelfawr/templates/cronjob-audit.yaml @@ -38,7 +38,7 @@ spec: - "audit" env: {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret") | nindent 16 }} - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" - name: "KAFKA_CLIENT_KEY_PATH" @@ -62,7 +62,7 @@ spec: - name: "config" mountPath: "/etc/gafaelfawr" readOnly: true - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "kafka" mountPath: "/etc/gafaelfawr-kafka/ca.crt" readOnly: true @@ -96,7 +96,7 @@ spec: - name: "config" configMap: name: "gafaelfawr-config" - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "kafka" secret: secretName: "gafaelfawr-kafka" diff --git a/applications/gafaelfawr/templates/cronjob-maintenance.yaml b/applications/gafaelfawr/templates/cronjob-maintenance.yaml index 7b9cadc348..bbefece8bd 100644 --- a/applications/gafaelfawr/templates/cronjob-maintenance.yaml +++ b/applications/gafaelfawr/templates/cronjob-maintenance.yaml @@ -37,7 +37,7 @@ spec: - "maintenance" env: {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret") | nindent 16 }} - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" - name: "KAFKA_CLIENT_KEY_PATH" @@ -61,7 +61,7 @@ spec: - name: "config" mountPath: "/etc/gafaelfawr" readOnly: true - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "kafka" mountPath: "/etc/gafaelfawr-kafka/ca.crt" readOnly: true @@ -95,7 +95,7 @@ spec: - name: "config" configMap: name: "gafaelfawr-config" - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "kafka" secret: secretName: "gafaelfawr-kafka" diff --git a/applications/gafaelfawr/templates/deployment-operator.yaml b/applications/gafaelfawr/templates/deployment-operator.yaml index 53f3f2d62a..323a25bce6 100644 --- a/applications/gafaelfawr/templates/deployment-operator.yaml +++ b/applications/gafaelfawr/templates/deployment-operator.yaml @@ -43,7 +43,7 @@ spec: - "gafaelfawr.operator" env: {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret") | nindent 12 }} - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" - name: "KAFKA_CLIENT_KEY_PATH" @@ -79,7 +79,7 @@ spec: - name: "config" mountPath: "/etc/gafaelfawr" readOnly: true - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "kafka" mountPath: "/etc/gafaelfawr-kafka/ca.crt" readOnly: true @@ -113,7 +113,7 @@ spec: - name: "config" configMap: name: "gafaelfawr-config" - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "kafka" secret: secretName: "gafaelfawr-kafka" diff --git a/applications/gafaelfawr/templates/deployment.yaml b/applications/gafaelfawr/templates/deployment.yaml index 3690d094bd..fed652313f 100644 --- a/applications/gafaelfawr/templates/deployment.yaml +++ b/applications/gafaelfawr/templates/deployment.yaml @@ -55,7 +55,7 @@ spec: - name: "gafaelfawr" env: {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret" "sidecar" true) | nindent 12 }} - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" - name: "KAFKA_CLIENT_KEY_PATH" @@ -102,7 +102,7 @@ spec: - name: "config" mountPath: "/etc/gafaelfawr" readOnly: true - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "kafka" mountPath: "/etc/gafaelfawr-kafka/ca.crt" readOnly: true @@ -136,7 +136,7 @@ spec: - name: "config" configMap: name: "gafaelfawr-config" - {{- if .Values.config.metrics.metricsEvents }} + {{- if .Values.config.metrics.enabled }} - name: "kafka" secret: secretName: "gafaelfawr-kafka" diff --git a/applications/gafaelfawr/templates/kafka-access.yaml b/applications/gafaelfawr/templates/kafka-access.yaml index 31a3759dd0..4a13c53b68 100644 --- a/applications/gafaelfawr/templates/kafka-access.yaml +++ b/applications/gafaelfawr/templates/kafka-access.yaml @@ -1,4 +1,4 @@ -{{- if .Values.config.metrics.metricsEvents -}} +{{- if .Values.config.metrics.enabled -}} apiVersion: access.strimzi.io/v1alpha1 kind: KafkaAccess metadata: diff --git a/applications/gafaelfawr/values-idfdev.yaml b/applications/gafaelfawr/values-idfdev.yaml index b29b2c25d9..fc1d59d4f9 100644 --- a/applications/gafaelfawr/values-idfdev.yaml +++ b/applications/gafaelfawr/values-idfdev.yaml @@ -34,8 +34,7 @@ config: # Enable metrics reporting. metrics: - metricsEvents: - disable: false + enabled: true # User quota settings for services. quota: diff --git a/applications/gafaelfawr/values.yaml b/applications/gafaelfawr/values.yaml index 2f01d66449..8225f53e99 100644 --- a/applications/gafaelfawr/values.yaml +++ b/applications/gafaelfawr/values.yaml @@ -225,15 +225,14 @@ config: dataRightsMapping: {} metrics: - metricsEvents: - # -- Whether to disable sending metric events. If disabled, other - # settings must be present but are ignored. - disable: true + # -- Whether to enable sending metrics + enabled: false - # -- Name under which to log metric events. Generally there is no reason - # to change this. - appName: "gafaelfawr" + # -- Name under which to log metrics. Generally there is no reason to + # change this. + appName: "gafaelfawr" + events: # -- Topic prefix for events. It may sometimes be useful to change this # in development environments. topicPrefix: "lsst.square.metrics.events" From aa3843a01f608a98122f29e9ca7a3e17c7b52691 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Fri, 18 Oct 2024 09:37:02 -0700 Subject: [PATCH 341/567] Rename app_name to application in app-metrics Use application instead of app_name in app-metrics. Rewrap the comments in the Sasquatch app-metrics subchart to follow our normal width convention. --- applications/gafaelfawr/README.md | 2 +- applications/gafaelfawr/values.yaml | 2 +- applications/sasquatch/README.md | 12 ++++---- .../sasquatch/charts/app-metrics/README.md | 12 ++++---- .../sasquatch/charts/app-metrics/values.yaml | 28 +++++++++---------- 5 files changed, 27 insertions(+), 29 deletions(-) diff --git a/applications/gafaelfawr/README.md b/applications/gafaelfawr/README.md index 12d11e3701..1f6c642c1d 100644 --- a/applications/gafaelfawr/README.md +++ b/applications/gafaelfawr/README.md @@ -54,7 +54,7 @@ Authentication and identity system | config.ldap.userDn | string | Use anonymous binds | Bind DN for simple bind authentication. If set, `ldap-secret` must be set in the Gafaelfawr Vault secret. Set this or `kerberosConfig`, not both. | | config.ldap.userSearchAttr | string | `"uid"` | Search attribute containing the user's username | | config.logLevel | string | `"INFO"` | Choose from the text form of Python logging levels | -| config.metrics.appName | string | `"gafaelfawr"` | Name under which to log metrics. Generally there is no reason to change this. | +| config.metrics.application | string | `"gafaelfawr"` | Name under which to log metrics. Generally there is no reason to change this. | | config.metrics.enabled | bool | `false` | Whether to enable sending metrics | | config.metrics.events.topicPrefix | string | `"lsst.square.metrics.events"` | Topic prefix for events. It may sometimes be useful to change this in development environments. | | config.metrics.schemaManager.registryUrl | string | Sasquatch in the local cluster | URL of the Confluent-compatible schema registry server | diff --git a/applications/gafaelfawr/values.yaml b/applications/gafaelfawr/values.yaml index 8225f53e99..4e39060409 100644 --- a/applications/gafaelfawr/values.yaml +++ b/applications/gafaelfawr/values.yaml @@ -230,7 +230,7 @@ config: # -- Name under which to log metrics. Generally there is no reason to # change this. - appName: "gafaelfawr" + application: "gafaelfawr" events: # -- Topic prefix for events. It may sometimes be useful to change this diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 77b7ba8072..bc03b4ca9e 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -84,14 +84,14 @@ Rubin Observatory's telemetry service | strimzi-registry-operator.operatorNamespace | string | `"sasquatch"` | Namespace where the strimzi-registry-operator is deployed | | telegraf-kafka-consumer | object | `{}` | Overrides for telegraf-kafka-consumer configuration | | app-metrics.affinity | object | `{}` | Affinity for pod assignment | -| app-metrics.apps | list | `[]` | A list of applications that will publish metrics events, and the keys that should be ingested into InfluxDB as tags. The names should be the same as the app names in Phalanx. | +| app-metrics.apps | list | `[]` | A list of applications that will publish metrics events, and the keys that should be ingested into InfluxDB as tags. The names should be the same as the app names in Phalanx. | | app-metrics.args | list | `[]` | Arguments passed to the Telegraf agent containers | -| app-metrics.cluster.name | string | `"sasquatch"` | | +| app-metrics.cluster.name | string | `"sasquatch"` | Name of the Strimzi cluster. Synchronize this with the cluster name in the parent Sasquatch chart. | | app-metrics.debug | bool | false | Run Telegraf in debug mode. | | app-metrics.env | list | See `values.yaml` | Telegraf agent enviroment variables | -| app-metrics.envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | -| app-metrics.globalAppConfig | object | See `values.yaml` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | -| app-metrics.globalInfluxTags | list | `["app_name"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | +| app-metrics.envFromSecret | string | `""` | Name of the secret with values to be added to the environment | +| app-metrics.globalAppConfig | object | See `values.yaml` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | +| app-metrics.globalInfluxTags | list | `["application"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | | app-metrics.image.pullPolicy | string | `"Always"` | Image pull policy | | app-metrics.image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | | app-metrics.image.tag | string | `"1.30.2-alpine"` | Telegraf image tag | @@ -100,7 +100,7 @@ Rubin Observatory's telemetry service | app-metrics.nodeSelector | object | `{}` | Node labels for pod assignment | | app-metrics.podAnnotations | object | `{}` | Annotations for telegraf-kafka-consumers pods | | app-metrics.podLabels | object | `{}` | Labels for telegraf-kafka-consumer pods | -| app-metrics.replicaCount | int | `3` | Number of Telegraf replicas. Multiple replicas increase availability. | +| app-metrics.replicaCount | int | `3` | Number of Telegraf replicas. Multiple replicas increase availability. | | app-metrics.resources | object | See `values.yaml` | Kubernetes resources requests and limits | | app-metrics.tolerations | list | `[]` | Tolerations for pod assignment | | influxdb-enterprise.bootstrap.auth.secretName | string | `"sasquatch"` | Enable authentication of the data nodes using this secret, by creating a username and password for an admin account. The secret must contain keys `username` and `password`. | diff --git a/applications/sasquatch/charts/app-metrics/README.md b/applications/sasquatch/charts/app-metrics/README.md index df8737703a..3f75c1aa74 100644 --- a/applications/sasquatch/charts/app-metrics/README.md +++ b/applications/sasquatch/charts/app-metrics/README.md @@ -7,14 +7,14 @@ Kafka topics, users, and a telegraf connector for metrics events. | Key | Type | Default | Description | |-----|------|---------|-------------| | affinity | object | `{}` | Affinity for pod assignment | -| apps | list | `[]` | A list of applications that will publish metrics events, and the keys that should be ingested into InfluxDB as tags. The names should be the same as the app names in Phalanx. | +| apps | list | `[]` | A list of applications that will publish metrics events, and the keys that should be ingested into InfluxDB as tags. The names should be the same as the app names in Phalanx. | | args | list | `[]` | Arguments passed to the Telegraf agent containers | -| cluster.name | string | `"sasquatch"` | | +| cluster.name | string | `"sasquatch"` | Name of the Strimzi cluster. Synchronize this with the cluster name in the parent Sasquatch chart. | | debug | bool | false | Run Telegraf in debug mode. | | env | list | See `values.yaml` | Telegraf agent enviroment variables | -| envFromSecret | string | `""` | Name of the secret with values to be added to the environment. | -| globalAppConfig | object | See `values.yaml` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | -| globalInfluxTags | list | `["app_name"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | +| envFromSecret | string | `""` | Name of the secret with values to be added to the environment | +| globalAppConfig | object | See `values.yaml` | app-metrics configuration in any environment in which the subchart is enabled. This should stay globally specified here, and it shouldn't be overridden. See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) for the structure of this value. | +| globalInfluxTags | list | `["application"]` | Keys in an every event sent by any app that should be recorded in InfluxDB as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from `globalAppConfig` | | image.pullPolicy | string | `"Always"` | Image pull policy | | image.repo | string | `"docker.io/library/telegraf"` | Telegraf image repository | | image.tag | string | `"1.30.2-alpine"` | Telegraf image tag | @@ -23,6 +23,6 @@ Kafka topics, users, and a telegraf connector for metrics events. | nodeSelector | object | `{}` | Node labels for pod assignment | | podAnnotations | object | `{}` | Annotations for telegraf-kafka-consumers pods | | podLabels | object | `{}` | Labels for telegraf-kafka-consumer pods | -| replicaCount | int | `3` | Number of Telegraf replicas. Multiple replicas increase availability. | +| replicaCount | int | `3` | Number of Telegraf replicas. Multiple replicas increase availability. | | resources | object | See `values.yaml` | Kubernetes resources requests and limits | | tolerations | list | `[]` | Tolerations for pod assignment | diff --git a/applications/sasquatch/charts/app-metrics/values.yaml b/applications/sasquatch/charts/app-metrics/values.yaml index ce140111b2..9ddfbc4bb4 100644 --- a/applications/sasquatch/charts/app-metrics/values.yaml +++ b/applications/sasquatch/charts/app-metrics/values.yaml @@ -2,10 +2,9 @@ # -- app-metrics configuration in any environment in which the subchart is # enabled. This should stay globally specified here, and it shouldn't be -# overridden. -# See [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) +# overridden. See +# [here](https://sasquatch.lsst.io/user-guide/app-metrics.html#configuration) # for the structure of this value. -# # @default -- See `values.yaml` globalAppConfig: gafaelfawr: @@ -16,22 +15,22 @@ globalAppConfig: influxTags: - "type" -# -- A list of applications that will publish metrics events, and the keys that should be ingested into InfluxDB as tags. -# The names should be the same as the app names in Phalanx. +# -- A list of applications that will publish metrics events, and the keys +# that should be ingested into InfluxDB as tags. The names should be the same +# as the app names in Phalanx. apps: [] -# -- Keys in an every event sent by any app that should be recorded in InfluxDB -# as "tags" (vs. "fields"). These will be concatenated with the `influxTags` from -# `globalAppConfig` -globalInfluxTags: ["app_name"] +# -- Keys in an every event sent by any app that should be recorded in +# InfluxDB as "tags" (vs. "fields"). These will be concatenated with the +# `influxTags` from `globalAppConfig` +globalInfluxTags: ["application"] cluster: - # The name of the Strimzi cluster. Synchronize this with the cluster name in + # -- Name of the Strimzi cluster. Synchronize this with the cluster name in # the parent Sasquatch chart. name: sasquatch -# These values refer to the telegraf deployment and config - +# These values refer to the Telegraf deployment and config image: # -- Telegraf image repository repo: "docker.io/library/telegraf" @@ -76,7 +75,7 @@ env: # InfluxDB v1 password key: influxdb-password -# -- Name of the secret with values to be added to the environment. +# -- Name of the secret with values to be added to the environment envFromSecret: "" # -- Run Telegraf in debug mode. @@ -87,10 +86,9 @@ influxdb: # -- URL of the InfluxDB v1 instance to write to url: "http://sasquatch-influxdb.sasquatch:8086" -# -- Number of Telegraf replicas. Multiple replicas increase availability. +# -- Number of Telegraf replicas. Multiple replicas increase availability. replicaCount: 3 - # -- Kubernetes resources requests and limits # @default -- See `values.yaml` resources: From ff946bb4b2f000cf56815b70ecbcc709d20098aa Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 18 Oct 2024 15:53:51 -0700 Subject: [PATCH 342/567] Updates for changing versions. --- applications/obsenv-management/README.md | 1 + applications/obsenv-management/charts/obsenv-ui/README.md | 1 + .../charts/obsenv-ui/templates/configmap.yaml | 2 +- applications/obsenv-management/charts/obsenv-ui/values.yaml | 3 +++ applications/obsenv-management/values-tucson-teststand.yaml | 5 +++-- 5 files changed, 9 insertions(+), 3 deletions(-) diff --git a/applications/obsenv-management/README.md b/applications/obsenv-management/README.md index 7983fbc0b3..9055d587ca 100644 --- a/applications/obsenv-management/README.md +++ b/applications/obsenv-management/README.md @@ -34,6 +34,7 @@ Rubin Observatory Environment Management System | obsenv-api.securityContext.user | int | `72091` | User ID | | obsenv-api.tolerations | list | `[]` | Tolerations for the obsenv-api deployment pod | | obsenv-ui.affinity | object | `{}` | Affinity rules for the obsenv-ui deployment pod | +| obsenv-ui.config.authGroup | string | `"test-group"` | The group used to authorize users to change the package versions | | obsenv-ui.config.logLevel | string | `"INFO"` | Logging level | | obsenv-ui.config.logProfile | string | `"production"` | Logging profile (`production` for JSON, `development` for human-friendly) | | obsenv-ui.config.pathPrefix | string | `"/obsenv-ui"` | URL path prefix | diff --git a/applications/obsenv-management/charts/obsenv-ui/README.md b/applications/obsenv-management/charts/obsenv-ui/README.md index 1127616b1b..3fcb2ce527 100644 --- a/applications/obsenv-management/charts/obsenv-ui/README.md +++ b/applications/obsenv-management/charts/obsenv-ui/README.md @@ -7,6 +7,7 @@ Helm chart for the Observatory Environment Management UI. | Key | Type | Default | Description | |-----|------|---------|-------------| | affinity | object | `{}` | Affinity rules for the obsenv-ui deployment pod | +| config.authGroup | string | `"test-group"` | The group used to authorize users to change the package versions | | config.logLevel | string | `"INFO"` | Logging level | | config.logProfile | string | `"production"` | Logging profile (`production` for JSON, `development` for human-friendly) | | config.pathPrefix | string | `"/obsenv-ui"` | URL path prefix | diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml index bb38aed72f..80eabfd0cc 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/configmap.yaml @@ -7,4 +7,4 @@ metadata: data: BASE_URL: {{ .Values.global.baseUrl | quote }} OBSENV_API: "http://obsenv-api:8080/obsenv-api" - AUTH_GROUP: "lsst-ts-integration-testing-team" + AUTH_GROUP: {{ .Values.config.authGroup | quote }} diff --git a/applications/obsenv-management/charts/obsenv-ui/values.yaml b/applications/obsenv-management/charts/obsenv-ui/values.yaml index d4c92e1857..b8a325a3a6 100644 --- a/applications/obsenv-management/charts/obsenv-ui/values.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/values.yaml @@ -23,6 +23,9 @@ config: # -- URL path prefix pathPrefix: "/obsenv-ui" + # -- The group used to authorize users to change the package versions + authGroup: "test-group" + ingress: # -- Additional annotations for the ingress rule annotations: {} diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 6c0759ce10..091576856e 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -1,7 +1,7 @@ obsenv-api: image: repository: rubin-cr.lsst.org/obsenv-api - tag: 0.1.0 + tag: tickets/DM-46822 pullPolicy: Always config: logLevel: "DEBUG" @@ -11,7 +11,8 @@ obsenv-api: obsenv-ui: image: repository: rubin-cr.lsst.org/obsenv-ui - tag: 0.1.0 + tag: tickets/DM-46822 pullPolicy: Always config: pathPrefix: /obsenv-management + authGroup: obsenv-admin-group From 5d7e0d3151c4fb336a4e1130f4e4c74f06e97803 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Fri, 18 Oct 2024 16:15:27 -0700 Subject: [PATCH 343/567] Disable vo-cutouts schema updates These schema updates have been complete, so disable schema updates to return to the default. --- applications/vo-cutouts/values-idfint.yaml | 1 - applications/vo-cutouts/values-idfprod.yaml | 1 - 2 files changed, 2 deletions(-) diff --git a/applications/vo-cutouts/values-idfint.yaml b/applications/vo-cutouts/values-idfint.yaml index 9239f30c7d..b7e41291fd 100644 --- a/applications/vo-cutouts/values-idfint.yaml +++ b/applications/vo-cutouts/values-idfint.yaml @@ -1,7 +1,6 @@ config: serviceAccount: "vo-cutouts@science-platform-int-dc5d.iam.gserviceaccount.com" storageBucketUrl: "gs://rubin-cutouts-int-us-central1-output/" - updateSchema: true cloudsql: enabled: true diff --git a/applications/vo-cutouts/values-idfprod.yaml b/applications/vo-cutouts/values-idfprod.yaml index 53657a6e3c..461cb96fe5 100644 --- a/applications/vo-cutouts/values-idfprod.yaml +++ b/applications/vo-cutouts/values-idfprod.yaml @@ -1,7 +1,6 @@ config: serviceAccount: "vo-cutouts@science-platform-stable-6994.iam.gserviceaccount.com" storageBucketUrl: "gs://rubin-cutouts-stable-us-central1-output/" - updateSchema: true cloudsql: enabled: true From 71f74c30f342d218e97154b11e30a77c9439cd94 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 09:41:13 +0000 Subject: [PATCH 344/567] chore(deps): update helm release argo-cd to v7.6.12 --- applications/argocd/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argocd/Chart.yaml b/applications/argocd/Chart.yaml index 0c95280ff9..db65dd7abc 100644 --- a/applications/argocd/Chart.yaml +++ b/applications/argocd/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-cd - version: 7.6.8 + version: 7.6.12 repository: https://argoproj.github.io/argo-helm From a23094a5a6cdd4c932954d32a1a6bacac7fff15a Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 09:41:17 +0000 Subject: [PATCH 345/567] chore(deps): update helm release telegraf to v1.8.55 --- applications/telegraf/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/telegraf/Chart.yaml b/applications/telegraf/Chart.yaml index 33c097cea8..e0cd75bc86 100644 --- a/applications/telegraf/Chart.yaml +++ b/applications/telegraf/Chart.yaml @@ -8,7 +8,7 @@ sources: - https://github.com/influxdata/helm-charts dependencies: - name: telegraf - version: 1.8.54 + version: 1.8.55 repository: https://helm.influxdata.com/ annotations: phalanx.lsst.io/docs: | From 72ea3988b5942bfeb4e23c4d0da06e45ca295d8e Mon Sep 17 00:00:00 2001 From: MAINETTI Gabriele Date: Mon, 21 Oct 2024 16:35:34 +0200 Subject: [PATCH 346/567] trying workaround for Gafaelfawr kafka issue --- applications/gafaelfawr/values-ccin2p3.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/applications/gafaelfawr/values-ccin2p3.yaml b/applications/gafaelfawr/values-ccin2p3.yaml index 7d59a28426..a50bc8e976 100644 --- a/applications/gafaelfawr/values-ccin2p3.yaml +++ b/applications/gafaelfawr/values-ccin2p3.yaml @@ -14,6 +14,9 @@ config: # github: # clientId: ae314e45a6af43ea910a + metrics: + application: gafaelfawr + enabled: false oidc: clientId: "lsst_rsp" From 6484c5688fbe45532e625cf7d909ff121d644256 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 15:39:51 +0000 Subject: [PATCH 347/567] chore(deps): update helm release telegraf-ds to v1.1.35 --- applications/telegraf-ds/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/telegraf-ds/Chart.yaml b/applications/telegraf-ds/Chart.yaml index 8cb53aec89..a2356af0ab 100644 --- a/applications/telegraf-ds/Chart.yaml +++ b/applications/telegraf-ds/Chart.yaml @@ -8,7 +8,7 @@ sources: - https://github.com/influxdata/helm-charts dependencies: - name: telegraf-ds - version: 1.1.34 + version: 1.1.35 repository: https://helm.influxdata.com/ annotations: phalanx.lsst.io/docs: | From 315ad8752c90bbf74a6c380e2f2f2c96d46f1397 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 15:39:54 +0000 Subject: [PATCH 348/567] chore(deps): update postgres docker tag to v17 --- applications/siav2/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/siav2/values.yaml b/applications/siav2/values.yaml index 6547f8f87a..f9ce8acc2f 100644 --- a/applications/siav2/values.yaml +++ b/applications/siav2/values.yaml @@ -79,7 +79,7 @@ uws: pullPolicy: "IfNotPresent" # -- Tag of UWS database image to use - tag: "16.4" + tag: "17.0" # -- Resource limits and requests for the UWS database pod resources: From 854a7a3af09c58493bf5ddb195bb2aef279538b2 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 21 Oct 2024 08:40:57 -0700 Subject: [PATCH 349/567] Update Helm docs --- applications/siav2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/siav2/README.md b/applications/siav2/README.md index 82aaee32ae..92993339b1 100644 --- a/applications/siav2/README.md +++ b/applications/siav2/README.md @@ -28,7 +28,7 @@ Simple Image Access v2 service | uws.affinity | object | `{}` | Affinity rules for the UWS database pod | | uws.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the UWS database image | | uws.image.repository | string | `"library/postgres"` | UWS database image to use | -| uws.image.tag | string | `"16.4"` | Tag of UWS database image to use | +| uws.image.tag | string | `"17.0"` | Tag of UWS database image to use | | uws.nodeSelector | object | `{}` | Node selection rules for the UWS database pod | | uws.podAnnotations | object | `{}` | Annotations for the UWS databse pod | | uws.resources | object | `{"limits":{"cpu":2,"memory":"4Gi"},"requests":{"cpu":0.25,"memory":"1Gi"}}` | Resource limits and requests for the UWS database pod | From 0422bed83bd39c56aad239c58d84d7b9b1b089f9 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 21 Oct 2024 11:36:04 -0700 Subject: [PATCH 350/567] Update to Gafaelfawr 12.0.1 Fixes configuration handling when metrics are disabled. --- applications/gafaelfawr/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/gafaelfawr/Chart.yaml b/applications/gafaelfawr/Chart.yaml index ca77327765..5a04e7190b 100644 --- a/applications/gafaelfawr/Chart.yaml +++ b/applications/gafaelfawr/Chart.yaml @@ -5,7 +5,7 @@ description: "Authentication and identity system" home: "https://gafaelfawr.lsst.io/" sources: - "https://github.com/lsst-sqre/gafaelfawr" -appVersion: 12.0.0 +appVersion: 12.0.1 dependencies: - name: "redis" From 6ae8a79399467ec684783de6006d10527261343a Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Mon, 14 Oct 2024 12:52:15 -0700 Subject: [PATCH 351/567] Configure Next Visit Fanout's supported instruments. These instruments are currently hard-coded into the fan-out service. This commit does not remove all the hard-coding, so the config should not be changed yet. --- applications/next-visit-fan-out/README.md | 1 + applications/next-visit-fan-out/templates/deployment.yaml | 2 ++ applications/next-visit-fan-out/values.yaml | 3 +++ 3 files changed, 6 insertions(+) diff --git a/applications/next-visit-fan-out/README.md b/applications/next-visit-fan-out/README.md index e9c699a48e..9623e3f4bf 100644 --- a/applications/next-visit-fan-out/README.md +++ b/applications/next-visit-fan-out/README.md @@ -15,6 +15,7 @@ Poll next visit events from Kafka, duplicate them, and send them to all applicat | image.pullPolicy | string | `"IfNotPresent"` | | | image.repository | string | `"us-central1-docker.pkg.dev/prompt-proto/prompt/nextvisit-fanout"` | | | image.tag | string | `""` | | +| instruments | string | `"LATISS LSSTCam LSSTComCam LSSTComCamSim HSC"` | The instruments that are initialized when the fan-out service starts up as a space-delimited string. | | kafka.expiration | float | `3600` | Maximum message age to consider, in seconds. | | kafka.offset | string | `"latest"` | | | kafka.saslMechamism | string | `"SCRAM-SHA-512"` | | diff --git a/applications/next-visit-fan-out/templates/deployment.yaml b/applications/next-visit-fan-out/templates/deployment.yaml index 115f8c38e8..bc949d461d 100644 --- a/applications/next-visit-fan-out/templates/deployment.yaml +++ b/applications/next-visit-fan-out/templates/deployment.yaml @@ -61,6 +61,8 @@ spec: secretKeyRef: key: kafka_pp_sasl_password name: {{ template "next-visit-fan-out.fullname" . }}-secret + - name: SUPPORTED_INSTRUMENTS + value: {{ .Values.instruments }} resources: {{- toYaml .Values.resources | nindent 12 }} diff --git a/applications/next-visit-fan-out/values.yaml b/applications/next-visit-fan-out/values.yaml index 4adeaa8669..7acd4bd3f5 100644 --- a/applications/next-visit-fan-out/values.yaml +++ b/applications/next-visit-fan-out/values.yaml @@ -56,3 +56,6 @@ global: # -- Base path for Vault secrets # @default -- Set by Argo CD vaultSecretsPath: "" + +# -- The instruments that are initialized when the fan-out service starts up as a space-delimited string. +instruments: "LATISS LSSTCam LSSTComCam LSSTComCamSim HSC" From 3a1aa294ae2e55cfd6fb59440204043c45cbf763 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Mon, 14 Oct 2024 11:25:28 -0700 Subject: [PATCH 352/567] Factor detector.yaml out of Docker image for Next Visit Fan Out. --- applications/next-visit-fan-out/README.md | 5 +- .../templates/deployment.yaml | 10 + .../templates/detectors.yaml | 8 + applications/next-visit-fan-out/values.yaml | 336 +++++++++++++++++- 4 files changed, 356 insertions(+), 3 deletions(-) create mode 100644 applications/next-visit-fan-out/templates/detectors.yaml diff --git a/applications/next-visit-fan-out/README.md b/applications/next-visit-fan-out/README.md index 9623e3f4bf..0e49da71b4 100644 --- a/applications/next-visit-fan-out/README.md +++ b/applications/next-visit-fan-out/README.md @@ -7,7 +7,8 @@ Poll next visit events from Kafka, duplicate them, and send them to all applicat | Key | Type | Default | Description | |-----|------|---------|-------------| | affinity | object | `{}` | Affinity rules for the next-visit-fan-out deployment pod | -| detectorConfigFile | string | `"detector.yaml"` | | +| detectorConfig | object | See `values.yaml`. | A mapping, for each instrument, of detector number to whether that detector is "active" (i.e., producing images). | +| detectorConfigFile | string | `"/etc/config/detector.yaml"` | | | fullnameOverride | string | `""` | | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | @@ -15,7 +16,7 @@ Poll next visit events from Kafka, duplicate them, and send them to all applicat | image.pullPolicy | string | `"IfNotPresent"` | | | image.repository | string | `"us-central1-docker.pkg.dev/prompt-proto/prompt/nextvisit-fanout"` | | | image.tag | string | `""` | | -| instruments | string | `"LATISS LSSTCam LSSTComCam LSSTComCamSim HSC"` | The instruments that are initialized when the fan-out service starts up as a space-delimited string. | +| instruments | string | `"LATISS LSSTCam LSSTComCam LSSTComCamSim HSC"` | The instruments that are initialized when the fan-out service starts up as a space-delimited string. This list is a subset of the keys of `detectorConfig` because the latter handles some special cases. | | kafka.expiration | float | `3600` | Maximum message age to consider, in seconds. | | kafka.offset | string | `"latest"` | | | kafka.saslMechamism | string | `"SCRAM-SHA-512"` | | diff --git a/applications/next-visit-fan-out/templates/deployment.yaml b/applications/next-visit-fan-out/templates/deployment.yaml index bc949d461d..7e002923e9 100644 --- a/applications/next-visit-fan-out/templates/deployment.yaml +++ b/applications/next-visit-fan-out/templates/deployment.yaml @@ -65,8 +65,18 @@ spec: value: {{ .Values.instruments }} resources: {{- toYaml .Values.resources | nindent 12 }} + volumeMounts: + - name: detector-config + mountPath: {{ .Values.detectorConfigFile | dir }} + readOnly: true volumes: - name: kafka-sasl-prompt-prompt-processing secret: secretName: {{ template "next-visit-fan-out.fullname" . }}-secret + - name: detector-config + configMap: + name: detector-map + items: + - key: "detectors.status" + path: {{ .Values.detectorConfigFile | base }} diff --git a/applications/next-visit-fan-out/templates/detectors.yaml b/applications/next-visit-fan-out/templates/detectors.yaml new file mode 100644 index 0000000000..d1609425f8 --- /dev/null +++ b/applications/next-visit-fan-out/templates/detectors.yaml @@ -0,0 +1,8 @@ +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: detector-map +data: + detectors.status: | + {{- .Values.detectorConfig | toYaml | nindent 4 }} diff --git a/applications/next-visit-fan-out/values.yaml b/applications/next-visit-fan-out/values.yaml index 7acd4bd3f5..e1ab9f1c86 100644 --- a/applications/next-visit-fan-out/values.yaml +++ b/applications/next-visit-fan-out/values.yaml @@ -5,7 +5,7 @@ knative: lsstcomcamsimUrl: http://prompt-proto-service-lsstcomcamsim.prompt-proto-service-lsstcomcamsim/next-visit lsstcamUrl: http://prompt-proto-service-lsstcam.prompt-proto-service-lsstcam/next-visit -detectorConfigFile: detector.yaml +detectorConfigFile: /etc/config/detector.yaml kafka: offset: latest @@ -58,4 +58,338 @@ global: vaultSecretsPath: "" # -- The instruments that are initialized when the fan-out service starts up as a space-delimited string. +# This list is a subset of the keys of `detectorConfig` because the latter handles some special cases. instruments: "LATISS LSSTCam LSSTComCam LSSTComCamSim HSC" + +# -- A mapping, for each instrument, of detector number to whether that detector is "active" (i.e., producing images). +# @default -- See `values.yaml`. +detectorConfig: + LATISS: + detectors: + 0: True + LSSTComCam: + detectors: + 0: True + 1: True + 2: True + 3: True + 4: True + 5: True + 6: True + 7: True + 8: True + LSSTCam: + detectors: + 0: False + 1: False + 2: False + 3: False + 4: False + 5: False + 6: False + 7: False + 8: False + 9: False + 10: False + 11: False + 12: False + 13: False + 14: False + 15: False + 16: False + 17: False + 18: False + 19: False + 20: False + 21: False + 22: False + 23: False + 24: False + 25: False + 26: False + 27: False + 28: False + 29: False + 30: False + 31: False + 32: False + 33: False + 34: False + 35: False + 36: False + 37: False + 38: False + 39: False + 40: False + 41: False + 42: False + 43: False + 44: False + 45: False + 46: False + 47: False + 48: False + 49: False + 50: False + 51: False + 52: False + 53: False + 54: False + 55: False + 56: False + 57: False + 58: False + 59: False + 60: False + 61: False + 62: False + 63: False + 64: False + 65: False + 66: False + 67: False + 68: False + 69: False + 70: False + 71: False + 72: False + 73: False + 74: False + 75: False + 76: False + 77: False + 78: False + 79: False + 80: False + 81: False + 82: False + 83: False + 84: False + 85: False + 86: False + 87: False + 88: False + 89: False + 90: False + 91: False + 92: False + 93: False + 94: False + 95: False + 96: False + 97: False + 98: False + 99: False + 100: False + 101: False + 102: False + 103: False + 104: False + 105: False + 106: False + 107: False + 108: False + 109: False + 110: False + 111: False + 112: False + 113: False + 114: False + 115: False + 116: False + 117: False + 118: False + 119: False + 120: False + 121: False + 122: False + 123: False + 124: False + 125: False + 126: False + 127: False + 128: False + 129: False + 130: False + 131: False + 132: False + 133: False + 134: False + 135: False + 136: False + 137: False + 138: False + 139: False + 140: False + 141: False + 142: False + 143: False + 144: False + 145: False + 146: False + 147: False + 148: False + 149: False + 150: False + 151: False + 152: False + 153: False + 154: False + 155: False + 156: False + 157: False + 158: False + 159: False + 160: False + 161: False + 162: False + 163: False + 164: False + 165: False + 166: False + 167: False + 168: False + 169: False + 170: False + 171: False + 172: False + 173: False + 174: False + 175: False + 176: False + 177: False + 178: False + 179: False + 180: False + 181: False + 182: False + 183: False + 184: False + 185: False + 186: False + 187: False + 188: False + HSC: + detectors: + 0: True + 1: True + 2: True + 3: True + 4: True + 5: True + 6: True + 7: True + 8: True + 9: False + 10: True + 11: True + 12: True + 13: True + 14: True + 15: True + 16: True + 17: True + 18: True + 19: True + 20: True + 21: True + 22: True + 23: True + 24: True + 25: True + 26: True + 27: True + 28: True + 29: True + 30: True + 31: True + 32: True + 33: True + 34: True + 35: True + 36: True + 37: True + 38: True + 39: True + 40: True + 41: True + 42: True + 43: True + 44: True + 45: True + 46: True + 47: True + 48: True + 49: True + 50: True + 51: True + 52: True + 53: True + 54: True + 55: True + 56: True + 57: True + 58: True + 59: True + 60: True + 61: True + 62: True + 63: True + 64: True + 65: True + 66: True + 67: True + 68: True + 69: True + 70: True + 71: True + 72: True + 73: True + 74: True + 75: True + 76: True + 77: True + 78: True + 79: True + 80: True + 81: True + 82: True + 83: True + 84: True + 85: True + 86: True + 87: True + 88: True + 89: True + 90: True + 91: True + 92: True + 93: True + 94: True + 95: True + 96: True + 97: True + 98: True + 99: True + 100: True + 101: True + 102: True + 103: True + HSC-TEST-59134: + detectors: + 0: True + 4: True + 5: True + HSC-TEST-59142: + detectors: + 0: True + 5: True + 11: True + HSC-TEST-59150: + detectors: + 50: True + 58: True + HSC-TEST-59160: + detectors: + 43: True + 51: True From 6747e0f55c456870a5b42c056e7dfec23b3265f5 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Mon, 14 Oct 2024 12:27:31 -0700 Subject: [PATCH 353/567] Remove configurable detector file for Next Visit Fan Out. Now that the detector config is provided through Helm, a configurable filename is redundant, and a poorly chosen mount point will break other parts of the app. A hardcoded path that's guaranteed not to clash is safer. --- applications/next-visit-fan-out/README.md | 1 - applications/next-visit-fan-out/templates/deployment.yaml | 8 ++++---- applications/next-visit-fan-out/values.yaml | 2 -- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/applications/next-visit-fan-out/README.md b/applications/next-visit-fan-out/README.md index 0e49da71b4..f54f3d263c 100644 --- a/applications/next-visit-fan-out/README.md +++ b/applications/next-visit-fan-out/README.md @@ -8,7 +8,6 @@ Poll next visit events from Kafka, duplicate them, and send them to all applicat |-----|------|---------|-------------| | affinity | object | `{}` | Affinity rules for the next-visit-fan-out deployment pod | | detectorConfig | object | See `values.yaml`. | A mapping, for each instrument, of detector number to whether that detector is "active" (i.e., producing images). | -| detectorConfigFile | string | `"/etc/config/detector.yaml"` | | | fullnameOverride | string | `""` | | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | diff --git a/applications/next-visit-fan-out/templates/deployment.yaml b/applications/next-visit-fan-out/templates/deployment.yaml index 7e002923e9..9b1189876c 100644 --- a/applications/next-visit-fan-out/templates/deployment.yaml +++ b/applications/next-visit-fan-out/templates/deployment.yaml @@ -33,8 +33,6 @@ spec: value: {{ .Values.knative.lsstcomcamsimUrl }} - name: LSSTCAM_KNATIVE_SERVING_URL value: {{ .Values.knative.lsstcamUrl }} - - name: DETECTOR_CONFIG_FILE - value: {{ .Values.detectorConfigFile }} - name: KAFKA_SCHEMA_REGISTRY_URL value: {{ .Values.kafka.schemaRegistryUrl }} - name: KAFKA_CLUSTER @@ -63,11 +61,13 @@ spec: name: {{ template "next-visit-fan-out.fullname" . }}-secret - name: SUPPORTED_INSTRUMENTS value: {{ .Values.instruments }} + - name: DETECTOR_CONFIG_FILE + value: /etc/config/detector.yaml resources: {{- toYaml .Values.resources | nindent 12 }} volumeMounts: - name: detector-config - mountPath: {{ .Values.detectorConfigFile | dir }} + mountPath: /etc/config readOnly: true volumes: @@ -79,4 +79,4 @@ spec: name: detector-map items: - key: "detectors.status" - path: {{ .Values.detectorConfigFile | base }} + path: detector.yaml diff --git a/applications/next-visit-fan-out/values.yaml b/applications/next-visit-fan-out/values.yaml index e1ab9f1c86..27a4ae98cf 100644 --- a/applications/next-visit-fan-out/values.yaml +++ b/applications/next-visit-fan-out/values.yaml @@ -5,8 +5,6 @@ knative: lsstcomcamsimUrl: http://prompt-proto-service-lsstcomcamsim.prompt-proto-service-lsstcomcamsim/next-visit lsstcamUrl: http://prompt-proto-service-lsstcam.prompt-proto-service-lsstcam/next-visit -detectorConfigFile: /etc/config/detector.yaml - kafka: offset: latest saslMechamism: SCRAM-SHA-512 From ccd5701272a73d67134e8a3d31269e0affd758ee Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Wed, 16 Oct 2024 09:39:48 -0700 Subject: [PATCH 354/567] Rename Next Visit Fan Out's detector config to instrument config. The old format was called a "detector" config despite having room for more fields. Calling it an instrument config makes it more natural to add other instrument-specific information to the file. --- .../next-visit-fan-out/templates/deployment.yaml | 14 +++++++------- .../templates/{detectors.yaml => instruments.yaml} | 4 ++-- 2 files changed, 9 insertions(+), 9 deletions(-) rename applications/next-visit-fan-out/templates/{detectors.yaml => instruments.yaml} (71%) diff --git a/applications/next-visit-fan-out/templates/deployment.yaml b/applications/next-visit-fan-out/templates/deployment.yaml index 9b1189876c..9f586970ab 100644 --- a/applications/next-visit-fan-out/templates/deployment.yaml +++ b/applications/next-visit-fan-out/templates/deployment.yaml @@ -61,12 +61,12 @@ spec: name: {{ template "next-visit-fan-out.fullname" . }}-secret - name: SUPPORTED_INSTRUMENTS value: {{ .Values.instruments }} - - name: DETECTOR_CONFIG_FILE - value: /etc/config/detector.yaml + - name: INSTRUMENT_CONFIG_FILE + value: /etc/config/instrument.yaml resources: {{- toYaml .Values.resources | nindent 12 }} volumeMounts: - - name: detector-config + - name: instrument-config mountPath: /etc/config readOnly: true @@ -74,9 +74,9 @@ spec: - name: kafka-sasl-prompt-prompt-processing secret: secretName: {{ template "next-visit-fan-out.fullname" . }}-secret - - name: detector-config + - name: instrument-config configMap: - name: detector-map + name: instrument-map items: - - key: "detectors.status" - path: detector.yaml + - key: "instruments" + path: instrument.yaml diff --git a/applications/next-visit-fan-out/templates/detectors.yaml b/applications/next-visit-fan-out/templates/instruments.yaml similarity index 71% rename from applications/next-visit-fan-out/templates/detectors.yaml rename to applications/next-visit-fan-out/templates/instruments.yaml index d1609425f8..e90e0d95a3 100644 --- a/applications/next-visit-fan-out/templates/detectors.yaml +++ b/applications/next-visit-fan-out/templates/instruments.yaml @@ -2,7 +2,7 @@ apiVersion: v1 kind: ConfigMap metadata: - name: detector-map + name: instrument-map data: - detectors.status: | + instruments: | {{- .Values.detectorConfig | toYaml | nindent 4 }} From cd65c4ef63a8fdbe99307cf7b83d18ce94e9b245 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Wed, 16 Oct 2024 09:46:11 -0700 Subject: [PATCH 355/567] Transpose Fan Out service's detector config. The original config was organized instrument->detectors->detector, which required the entire config to be overridden if only one per-instrument property was environment-dependent. The new config is organized detectors->instrument->detector, which allows non-detector configs to be changed without touching or duplicating the detectors. --- .../templates/instruments.yaml | 3 +- applications/next-visit-fan-out/values.yaml | 634 +++++++++--------- 2 files changed, 315 insertions(+), 322 deletions(-) diff --git a/applications/next-visit-fan-out/templates/instruments.yaml b/applications/next-visit-fan-out/templates/instruments.yaml index e90e0d95a3..25ad15a92f 100644 --- a/applications/next-visit-fan-out/templates/instruments.yaml +++ b/applications/next-visit-fan-out/templates/instruments.yaml @@ -5,4 +5,5 @@ metadata: name: instrument-map data: instruments: | - {{- .Values.detectorConfig | toYaml | nindent 4 }} + detectors: + {{- .Values.detectorConfig | toYaml | nindent 6 }} diff --git a/applications/next-visit-fan-out/values.yaml b/applications/next-visit-fan-out/values.yaml index 27a4ae98cf..9a1e7301d4 100644 --- a/applications/next-visit-fan-out/values.yaml +++ b/applications/next-visit-fan-out/values.yaml @@ -63,331 +63,323 @@ instruments: "LATISS LSSTCam LSSTComCam LSSTComCamSim HSC" # @default -- See `values.yaml`. detectorConfig: LATISS: - detectors: - 0: True + 0: True LSSTComCam: - detectors: - 0: True - 1: True - 2: True - 3: True - 4: True - 5: True - 6: True - 7: True - 8: True + 0: True + 1: True + 2: True + 3: True + 4: True + 5: True + 6: True + 7: True + 8: True LSSTCam: - detectors: - 0: False - 1: False - 2: False - 3: False - 4: False - 5: False - 6: False - 7: False - 8: False - 9: False - 10: False - 11: False - 12: False - 13: False - 14: False - 15: False - 16: False - 17: False - 18: False - 19: False - 20: False - 21: False - 22: False - 23: False - 24: False - 25: False - 26: False - 27: False - 28: False - 29: False - 30: False - 31: False - 32: False - 33: False - 34: False - 35: False - 36: False - 37: False - 38: False - 39: False - 40: False - 41: False - 42: False - 43: False - 44: False - 45: False - 46: False - 47: False - 48: False - 49: False - 50: False - 51: False - 52: False - 53: False - 54: False - 55: False - 56: False - 57: False - 58: False - 59: False - 60: False - 61: False - 62: False - 63: False - 64: False - 65: False - 66: False - 67: False - 68: False - 69: False - 70: False - 71: False - 72: False - 73: False - 74: False - 75: False - 76: False - 77: False - 78: False - 79: False - 80: False - 81: False - 82: False - 83: False - 84: False - 85: False - 86: False - 87: False - 88: False - 89: False - 90: False - 91: False - 92: False - 93: False - 94: False - 95: False - 96: False - 97: False - 98: False - 99: False - 100: False - 101: False - 102: False - 103: False - 104: False - 105: False - 106: False - 107: False - 108: False - 109: False - 110: False - 111: False - 112: False - 113: False - 114: False - 115: False - 116: False - 117: False - 118: False - 119: False - 120: False - 121: False - 122: False - 123: False - 124: False - 125: False - 126: False - 127: False - 128: False - 129: False - 130: False - 131: False - 132: False - 133: False - 134: False - 135: False - 136: False - 137: False - 138: False - 139: False - 140: False - 141: False - 142: False - 143: False - 144: False - 145: False - 146: False - 147: False - 148: False - 149: False - 150: False - 151: False - 152: False - 153: False - 154: False - 155: False - 156: False - 157: False - 158: False - 159: False - 160: False - 161: False - 162: False - 163: False - 164: False - 165: False - 166: False - 167: False - 168: False - 169: False - 170: False - 171: False - 172: False - 173: False - 174: False - 175: False - 176: False - 177: False - 178: False - 179: False - 180: False - 181: False - 182: False - 183: False - 184: False - 185: False - 186: False - 187: False - 188: False + 0: False + 1: False + 2: False + 3: False + 4: False + 5: False + 6: False + 7: False + 8: False + 9: False + 10: False + 11: False + 12: False + 13: False + 14: False + 15: False + 16: False + 17: False + 18: False + 19: False + 20: False + 21: False + 22: False + 23: False + 24: False + 25: False + 26: False + 27: False + 28: False + 29: False + 30: False + 31: False + 32: False + 33: False + 34: False + 35: False + 36: False + 37: False + 38: False + 39: False + 40: False + 41: False + 42: False + 43: False + 44: False + 45: False + 46: False + 47: False + 48: False + 49: False + 50: False + 51: False + 52: False + 53: False + 54: False + 55: False + 56: False + 57: False + 58: False + 59: False + 60: False + 61: False + 62: False + 63: False + 64: False + 65: False + 66: False + 67: False + 68: False + 69: False + 70: False + 71: False + 72: False + 73: False + 74: False + 75: False + 76: False + 77: False + 78: False + 79: False + 80: False + 81: False + 82: False + 83: False + 84: False + 85: False + 86: False + 87: False + 88: False + 89: False + 90: False + 91: False + 92: False + 93: False + 94: False + 95: False + 96: False + 97: False + 98: False + 99: False + 100: False + 101: False + 102: False + 103: False + 104: False + 105: False + 106: False + 107: False + 108: False + 109: False + 110: False + 111: False + 112: False + 113: False + 114: False + 115: False + 116: False + 117: False + 118: False + 119: False + 120: False + 121: False + 122: False + 123: False + 124: False + 125: False + 126: False + 127: False + 128: False + 129: False + 130: False + 131: False + 132: False + 133: False + 134: False + 135: False + 136: False + 137: False + 138: False + 139: False + 140: False + 141: False + 142: False + 143: False + 144: False + 145: False + 146: False + 147: False + 148: False + 149: False + 150: False + 151: False + 152: False + 153: False + 154: False + 155: False + 156: False + 157: False + 158: False + 159: False + 160: False + 161: False + 162: False + 163: False + 164: False + 165: False + 166: False + 167: False + 168: False + 169: False + 170: False + 171: False + 172: False + 173: False + 174: False + 175: False + 176: False + 177: False + 178: False + 179: False + 180: False + 181: False + 182: False + 183: False + 184: False + 185: False + 186: False + 187: False + 188: False HSC: - detectors: - 0: True - 1: True - 2: True - 3: True - 4: True - 5: True - 6: True - 7: True - 8: True - 9: False - 10: True - 11: True - 12: True - 13: True - 14: True - 15: True - 16: True - 17: True - 18: True - 19: True - 20: True - 21: True - 22: True - 23: True - 24: True - 25: True - 26: True - 27: True - 28: True - 29: True - 30: True - 31: True - 32: True - 33: True - 34: True - 35: True - 36: True - 37: True - 38: True - 39: True - 40: True - 41: True - 42: True - 43: True - 44: True - 45: True - 46: True - 47: True - 48: True - 49: True - 50: True - 51: True - 52: True - 53: True - 54: True - 55: True - 56: True - 57: True - 58: True - 59: True - 60: True - 61: True - 62: True - 63: True - 64: True - 65: True - 66: True - 67: True - 68: True - 69: True - 70: True - 71: True - 72: True - 73: True - 74: True - 75: True - 76: True - 77: True - 78: True - 79: True - 80: True - 81: True - 82: True - 83: True - 84: True - 85: True - 86: True - 87: True - 88: True - 89: True - 90: True - 91: True - 92: True - 93: True - 94: True - 95: True - 96: True - 97: True - 98: True - 99: True - 100: True - 101: True - 102: True - 103: True + 0: True + 1: True + 2: True + 3: True + 4: True + 5: True + 6: True + 7: True + 8: True + 9: False + 10: True + 11: True + 12: True + 13: True + 14: True + 15: True + 16: True + 17: True + 18: True + 19: True + 20: True + 21: True + 22: True + 23: True + 24: True + 25: True + 26: True + 27: True + 28: True + 29: True + 30: True + 31: True + 32: True + 33: True + 34: True + 35: True + 36: True + 37: True + 38: True + 39: True + 40: True + 41: True + 42: True + 43: True + 44: True + 45: True + 46: True + 47: True + 48: True + 49: True + 50: True + 51: True + 52: True + 53: True + 54: True + 55: True + 56: True + 57: True + 58: True + 59: True + 60: True + 61: True + 62: True + 63: True + 64: True + 65: True + 66: True + 67: True + 68: True + 69: True + 70: True + 71: True + 72: True + 73: True + 74: True + 75: True + 76: True + 77: True + 78: True + 79: True + 80: True + 81: True + 82: True + 83: True + 84: True + 85: True + 86: True + 87: True + 88: True + 89: True + 90: True + 91: True + 92: True + 93: True + 94: True + 95: True + 96: True + 97: True + 98: True + 99: True + 100: True + 101: True + 102: True + 103: True HSC-TEST-59134: - detectors: - 0: True - 4: True - 5: True + 0: True + 4: True + 5: True HSC-TEST-59142: - detectors: - 0: True - 5: True - 11: True + 0: True + 5: True + 11: True HSC-TEST-59150: - detectors: - 50: True - 58: True + 50: True + 58: True HSC-TEST-59160: - detectors: - 43: True - 51: True + 43: True + 51: True From 5a7b96462c3cc141b70b7b1783d929c401f6cb97 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Mon, 14 Oct 2024 13:23:25 -0700 Subject: [PATCH 356/567] Move Next Visit Fan Out URLs from envvars to instrument map. This consolidates the instrument-specific configs in one file. The URLs are still kept separate from the detector config so that they can be overridden independently. --- applications/next-visit-fan-out/README.md | 6 +----- .../next-visit-fan-out/templates/deployment.yaml | 10 ---------- .../next-visit-fan-out/templates/instruments.yaml | 2 ++ applications/next-visit-fan-out/values.yaml | 14 ++++++++------ 4 files changed, 11 insertions(+), 21 deletions(-) diff --git a/applications/next-visit-fan-out/README.md b/applications/next-visit-fan-out/README.md index f54f3d263c..7c7324ba2f 100644 --- a/applications/next-visit-fan-out/README.md +++ b/applications/next-visit-fan-out/README.md @@ -20,11 +20,7 @@ Poll next visit events from Kafka, duplicate them, and send them to all applicat | kafka.offset | string | `"latest"` | | | kafka.saslMechamism | string | `"SCRAM-SHA-512"` | | | kafka.securityProtocol | string | `"SASL_PLAINTEXT"` | | -| knative.hscUrl | string | `"http://prompt-proto-service-hsc.prompt-proto-service-hsc/next-visit"` | | -| knative.latissUrl | string | `"http://prompt-proto-service-latiss.prompt-proto-service-latiss/next-visit"` | | -| knative.lsstcamUrl | string | `"http://prompt-proto-service-lsstcam.prompt-proto-service-lsstcam/next-visit"` | | -| knative.lsstcomcamUrl | string | `"http://prompt-proto-service-lsstcomcam.prompt-proto-service-lsstcomcam/next-visit"` | | -| knative.lsstcomcamsimUrl | string | `"http://prompt-proto-service-lsstcomcamsim.prompt-proto-service-lsstcomcamsim/next-visit"` | | +| knativeUrls | object | See `values.yaml`. | A mapping of instrument to that instrument's Knative service. | | nameOverride | string | `""` | | | nodeSelector | object | `{}` | Node selection rules for the next-visit-fan-out deployment pod | | podAnnotations."prometheus.io/port" | string | `"8000"` | | diff --git a/applications/next-visit-fan-out/templates/deployment.yaml b/applications/next-visit-fan-out/templates/deployment.yaml index 9f586970ab..91778fa44b 100644 --- a/applications/next-visit-fan-out/templates/deployment.yaml +++ b/applications/next-visit-fan-out/templates/deployment.yaml @@ -23,16 +23,6 @@ spec: image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy }} env: - - name: LATISS_KNATIVE_SERVING_URL - value: {{ .Values.knative.latissUrl }} - - name: HSC_KNATIVE_SERVING_URL - value: {{ .Values.knative.hscUrl }} - - name: LSSTCOMCAM_KNATIVE_SERVING_URL - value: {{ .Values.knative.lsstcomcamUrl }} - - name: LSSTCOMCAMSIM_KNATIVE_SERVING_URL - value: {{ .Values.knative.lsstcomcamsimUrl }} - - name: LSSTCAM_KNATIVE_SERVING_URL - value: {{ .Values.knative.lsstcamUrl }} - name: KAFKA_SCHEMA_REGISTRY_URL value: {{ .Values.kafka.schemaRegistryUrl }} - name: KAFKA_CLUSTER diff --git a/applications/next-visit-fan-out/templates/instruments.yaml b/applications/next-visit-fan-out/templates/instruments.yaml index 25ad15a92f..ae0635611f 100644 --- a/applications/next-visit-fan-out/templates/instruments.yaml +++ b/applications/next-visit-fan-out/templates/instruments.yaml @@ -5,5 +5,7 @@ metadata: name: instrument-map data: instruments: | + knative-urls: + {{- .Values.knativeUrls | toYaml | nindent 6 }} detectors: {{- .Values.detectorConfig | toYaml | nindent 6 }} diff --git a/applications/next-visit-fan-out/values.yaml b/applications/next-visit-fan-out/values.yaml index 9a1e7301d4..cef532620d 100644 --- a/applications/next-visit-fan-out/values.yaml +++ b/applications/next-visit-fan-out/values.yaml @@ -1,9 +1,11 @@ -knative: - hscUrl: http://prompt-proto-service-hsc.prompt-proto-service-hsc/next-visit - latissUrl: http://prompt-proto-service-latiss.prompt-proto-service-latiss/next-visit - lsstcomcamUrl: http://prompt-proto-service-lsstcomcam.prompt-proto-service-lsstcomcam/next-visit - lsstcomcamsimUrl: http://prompt-proto-service-lsstcomcamsim.prompt-proto-service-lsstcomcamsim/next-visit - lsstcamUrl: http://prompt-proto-service-lsstcam.prompt-proto-service-lsstcam/next-visit +# -- A mapping of instrument to that instrument's Knative service. +# @default -- See `values.yaml`. +knativeUrls: + HSC: http://prompt-proto-service-hsc.prompt-proto-service-hsc/next-visit + LATISS: http://prompt-proto-service-latiss.prompt-proto-service-latiss/next-visit + LSSTComCam: http://prompt-proto-service-lsstcomcam.prompt-proto-service-lsstcomcam/next-visit + LSSTComCamSim: http://prompt-proto-service-lsstcomcamsim.prompt-proto-service-lsstcomcamsim/next-visit + LSSTCam: http://prompt-proto-service-lsstcam.prompt-proto-service-lsstcam/next-visit kafka: offset: latest From 12f5223e4a44b85e2de5f25285c37c342b13e382 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Wed, 16 Oct 2024 11:49:54 -0700 Subject: [PATCH 357/567] Add explicit ComCamSim to Fan Out detector configs. This change removes the need to special-case ComCamSim in the service code. --- applications/next-visit-fan-out/values.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/applications/next-visit-fan-out/values.yaml b/applications/next-visit-fan-out/values.yaml index cef532620d..9bcb79f8e4 100644 --- a/applications/next-visit-fan-out/values.yaml +++ b/applications/next-visit-fan-out/values.yaml @@ -66,7 +66,7 @@ instruments: "LATISS LSSTCam LSSTComCam LSSTComCamSim HSC" detectorConfig: LATISS: 0: True - LSSTComCam: + LSSTComCam: &ComCam 0: True 1: True 2: True @@ -76,6 +76,8 @@ detectorConfig: 6: True 7: True 8: True + LSSTComCamSim: + <<: *ComCam LSSTCam: 0: False 1: False From f728f97b188cf3af0c10d3177c38bba396f31bb6 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Tue, 15 Oct 2024 11:34:53 -0700 Subject: [PATCH 358/567] Configure active instruments for Next Visit Fan Out. Now that the hard-coded instruments (except HSC) have been removed from the fan-out code, we can configure fan out to ignore LSSTCam and LSSTComCam until they are deployed. --- applications/next-visit-fan-out/README.md | 2 +- .../next-visit-fan-out/values-usdfdev-prompt-processing.yaml | 2 ++ .../next-visit-fan-out/values-usdfprod-prompt-processing.yaml | 2 ++ applications/next-visit-fan-out/values.yaml | 3 ++- 4 files changed, 7 insertions(+), 2 deletions(-) diff --git a/applications/next-visit-fan-out/README.md b/applications/next-visit-fan-out/README.md index 7c7324ba2f..6ad1c073ed 100644 --- a/applications/next-visit-fan-out/README.md +++ b/applications/next-visit-fan-out/README.md @@ -15,7 +15,7 @@ Poll next visit events from Kafka, duplicate them, and send them to all applicat | image.pullPolicy | string | `"IfNotPresent"` | | | image.repository | string | `"us-central1-docker.pkg.dev/prompt-proto/prompt/nextvisit-fanout"` | | | image.tag | string | `""` | | -| instruments | string | `"LATISS LSSTCam LSSTComCam LSSTComCamSim HSC"` | The instruments that are initialized when the fan-out service starts up as a space-delimited string. This list is a subset of the keys of `detectorConfig` because the latter handles some special cases. | +| instruments | string | None, must be set. | The instruments that are initialized when the fan-out service starts up as a space-delimited string. This list is a subset of the keys of `detectorConfig` because the latter handles some special cases. | | kafka.expiration | float | `3600` | Maximum message age to consider, in seconds. | | kafka.offset | string | `"latest"` | | | kafka.saslMechamism | string | `"SCRAM-SHA-512"` | | diff --git a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml index 9718900be4..332a2fdbcd 100644 --- a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml @@ -11,3 +11,5 @@ image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. tag: 2.3.0 + +instruments: "LATISS LSSTComCamSim HSC" diff --git a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml index 6706fa1ff6..6d99661ba2 100644 --- a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml @@ -9,3 +9,5 @@ image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. tag: 2.3.0 + +instruments: "LATISS" diff --git a/applications/next-visit-fan-out/values.yaml b/applications/next-visit-fan-out/values.yaml index 9bcb79f8e4..fc76f30b3c 100644 --- a/applications/next-visit-fan-out/values.yaml +++ b/applications/next-visit-fan-out/values.yaml @@ -59,7 +59,8 @@ global: # -- The instruments that are initialized when the fan-out service starts up as a space-delimited string. # This list is a subset of the keys of `detectorConfig` because the latter handles some special cases. -instruments: "LATISS LSSTCam LSSTComCam LSSTComCamSim HSC" +# @default -- None, must be set. +instruments: "" # -- A mapping, for each instrument, of detector number to whether that detector is "active" (i.e., producing images). # @default -- See `values.yaml`. From f9ac0ccb08cf40ac106fbba7088e5df72c83b01d Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Wed, 16 Oct 2024 14:51:12 -0700 Subject: [PATCH 359/567] Add a debug logging flag to Next Visit Fan Out. --- applications/next-visit-fan-out/README.md | 1 + applications/next-visit-fan-out/templates/deployment.yaml | 2 ++ applications/next-visit-fan-out/values.yaml | 3 +++ 3 files changed, 6 insertions(+) diff --git a/applications/next-visit-fan-out/README.md b/applications/next-visit-fan-out/README.md index 6ad1c073ed..5d92c4e119 100644 --- a/applications/next-visit-fan-out/README.md +++ b/applications/next-visit-fan-out/README.md @@ -7,6 +7,7 @@ Poll next visit events from Kafka, duplicate them, and send them to all applicat | Key | Type | Default | Description | |-----|------|---------|-------------| | affinity | object | `{}` | Affinity rules for the next-visit-fan-out deployment pod | +| debug | bool | `false` | If set, enable debug logging. | | detectorConfig | object | See `values.yaml`. | A mapping, for each instrument, of detector number to whether that detector is "active" (i.e., producing images). | | fullnameOverride | string | `""` | | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | diff --git a/applications/next-visit-fan-out/templates/deployment.yaml b/applications/next-visit-fan-out/templates/deployment.yaml index 91778fa44b..8b288c6999 100644 --- a/applications/next-visit-fan-out/templates/deployment.yaml +++ b/applications/next-visit-fan-out/templates/deployment.yaml @@ -49,6 +49,8 @@ spec: secretKeyRef: key: kafka_pp_sasl_password name: {{ template "next-visit-fan-out.fullname" . }}-secret + - name: DEBUG_LOGS + value: {{ ternary "true" "false" .Values.debug | quote}} - name: SUPPORTED_INSTRUMENTS value: {{ .Values.instruments }} - name: INSTRUMENT_CONFIG_FILE diff --git a/applications/next-visit-fan-out/values.yaml b/applications/next-visit-fan-out/values.yaml index fc76f30b3c..0b168e753a 100644 --- a/applications/next-visit-fan-out/values.yaml +++ b/applications/next-visit-fan-out/values.yaml @@ -57,6 +57,9 @@ global: # @default -- Set by Argo CD vaultSecretsPath: "" +# -- If set, enable debug logging. +debug: false + # -- The instruments that are initialized when the fan-out service starts up as a space-delimited string. # This list is a subset of the keys of `detectorConfig` because the latter handles some special cases. # @default -- None, must be set. From 5976fda5ea14180aa2350f0aacf70371d90883ec Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Wed, 16 Oct 2024 17:35:18 -0700 Subject: [PATCH 360/567] Configure outgoing connections for Next Visit Fan Out. The number of outgoing connections sets a hard limit for how many pods we can use simultaneously in Prompt Processing. The dev environment requires only a modest limit, while the production environment will need a large one. --- applications/next-visit-fan-out/README.md | 3 ++- .../templates/deployment.yaml | 2 ++ .../templates/instruments.yaml | 2 +- .../values-usdfdev-prompt-processing.yaml | 3 +++ .../values-usdfprod-prompt-processing.yaml | 3 +++ applications/next-visit-fan-out/values.yaml | 20 +++++++++++-------- 6 files changed, 23 insertions(+), 10 deletions(-) diff --git a/applications/next-visit-fan-out/README.md b/applications/next-visit-fan-out/README.md index 5d92c4e119..c9d89654df 100644 --- a/applications/next-visit-fan-out/README.md +++ b/applications/next-visit-fan-out/README.md @@ -21,7 +21,8 @@ Poll next visit events from Kafka, duplicate them, and send them to all applicat | kafka.offset | string | `"latest"` | | | kafka.saslMechamism | string | `"SCRAM-SHA-512"` | | | kafka.securityProtocol | string | `"SASL_PLAINTEXT"` | | -| knativeUrls | object | See `values.yaml`. | A mapping of instrument to that instrument's Knative service. | +| knative.maxMessages | string | None, must be set. | The maximum number of messages that can be forwarded to all Knative instances combined. | +| knative.urls | object | See `values.yaml`. | A mapping of instrument to that instrument's Knative service. | | nameOverride | string | `""` | | | nodeSelector | object | `{}` | Node selection rules for the next-visit-fan-out deployment pod | | podAnnotations."prometheus.io/port" | string | `"8000"` | | diff --git a/applications/next-visit-fan-out/templates/deployment.yaml b/applications/next-visit-fan-out/templates/deployment.yaml index 8b288c6999..2ec0ec5307 100644 --- a/applications/next-visit-fan-out/templates/deployment.yaml +++ b/applications/next-visit-fan-out/templates/deployment.yaml @@ -39,6 +39,8 @@ spec: value: {{ .Values.kafka.saslMechamism }} - name: SECURITY_PROTOCOL value: {{ .Values.kafka.securityProtocol }} + - name: MAX_FAN_OUT_MESSAGES + value: {{ .Values.knative.maxMessages | toString | quote }} - name: SASL_USERNAME valueFrom: secretKeyRef: diff --git a/applications/next-visit-fan-out/templates/instruments.yaml b/applications/next-visit-fan-out/templates/instruments.yaml index ae0635611f..69c7e738b0 100644 --- a/applications/next-visit-fan-out/templates/instruments.yaml +++ b/applications/next-visit-fan-out/templates/instruments.yaml @@ -6,6 +6,6 @@ metadata: data: instruments: | knative-urls: - {{- .Values.knativeUrls | toYaml | nindent 6 }} + {{- .Values.knative.urls | toYaml | nindent 6 }} detectors: {{- .Values.detectorConfig | toYaml | nindent 6 }} diff --git a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml index 332a2fdbcd..f00e586997 100644 --- a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml @@ -1,3 +1,6 @@ +knative: + maxMessages: 150 + kafka: schemaRegistryUrl: http://10.96.181.159:8081 sasquatchAddress: 10.100.226.209:9094 diff --git a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml index 6d99661ba2..85adce95d5 100644 --- a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml @@ -1,3 +1,6 @@ +knative: + maxMessages: 1000 # Kubernetes can't support more pods yet + kafka: schemaRegistryUrl: http://10.110.90.252:8081 sasquatchAddress: 10.96.121.181:9094 diff --git a/applications/next-visit-fan-out/values.yaml b/applications/next-visit-fan-out/values.yaml index 0b168e753a..750b483c14 100644 --- a/applications/next-visit-fan-out/values.yaml +++ b/applications/next-visit-fan-out/values.yaml @@ -1,11 +1,15 @@ -# -- A mapping of instrument to that instrument's Knative service. -# @default -- See `values.yaml`. -knativeUrls: - HSC: http://prompt-proto-service-hsc.prompt-proto-service-hsc/next-visit - LATISS: http://prompt-proto-service-latiss.prompt-proto-service-latiss/next-visit - LSSTComCam: http://prompt-proto-service-lsstcomcam.prompt-proto-service-lsstcomcam/next-visit - LSSTComCamSim: http://prompt-proto-service-lsstcomcamsim.prompt-proto-service-lsstcomcamsim/next-visit - LSSTCam: http://prompt-proto-service-lsstcam.prompt-proto-service-lsstcam/next-visit +knative: + # -- A mapping of instrument to that instrument's Knative service. + # @default -- See `values.yaml`. + urls: + HSC: http://prompt-proto-service-hsc.prompt-proto-service-hsc/next-visit + LATISS: http://prompt-proto-service-latiss.prompt-proto-service-latiss/next-visit + LSSTComCam: http://prompt-proto-service-lsstcomcam.prompt-proto-service-lsstcomcam/next-visit + LSSTComCamSim: http://prompt-proto-service-lsstcomcamsim.prompt-proto-service-lsstcomcamsim/next-visit + LSSTCam: http://prompt-proto-service-lsstcam.prompt-proto-service-lsstcam/next-visit + # -- The maximum number of messages that can be forwarded to all Knative instances combined. + # @default -- None, must be set. + maxMessages: "" kafka: offset: latest From c661e1c9581c11638135fdc21f35355bb987e0ed Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Thu, 17 Oct 2024 10:09:05 -0700 Subject: [PATCH 361/567] Scale up ComCamSim Prompt Processing to 150 pods. In a steady state, we need roughly 120 pods to process ComCamSim data. Now that we can handle more than 100 requests at a time, give ComCamSim enough capacity to handle the full load. --- applications/prompt-proto-service-lsstcomcamsim/values.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-lsstcomcamsim/values.yaml b/applications/prompt-proto-service-lsstcomcamsim/values.yaml index 99f8eea75b..4b5797b52d 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values.yaml @@ -4,7 +4,9 @@ prompt-proto-service: # @default -- See the `values.yaml` file. podAnnotations: autoscaling.knative.dev/min-scale: "3" - autoscaling.knative.dev/max-scale: "100" + # Expect to need roughly n_detector × request_latency / survey_cadence pods + # For a 30 s ComCam survey with 500 s latency, this is 150 + autoscaling.knative.dev/max-scale: "150" autoscaling.knative.dev/target-utilization-percentage: "60" autoscaling.knative.dev/target-burst-capacity: "-1" queue.sidecar.serving.knative.dev/cpu-resource-request: "1" From 383d7809db548af0248a08c3281bbe9cc76feef7 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Thu, 17 Oct 2024 10:16:32 -0700 Subject: [PATCH 362/567] Allow up to 200 pods for ComCam Prompt Processing. For ComCamSim, 120-130 pods were enough, but that was assuming "survey" cadence of roughly 30 seconds. Add a healthy buffer in case the summit team tries a more rapid cadence. --- applications/prompt-proto-service-lsstcomcam/values.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-lsstcomcam/values.yaml b/applications/prompt-proto-service-lsstcomcam/values.yaml index 7682298e07..6bd8388077 100644 --- a/applications/prompt-proto-service-lsstcomcam/values.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values.yaml @@ -4,7 +4,10 @@ prompt-proto-service: # @default -- See the `values.yaml` file. podAnnotations: autoscaling.knative.dev/min-scale: "3" - autoscaling.knative.dev/max-scale: "30" + # Expect to need roughly n_detector × request_latency / survey_cadence pods + # For a 30 s ComCam survey with 500 s latency, this is 150 + # Add some buffer for fast twilight survey + autoscaling.knative.dev/max-scale: "200" autoscaling.knative.dev/target-utilization-percentage: "60" autoscaling.knative.dev/target-burst-capacity: "-1" queue.sidecar.serving.knative.dev/cpu-resource-request: "1" From 8b82c14360fe644a088e1a6468ba8f1b2bcfd695 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 21 Oct 2024 12:28:05 -0700 Subject: [PATCH 363/567] Fix image tags. --- applications/obsenv-management/values-tucson-teststand.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 091576856e..8030aec25d 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -1,7 +1,7 @@ obsenv-api: image: repository: rubin-cr.lsst.org/obsenv-api - tag: tickets/DM-46822 + tag: tickets-DM-46822 pullPolicy: Always config: logLevel: "DEBUG" @@ -11,7 +11,7 @@ obsenv-api: obsenv-ui: image: repository: rubin-cr.lsst.org/obsenv-ui - tag: tickets/DM-46822 + tag: tickets-DM-46822 pullPolicy: Always config: pathPrefix: /obsenv-management From 1f816925e8b266de4960592b232fe9ea6a5abeed Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 21 Oct 2024 13:56:47 -0700 Subject: [PATCH 364/567] Fix auth group. --- applications/obsenv-management/values-tucson-teststand.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 8030aec25d..8fcd756c11 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -15,4 +15,4 @@ obsenv-ui: pullPolicy: Always config: pathPrefix: /obsenv-management - authGroup: obsenv-admin-group + authGroup: lsst-ts-obsenv-admin-group From 0b94e2c2a298ad44c87d10e01ddbe7e7f44b7516 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 21 Oct 2024 14:49:54 -0700 Subject: [PATCH 365/567] Allow configuring Gafaelfawr internal URL Just in case someone is using a cluster FQDN other than `svc.cluster.local`, allow configuring the Gafaelfawr internal URL. --- applications/gafaelfawr/README.md | 1 + applications/gafaelfawr/templates/_helpers.tpl | 2 ++ applications/gafaelfawr/values.yaml | 5 +++++ 3 files changed, 8 insertions(+) diff --git a/applications/gafaelfawr/README.md b/applications/gafaelfawr/README.md index 1f6c642c1d..8d3490131a 100644 --- a/applications/gafaelfawr/README.md +++ b/applications/gafaelfawr/README.md @@ -26,6 +26,7 @@ Authentication and identity system | cloudsql.serviceAccount | string | None, must be set if Cloud SQL Auth Proxy is enabled | The Google service account that has an IAM binding to the `gafaelfawr` Kubernetes service account and has the `cloudsql.client` role | | cloudsql.tolerations | list | `[]` | Tolerations for the Cloud SQL Proxy pod | | config.afterLogoutUrl | string | Top-level page of this Phalanx environment | Where to send the user after they log out | +| config.baseInternalUrl | string | FQDN under `svc.cluster.local` | URL for direct connections to the Gafaelfawr service, bypassing the Ingress. Must use a service name of `gafaelfawr` and port 8080. | | config.cilogon.clientId | string | `nil` | CILogon client ID. One and only one of this, `config.github.clientId`, or `config.oidc.clientId` must be set. | | config.cilogon.enrollmentUrl | string | Login fails with an error | Where to send the user if their username cannot be found in LDAP | | config.cilogon.loginParams | object | `{"skin":"LSST"}` | Additional parameters to add | diff --git a/applications/gafaelfawr/templates/_helpers.tpl b/applications/gafaelfawr/templates/_helpers.tpl index d43a58f2ef..812a70b587 100644 --- a/applications/gafaelfawr/templates/_helpers.tpl +++ b/applications/gafaelfawr/templates/_helpers.tpl @@ -36,8 +36,10 @@ Common environment variables {{- end }} - name: "GAFAELFAWR_BASE_URL" value: {{ .Values.global.baseUrl | quote }} +{{- if not .Values.config.baseInternalUrl }} - name: "GAFAELFAWR_BASE_INTERNAL_URL" value: "http://gafaelfawr.{{ .Release.Namespace }}.svc.cluster.local:8080" +{{- end }} - name: "GAFAELFAWR_BOOTSTRAP_TOKEN" valueFrom: secretKeyRef: diff --git a/applications/gafaelfawr/values.yaml b/applications/gafaelfawr/values.yaml index 4e39060409..034c692cb6 100644 --- a/applications/gafaelfawr/values.yaml +++ b/applications/gafaelfawr/values.yaml @@ -41,6 +41,11 @@ config: # @default -- Top-level page of this Phalanx environment afterLogoutUrl: null + # -- URL for direct connections to the Gafaelfawr service, bypassing the + # Ingress. Must use a service name of `gafaelfawr` and port 8080. + # @default -- FQDN under `svc.cluster.local` + baseInternalUrl: null + # -- URL for the PostgreSQL database # @default -- None, must be set if neither `cloudsql.enabled` nor # `config.internalDatabase` are true From e2a70e0dacaad5de252bac48c1389d2a851740e8 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 21 Oct 2024 16:34:59 -0700 Subject: [PATCH 366/567] Add rra to Argo CD for usdf-cm-dev So that I can help debug Gafaelfawr issues. --- applications/argocd/values-usdf-cm-dev.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/argocd/values-usdf-cm-dev.yaml b/applications/argocd/values-usdf-cm-dev.yaml index fd33f69cf3..cd292535bc 100644 --- a/applications/argocd/values-usdf-cm-dev.yaml +++ b/applications/argocd/values-usdf-cm-dev.yaml @@ -29,6 +29,7 @@ argo-cd: g, fritzm@slac.stanford.edu, role:admin g, cslater@slac.stanford.edu, role:admin g, yusra@slac.stanford.edu, role:admin + g, rra@slac.stanford.edu, role:admin scopes: "[email]" server: From 74b3ad14e75273ba28e47e354bcf67f4fd090260 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 21 Oct 2024 18:03:25 -0700 Subject: [PATCH 367/567] Change to release tags. --- applications/obsenv-management/values-tucson-teststand.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 8fcd756c11..672f70bf83 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -1,7 +1,7 @@ obsenv-api: image: repository: rubin-cr.lsst.org/obsenv-api - tag: tickets-DM-46822 + tag: 0.2.0 pullPolicy: Always config: logLevel: "DEBUG" @@ -11,7 +11,7 @@ obsenv-api: obsenv-ui: image: repository: rubin-cr.lsst.org/obsenv-ui - tag: tickets-DM-46822 + tag: 0.2.0 pullPolicy: Always config: pathPrefix: /obsenv-management From 147a4927695a7bddc188c7b05e231d770582b812 Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 22 Oct 2024 13:53:04 -0700 Subject: [PATCH 368/567] bump ghostwriter version --- applications/ghostwriter/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/ghostwriter/Chart.yaml b/applications/ghostwriter/Chart.yaml index 8d923876b0..c289ec9f6d 100644 --- a/applications/ghostwriter/Chart.yaml +++ b/applications/ghostwriter/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: 0.1.1 +appVersion: 0.1.2 description: URL rewriter/personalizer name: ghostwriter sources: From 0a6905c739bf7f08d2364e903c32978af4e4b6fa Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Tue, 22 Oct 2024 13:54:46 -0700 Subject: [PATCH 369/567] Deploy Next Visit Fan Out 2.4.0. --- .../next-visit-fan-out/values-usdfdev-prompt-processing.yaml | 2 +- .../next-visit-fan-out/values-usdfprod-prompt-processing.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml index f00e586997..05625325c7 100644 --- a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml @@ -13,6 +13,6 @@ image: repository: ghcr.io/lsst-dm/next_visit_fan_out pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 2.3.0 + tag: 2.4.0 instruments: "LATISS LSSTComCamSim HSC" diff --git a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml index 85adce95d5..ad765e9be2 100644 --- a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml @@ -11,6 +11,6 @@ image: repository: ghcr.io/lsst-dm/next_visit_fan_out pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 2.3.0 + tag: 2.4.0 instruments: "LATISS" From 33df835edeaa16b9031c80cbba3d640a58f44e0f Mon Sep 17 00:00:00 2001 From: Colin Slater Date: Fri, 21 Jun 2024 17:09:33 -0700 Subject: [PATCH 370/567] Switch to new plot-navigator version. --- applications/plot-navigator/Chart.yaml | 6 +- .../plot-navigator/templates/deployment.yaml | 61 +++++-------------- .../plot-navigator/templates/service.yaml | 2 +- .../templates/vault-secrets.yaml | 4 -- .../plot-navigator/values-usdfdev.yaml | 32 ++-------- applications/plot-navigator/values.yaml | 2 +- 6 files changed, 27 insertions(+), 80 deletions(-) diff --git a/applications/plot-navigator/Chart.yaml b/applications/plot-navigator/Chart.yaml index 2d5ac17db9..b13dab9a70 100644 --- a/applications/plot-navigator/Chart.yaml +++ b/applications/plot-navigator/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v2 name: plot-navigator -description: Panel-based plot viewer +description: Plot-navigator version: 1.0.0 sources: - - https://github.com/lsst-dm/pipetask-plot-navigator -appVersion: "0.11.2" + - https://github.com/lsst-dm/plot-navigator +appVersion: "0.1.1" diff --git a/applications/plot-navigator/templates/deployment.yaml b/applications/plot-navigator/templates/deployment.yaml index 9f293a465b..3bec5b7670 100644 --- a/applications/plot-navigator/templates/deployment.yaml +++ b/applications/plot-navigator/templates/deployment.yaml @@ -15,39 +15,9 @@ spec: {{- include "plot-navigator.selectorLabels" . | nindent 8 }} spec: volumes: - # butler-secrets-raw is the secrets we get from vault - - name: "butler-secrets-raw" - secret: - secretName: {{ include "plot-navigator.fullname" . }} - # butler-secrets are the copied and chmoded versions - - name: "butler-secrets" - emptyDir: {} {{- with .Values.config.volumes }} {{- . | toYaml | nindent 8 }} {{- end }} - # Have to fix permissions on the pgpass file. - # init container pattern borrowed from vo-cutouts. - initContainers: - - name: fix-secret-permissions - image: {{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }} - imagePullPolicy: Always - command: - - "/bin/bash" - - "-c" - - | - cp -RL /home/worker/secrets-raw/* /home/worker/.lsst/ - chown worker:worker /home/worker/.lsst/* - chmod 0400 /home/worker/.lsst/* - securityContext: - runAsNonRoot: false - runAsUser: 0 - runAsGroup: 0 - volumeMounts: - - name: "butler-secrets" - mountPath: "/home/worker/.lsst/" - - name: "butler-secrets-raw" - mountPath: "/home/worker/secrets-raw/" - readOnly: true containers: - name: plot-navigator image: {{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion}} @@ -57,22 +27,23 @@ spec: - name: {{ $key | quote }} value: {{ $value | quote }} {{- end }} + - name: S3_KEY + valueFrom: + secretKeyRef: + name: {{ include "plot-navigator.fullname" . }} + key: S3_KEY + - name: S3_SECRET + valueFrom: + secretKeyRef: + name: {{ include "plot-navigator.fullname" . }} + key: S3_SECRET + {{- if .Values.config.envFromSecretPath }} + envFrom: + - secretRef: {{ include "plot-navigator.fullname" . }} + {{- end }} + {{- if .Values.config.volume_mounts }} volumeMounts: - - name: butler-secrets - mountPath: "/home/worker/.lsst/" {{- with .Values.config.volume_mounts }} {{- . | toYaml | nindent 10 }} {{- end }} - command: - - /bin/bash - - -c - - panel serve dashboard_gen3.py --port 8080 --prefix /plot-navigator --allow-websocket-origin {{ .Values.global.host }} --static-dirs assets=./assets - resources: - limits: - cpu: "2" - memory: "3Gi" - ephemeral-storage: "100Mi" - requests: - cpu: "1" - memory: "2Gi" - ephemeral-storage: "50Mi" + {{- end }} diff --git a/applications/plot-navigator/templates/service.yaml b/applications/plot-navigator/templates/service.yaml index ba648bdc01..9e8b953143 100644 --- a/applications/plot-navigator/templates/service.yaml +++ b/applications/plot-navigator/templates/service.yaml @@ -10,4 +10,4 @@ spec: ports: - port: 80 protocol: TCP - targetPort: 8080 + targetPort: 3000 diff --git a/applications/plot-navigator/templates/vault-secrets.yaml b/applications/plot-navigator/templates/vault-secrets.yaml index 3a22648e85..f2edfa27b4 100644 --- a/applications/plot-navigator/templates/vault-secrets.yaml +++ b/applications/plot-navigator/templates/vault-secrets.yaml @@ -5,9 +5,5 @@ metadata: labels: {{- include "plot-navigator.labels" . | nindent 4 }} spec: -{{- if .Values.config.separateSecrets }} path: "{{ .Values.global.vaultSecretsPath }}/plot-navigator" -{{- else }} - path: "{{ .Values.global.vaultSecretsPath }}/nublado-lab-secret" -{{- end }} type: Opaque diff --git a/applications/plot-navigator/values-usdfdev.yaml b/applications/plot-navigator/values-usdfdev.yaml index 505748ac2c..cc73afab94 100644 --- a/applications/plot-navigator/values-usdfdev.yaml +++ b/applications/plot-navigator/values-usdfdev.yaml @@ -1,28 +1,8 @@ environment: - DAF_BUTLER_REPOSITORY_INDEX: "/sdf/group/rubin/shared/data-repos.yaml" - PGPASSFILE: "/home/worker/.lsst/postgres-credentials.txt" - PGUSER: "rubin" - AWS_SHARED_CREDENTIALS_FILE: "/home/worker/.lsst/aws-credentials.ini" - S3_ENDPOINT_URL: "https://s3dfrgw.slac.stanford.edu" - BUTLER_DEFAULT_REPO: "/repo/main" - BUTLER_DEFAULT_COLLECTION: "HSC/runs/RC2/w_2024_02/DM-42454" - LSST_DISABLE_BUCKET_VALIDATION: "1" + REPOS: "embargo" + BUCKET_NAME: "rubin-plot-navigator" + BUCKET_URL: "https://s3dfrgw.slac.stanford.edu/" + BUTLER_URL: "http://butler.butler:8080" -config: - volumes: - - name: sdf-group-rubin - persistentVolumeClaim: - claimName: sdf-group-rubin - - name: sdf-data-rubin - persistentVolumeClaim: - claimName: sdf-data-rubin - volume_mounts: - - name: sdf-group-rubin - mountPath: /sdf/group/rubin - - name: sdf-data-rubin - mountPath: /sdf/data/rubin - persistentVolumeClaims: - - name: sdf-group-rubin - storageClassName: sdf-group-rubin - - name: sdf-data-rubin - storageClassName: sdf-data-rubin +image: + tag: DM-44957 diff --git a/applications/plot-navigator/values.yaml b/applications/plot-navigator/values.yaml index 3a808b27c6..bde40737b5 100644 --- a/applications/plot-navigator/values.yaml +++ b/applications/plot-navigator/values.yaml @@ -1,6 +1,6 @@ image: # -- plot-navigator image to use - repository: ghcr.io/lsst-dm/pipetask-plot-navigator + repository: ghcr.io/lsst-dm/plot-navigator # -- Tag of plot-navigator image to use # @default -- The appVersion of the chart From abbb530e4c8751e9234dc5bb62e80737328db384 Mon Sep 17 00:00:00 2001 From: Colin Slater Date: Mon, 29 Jul 2024 12:29:03 -0700 Subject: [PATCH 371/567] Use separate pod for the butler. Add volumes to nodejs deployment. Add /repo/dc2. --- .../templates/butler-deployment.yaml | 125 ++++++++++++++++++ .../templates/butler-service.yaml | 12 ++ .../plot-navigator/templates/deployment.yaml | 8 ++ .../plot-navigator/templates/service.yaml | 1 + .../plot-navigator/values-usdfdev.yaml | 50 ++++++- applications/plot-navigator/values.yaml | 22 ++- 6 files changed, 215 insertions(+), 3 deletions(-) create mode 100644 applications/plot-navigator/templates/butler-deployment.yaml create mode 100644 applications/plot-navigator/templates/butler-service.yaml diff --git a/applications/plot-navigator/templates/butler-deployment.yaml b/applications/plot-navigator/templates/butler-deployment.yaml new file mode 100644 index 0000000000..1d53204d70 --- /dev/null +++ b/applications/plot-navigator/templates/butler-deployment.yaml @@ -0,0 +1,125 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: "internal-butler" + labels: + {{- include "plot-navigator.labels" . | nindent 4 }} + app.kubernetes.io/component: butler +spec: + selector: + matchLabels: + {{- include "plot-navigator.selectorLabels" . | nindent 6 }} + app.kubernetes.io/component: butler + template: + metadata: + {{- with .Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "plot-navigator.selectorLabels" . | nindent 8 }} + app.kubernetes.io/component: butler + spec: + automountServiceAccountToken: false + containers: + - name: internal-butler + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - "all" + readOnlyRootFilesystem: true + image: ghcr.io/ctslater/daf_butler:test_cts + imagePullPolicy: Always + ports: + - name: "http" + containerPort: 8080 + protocol: "TCP" + readinessProbe: + httpGet: + path: "/" + port: "http" + resources: + {{- toYaml .Values.butlerResources | nindent 12 }} + env: + - name: AWS_SHARED_CREDENTIALS_FILE + value: "/opt/lsst/butler/secrets/aws-credentials.ini" + - name: PGPASSFILE + value: "/opt/lsst/butler/secrets/postgres-credentials.txt" + - name: GOOGLE_APPLICATION_CREDENTIALS + value: "/opt/lsst/butler/secrets/butler-gcs-creds.json" + - name: S3_ENDPOINT_URL + value: {{ .Values.butlerConfig.s3EndpointUrl | quote }} + - name: DAF_BUTLER_REPOSITORIES + value: {{ .Values.butlerConfig.repositories | toJson | quote }} + {{ if .Values.butlerConfig.pguser }} + - name: PGUSER + value: {{ .Values.butlerConfig.pguser | quote }} + {{ end }} + volumeMounts: + - name: "butler-secrets" + mountPath: "/opt/lsst/butler/secrets" + readOnly: true + {{- with .Values.butlerConfig.volume_mounts }} + {{- . | toYaml | nindent 12 }} + {{- end }} + volumes: + # butler-secrets-raw pulls in the secrets from the vault as files. + # These files are owned by root and group/world readable. + # This volume is not used directly by the container running the actual + # Butler application. + - name: "butler-secrets-raw" + secret: + secretName: {{ include "plot-navigator.fullname" . }} + # Postgres will not use a pgpass file (postgres-credentials.txt in the + # vault) if it is group/world writeable or owned by a different user. + # So the initContainers below copies the files from butler-secrets-raw + # to butlet-secrets, changing the owner and permissions. + # This volume is the one used by the container running the actual + # Butler application. + - name: "butler-secrets" + emptyDir: {} + {{- with .Values.butlerConfig.volumes }} + {{- . | toYaml | nindent 8 }} + {{- end }} + initContainers: + # To deal with the Postgres file permission issued mentioned above, + # copy the secrets from butler-secrets-raw to butler-secrets. + # This initContainer definition is borrowed from obsloctap's + # deployment.yaml. + - name: fix-secret-permissions + image: ghcr.io/ctslater/daf_butler:test_cts + imagePullPolicy: Always + command: + - "/bin/sh" + - "-c" + - | + cp -RL /tmp/butler-secrets-raw/* /opt/lsst/butler/secrets/ + chmod 0400 /opt/lsst/butler/secrets/* + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - "all" + volumeMounts: + - name: "butler-secrets" + mountPath: "/opt/lsst/butler/secrets" + - name: "butler-secrets-raw" + mountPath: "/tmp/butler-secrets-raw" + readOnly: true + securityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 4085 + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} diff --git a/applications/plot-navigator/templates/butler-service.yaml b/applications/plot-navigator/templates/butler-service.yaml new file mode 100644 index 0000000000..7482986a49 --- /dev/null +++ b/applications/plot-navigator/templates/butler-service.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: Service +metadata: + name: internal-butler +spec: + selector: + {{- include "plot-navigator.selectorLabels" . | nindent 4 }} + app.kubernetes.io/component: butler + ports: + - port: 80 + protocol: TCP + targetPort: 8080 diff --git a/applications/plot-navigator/templates/deployment.yaml b/applications/plot-navigator/templates/deployment.yaml index 3bec5b7670..4bd69e1103 100644 --- a/applications/plot-navigator/templates/deployment.yaml +++ b/applications/plot-navigator/templates/deployment.yaml @@ -9,10 +9,12 @@ spec: selector: matchLabels: {{- include "plot-navigator.selectorLabels" . | nindent 6 }} + app.kubernetes.io/component: nodejs template: metadata: labels: {{- include "plot-navigator.selectorLabels" . | nindent 8 }} + app.kubernetes.io/component: nodejs spec: volumes: {{- with .Values.config.volumes }} @@ -37,6 +39,8 @@ spec: secretKeyRef: name: {{ include "plot-navigator.fullname" . }} key: S3_SECRET + resources: + {{- toYaml .Values.resources | nindent 10 }} {{- if .Values.config.envFromSecretPath }} envFrom: - secretRef: {{ include "plot-navigator.fullname" . }} @@ -47,3 +51,7 @@ spec: {{- . | toYaml | nindent 10 }} {{- end }} {{- end }} + securityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 4085 diff --git a/applications/plot-navigator/templates/service.yaml b/applications/plot-navigator/templates/service.yaml index 9e8b953143..97f81e5418 100644 --- a/applications/plot-navigator/templates/service.yaml +++ b/applications/plot-navigator/templates/service.yaml @@ -7,6 +7,7 @@ metadata: spec: selector: {{- include "plot-navigator.selectorLabels" . | nindent 4 }} + app.kubernetes.io/component: nodejs ports: - port: 80 protocol: TCP diff --git a/applications/plot-navigator/values-usdfdev.yaml b/applications/plot-navigator/values-usdfdev.yaml index cc73afab94..5076652c39 100644 --- a/applications/plot-navigator/values-usdfdev.yaml +++ b/applications/plot-navigator/values-usdfdev.yaml @@ -1,8 +1,54 @@ environment: - REPOS: "embargo" + BASE_URL: "/plot-navigator" + REPO_URLS: '{"embargo": "http://internal-butler/api/butler/repo/embargo", "/repo/main": "http://internal-butler/api/butler/repo/main", "/repo/dc2": "http://internal-butler/api/butler/repo/dc2"}' BUCKET_NAME: "rubin-plot-navigator" BUCKET_URL: "https://s3dfrgw.slac.stanford.edu/" - BUTLER_URL: "http://butler.butler:8080" + + +config: + persistentVolumeClaims: + - name: sdf-group-rubin + storageClassName: sdf-group-rubin + - name: sdf-data-rubin + storageClassName: sdf-data-rubin + volumes: + - name: sdf-group-rubin + persistentVolumeClaim: + claimName: sdf-group-rubin + - name: sdf-data-rubin + persistentVolumeClaim: + claimName: sdf-data-rubin + volume_mounts: + - name: sdf-group-rubin + mountPath: /sdf/group/rubin + readOnly: true + - name: sdf-data-rubin + mountPath: /sdf/data/rubin + readOnly: true + +butlerConfig: + pguser: "rubin" + s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu" + repositories: + embargo: "s3://rubin-summit-users/butler.yaml" + main: "/sdf/group/rubin/repo/main" + dc2: "/sdf/group/rubin/repo/dc2" + volumes: + - name: sdf-group-rubin + persistentVolumeClaim: + claimName: sdf-group-rubin + - name: sdf-data-rubin + persistentVolumeClaim: + claimName: sdf-data-rubin + volume_mounts: + - name: sdf-group-rubin + mountPath: /sdf/group/rubin + readOnly: true + - name: sdf-data-rubin + mountPath: /sdf/data/rubin + readOnly: true + image: tag: DM-44957 + diff --git a/applications/plot-navigator/values.yaml b/applications/plot-navigator/values.yaml index bde40737b5..ad2f278bbc 100644 --- a/applications/plot-navigator/values.yaml +++ b/applications/plot-navigator/values.yaml @@ -6,7 +6,7 @@ image: # @default -- The appVersion of the chart tag: "" -# -- Environment variables (e.g. butler configuration/auth parms) for panel +# -- Environment variables (e.g. butler configuration/auth parms) for the nextjs server environment: {} ingress: @@ -40,3 +40,23 @@ global: # -- Base path for Vault secrets # @default -- Set by Argo CD vaultSecretsPath: "" + +# -- Resource limits and requests for the nodejs pod +# @default -- see `values.yaml` +resources: + limits: + cpu: "1" + memory: "512Mi" + requests: + cpu: "50m" + memory: "256Mi" + +# -- Resource limits and requests for the butler pod +# @default -- see `values.yaml` +butlerResources: + limits: + cpu: "1" + memory: "324Mi" + requests: + cpu: "15m" + memory: "150Mi" From 606f06463b0fe01b3d8ae35f11213f257dee2950 Mon Sep 17 00:00:00 2001 From: Colin Slater Date: Wed, 16 Oct 2024 22:10:46 -0700 Subject: [PATCH 372/567] Add production-tools image to plot-navigator. --- applications/plot-navigator/README.md | 15 +++- .../plot-navigator/templates/ingress.yaml | 14 +++ .../templates/production-tools-service.yaml | 14 +++ .../templates/production-tools.yaml | 89 +++++++++++++++++++ .../plot-navigator/values-usdfdev.yaml | 34 ++++++- 5 files changed, 159 insertions(+), 7 deletions(-) create mode 100644 applications/plot-navigator/templates/production-tools-service.yaml create mode 100644 applications/plot-navigator/templates/production-tools.yaml diff --git a/applications/plot-navigator/README.md b/applications/plot-navigator/README.md index 87a645d585..8c1af8496b 100644 --- a/applications/plot-navigator/README.md +++ b/applications/plot-navigator/README.md @@ -1,23 +1,30 @@ # plot-navigator -Panel-based plot viewer +![Version: 1.0.0](https://img.shields.io/badge/Version-1.0.0-informational?style=flat-square) ![AppVersion: 0.1.1](https://img.shields.io/badge/AppVersion-0.1.1-informational?style=flat-square) + +Plot-navigator ## Source Code -* +* ## Values | Key | Type | Default | Description | |-----|------|---------|-------------| +| butlerResources | object | see `values.yaml` | Resource limits and requests for the butler pod | | config.persistentVolumeClaims | list | `[]` | PersistentVolumeClaims to create. | | config.separateSecrets | bool | `false` | Whether to use the new secrets management scheme | | config.volume_mounts | list | `[]` | Mount points for additional volumes | | config.volumes | list | `[]` | Additional volumes to attach | -| environment | object | `{}` | Environment variables (e.g. butler configuration/auth parms) for panel | +| environment | object | `{}` | Environment variables (e.g. butler configuration/auth parms) for the nextjs server | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | -| image.repository | string | `"ghcr.io/lsst-dm/pipetask-plot-navigator"` | plot-navigator image to use | +| image.repository | string | `"ghcr.io/lsst-dm/plot-navigator"` | plot-navigator image to use | | image.tag | string | The appVersion of the chart | Tag of plot-navigator image to use | | ingress.annotations | object | `{}` | Additional annotations to add to the ingress | +| resources | object | see `values.yaml` | Resource limits and requests for the nodejs pod | + +---------------------------------------------- +Autogenerated from chart metadata using [helm-docs v1.8.1](https://github.com/norwoodj/helm-docs/releases/v1.8.1) diff --git a/applications/plot-navigator/templates/ingress.yaml b/applications/plot-navigator/templates/ingress.yaml index 081d11c983..f431ffabc1 100644 --- a/applications/plot-navigator/templates/ingress.yaml +++ b/applications/plot-navigator/templates/ingress.yaml @@ -33,3 +33,17 @@ template: name: "plot-navigator" port: number: 80 + - path: "/plot-navigator/metrics" + pathType: "Prefix" + backend: + service: + name: "production-tools" + port: + number: 8080 + - path: "/plot-navigator/bokeh" + pathType: "Prefix" + backend: + service: + name: "production-tools" + port: + number: 8080 diff --git a/applications/plot-navigator/templates/production-tools-service.yaml b/applications/plot-navigator/templates/production-tools-service.yaml new file mode 100644 index 0000000000..78b9531c53 --- /dev/null +++ b/applications/plot-navigator/templates/production-tools-service.yaml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: Service +metadata: + name: production-tools + labels: + {{- include "plot-navigator.labels" . | nindent 4 }} +spec: + selector: + {{- include "plot-navigator.selectorLabels" . | nindent 4 }} + app.kubernetes.io/component: production-tools + ports: + - port: 80 + protocol: TCP + targetPort: 8080 diff --git a/applications/plot-navigator/templates/production-tools.yaml b/applications/plot-navigator/templates/production-tools.yaml new file mode 100644 index 0000000000..0d7760fd7b --- /dev/null +++ b/applications/plot-navigator/templates/production-tools.yaml @@ -0,0 +1,89 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: production-tools + labels: + {{- include "plot-navigator.labels" . | nindent 4 }} +spec: + replicas: 1 + selector: + matchLabels: + {{- include "plot-navigator.selectorLabels" . | nindent 6 }} + app.kubernetes.io/component: production-tools + template: + metadata: + labels: + {{- include "plot-navigator.selectorLabels" . | nindent 8 }} + app.kubernetes.io/component: production-tools + spec: + volumes: + - name: "butler-secrets-raw" + secret: + secretName: {{ include "plot-navigator.fullname" . }} + - name: "butler-secrets" + emptyDir: + sizeLimit: 50Mi + {{- with .Values.productionTools.volumes }} + {{- . | toYaml | nindent 8 }} + {{- end }} + containers: + - name: plot-navigator + image: {{ .Values.productionTools.image.repository }}:{{ .Values.productionTools.image.tag | default .Chart.AppVersion}} + imagePullPolicy: Always + env: +{{- range $key, $value := .Values.productionTools.env }} + - name: {{ $key | quote }} + value: {{ $value | quote }} +{{- end }} + - name: S3_KEY + valueFrom: + secretKeyRef: + name: {{ include "plot-navigator.fullname" . }} + key: S3_KEY + - name: S3_SECRET + valueFrom: + secretKeyRef: + name: {{ include "plot-navigator.fullname" . }} + key: S3_SECRET + resources: + {{- toYaml .Values.resources | nindent 10 }} + {{- if .Values.productionTools.envFromSecretPath }} + envFrom: + - secretRef: {{ include "plot-navigator.fullname" . }} + {{- end }} + {{- if .Values.productionTools.volume_mounts }} + volumeMounts: + - name: "butler-secrets" + mountPath: "/opt/lsst/butler/secrets" + {{- with .Values.productionTools.volume_mounts }} + {{- . | toYaml | nindent 10 }} + {{- end }} + {{- end }} + initContainers: + # To deal with the Postgres file permission issues, + # copy the secrets from butler-secrets-raw to butler-secrets. + - name: fix-secret-permissions + image: "alpine:latest" + imagePullPolicy: IfNotPresent + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /opt/lsst/butler/secrets/ + chown 1000:4085 /opt/lsst/butler/secrets/* + chmod 0400 /opt/lsst/butler/secrets/* + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - "all" + volumeMounts: + - name: "butler-secrets" + mountPath: "/opt/lsst/butler/secrets" + - name: "butler-secrets-raw" + mountPath: "/secrets-raw" + readOnly: true + securityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 4085 diff --git a/applications/plot-navigator/values-usdfdev.yaml b/applications/plot-navigator/values-usdfdev.yaml index 5076652c39..92672e1b3d 100644 --- a/applications/plot-navigator/values-usdfdev.yaml +++ b/applications/plot-navigator/values-usdfdev.yaml @@ -4,7 +4,6 @@ environment: BUCKET_NAME: "rubin-plot-navigator" BUCKET_URL: "https://s3dfrgw.slac.stanford.edu/" - config: persistentVolumeClaims: - name: sdf-group-rubin @@ -26,6 +25,37 @@ config: mountPath: /sdf/data/rubin readOnly: true +productionTools: + image: + repository: ghcr.io/lsst-dm/production_tools + tag: SP-883 + env: + DAF_BUTLER_REPOSITORY_INDEX: "/sdf/group/rubin/shared/data-repos.yaml" + PGPASSFILE: "/opt/lsst/butler/secrets/postgres-credentials.txt" + PGUSER: "rubin" + AWS_SHARED_CREDENTIALS_FILE: "/opt/lsst/butler/secrets/aws-credentials.ini" + S3_ENDPOINT_URL: "https://s3dfrgw.slac.stanford.edu" + LSST_DISABLE_BUCKET_VALIDATION: "1" + persistentVolumeClaims: + - name: sdf-group-rubin + storageClassName: sdf-group-rubin + - name: sdf-data-rubin + storageClassName: sdf-data-rubin + volumes: + - name: sdf-group-rubin + persistentVolumeClaim: + claimName: sdf-group-rubin + - name: sdf-data-rubin + persistentVolumeClaim: + claimName: sdf-data-rubin + volume_mounts: + - name: sdf-group-rubin + mountPath: /sdf/group/rubin + readOnly: true + - name: sdf-data-rubin + mountPath: /sdf/data/rubin + readOnly: true + butlerConfig: pguser: "rubin" s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu" @@ -48,7 +78,5 @@ butlerConfig: mountPath: /sdf/data/rubin readOnly: true - image: tag: DM-44957 - From 247ce0912194cba67ade5a33195190dbf18b823d Mon Sep 17 00:00:00 2001 From: Colin Slater Date: Sat, 19 Oct 2024 22:28:09 -0700 Subject: [PATCH 373/567] Add Redis for arq queue. --- applications/plot-navigator/Chart.yaml | 5 + applications/plot-navigator/README.md | 7 +- applications/plot-navigator/secrets.yaml | 5 + .../templates/production-tools-worker.yaml | 97 +++++++++++++++++++ .../templates/production-tools.yaml | 5 + .../templates/vault-secrets.yaml | 13 +++ .../plot-navigator/values-usdfdev.yaml | 6 +- applications/plot-navigator/values.yaml | 8 ++ 8 files changed, 139 insertions(+), 7 deletions(-) create mode 100644 applications/plot-navigator/templates/production-tools-worker.yaml diff --git a/applications/plot-navigator/Chart.yaml b/applications/plot-navigator/Chart.yaml index b13dab9a70..a7a031592b 100644 --- a/applications/plot-navigator/Chart.yaml +++ b/applications/plot-navigator/Chart.yaml @@ -5,3 +5,8 @@ version: 1.0.0 sources: - https://github.com/lsst-dm/plot-navigator appVersion: "0.1.1" + +dependencies: + - name: redis + version: 1.0.13 + repository: https://lsst-sqre.github.io/charts/ diff --git a/applications/plot-navigator/README.md b/applications/plot-navigator/README.md index 8c1af8496b..0f4f7590c0 100644 --- a/applications/plot-navigator/README.md +++ b/applications/plot-navigator/README.md @@ -1,7 +1,5 @@ # plot-navigator -![Version: 1.0.0](https://img.shields.io/badge/Version-1.0.0-informational?style=flat-square) ![AppVersion: 0.1.1](https://img.shields.io/badge/AppVersion-0.1.1-informational?style=flat-square) - Plot-navigator ## Source Code @@ -24,7 +22,6 @@ Plot-navigator | image.repository | string | `"ghcr.io/lsst-dm/plot-navigator"` | plot-navigator image to use | | image.tag | string | The appVersion of the chart | Tag of plot-navigator image to use | | ingress.annotations | object | `{}` | Additional annotations to add to the ingress | +| redis.config.secretKey | string | `"password"` | Key inside secret from which to get the Redis password (do not change) | +| redis.config.secretName | string | `"redis-secret"` | Name of secret containing Redis password | | resources | object | see `values.yaml` | Resource limits and requests for the nodejs pod | - ----------------------------------------------- -Autogenerated from chart metadata using [helm-docs v1.8.1](https://github.com/norwoodj/helm-docs/releases/v1.8.1) diff --git a/applications/plot-navigator/secrets.yaml b/applications/plot-navigator/secrets.yaml index 3f830741d4..3b763e3aa1 100644 --- a/applications/plot-navigator/secrets.yaml +++ b/applications/plot-navigator/secrets.yaml @@ -18,3 +18,8 @@ copy: application: nublado key: "postgres-credentials.txt" +redis-password: + description: >- + Password used to authenticate production-tools to the arq redis server. + generate: + type: password diff --git a/applications/plot-navigator/templates/production-tools-worker.yaml b/applications/plot-navigator/templates/production-tools-worker.yaml new file mode 100644 index 0000000000..1b233ffeee --- /dev/null +++ b/applications/plot-navigator/templates/production-tools-worker.yaml @@ -0,0 +1,97 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: production-tools-worker + labels: + {{- include "plot-navigator.labels" . | nindent 4 }} +spec: + replicas: 1 + selector: + matchLabels: + {{- include "plot-navigator.selectorLabels" . | nindent 6 }} + app.kubernetes.io/component: production-tools-worker + template: + metadata: + labels: + {{- include "plot-navigator.selectorLabels" . | nindent 8 }} + app.kubernetes.io/component: production-tools-worker + spec: + volumes: + - name: "butler-secrets-raw" + secret: + secretName: {{ include "plot-navigator.fullname" . }} + - name: "butler-secrets" + emptyDir: + sizeLimit: 50Mi + {{- with .Values.productionTools.volumes }} + {{- . | toYaml | nindent 8 }} + {{- end }} + containers: + - name: plot-navigator + image: {{ .Values.productionTools.image.repository }}:{{ .Values.productionTools.image.tag | default .Chart.AppVersion}} + imagePullPolicy: Always + env: +{{- range $key, $value := .Values.productionTools.env }} + - name: {{ $key | quote }} + value: {{ $value | quote }} +{{- end }} + - name: S3_KEY + valueFrom: + secretKeyRef: + name: {{ include "plot-navigator.fullname" . }} + key: S3_KEY + - name: S3_SECRET + valueFrom: + secretKeyRef: + name: {{ include "plot-navigator.fullname" . }} + key: S3_SECRET + - name: CM_ARQ_REDIS_PASSWORD + valueFrom: + secretKeyRef: + name: redis-secret + key: password + resources: + {{- toYaml .Values.resources | nindent 10 }} + {{- if .Values.productionTools.envFromSecretPath }} + envFrom: + - secretRef: {{ include "plot-navigator.fullname" . }} + {{- end }} + {{- if .Values.productionTools.volume_mounts }} + volumeMounts: + - name: "butler-secrets" + mountPath: "/opt/lsst/butler/secrets" + {{- with .Values.productionTools.volume_mounts }} + {{- . | toYaml | nindent 10 }} + {{- end }} + {{- end }} + command: + - arq + - lsst.production.tools.cache.Worker + initContainers: + # To deal with the Postgres file permission issues, + # copy the secrets from butler-secrets-raw to butler-secrets. + - name: fix-secret-permissions + image: "alpine:latest" + imagePullPolicy: IfNotPresent + command: + - "/bin/ash" + - "-c" + - | + cp -RL /secrets-raw/* /opt/lsst/butler/secrets/ + chown 1000:4085 /opt/lsst/butler/secrets/* + chmod 0400 /opt/lsst/butler/secrets/* + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - "all" + volumeMounts: + - name: "butler-secrets" + mountPath: "/opt/lsst/butler/secrets" + - name: "butler-secrets-raw" + mountPath: "/secrets-raw" + readOnly: true + securityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 4085 diff --git a/applications/plot-navigator/templates/production-tools.yaml b/applications/plot-navigator/templates/production-tools.yaml index 0d7760fd7b..093a4e705f 100644 --- a/applications/plot-navigator/templates/production-tools.yaml +++ b/applications/plot-navigator/templates/production-tools.yaml @@ -45,6 +45,11 @@ spec: secretKeyRef: name: {{ include "plot-navigator.fullname" . }} key: S3_SECRET + - name: CM_ARQ_REDIS_PASSWORD + valueFrom: + secretKeyRef: + name: redis-secret + key: password resources: {{- toYaml .Values.resources | nindent 10 }} {{- if .Values.productionTools.envFromSecretPath }} diff --git a/applications/plot-navigator/templates/vault-secrets.yaml b/applications/plot-navigator/templates/vault-secrets.yaml index f2edfa27b4..5c1659da52 100644 --- a/applications/plot-navigator/templates/vault-secrets.yaml +++ b/applications/plot-navigator/templates/vault-secrets.yaml @@ -7,3 +7,16 @@ metadata: spec: path: "{{ .Values.global.vaultSecretsPath }}/plot-navigator" type: Opaque +--- +apiVersion: ricoberger.de/v1alpha1 +kind: VaultSecret +metadata: + name: redis-secret + labels: + {{- include "plot-navigator.labels" . | nindent 4 }} +spec: + path: "{{ .Values.global.vaultSecretsPath }}/plot-navigator" + templates: + password: >- + {% index .Secrets "redis-password" %} + type: Opaque diff --git a/applications/plot-navigator/values-usdfdev.yaml b/applications/plot-navigator/values-usdfdev.yaml index 92672e1b3d..7412d951f0 100644 --- a/applications/plot-navigator/values-usdfdev.yaml +++ b/applications/plot-navigator/values-usdfdev.yaml @@ -28,7 +28,7 @@ config: productionTools: image: repository: ghcr.io/lsst-dm/production_tools - tag: SP-883 + tag: 0.1 env: DAF_BUTLER_REPOSITORY_INDEX: "/sdf/group/rubin/shared/data-repos.yaml" PGPASSFILE: "/opt/lsst/butler/secrets/postgres-credentials.txt" @@ -36,6 +36,8 @@ productionTools: AWS_SHARED_CREDENTIALS_FILE: "/opt/lsst/butler/secrets/aws-credentials.ini" S3_ENDPOINT_URL: "https://s3dfrgw.slac.stanford.edu" LSST_DISABLE_BUCKET_VALIDATION: "1" + REDIS_HOST: "plot-navigator-redis" + REDIS_PORT: "6379" persistentVolumeClaims: - name: sdf-group-rubin storageClassName: sdf-group-rubin @@ -79,4 +81,4 @@ butlerConfig: readOnly: true image: - tag: DM-44957 + tag: v0.2.1 diff --git a/applications/plot-navigator/values.yaml b/applications/plot-navigator/values.yaml index ad2f278bbc..a18145869f 100644 --- a/applications/plot-navigator/values.yaml +++ b/applications/plot-navigator/values.yaml @@ -60,3 +60,11 @@ butlerResources: requests: cpu: "15m" memory: "150Mi" + +redis: + config: + # -- Name of secret containing Redis password + secretName: "redis-secret" + # -- Key inside secret from which to get the Redis password (do not + # change) + secretKey: "password" From 8a2d6cd69da70d6643f652ebb903d502e2747b39 Mon Sep 17 00:00:00 2001 From: Colin Slater Date: Tue, 22 Oct 2024 09:15:59 -0700 Subject: [PATCH 374/567] Deploy new plot-navigator to USDF prod and int. --- .../plot-navigator/values-usdfint.yaml | 72 ++++++++++++++++-- .../plot-navigator/values-usdfprod.yaml | 73 ++++++++++++++++--- 2 files changed, 128 insertions(+), 17 deletions(-) diff --git a/applications/plot-navigator/values-usdfint.yaml b/applications/plot-navigator/values-usdfint.yaml index c8cffe48b9..7412d951f0 100644 --- a/applications/plot-navigator/values-usdfint.yaml +++ b/applications/plot-navigator/values-usdfint.yaml @@ -1,13 +1,15 @@ environment: - DAF_BUTLER_REPOSITORY_INDEX: "/sdf/group/rubin/shared/data-repos.yaml" - PGPASSFILE: "/home/worker/.lsst/postgres-credentials.txt" - PGUSER: "rubin" - AWS_SHARED_CREDENTIALS_FILE: "/home/worker/.lsst/aws-credentials.ini" - S3_ENDPOINT_URL: "https://s3dfrgw.slac.stanford.edu" - BUTLER_DEFAULT_REPO: "/repo/main" - BUTLER_DEFAULT_COLLECTION: "HSC/runs/RC2/w_2024_02/DM-42454" + BASE_URL: "/plot-navigator" + REPO_URLS: '{"embargo": "http://internal-butler/api/butler/repo/embargo", "/repo/main": "http://internal-butler/api/butler/repo/main", "/repo/dc2": "http://internal-butler/api/butler/repo/dc2"}' + BUCKET_NAME: "rubin-plot-navigator" + BUCKET_URL: "https://s3dfrgw.slac.stanford.edu/" config: + persistentVolumeClaims: + - name: sdf-group-rubin + storageClassName: sdf-group-rubin + - name: sdf-data-rubin + storageClassName: sdf-data-rubin volumes: - name: sdf-group-rubin persistentVolumeClaim: @@ -18,11 +20,65 @@ config: volume_mounts: - name: sdf-group-rubin mountPath: /sdf/group/rubin + readOnly: true - name: sdf-data-rubin mountPath: /sdf/data/rubin + readOnly: true + +productionTools: + image: + repository: ghcr.io/lsst-dm/production_tools + tag: 0.1 + env: + DAF_BUTLER_REPOSITORY_INDEX: "/sdf/group/rubin/shared/data-repos.yaml" + PGPASSFILE: "/opt/lsst/butler/secrets/postgres-credentials.txt" + PGUSER: "rubin" + AWS_SHARED_CREDENTIALS_FILE: "/opt/lsst/butler/secrets/aws-credentials.ini" + S3_ENDPOINT_URL: "https://s3dfrgw.slac.stanford.edu" + LSST_DISABLE_BUCKET_VALIDATION: "1" + REDIS_HOST: "plot-navigator-redis" + REDIS_PORT: "6379" persistentVolumeClaims: - name: sdf-group-rubin storageClassName: sdf-group-rubin - name: sdf-data-rubin storageClassName: sdf-data-rubin - separateSecrets: true + volumes: + - name: sdf-group-rubin + persistentVolumeClaim: + claimName: sdf-group-rubin + - name: sdf-data-rubin + persistentVolumeClaim: + claimName: sdf-data-rubin + volume_mounts: + - name: sdf-group-rubin + mountPath: /sdf/group/rubin + readOnly: true + - name: sdf-data-rubin + mountPath: /sdf/data/rubin + readOnly: true + +butlerConfig: + pguser: "rubin" + s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu" + repositories: + embargo: "s3://rubin-summit-users/butler.yaml" + main: "/sdf/group/rubin/repo/main" + dc2: "/sdf/group/rubin/repo/dc2" + volumes: + - name: sdf-group-rubin + persistentVolumeClaim: + claimName: sdf-group-rubin + - name: sdf-data-rubin + persistentVolumeClaim: + claimName: sdf-data-rubin + volume_mounts: + - name: sdf-group-rubin + mountPath: /sdf/group/rubin + readOnly: true + - name: sdf-data-rubin + mountPath: /sdf/data/rubin + readOnly: true + +image: + tag: v0.2.1 diff --git a/applications/plot-navigator/values-usdfprod.yaml b/applications/plot-navigator/values-usdfprod.yaml index 60ac287325..7412d951f0 100644 --- a/applications/plot-navigator/values-usdfprod.yaml +++ b/applications/plot-navigator/values-usdfprod.yaml @@ -1,15 +1,15 @@ environment: - DAF_BUTLER_REPOSITORY_INDEX: "/sdf/group/rubin/shared/data-repos.yaml" - PGPASSFILE: "/home/worker/.lsst/postgres-credentials.txt" - PGUSER: "rubin" - AWS_SHARED_CREDENTIALS_FILE: "/home/worker/.lsst/aws-credentials.ini" - S3_ENDPOINT_URL: "https://s3dfrgw.slac.stanford.edu" - BUTLER_DEFAULT_REPO: "/repo/main" - BUTLER_DEFAULT_COLLECTION: "HSC/runs/RC2/w_2024_02/DM-42454" - LSST_DISABLE_BUCKET_VALIDATION: "1" - LSST_RESOURCES_S3_PROFILE_embargo: "https://sdfembs3.sdf.slac.stanford.edu" + BASE_URL: "/plot-navigator" + REPO_URLS: '{"embargo": "http://internal-butler/api/butler/repo/embargo", "/repo/main": "http://internal-butler/api/butler/repo/main", "/repo/dc2": "http://internal-butler/api/butler/repo/dc2"}' + BUCKET_NAME: "rubin-plot-navigator" + BUCKET_URL: "https://s3dfrgw.slac.stanford.edu/" config: + persistentVolumeClaims: + - name: sdf-group-rubin + storageClassName: sdf-group-rubin + - name: sdf-data-rubin + storageClassName: sdf-data-rubin volumes: - name: sdf-group-rubin persistentVolumeClaim: @@ -20,10 +20,65 @@ config: volume_mounts: - name: sdf-group-rubin mountPath: /sdf/group/rubin + readOnly: true - name: sdf-data-rubin mountPath: /sdf/data/rubin + readOnly: true + +productionTools: + image: + repository: ghcr.io/lsst-dm/production_tools + tag: 0.1 + env: + DAF_BUTLER_REPOSITORY_INDEX: "/sdf/group/rubin/shared/data-repos.yaml" + PGPASSFILE: "/opt/lsst/butler/secrets/postgres-credentials.txt" + PGUSER: "rubin" + AWS_SHARED_CREDENTIALS_FILE: "/opt/lsst/butler/secrets/aws-credentials.ini" + S3_ENDPOINT_URL: "https://s3dfrgw.slac.stanford.edu" + LSST_DISABLE_BUCKET_VALIDATION: "1" + REDIS_HOST: "plot-navigator-redis" + REDIS_PORT: "6379" persistentVolumeClaims: - name: sdf-group-rubin storageClassName: sdf-group-rubin - name: sdf-data-rubin storageClassName: sdf-data-rubin + volumes: + - name: sdf-group-rubin + persistentVolumeClaim: + claimName: sdf-group-rubin + - name: sdf-data-rubin + persistentVolumeClaim: + claimName: sdf-data-rubin + volume_mounts: + - name: sdf-group-rubin + mountPath: /sdf/group/rubin + readOnly: true + - name: sdf-data-rubin + mountPath: /sdf/data/rubin + readOnly: true + +butlerConfig: + pguser: "rubin" + s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu" + repositories: + embargo: "s3://rubin-summit-users/butler.yaml" + main: "/sdf/group/rubin/repo/main" + dc2: "/sdf/group/rubin/repo/dc2" + volumes: + - name: sdf-group-rubin + persistentVolumeClaim: + claimName: sdf-group-rubin + - name: sdf-data-rubin + persistentVolumeClaim: + claimName: sdf-data-rubin + volume_mounts: + - name: sdf-group-rubin + mountPath: /sdf/group/rubin + readOnly: true + - name: sdf-data-rubin + mountPath: /sdf/data/rubin + readOnly: true + +image: + tag: v0.2.1 From 2a3f66895ec0a07a853d3c3d936353efb1b90663 Mon Sep 17 00:00:00 2001 From: Colin Slater Date: Tue, 22 Oct 2024 10:22:35 -0700 Subject: [PATCH 375/567] Remove plot-navigator from idfint. --- applications/plot-navigator/values-idfint.yaml | 7 ------- environments/values-idfint.yaml | 1 - 2 files changed, 8 deletions(-) delete mode 100644 applications/plot-navigator/values-idfint.yaml diff --git a/applications/plot-navigator/values-idfint.yaml b/applications/plot-navigator/values-idfint.yaml deleted file mode 100644 index 2a8515e988..0000000000 --- a/applications/plot-navigator/values-idfint.yaml +++ /dev/null @@ -1,7 +0,0 @@ -environment: - BUTLER_URI: "s3://butler-us-central1-panda-dev/dc2/butler-external.yaml" - PGPASSFILE: "/home/worker/.lsst/postgres-credentials.txt" - AWS_SHARED_CREDENTIALS_FILE: "/home/worker/.lsst/aws-credentials.ini" - S3_ENDPOINT_URL: "https://storage.googleapis.com" -config: - separateSecrets: true diff --git a/environments/values-idfint.yaml b/environments/values-idfint.yaml index 34696fc711..2a6b2fee82 100644 --- a/environments/values-idfint.yaml +++ b/environments/values-idfint.yaml @@ -20,7 +20,6 @@ applications: hips: true mobu: true nublado: true - plot-navigator: true portal: true sasquatch: true siav2: false From c5bde713278a4343c41ac695af75f4d5ca3c9ecd Mon Sep 17 00:00:00 2001 From: Valerie Becker Date: Mon, 21 Oct 2024 12:38:52 -0700 Subject: [PATCH 376/567] Update consdb tag to point to 24.10.3 --- applications/consdb/values-summit.yaml | 8 ++++---- applications/consdb/values-tucson-teststand.yaml | 8 ++++---- applications/consdb/values-usdfdev.yaml | 8 ++++---- applications/consdb/values-usdfprod.yaml | 8 ++++---- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/applications/consdb/values-summit.yaml b/applications/consdb/values-summit.yaml index f8fc03e7b7..438f1fbb22 100644 --- a/applications/consdb/values-summit.yaml +++ b/applications/consdb/values-summit.yaml @@ -8,15 +8,15 @@ lfa: hinfo: latiss: enable: true - tag: "24.10.1" + tag: "24.10.3" logConfig: "consdb.hinfo=DEBUG" lsstcomcam: enable: true - tag: "24.10.1" + tag: "24.10.3" logConfig: "consdb.hinfo=DEBUG" lsstcam: enable: false - tag: "24.10.1" + tag: "24.10.3" pq: image: - tag: "24.10.1" + tag: "24.10.3" diff --git a/applications/consdb/values-tucson-teststand.yaml b/applications/consdb/values-tucson-teststand.yaml index c6810b178a..956b148fce 100644 --- a/applications/consdb/values-tucson-teststand.yaml +++ b/applications/consdb/values-tucson-teststand.yaml @@ -8,19 +8,19 @@ lfa: hinfo: latiss: enable: true - tag: "24.10.1" + tag: "24.10.3" logConfig: "consdb.hinfo=DEBUG" lsstcomcam: enable: true - tag: "24.10.1" + tag: "24.10.3" logConfig: "consdb.hinfo=DEBUG" lsstcam: enable: false - tag: "24.10.1" + tag: "24.10.3" pq: image: - tag: "24.10.1" + tag: "24.10.3" resources: requests: diff --git a/applications/consdb/values-usdfdev.yaml b/applications/consdb/values-usdfdev.yaml index 9447f015f6..203e10f75b 100644 --- a/applications/consdb/values-usdfdev.yaml +++ b/applications/consdb/values-usdfdev.yaml @@ -6,13 +6,13 @@ db: hinfo: latiss: enable: false - tag: "24.10.1" + tag: "24.10.3" lsstcomcam: enable: false - tag: "24.10.1" + tag: "24.10.3" lsstcam: enable: false - tag: "24.10.1" + tag: "24.10.3" pq: image: - tag: "24.10.1" + tag: "24.10.3" diff --git a/applications/consdb/values-usdfprod.yaml b/applications/consdb/values-usdfprod.yaml index 9447f015f6..203e10f75b 100644 --- a/applications/consdb/values-usdfprod.yaml +++ b/applications/consdb/values-usdfprod.yaml @@ -6,13 +6,13 @@ db: hinfo: latiss: enable: false - tag: "24.10.1" + tag: "24.10.3" lsstcomcam: enable: false - tag: "24.10.1" + tag: "24.10.3" lsstcam: enable: false - tag: "24.10.1" + tag: "24.10.3" pq: image: - tag: "24.10.1" + tag: "24.10.3" From 4c8af14d6d94e6cd7dd3edbf26618a74f53751a4 Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Wed, 23 Oct 2024 09:36:19 -0700 Subject: [PATCH 377/567] Use client-server butler for default dp02 in prod We are ready to switch over to client-server Butler as the default in production. --- applications/butler/values-idfprod.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/butler/values-idfprod.yaml b/applications/butler/values-idfprod.yaml index 4e65f438d3..128891095e 100644 --- a/applications/butler/values-idfprod.yaml +++ b/applications/butler/values-idfprod.yaml @@ -1,4 +1,5 @@ config: + dp02ClientServerIsDefault: true dp02PostgresUri: postgresql://postgres@10.163.0.3/idfdp02 s3EndpointUrl: "https://storage.googleapis.com" repositories: From 512ed19a99cc53826d5da4f7738fc94cee2d35f0 Mon Sep 17 00:00:00 2001 From: Erin Howard Date: Wed, 23 Oct 2024 17:00:58 -0700 Subject: [PATCH 378/567] Deploy Prompt Processing 4.6.0 for LATISS. --- .../values-usdfprod-prompt-processing.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index ec4b9cc3f7..e350a646eb 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.5.1 + tag: 4.6.0 instrument: pipelines: From 2ae74962576e713ab88219c27f31361edb1286b3 Mon Sep 17 00:00:00 2001 From: Erin Howard Date: Thu, 17 Oct 2024 16:51:16 -0700 Subject: [PATCH 379/567] Add/update values for LSSTComCam. --- .../prompt-proto-service-lsstcomcam/README.md | 14 ++-- .../values-usdfdev-prompt-processing.yaml | 23 ++++-- .../values-usdfprod-prompt-processing.yaml | 70 +++++++++++++++++++ .../values.yaml | 20 ++---- .../values-usdfprod-prompt-processing.yaml | 2 +- 5 files changed, 101 insertions(+), 28 deletions(-) create mode 100644 applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml diff --git a/applications/prompt-proto-service-lsstcomcam/README.md b/applications/prompt-proto-service-lsstcomcam/README.md index 9e9b55654b..eb34082609 100644 --- a/applications/prompt-proto-service-lsstcomcam/README.md +++ b/applications/prompt-proto-service-lsstcomcam/README.md @@ -15,12 +15,12 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.additionalVolumeMounts | list | `[]` | Kubernetes YAML configs for extra container volume(s). Any volumes required by other config options are automatically handled by the Helm chart. | | prompt-proto-service.affinity | object | `{}` | Affinity rules for the prompt processing pods | | prompt-proto-service.alerts.server | string | `"usdf-alert-stream-dev-broker-0.lsst.cloud:9094"` | Server address for the alert stream | -| prompt-proto-service.alerts.topic | string | None, must be set | Topic name where alerts will be sent | +| prompt-proto-service.alerts.topic | string | `""` | Topic name where alerts will be sent | | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | -| prompt-proto-service.cache.patchesPerImage | int | `4` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | -| prompt-proto-service.cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | +| prompt-proto-service.cache.patchesPerImage | int | `16` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | +| prompt-proto-service.cache.refcatsPerImage | int | `6` | A factor by which to multiply `baseSize` for refcat datasets. | | prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | | prompt-proto-service.image.pullPolicy | string | `IfNotPresent` in prod, `Always` in dev | Pull policy for the PP image | | prompt-proto-service.image.repository | string | `"ghcr.io/lsst-dm/prompt-service"` | Image to use in the PP deployment | @@ -32,21 +32,19 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `""` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | -| prompt-proto-service.instrument.skymap | string | `""` | Skymap to use with the instrument | +| prompt-proto-service.instrument.skymap | string | `"ops_rehersal_prep_2k_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | | prompt-proto-service.knative.ephemeralStorageRequest | string | `"5Gi"` | The storage space reserved for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | -| prompt-proto-service.knative.extraTimeout | int | `10` | To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. This parameter adds extra time to that minimum (seconds). | | prompt-proto-service.knative.gpu | bool | `false` | GPUs enabled. | | prompt-proto-service.knative.gpuRequest | int | `0` | The number of GPUs to request for the full pod (see `containerConcurrency`). | -| prompt-proto-service.knative.idleTimeout | int | `0` | Maximum time that a container can send nothing to Knative (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | +| prompt-proto-service.knative.idleTimeout | int | `900` | Maximum time that a container can send nothing to Knative (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.knative.memoryLimit | string | `"8Gi"` | The maximum memory limit for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.memoryRequest | string | `"2Gi"` | The minimum memory to request for the full pod (see `containerConcurrency`). | -| prompt-proto-service.knative.responseStartTimeout | int | `0` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | +| prompt-proto-service.knative.responseStartTimeout | int | `900` | Maximum time that a container can send nothing to Knative after initial submission (seconds). This is only useful if the container runs async workers. If 0, idle timeout is ignored. | | prompt-proto-service.logLevel | string | log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. | Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). | | prompt-proto-service.podAnnotations | object | See the `values.yaml` file. | Annotations for the prompt-proto-service pod | -| prompt-proto-service.raw_microservice | string | `""` | The URI to a microservice that maps image metadata to a file location. If empty, Prompt Processing does not use a microservice. | | prompt-proto-service.registry.centralRepoFile | bool | `false` | If set, this application's Vault secret must contain a `central_repo_file` key containing a remote Butler configuration, and `instrument.calibRepo` is the local path where this file is mounted. | | prompt-proto-service.s3.auth_env | bool | `true` | If set, get S3 credentials from this application's Vault secret. | | prompt-proto-service.s3.disableBucketValidation | int | `0` | Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. | diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml index 45667dadc3..4b8736f8ab 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml @@ -5,24 +5,37 @@ prompt-proto-service: revision: "1" image: - repository: ghcr.io/lsst-dm/prompt-service pullPolicy: Always # Overrides the image tag whose default is the chart appVersion. tag: latest instrument: - calibRepo: s3://rubin-summit-users/ + pipelines: + main: >- + (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml, + ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr.yaml] + preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml] + calibRepo: s3://rubin-pp-dev-users/central_repo_2 s3: - imageBucket: rubin:rubin-pp + imageBucket: rubin-pp-dev endpointUrl: https://s3dfrgw.slac.stanford.edu - disableBucketValidation: 1 imageNotifications: kafkaClusterAddress: prompt-processing-kafka-bootstrap.kafka:9092 - topic: rubin-prompt-processing + topic: prompt-processing-dev + + apdb: + config: s3://rubin-pp-dev-users/apdb_config/cassandra/pp_apdb_lsstcomcam-dev.py alerts: topic: "alert-stream-test" + sasquatch: + endpointUrl: https://usdf-rsp-dev.slac.stanford.edu/sasquatch-rest-proxy + auth_env: false + + # A cache efficiency workaround breaks when mixing observing dates; see DM-43205, DM-43913. + cacheCalibs: false + fullnameOverride: "prompt-proto-service-lsstcomcam" diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml new file mode 100644 index 0000000000..c39a312b2f --- /dev/null +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -0,0 +1,70 @@ +prompt-proto-service: + + podAnnotations: + # HACK: disable autoscaling as workaround for DM-41829 + autoscaling.knative.dev/min-scale: "20" + autoscaling.knative.dev/max-scale: "20" + # Update this field if using latest or static image tag in dev + revision: "1" + + # HACK: disable autoscaling as workaround for DM-41829 + worker: + restart: 15 + + image: + pullPolicy: IfNotPresent + # Overrides the image tag whose default is the chart appVersion. + tag: 4.6.0 + + instrument: + pipelines: + main: >- + (survey="comcam-ap")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml, + ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/SingleFrame.yaml, + ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr.yaml] + (survey="comcam-isr-cal")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] + (survey="cwfs")=[] + (survey="cwfs-focus-sweep")=[] + (survey="spec-survey")=[] + (survey="photon-transfer")=[] + (survey="daily-calibration-seq")=[] + (survey="")=[] + preprocessing: >- + (survey="comcam-preprocessing")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml] + (survey="daytime-checkout")=[] + (survey="cwfs")=[] + (survey="cwfs-focus-sweep")=[] + (survey="spec-survey")=[] + (survey="photon-transfer")=[] + (survey="daily-calibration-seq")=[] + (survey="")=[] + calibRepo: s3://rubin-summit-users + + s3: + imageBucket: rubin-summit + endpointUrl: https://sdfembs3.sdf.slac.stanford.edu + + raw_microservice: http://172.24.5.158:8080/presence + + imageNotifications: + kafkaClusterAddress: prompt-processing-2-kafka-bootstrap.kafka:9092 + topic: rubin-summit-notification + # Scheduler adds an extra 60-80-second delay for first visit in a sequence, + # and files can take up to 20 seconds to arrive. Scheduler delay associated + # with CWFS engineering data, should not apply to other cameras. + imageTimeout: 110 + + apdb: + config: s3://rubin-summit-users/apdb_config/cassandra/pp_apdb_lsstcomcam.py + + alerts: + topic: "lsstcomcam-alerts" + + sasquatch: + endpointUrl: https://usdf-rsp-dev.slac.stanford.edu/sasquatch-rest-proxy + namespace: lsst.prompt.prod + auth_env: false + + logLevel: timer.lsst.activator=DEBUG lsst.diaPipe=VERBOSE lsst.rbClassify=VERBOSE lsst.resources=DEBUG + + fullnameOverride: "prompt-proto-service-lsstcomcam" diff --git a/applications/prompt-proto-service-lsstcomcam/values.yaml b/applications/prompt-proto-service-lsstcomcam/values.yaml index 6bd8388077..e015b69c44 100644 --- a/applications/prompt-proto-service-lsstcomcam/values.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values.yaml @@ -49,7 +49,7 @@ prompt-proto-service: # @default -- None, must be set preprocessing: "" # -- Skymap to use with the instrument - skymap: "" + skymap: "ops_rehersal_prep_2k_v1" # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set @@ -60,9 +60,9 @@ prompt-proto-service: # The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. baseSize: 3 # -- A factor by which to multiply `baseSize` for refcat datasets. - refcatsPerImage: 4 + refcatsPerImage: 6 # -- A factor by which to multiply `baseSize` for templates and other patch-based datasets. - patchesPerImage: 4 + patchesPerImage: 16 s3: # -- Bucket containing the incoming raw images @@ -76,10 +76,6 @@ prompt-proto-service: # -- Set this to disable validation of S3 bucket names, allowing Ceph multi-tenant colon-separated names to be used. disableBucketValidation: 0 - # -- The URI to a microservice that maps image metadata to a file location. - # If empty, Prompt Processing does not use a microservice. - raw_microservice: "" - imageNotifications: # -- Hostname and port of the Kafka provider # @default -- None, must be set @@ -102,7 +98,6 @@ prompt-proto-service: # -- Server address for the alert stream server: "usdf-alert-stream-dev-broker-0.lsst.cloud:9094" # -- Topic name where alerts will be sent - # @default -- None, must be set topic: "" registry: @@ -111,7 +106,7 @@ prompt-proto-service: # -- Requested logging levels in the format of [Middleware's \-\-log-level argument](https://pipelines.lsst.io/v/daily/modules/lsst.daf.butler/scripts/butler.html#cmdoption-butler-log-level). # @default -- log prompt_processing at DEBUG, other LSST code at INFO, and third-party code at WARNING. - logLevel: "" + logLevel: "timer.lsst.activator=DEBUG" sasquatch: # -- Url of the Sasquatch proxy server to upload metrics to. Leave blank to disable upload. @@ -143,17 +138,14 @@ prompt-proto-service: memoryRequest: "2Gi" # -- The maximum memory limit for the full pod (see `containerConcurrency`). memoryLimit: "8Gi" - # -- To acommodate scheduling problems, Knative waits for a request for twice `worker.timeout`. - # This parameter adds extra time to that minimum (seconds). - extraTimeout: 10 # -- Maximum time that a container can send nothing to Knative (seconds). # This is only useful if the container runs async workers. # If 0, idle timeout is ignored. - idleTimeout: 0 + idleTimeout: 900 # -- Maximum time that a container can send nothing to Knative after initial submission (seconds). # This is only useful if the container runs async workers. # If 0, idle timeout is ignored. - responseStartTimeout: 0 + responseStartTimeout: 900 # -- The number of Knative requests that can be handled simultaneously by one container containerConcurrency: 1 diff --git a/environments/values-usdfprod-prompt-processing.yaml b/environments/values-usdfprod-prompt-processing.yaml index b1c1ce92d9..79c9897f4a 100644 --- a/environments/values-usdfprod-prompt-processing.yaml +++ b/environments/values-usdfprod-prompt-processing.yaml @@ -11,6 +11,6 @@ applications: prompt-proto-service-hsc: false prompt-proto-service-latiss: true prompt-proto-service-lsstcam: false - prompt-proto-service-lsstcomcam: false + prompt-proto-service-lsstcomcam: true prompt-proto-service-lsstcomcamsim: false vault-secrets-operator: false From 28f2c09a80b380adae40717503bb9fddf1b7ba48 Mon Sep 17 00:00:00 2001 From: Brianna Smart Date: Fri, 18 Oct 2024 11:07:03 -0700 Subject: [PATCH 380/567] Add lsstcomcam-alerts as a topic to the alert broker --- .../templates/kafka-topics.yaml | 22 ++++++++++++++++++- .../values-usdfdev-alert-stream-broker.yaml | 4 ++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka-topics.yaml b/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka-topics.yaml index 26c74abe19..74b3e6ea15 100644 --- a/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka-topics.yaml +++ b/applications/alert-stream-broker/charts/alert-stream-broker/templates/kafka-topics.yaml @@ -93,4 +93,24 @@ spec: # timestamp instead message.timestamp.type: 'LogAppendTime' partitions: {{ .Values.latissTopicPartitions }} - replicas: {{ .Values.latissTopicReplicas }} \ No newline at end of file + replicas: {{ .Values.latissTopicReplicas }} +--- +apiVersion: "kafka.strimzi.io/{{ .Values.strimziAPIVersion }}" +kind: KafkaTopic +metadata: + labels: + strimzi.io/cluster: "{{ .Values.clusterName }}" + name: "{{ .Values.comcamTopicName}}" +spec: + config: + cleanup.policy: "delete" + retention.ms: {{ .Values.maxMillisecondsRetained }} # 7 days + retention.bytes: {{ .Values.maxBytesRetained }} + compression.type: {{ .Values.topicCompression }} + # The default timestamp is the creation time of the alert. + # To get the ingestion rate, we need this to be the log + # append time, and the header will contain the producer + # timestamp instead + message.timestamp.type: 'LogAppendTime' + partitions: {{ .Values.comcamTopicPartitions }} + replicas: {{ .Values.comcamTopicReplicas }} diff --git a/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml b/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml index 0f56055671..c80f69d767 100644 --- a/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml +++ b/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml @@ -121,6 +121,10 @@ alert-stream-broker: latissTopicPartitions: 45 latissTopicReplicas: 1 + comcamTopicName: lsstcomcam-alerts + comcamTopicPartitions: 45 + comcamTopicReplicas: 1 + # Compression set to snappy to balance alert packet compression speed and size. topicCompression: snappy From 786b9b8b37decb45e8cdcb228ba3a13eb2f1d349 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 25 Oct 2024 10:07:33 -0700 Subject: [PATCH 381/567] Add MTM1M3TS to summit and USDF prod sasquatch configs. --- applications/sasquatch/values-summit.yaml | 12 ++++++++++++ applications/sasquatch/values-usdfprod.yaml | 8 ++++++++ 2 files changed, 20 insertions(+) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 3919490042..d6830e21f7 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -190,6 +190,10 @@ kafka-connect-manager: repairerConnector: false topicsRegex: ".*MTM1M3" tasksMax: "8" + m1m3ts: + enabled: true + repairerConnector: false + topicsRegex: ".*MTM1M3TS" m2: enabled: true repairerConnector: false @@ -348,6 +352,14 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.sal.MTM1M3" ] debug: true + m1m3ts: + enabled: true + repair: false + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.MTM1M3TS" ] + debug: true m2: enabled: true database: "efd" diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 1ea6936014..e3efbedf7b 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -175,6 +175,14 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.sal.MTM1M3" ] debug: true + m1m3ts: + enabled: true + repair: false + database: "efd" + timestamp_field: "private_efdStamp" + topicRegexps: | + [ "lsst.sal.MTM1M3TS" ] + debug: true m2: enabled: true repair: false From 7f4c12242f2a220c427defb067f3289b09083f4b Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 25 Oct 2024 11:44:32 -0700 Subject: [PATCH 382/567] Remove MTM1M3TS. It's picked up elsewhere. --- applications/sasquatch/values-summit.yaml | 12 ------------ applications/sasquatch/values-usdfprod.yaml | 8 -------- 2 files changed, 20 deletions(-) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index d6830e21f7..3919490042 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -190,10 +190,6 @@ kafka-connect-manager: repairerConnector: false topicsRegex: ".*MTM1M3" tasksMax: "8" - m1m3ts: - enabled: true - repairerConnector: false - topicsRegex: ".*MTM1M3TS" m2: enabled: true repairerConnector: false @@ -352,14 +348,6 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.sal.MTM1M3" ] debug: true - m1m3ts: - enabled: true - repair: false - database: "efd" - timestamp_field: "private_efdStamp" - topicRegexps: | - [ "lsst.sal.MTM1M3TS" ] - debug: true m2: enabled: true database: "efd" diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index e3efbedf7b..1ea6936014 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -175,14 +175,6 @@ telegraf-kafka-consumer: topicRegexps: | [ "lsst.sal.MTM1M3" ] debug: true - m1m3ts: - enabled: true - repair: false - database: "efd" - timestamp_field: "private_efdStamp" - topicRegexps: | - [ "lsst.sal.MTM1M3TS" ] - debug: true m2: enabled: true repair: false From 1feae4a05f522bc5fcb7ac0cb236fee853228209 Mon Sep 17 00:00:00 2001 From: Erin Howard Date: Fri, 25 Oct 2024 12:17:15 -0700 Subject: [PATCH 383/567] Add LSSTComCam to next-visit-fan-out. --- .../next-visit-fan-out/values-usdfdev-prompt-processing.yaml | 2 +- .../next-visit-fan-out/values-usdfprod-prompt-processing.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml index 05625325c7..e2ac0f59a6 100644 --- a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml @@ -15,4 +15,4 @@ image: # Overrides the image tag whose default is the chart appVersion. tag: 2.4.0 -instruments: "LATISS LSSTComCamSim HSC" +instruments: "LATISS LSSTComCam LSSTComCamSim HSC" diff --git a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml index ad765e9be2..73bcb8144d 100644 --- a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml @@ -13,4 +13,4 @@ image: # Overrides the image tag whose default is the chart appVersion. tag: 2.4.0 -instruments: "LATISS" +instruments: "LATISS LSSTComCam" From 583967ccee1676ed9e0bd7095991816e1768e5e7 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Fri, 25 Oct 2024 15:32:15 -0700 Subject: [PATCH 384/567] Clean up Gafaelfawr Helm chart Use a different method for handling Gafaelfawr schema updates: add the hook metadata to the required supporting resources rather than creating new ones. We tested this approach with vo-cutouts and it worked well, so adopt it here as well. This results in replacing instead of updating the resources on the next sync, but it avoids having duplicate resources that have to be manually deleted. Remove the -config and -secret endings on the names of the Gafaelfawr VaultSecret and ConfigMap resources, adopting our current conventions. The endings duplicate information already available in Kubernetes in other ways. --- applications/gafaelfawr/README.md | 2 +- .../gafaelfawr/templates/_helpers.tpl | 22 +++++++++---------- .../templates/configmap-kerberos.yaml | 2 +- .../gafaelfawr/templates/configmap.yaml | 16 ++------------ .../gafaelfawr/templates/cronjob-audit.yaml | 6 ++--- .../templates/cronjob-maintenance.yaml | 6 ++--- .../templates/deployment-operator.yaml | 6 ++--- .../gafaelfawr/templates/deployment.yaml | 6 ++--- .../templates/job-schema-update.yaml | 6 ++--- .../gafaelfawr/templates/serviceaccount.yaml | 15 +++---------- .../gafaelfawr/templates/vault-secrets.yaml | 19 +++++----------- applications/gafaelfawr/values-ccin2p3.yaml | 1 - applications/gafaelfawr/values.yaml | 5 ++--- 13 files changed, 40 insertions(+), 72 deletions(-) diff --git a/applications/gafaelfawr/README.md b/applications/gafaelfawr/README.md index 8d3490131a..ae4eb82caf 100644 --- a/applications/gafaelfawr/README.md +++ b/applications/gafaelfawr/README.md @@ -104,7 +104,7 @@ Authentication and identity system | podAnnotations | object | `{}` | Annotations for the Gafaelfawr frontend pod | | redis.affinity | object | `{}` | Affinity rules for the Redis pod | | redis.config.secretKey | string | `"redis-password"` | Key inside secret from which to get the Redis password (do not change) | -| redis.config.secretName | string | `"gafaelfawr-secret"` | Name of secret containing Redis password (may require changing if fullnameOverride is set) | +| redis.config.secretName | string | `"gafaelfawr"` | Name of secret containing Redis password (do not change) | | redis.nodeSelector | object | `{}` | Node selection rules for the Redis pod | | redis.persistence.accessMode | string | `"ReadWriteOnce"` | Access mode of storage to request | | redis.persistence.enabled | bool | `true` | Whether to persist Redis storage and thus tokens. Setting this to false will use `emptyDir` and reset all tokens on every restart. Only use this for a test deployment. | diff --git a/applications/gafaelfawr/templates/_helpers.tpl b/applications/gafaelfawr/templates/_helpers.tpl index 812a70b587..eb32f96cd7 100644 --- a/applications/gafaelfawr/templates/_helpers.tpl +++ b/applications/gafaelfawr/templates/_helpers.tpl @@ -43,19 +43,19 @@ Common environment variables - name: "GAFAELFAWR_BOOTSTRAP_TOKEN" valueFrom: secretKeyRef: - name: {{ .secretName | quote }} + name: "gafaelfawr" key: "bootstrap-token" {{- if .Values.config.cilogon.clientId }} - name: "GAFAELFAWR_CILOGON_CLIENT_SECRET" valueFrom: secretKeyRef: - name: {{ .secretName | quote }} + name: "gafaelfawr" key: "cilogon-client-secret" {{- end }} - name: "GAFAELFAWR_DATABASE_PASSWORD" valueFrom: secretKeyRef: - name: {{ .secretName | quote }} + name: "gafaelfawr" key: "database-password" {{- if (or .Values.cloudsql.enabled .Values.config.internalDatabase) }} - name: "GAFAELFAWR_DATABASE_URL" @@ -71,28 +71,28 @@ Common environment variables - name: "GAFAELFAWR_GITHUB_CLIENT_SECRET" valueFrom: secretKeyRef: - name: {{ .secretName | quote }} + name: "gafaelfawr" key: "github-client-secret" {{- end }} {{- if .Values.config.ldap.userDn }} - name: "GAFAELFAWR_LDAP_PASSWORD" valueFrom: secretKeyRef: - name: {{ .secretName | quote }} + name: "gafaelfawr" key: "ldap-password" {{- end }} {{- if .Values.config.oidc.clientId }} - name: "GAFAELFAWR_OIDC_CLIENT_SECRET" valueFrom: secretKeyRef: - name: {{ .secretName | quote }} + name: "gafaelfawr" key: "oidc-client-secret" {{- end }} {{- if .Values.config.oidcServer.enabled }} - name: "GAFAELFAWR_OIDC_SERVER_CLIENTS" valueFrom: secretKeyRef: - name: {{ .secretName | quote }} + name: "gafaelfawr" key: "oidc-server-secrets" {{- if (not .Values.config.oidcServer.issuer) }} - name: "GAFAELFAWR_OIDC_SERVER_ISSUER" @@ -101,7 +101,7 @@ Common environment variables - name: "GAFAELFAWR_OIDC_SERVER_KEY" valueFrom: secretKeyRef: - name: {{ .secretName | quote }} + name: "gafaelfawr" key: "signing-key" {{- end }} {{- if (not .Values.config.realm) }} @@ -113,20 +113,20 @@ Common environment variables - name: "GAFAELFAWR_REDIS_PASSWORD" valueFrom: secretKeyRef: - name: {{ .secretName | quote }} + name: "gafaelfawr" key: "redis-password" - name: "GAFAELFAWR_REDIS_URL" value: "redis://gafaelfawr-redis.{{ .Release.Namespace }}:6379/0" - name: "GAFAELFAWR_SESSION_SECRET" valueFrom: secretKeyRef: - name: {{ .secretName | quote }} + name: "gafaelfawr" key: "session-secret" {{- if .Values.config.slackAlerts }} - name: "GAFAELFAWR_SLACK_WEBHOOK" valueFrom: secretKeyRef: - name: {{ .secretName | quote }} + name: "gafaelfawr" key: "slack-webhook" {{- end }} {{- if .Values.config.metrics.enabled }} diff --git a/applications/gafaelfawr/templates/configmap-kerberos.yaml b/applications/gafaelfawr/templates/configmap-kerberos.yaml index d21ed51b45..ab7b960106 100644 --- a/applications/gafaelfawr/templates/configmap-kerberos.yaml +++ b/applications/gafaelfawr/templates/configmap-kerberos.yaml @@ -2,7 +2,7 @@ apiVersion: v1 kind: ConfigMap metadata: - name: "gafaelfawr-config-kerberos" + name: "gafaelfawr-kerberos" labels: {{- include "gafaelfawr.labels" . | nindent 4 }} data: diff --git a/applications/gafaelfawr/templates/configmap.yaml b/applications/gafaelfawr/templates/configmap.yaml index 86b72672ba..b9e0efe1ba 100644 --- a/applications/gafaelfawr/templates/configmap.yaml +++ b/applications/gafaelfawr/templates/configmap.yaml @@ -1,27 +1,15 @@ apiVersion: v1 kind: ConfigMap metadata: - name: "gafaelfawr-config" + name: "gafaelfawr" labels: {{- include "gafaelfawr.labels" . | nindent 4 }} -data: - gafaelfawr.yaml: | - {{- toYaml .Values.config | nindent 4 }} -{{- if .Values.config.updateSchema }} ---- -apiVersion: v1 -kind: ConfigMap -metadata: - name: "gafaelfawr-config-schema-update" {{- if .Values.config.updateSchema }} annotations: helm.sh/hook: "pre-install,pre-upgrade" - helm.sh/hook-delete-policy: "hook-succeeded" + helm.sh/hook-delete-policy: "before-hook-creation" helm.sh/hook-weight: "0" {{- end }} - labels: - {{- include "gafaelfawr.labels" . | nindent 4 }} data: gafaelfawr.yaml: | {{- toYaml .Values.config | nindent 4 }} -{{- end }} diff --git a/applications/gafaelfawr/templates/cronjob-audit.yaml b/applications/gafaelfawr/templates/cronjob-audit.yaml index 0013a16262..df5bbd3453 100644 --- a/applications/gafaelfawr/templates/cronjob-audit.yaml +++ b/applications/gafaelfawr/templates/cronjob-audit.yaml @@ -37,7 +37,7 @@ spec: - "gafaelfawr" - "audit" env: - {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret") | nindent 16 }} + {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values) | nindent 16 }} {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" @@ -95,7 +95,7 @@ spec: volumes: - name: "config" configMap: - name: "gafaelfawr-config" + name: "gafaelfawr" {{- if .Values.config.metrics.enabled }} - name: "kafka" secret: @@ -107,7 +107,7 @@ spec: secretName: "gafaelfawr-keytab" - name: "kerberos-config" configMap: - name: "gafaelfawr-config-kerberos" + name: "gafaelfawr-kerberos" - name: "tmp" emptyDir: {} {{- end }} diff --git a/applications/gafaelfawr/templates/cronjob-maintenance.yaml b/applications/gafaelfawr/templates/cronjob-maintenance.yaml index bbefece8bd..7108a75266 100644 --- a/applications/gafaelfawr/templates/cronjob-maintenance.yaml +++ b/applications/gafaelfawr/templates/cronjob-maintenance.yaml @@ -36,7 +36,7 @@ spec: - "gafaelfawr" - "maintenance" env: - {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret") | nindent 16 }} + {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values) | nindent 16 }} {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" @@ -94,7 +94,7 @@ spec: volumes: - name: "config" configMap: - name: "gafaelfawr-config" + name: "gafaelfawr" {{- if .Values.config.metrics.enabled }} - name: "kafka" secret: @@ -106,7 +106,7 @@ spec: secretName: "gafaelfawr-keytab" - name: "kerberos-config" configMap: - name: "gafaelfawr-config-kerberos" + name: "gafaelfawr-kerberos" - name: "tmp" emptyDir: {} {{- end }} diff --git a/applications/gafaelfawr/templates/deployment-operator.yaml b/applications/gafaelfawr/templates/deployment-operator.yaml index 323a25bce6..821ca0fabc 100644 --- a/applications/gafaelfawr/templates/deployment-operator.yaml +++ b/applications/gafaelfawr/templates/deployment-operator.yaml @@ -42,7 +42,7 @@ spec: - "-m" - "gafaelfawr.operator" env: - {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret") | nindent 12 }} + {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values) | nindent 12 }} {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" @@ -112,7 +112,7 @@ spec: volumes: - name: "config" configMap: - name: "gafaelfawr-config" + name: "gafaelfawr" {{- if .Values.config.metrics.enabled }} - name: "kafka" secret: @@ -124,7 +124,7 @@ spec: secretName: "gafaelfawr-keytab" - name: "kerberos-config" configMap: - name: "gafaelfawr-config-kerberos" + name: "gafaelfawr-kerberos" - name: "tmp" emptyDir: {} {{- end }} diff --git a/applications/gafaelfawr/templates/deployment.yaml b/applications/gafaelfawr/templates/deployment.yaml index fed652313f..22dc810318 100644 --- a/applications/gafaelfawr/templates/deployment.yaml +++ b/applications/gafaelfawr/templates/deployment.yaml @@ -54,7 +54,7 @@ spec: {{- end }} - name: "gafaelfawr" env: - {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret" "sidecar" true) | nindent 12 }} + {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "sidecar" true) | nindent 12 }} {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" @@ -135,7 +135,7 @@ spec: volumes: - name: "config" configMap: - name: "gafaelfawr-config" + name: "gafaelfawr" {{- if .Values.config.metrics.enabled }} - name: "kafka" secret: @@ -147,7 +147,7 @@ spec: secretName: "gafaelfawr-keytab" - name: "kerberos-config" configMap: - name: "gafaelfawr-config-kerberos" + name: "gafaelfawr-kerberos" - name: "tmp" emptyDir: {} {{- end }} diff --git a/applications/gafaelfawr/templates/job-schema-update.yaml b/applications/gafaelfawr/templates/job-schema-update.yaml index 87cc71815a..65bc2b52a0 100644 --- a/applications/gafaelfawr/templates/job-schema-update.yaml +++ b/applications/gafaelfawr/templates/job-schema-update.yaml @@ -23,7 +23,7 @@ spec: gafaelfawr-redis-client: "true" spec: {{- if .Values.cloudsql.enabled }} - serviceAccountName: "gafaelfawr-schema-update" + serviceAccountName: "gafaelfawr" {{- else }} automountServiceAccountToken: false {{- end }} @@ -79,7 +79,7 @@ spec: gafaelfawr update-schema touch /lifecycle/main-terminated env: - {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "secretName" "gafaelfawr-secret-schema-update" "sidecar" true) | nindent 12 }} + {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "sidecar" true) | nindent 12 }} image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy | quote }} {{- with .Values.resources }} @@ -106,7 +106,7 @@ spec: volumes: - name: "config" configMap: - name: "gafaelfawr-config-schema-update" + name: "gafaelfawr" - name: "lifecycle" emptyDir: {} {{- with .Values.nodeSelector }} diff --git a/applications/gafaelfawr/templates/serviceaccount.yaml b/applications/gafaelfawr/templates/serviceaccount.yaml index acf07b2ed2..42aea8bf2e 100644 --- a/applications/gafaelfawr/templates/serviceaccount.yaml +++ b/applications/gafaelfawr/templates/serviceaccount.yaml @@ -6,19 +6,10 @@ metadata: labels: {{- include "gafaelfawr.labels" . | nindent 4 }} annotations: - iam.gke.io/gcp-service-account: {{ required "cloudsql.serviceAccount must be set to a valid Google service account" .Values.cloudsql.serviceAccount | quote }} -{{- if .Values.config.updateSchema }} ---- -apiVersion: v1 -kind: ServiceAccount -metadata: - name: "gafaelfawr-schema-update" - labels: - {{- include "gafaelfawr.labels" . | nindent 4 }} - annotations: + {{- if .Values.config.updateSchema }} helm.sh/hook: "pre-install,pre-upgrade" - helm.sh/hook-delete-policy: "hook-succeeded" + helm.sh/hook-delete-policy: "before-hook-creation" helm.sh/hook-weight: "0" + {{- end }} iam.gke.io/gcp-service-account: {{ required "cloudsql.serviceAccount must be set to a valid Google service account" .Values.cloudsql.serviceAccount | quote }} {{- end }} -{{- end }} diff --git a/applications/gafaelfawr/templates/vault-secrets.yaml b/applications/gafaelfawr/templates/vault-secrets.yaml index 558598febe..29563d2ebd 100644 --- a/applications/gafaelfawr/templates/vault-secrets.yaml +++ b/applications/gafaelfawr/templates/vault-secrets.yaml @@ -1,29 +1,20 @@ apiVersion: ricoberger.de/v1alpha1 kind: VaultSecret metadata: - name: "gafaelfawr-secret" + name: "gafaelfawr" labels: {{- include "gafaelfawr.labels" . | nindent 4 }} -spec: - path: "{{ .Values.global.vaultSecretsPath }}/gafaelfawr" - type: Opaque -{{- if .Values.config.updateSchema }} ---- -apiVersion: ricoberger.de/v1alpha1 -kind: VaultSecret -metadata: - name: "gafaelfawr-secret-schema-update" + {{- if .Values.config.updateSchema }} annotations: helm.sh/hook: "pre-install,pre-upgrade" + helm.sh/hook-delete-policy: "before-hook-creation" helm.sh/hook-weight: "0" - labels: - {{- include "gafaelfawr.labels" . | nindent 4 }} + {{- end }} spec: path: "{{ .Values.global.vaultSecretsPath }}/gafaelfawr" type: Opaque -{{- end }} ---- {{- if .Values.config.ldap.kerberosConfig }} +--- apiVersion: ricoberger.de/v1alpha1 kind: VaultSecret metadata: diff --git a/applications/gafaelfawr/values-ccin2p3.yaml b/applications/gafaelfawr/values-ccin2p3.yaml index 7d59a28426..3d99fada3c 100644 --- a/applications/gafaelfawr/values-ccin2p3.yaml +++ b/applications/gafaelfawr/values-ccin2p3.yaml @@ -6,7 +6,6 @@ redis: config: logLevel: "DEBUG" internalDatabase: true - updateSchema: False # Session length and token expiration (in minutes). #issuer: diff --git a/applications/gafaelfawr/values.yaml b/applications/gafaelfawr/values.yaml index 034c692cb6..687c900024 100644 --- a/applications/gafaelfawr/values.yaml +++ b/applications/gafaelfawr/values.yaml @@ -415,9 +415,8 @@ operator: redis: config: - # -- Name of secret containing Redis password (may require changing if - # fullnameOverride is set) - secretName: "gafaelfawr-secret" + # -- Name of secret containing Redis password (do not change) + secretName: "gafaelfawr" # -- Key inside secret from which to get the Redis password (do not # change) From 2e3f565fd70e439e50961da0a4e6fd944d8c7fdc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 09:04:01 +0000 Subject: [PATCH 385/567] Update Helm release redis to v1.0.14 --- applications/plot-navigator/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/plot-navigator/Chart.yaml b/applications/plot-navigator/Chart.yaml index a7a031592b..2c8ad3c2f8 100644 --- a/applications/plot-navigator/Chart.yaml +++ b/applications/plot-navigator/Chart.yaml @@ -8,5 +8,5 @@ appVersion: "0.1.1" dependencies: - name: redis - version: 1.0.13 + version: 1.0.14 repository: https://lsst-sqre.github.io/charts/ From b94aef1f8fbde11ab9236420edb393dc82299767 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 09:04:08 +0000 Subject: [PATCH 386/567] Update gcr.io/cloudsql-docker/gce-proxy Docker tag to v1.37.1 --- applications/gafaelfawr/values.yaml | 2 +- applications/nublado/values.yaml | 2 +- applications/sqlproxy-cross-project/values.yaml | 2 +- applications/times-square/values.yaml | 2 +- applications/vo-cutouts/values.yaml | 2 +- charts/cadc-tap/values.yaml | 2 +- starters/fastapi-safir-uws/values.yaml | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/applications/gafaelfawr/values.yaml b/applications/gafaelfawr/values.yaml index 687c900024..95820eabb6 100644 --- a/applications/gafaelfawr/values.yaml +++ b/applications/gafaelfawr/values.yaml @@ -311,7 +311,7 @@ cloudsql: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.0" + tag: "1.37.1" # -- Tag suffix to use for the proxy for schema updates schemaUpdateTagSuffix: "-alpine" diff --git a/applications/nublado/values.yaml b/applications/nublado/values.yaml index eae9fb5461..433bd6c695 100644 --- a/applications/nublado/values.yaml +++ b/applications/nublado/values.yaml @@ -585,7 +585,7 @@ cloudsql: pullPolicy: "IfNotPresent" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.0" + tag: "1.37.1" # -- Instance connection name for a Cloud SQL PostgreSQL instance # @default -- None, must be set if Cloud SQL Auth Proxy is enabled diff --git a/applications/sqlproxy-cross-project/values.yaml b/applications/sqlproxy-cross-project/values.yaml index 637c381d81..ac677e8060 100644 --- a/applications/sqlproxy-cross-project/values.yaml +++ b/applications/sqlproxy-cross-project/values.yaml @@ -14,7 +14,7 @@ image: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Tag of Cloud SQL Proxy image to use - tag: "1.37.0" + tag: "1.37.1" # -- Pull policy for the Cloud SQL Proxy image pullPolicy: "IfNotPresent" diff --git a/applications/times-square/values.yaml b/applications/times-square/values.yaml index e6cdc61f51..f5509e689f 100644 --- a/applications/times-square/values.yaml +++ b/applications/times-square/values.yaml @@ -156,7 +156,7 @@ cloudsql: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.0" + tag: "1.37.1" # -- Pull policy for Cloud SQL Auth Proxy images pullPolicy: "IfNotPresent" diff --git a/applications/vo-cutouts/values.yaml b/applications/vo-cutouts/values.yaml index 17e9ad9ba5..199c0ce730 100644 --- a/applications/vo-cutouts/values.yaml +++ b/applications/vo-cutouts/values.yaml @@ -94,7 +94,7 @@ cloudsql: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.0" + tag: "1.37.1" # -- Tag suffix to use for the proxy for schema updates schemaUpdateTagSuffix: "-alpine" diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index 33fd500746..a755e17f89 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -229,7 +229,7 @@ cloudsql: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.0" + tag: "1.37.1" # -- Pull policy for Cloud SQL Auth Proxy images pullPolicy: "IfNotPresent" diff --git a/starters/fastapi-safir-uws/values.yaml b/starters/fastapi-safir-uws/values.yaml index cd15e7b79e..44d1783c3b 100644 --- a/starters/fastapi-safir-uws/values.yaml +++ b/starters/fastapi-safir-uws/values.yaml @@ -86,7 +86,7 @@ cloudsql: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.0" + tag: "1.37.1" # -- Pull policy for Cloud SQL Auth Proxy images pullPolicy: "IfNotPresent" From fdaa9941f4f63e0f1a93234614bf3e85887b91b2 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 28 Oct 2024 08:43:50 -0700 Subject: [PATCH 387/567] Update Helm docs --- applications/gafaelfawr/README.md | 2 +- applications/nublado/README.md | 2 +- applications/sqlproxy-cross-project/README.md | 2 +- applications/times-square/README.md | 2 +- applications/vo-cutouts/README.md | 2 +- charts/cadc-tap/README.md | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/applications/gafaelfawr/README.md b/applications/gafaelfawr/README.md index ae4eb82caf..51df4c45c1 100644 --- a/applications/gafaelfawr/README.md +++ b/applications/gafaelfawr/README.md @@ -18,7 +18,7 @@ Authentication and identity system | cloudsql.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for Cloud SQL Auth Proxy images | | cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Auth Proxy image to use | | cloudsql.image.schemaUpdateTagSuffix | string | `"-alpine"` | Tag suffix to use for the proxy for schema updates | -| cloudsql.image.tag | string | `"1.37.0"` | Cloud SQL Auth Proxy tag to use | +| cloudsql.image.tag | string | `"1.37.1"` | Cloud SQL Auth Proxy tag to use | | cloudsql.instanceConnectionName | string | None, must be set if Cloud SQL Auth Proxy is enabled | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.nodeSelector | object | `{}` | Node selection rules for the Cloud SQL Proxy pod | | cloudsql.podAnnotations | object | `{}` | Annotations for the Cloud SQL Proxy pod | diff --git a/applications/nublado/README.md b/applications/nublado/README.md index 57ae6213c6..69b6fe4279 100644 --- a/applications/nublado/README.md +++ b/applications/nublado/README.md @@ -17,7 +17,7 @@ JupyterHub and custom spawner for the Rubin Science Platform | cloudsql.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for Cloud SQL Auth Proxy images | | cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Auth Proxy image to use | | cloudsql.image.resources | object | See `values.yaml` | Resource requests and limits for Cloud SQL pod | -| cloudsql.image.tag | string | `"1.37.0"` | Cloud SQL Auth Proxy tag to use | +| cloudsql.image.tag | string | `"1.37.1"` | Cloud SQL Auth Proxy tag to use | | cloudsql.instanceConnectionName | string | None, must be set if Cloud SQL Auth Proxy is enabled | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.nodeSelector | object | `{}` | Node selection rules for the Cloud SQL Auth Proxy pod | | cloudsql.podAnnotations | object | `{}` | Annotations for the Cloud SQL Auth Proxy pod | diff --git a/applications/sqlproxy-cross-project/README.md b/applications/sqlproxy-cross-project/README.md index 6b4e441b3c..74b079d5d9 100644 --- a/applications/sqlproxy-cross-project/README.md +++ b/applications/sqlproxy-cross-project/README.md @@ -19,7 +19,7 @@ GCP SQL Proxy as a service | fullnameOverride | string | `""` | Override the full name for resources (includes the release name) | | image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the Cloud SQL Proxy image | | image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Proxy image to use | -| image.tag | string | `"1.37.0"` | Tag of Cloud SQL Proxy image to use | +| image.tag | string | `"1.37.1"` | Tag of Cloud SQL Proxy image to use | | nameOverride | string | `""` | Override the base name for resources | | nodeSelector | object | `{}` | Node selector rules for the Cloud SQL Proxy pod | | podAnnotations | object | `{}` | Annotations for the Cloud SQL Proxy pod | diff --git a/applications/times-square/README.md b/applications/times-square/README.md index 7385571ed7..366950ca28 100644 --- a/applications/times-square/README.md +++ b/applications/times-square/README.md @@ -19,7 +19,7 @@ An API service for managing and rendering parameterized Jupyter notebooks. | cloudsql.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for Cloud SQL Auth Proxy images | | cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Auth Proxy image to use | | cloudsql.image.resources | object | see `values.yaml` | Resource requests and limits for Cloud SQL pod | -| cloudsql.image.tag | string | `"1.37.0"` | Cloud SQL Auth Proxy tag to use | +| cloudsql.image.tag | string | `"1.37.1"` | Cloud SQL Auth Proxy tag to use | | cloudsql.instanceConnectionName | string | `""` | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.serviceAccount | string | `""` | The Google service account that has an IAM binding to the `times-square` Kubernetes service accounts and has the `cloudsql.client` role | | config.databaseUrl | string | None, must be set | URL for the PostgreSQL database | diff --git a/applications/vo-cutouts/README.md b/applications/vo-cutouts/README.md index ad78a23274..97213c69f0 100644 --- a/applications/vo-cutouts/README.md +++ b/applications/vo-cutouts/README.md @@ -14,7 +14,7 @@ Image cutout service complying with IVOA SODA | cloudsql.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for Cloud SQL Auth Proxy images | | cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Auth Proxy image to use | | cloudsql.image.schemaUpdateTagSuffix | string | `"-alpine"` | Tag suffix to use for the proxy for schema updates | -| cloudsql.image.tag | string | `"1.37.0"` | Cloud SQL Auth Proxy tag to use | +| cloudsql.image.tag | string | `"1.37.1"` | Cloud SQL Auth Proxy tag to use | | cloudsql.instanceConnectionName | string | None, must be set if Cloud SQL is used | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.resources | object | See `values.yaml` | Resource limits and requests for the Cloud SQL Proxy container | | config.databaseUrl | string | None, must be set if `cloudsql.enabled` is false | URL for the PostgreSQL database if Cloud SQL is not in use | diff --git a/charts/cadc-tap/README.md b/charts/cadc-tap/README.md index a6b4bbf3a9..8adaa4f4fd 100644 --- a/charts/cadc-tap/README.md +++ b/charts/cadc-tap/README.md @@ -17,7 +17,7 @@ IVOA TAP service | cloudsql.enabled | bool | `false` | Enable the Cloud SQL Auth Proxy sidecar, used with Cloud SQL databases on Google Cloud | | cloudsql.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for Cloud SQL Auth Proxy images | | cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Auth Proxy image to use | -| cloudsql.image.tag | string | `"1.37.0"` | Cloud SQL Auth Proxy tag to use | +| cloudsql.image.tag | string | `"1.37.1"` | Cloud SQL Auth Proxy tag to use | | cloudsql.instanceConnectionName | string | `""` | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.resources | object | See `values.yaml` | Resource limits and requests for the Cloud SQL Proxy container | | cloudsql.serviceAccount | string | None, must be set | The Google service account that has an IAM binding to the `cadc-tap` Kubernetes service accounts and has the `cloudsql.client` role, access | From 40e2c7a366b0ca328f299473ef50a723d0b58810 Mon Sep 17 00:00:00 2001 From: dspeck1 Date: Mon, 28 Oct 2024 12:39:02 -0500 Subject: [PATCH 388/567] Update conditional naming to lsstcomcam --- .../applications/prompt/prompt-proto-service-lsstcomcam.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environments/templates/applications/prompt/prompt-proto-service-lsstcomcam.yaml b/environments/templates/applications/prompt/prompt-proto-service-lsstcomcam.yaml index 96017e4d65..842abfdef9 100644 --- a/environments/templates/applications/prompt/prompt-proto-service-lsstcomcam.yaml +++ b/environments/templates/applications/prompt/prompt-proto-service-lsstcomcam.yaml @@ -1,4 +1,4 @@ -{{- if (index .Values "applications" "prompt-proto-service-lsstcam") -}} +{{- if (index .Values "applications" "prompt-proto-service-lsstcomcam") -}} apiVersion: argoproj.io/v1alpha1 kind: Application metadata: From 046378536164a11d815f524cee999353dc9afc6c Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 28 Oct 2024 11:05:19 -0700 Subject: [PATCH 389/567] Update Python and pre-commit dependencies --- .pre-commit-config.yaml | 4 +- requirements/dev.txt | 399 +++++++++++++++++++++------------------- requirements/main.txt | 198 ++++++++++---------- requirements/tox.txt | 50 ++--- 4 files changed, 339 insertions(+), 312 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4149a1f72f..4c56ca5f21 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,14 +46,14 @@ repos: - --template-files=../helm-docs.md.gotmpl - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.9 + rev: v0.7.1 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - id: ruff-format - repo: https://github.com/adamchainz/blacken-docs - rev: 1.19.0 + rev: 1.19.1 hooks: - id: blacken-docs additional_dependencies: [black==23.7.0] diff --git a/requirements/dev.txt b/requirements/dev.txt index a341c2e6d3..6611f5e909 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -244,69 +244,69 @@ comm==0.2.2 \ --hash=sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e \ --hash=sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3 # via ipykernel -coverage==7.6.3 \ - --hash=sha256:04f2189716e85ec9192df307f7c255f90e78b6e9863a03223c3b998d24a3c6c6 \ - --hash=sha256:0c6c0f4d53ef603397fc894a895b960ecd7d44c727df42a8d500031716d4e8d2 \ - --hash=sha256:0ca37993206402c6c35dc717f90d4c8f53568a8b80f0bf1a1b2b334f4d488fba \ - --hash=sha256:12f9515d875859faedb4144fd38694a761cd2a61ef9603bf887b13956d0bbfbb \ - --hash=sha256:1990b1f4e2c402beb317840030bb9f1b6a363f86e14e21b4212e618acdfce7f6 \ - --hash=sha256:2341a78ae3a5ed454d524206a3fcb3cec408c2a0c7c2752cd78b606a2ff15af4 \ - --hash=sha256:23bb63ae3f4c645d2d82fa22697364b0046fbafb6261b258a58587441c5f7bd0 \ - --hash=sha256:27bd5f18d8f2879e45724b0ce74f61811639a846ff0e5c0395b7818fae87aec6 \ - --hash=sha256:2dc7d6b380ca76f5e817ac9eef0c3686e7834c8346bef30b041a4ad286449990 \ - --hash=sha256:331b200ad03dbaa44151d74daeb7da2cf382db424ab923574f6ecca7d3b30de3 \ - --hash=sha256:365defc257c687ce3e7d275f39738dcd230777424117a6c76043459db131dd43 \ - --hash=sha256:37be7b5ea3ff5b7c4a9db16074dc94523b5f10dd1f3b362a827af66a55198175 \ - --hash=sha256:3c2e6fa98032fec8282f6b27e3f3986c6e05702828380618776ad794e938f53a \ - --hash=sha256:40e8b1983080439d4802d80b951f4a93d991ef3261f69e81095a66f86cf3c3c6 \ - --hash=sha256:43517e1f6b19f610a93d8227e47790722c8bf7422e46b365e0469fc3d3563d97 \ - --hash=sha256:43b32a06c47539fe275106b376658638b418c7cfdfff0e0259fbf877e845f14b \ - --hash=sha256:43d6a66e33b1455b98fc7312b124296dad97a2e191c80320587234a77b1b736e \ - --hash=sha256:4c59d6a4a4633fad297f943c03d0d2569867bd5372eb5684befdff8df8522e39 \ - --hash=sha256:52ac29cc72ee7e25ace7807249638f94c9b6a862c56b1df015d2b2e388e51dbd \ - --hash=sha256:54356a76b67cf8a3085818026bb556545ebb8353951923b88292556dfa9f812d \ - --hash=sha256:583049c63106c0555e3ae3931edab5669668bbef84c15861421b94e121878d3f \ - --hash=sha256:6d99198203f0b9cb0b5d1c0393859555bc26b548223a769baf7e321a627ed4fc \ - --hash=sha256:6da42bbcec130b188169107ecb6ee7bd7b4c849d24c9370a0c884cf728d8e976 \ - --hash=sha256:6e484e479860e00da1f005cd19d1c5d4a813324e5951319ac3f3eefb497cc549 \ - --hash=sha256:70a6756ce66cd6fe8486c775b30889f0dc4cb20c157aa8c35b45fd7868255c5c \ - --hash=sha256:70d24936ca6c15a3bbc91ee9c7fc661132c6f4c9d42a23b31b6686c05073bde5 \ - --hash=sha256:71967c35828c9ff94e8c7d405469a1fb68257f686bca7c1ed85ed34e7c2529c4 \ - --hash=sha256:79644f68a6ff23b251cae1c82b01a0b51bc40c8468ca9585c6c4b1aeee570e0b \ - --hash=sha256:87cd2e29067ea397a47e352efb13f976eb1b03e18c999270bb50589323294c6e \ - --hash=sha256:8d4c6ea0f498c7c79111033a290d060c517853a7bcb2f46516f591dab628ddd3 \ - --hash=sha256:9134032f5aa445ae591c2ba6991d10136a1f533b1d2fa8f8c21126468c5025c6 \ - --hash=sha256:921fbe13492caf6a69528f09d5d7c7d518c8d0e7b9f6701b7719715f29a71e6e \ - --hash=sha256:99670790f21a96665a35849990b1df447993880bb6463a0a1d757897f30da929 \ - --hash=sha256:9975442f2e7a5cfcf87299c26b5a45266ab0696348420049b9b94b2ad3d40234 \ - --hash=sha256:99ded130555c021d99729fabd4ddb91a6f4cc0707df4b1daf912c7850c373b13 \ - --hash=sha256:a3328c3e64ea4ab12b85999eb0779e6139295bbf5485f69d42cf794309e3d007 \ - --hash=sha256:a4fb91d5f72b7e06a14ff4ae5be625a81cd7e5f869d7a54578fc271d08d58ae3 \ - --hash=sha256:aa23ce39661a3e90eea5f99ec59b763b7d655c2cada10729ed920a38bfc2b167 \ - --hash=sha256:aac7501ae73d4a02f4b7ac8fcb9dc55342ca98ffb9ed9f2dfb8a25d53eda0e4d \ - --hash=sha256:ab84a8b698ad5a6c365b08061920138e7a7dd9a04b6feb09ba1bfae68346ce6d \ - --hash=sha256:b4adeb878a374126f1e5cf03b87f66279f479e01af0e9a654cf6d1509af46c40 \ - --hash=sha256:b9853509b4bf57ba7b1f99b9d866c422c9c5248799ab20e652bbb8a184a38181 \ - --hash=sha256:bb7d5fe92bd0dc235f63ebe9f8c6e0884f7360f88f3411bfed1350c872ef2054 \ - --hash=sha256:bca4c8abc50d38f9773c1ec80d43f3768df2e8576807d1656016b9d3eeaa96fd \ - --hash=sha256:c222958f59b0ae091f4535851cbb24eb57fc0baea07ba675af718fb5302dddb2 \ - --hash=sha256:c30e42ea11badb147f0d2e387115b15e2bd8205a5ad70d6ad79cf37f6ac08c91 \ - --hash=sha256:c3a79f56dee9136084cf84a6c7c4341427ef36e05ae6415bf7d787c96ff5eaa3 \ - --hash=sha256:c51ef82302386d686feea1c44dbeef744585da16fcf97deea2a8d6c1556f519b \ - --hash=sha256:c77326300b839c44c3e5a8fe26c15b7e87b2f32dfd2fc9fee1d13604347c9b38 \ - --hash=sha256:d33a785ea8354c480515e781554d3be582a86297e41ccbea627a5c632647f2cd \ - --hash=sha256:d546cfa78844b8b9c1c0533de1851569a13f87449897bbc95d698d1d3cb2a30f \ - --hash=sha256:da29ceabe3025a1e5a5aeeb331c5b1af686daab4ff0fb4f83df18b1180ea83e2 \ - --hash=sha256:df8c05a0f574d480947cba11b947dc41b1265d721c3777881da2fb8d3a1ddfba \ - --hash=sha256:e266af4da2c1a4cbc6135a570c64577fd3e6eb204607eaff99d8e9b710003c6f \ - --hash=sha256:e279f3db904e3b55f520f11f983cc8dc8a4ce9b65f11692d4718ed021ec58b83 \ - --hash=sha256:ea52bd218d4ba260399a8ae4bb6b577d82adfc4518b93566ce1fddd4a49d1dce \ - --hash=sha256:ebec65f5068e7df2d49466aab9128510c4867e532e07cb6960075b27658dca38 \ - --hash=sha256:ec1e3b40b82236d100d259854840555469fad4db64f669ab817279eb95cd535c \ - --hash=sha256:ee77c7bef0724165e795b6b7bf9c4c22a9b8468a6bdb9c6b4281293c6b22a90f \ - --hash=sha256:f263b18692f8ed52c8de7f40a0751e79015983dbd77b16906e5b310a39d3ca21 \ - --hash=sha256:f7b26757b22faf88fcf232f5f0e62f6e0fd9e22a8a5d0d5016888cdfe1f6c1c4 \ - --hash=sha256:f7ddb920106bbbbcaf2a274d56f46956bf56ecbde210d88061824a95bdd94e92 +coverage==7.6.4 \ + --hash=sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376 \ + --hash=sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9 \ + --hash=sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111 \ + --hash=sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172 \ + --hash=sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491 \ + --hash=sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546 \ + --hash=sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2 \ + --hash=sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11 \ + --hash=sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08 \ + --hash=sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c \ + --hash=sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2 \ + --hash=sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963 \ + --hash=sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613 \ + --hash=sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0 \ + --hash=sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db \ + --hash=sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf \ + --hash=sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73 \ + --hash=sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117 \ + --hash=sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1 \ + --hash=sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e \ + --hash=sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522 \ + --hash=sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25 \ + --hash=sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc \ + --hash=sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea \ + --hash=sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52 \ + --hash=sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a \ + --hash=sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07 \ + --hash=sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06 \ + --hash=sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa \ + --hash=sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901 \ + --hash=sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b \ + --hash=sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17 \ + --hash=sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0 \ + --hash=sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21 \ + --hash=sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19 \ + --hash=sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5 \ + --hash=sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51 \ + --hash=sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3 \ + --hash=sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3 \ + --hash=sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f \ + --hash=sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076 \ + --hash=sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a \ + --hash=sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718 \ + --hash=sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba \ + --hash=sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e \ + --hash=sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27 \ + --hash=sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e \ + --hash=sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09 \ + --hash=sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e \ + --hash=sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70 \ + --hash=sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f \ + --hash=sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72 \ + --hash=sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a \ + --hash=sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef \ + --hash=sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b \ + --hash=sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b \ + --hash=sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f \ + --hash=sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806 \ + --hash=sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b \ + --hash=sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1 \ + --hash=sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c \ + --hash=sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858 # via # -r requirements/dev.in # pytest-cov @@ -487,9 +487,9 @@ ipykernel==6.29.5 \ --hash=sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5 \ --hash=sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215 # via myst-nb -ipython==8.28.0 \ - --hash=sha256:0d0d15ca1e01faeb868ef56bc7ee5a0de5bd66885735682e8a322ae289a13d1a \ - --hash=sha256:530ef1e7bb693724d3cdc37287c80b07ad9b25986c007a53aa1857272dac3f35 +ipython==8.29.0 \ + --hash=sha256:0188a1bd83267192123ccea7f4a8ed0a78910535dbaa3f37671dca76ebd429c8 \ + --hash=sha256:40b60e15b22591450eef73e40a027cf77bd652e757523eebc5bd7c7c498290eb # via # ipykernel # myst-nb @@ -550,68 +550,68 @@ markdown-it-py==3.0.0 \ # documenteer # mdit-py-plugins # myst-parser -markupsafe==3.0.1 \ - --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ - --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ - --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ - --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ - --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ - --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ - --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ - --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ - --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ - --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ - --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ - --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ - --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ - --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ - --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ - --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ - --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ - --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ - --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ - --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ - --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ - --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ - --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ - --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ - --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ - --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ - --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ - --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ - --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ - --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ - --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ - --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ - --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ - --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ - --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ - --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ - --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ - --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ - --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ - --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ - --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ - --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ - --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ - --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ - --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ - --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ - --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ - --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ - --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ - --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ - --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ - --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ - --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ - --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ - --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ - --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ - --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ - --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ - --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ - --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ - --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f +markupsafe==3.0.2 \ + --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ + --hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \ + --hash=sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0 \ + --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ + --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ + --hash=sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13 \ + --hash=sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028 \ + --hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \ + --hash=sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557 \ + --hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \ + --hash=sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0 \ + --hash=sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b \ + --hash=sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579 \ + --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ + --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ + --hash=sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff \ + --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ + --hash=sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22 \ + --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ + --hash=sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb \ + --hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \ + --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ + --hash=sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a \ + --hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \ + --hash=sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a \ + --hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \ + --hash=sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8 \ + --hash=sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225 \ + --hash=sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c \ + --hash=sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144 \ + --hash=sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f \ + --hash=sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87 \ + --hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \ + --hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \ + --hash=sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf \ + --hash=sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158 \ + --hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \ + --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ + --hash=sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48 \ + --hash=sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171 \ + --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ + --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ + --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ + --hash=sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d \ + --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ + --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ + --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ + --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ + --hash=sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29 \ + --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ + --hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \ + --hash=sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c \ + --hash=sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8 \ + --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ + --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ + --hash=sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a \ + --hash=sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178 \ + --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \ + --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ + --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 \ + --hash=sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50 # via # -c requirements/main.txt # jinja2 @@ -629,39 +629,39 @@ mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -mypy==1.12.0 \ - --hash=sha256:060a07b10e999ac9e7fa249ce2bdcfa9183ca2b70756f3bce9df7a92f78a3c0a \ - --hash=sha256:06de0498798527451ffb60f68db0d368bd2bae2bbfb5237eae616d4330cc87aa \ - --hash=sha256:0eff042d7257f39ba4ca06641d110ca7d2ad98c9c1fb52200fe6b1c865d360ff \ - --hash=sha256:1ebf9e796521f99d61864ed89d1fb2926d9ab6a5fab421e457cd9c7e4dd65aa9 \ - --hash=sha256:20c7c5ce0c1be0b0aea628374e6cf68b420bcc772d85c3c974f675b88e3e6e57 \ - --hash=sha256:233e11b3f73ee1f10efada2e6da0f555b2f3a5316e9d8a4a1224acc10e7181d3 \ - --hash=sha256:2c40658d4fa1ab27cb53d9e2f1066345596af2f8fe4827defc398a09c7c9519b \ - --hash=sha256:2f106db5ccb60681b622ac768455743ee0e6a857724d648c9629a9bd2ac3f721 \ - --hash=sha256:4397081e620dc4dc18e2f124d5e1d2c288194c2c08df6bdb1db31c38cd1fe1ed \ - --hash=sha256:48d3e37dd7d9403e38fa86c46191de72705166d40b8c9f91a3de77350daa0893 \ - --hash=sha256:4ae8959c21abcf9d73aa6c74a313c45c0b5a188752bf37dace564e29f06e9c1b \ - --hash=sha256:4b86de37a0da945f6d48cf110d5206c5ed514b1ca2614d7ad652d4bf099c7de7 \ - --hash=sha256:52b9e1492e47e1790360a43755fa04101a7ac72287b1a53ce817f35899ba0521 \ - --hash=sha256:5bc81701d52cc8767005fdd2a08c19980de9ec61a25dbd2a937dfb1338a826f9 \ - --hash=sha256:5feee5c74eb9749e91b77f60b30771563327329e29218d95bedbe1257e2fe4b0 \ - --hash=sha256:65a22d87e757ccd95cbbf6f7e181e6caa87128255eb2b6be901bb71b26d8a99d \ - --hash=sha256:684a9c508a283f324804fea3f0effeb7858eb03f85c4402a967d187f64562469 \ - --hash=sha256:6b5df6c8a8224f6b86746bda716bbe4dbe0ce89fd67b1fa4661e11bfe38e8ec8 \ - --hash=sha256:6cabe4cda2fa5eca7ac94854c6c37039324baaa428ecbf4de4567279e9810f9e \ - --hash=sha256:77278e8c6ffe2abfba6db4125de55f1024de9a323be13d20e4f73b8ed3402bd1 \ - --hash=sha256:8462655b6694feb1c99e433ea905d46c478041a8b8f0c33f1dab00ae881b2164 \ - --hash=sha256:923ea66d282d8af9e0f9c21ffc6653643abb95b658c3a8a32dca1eff09c06475 \ - --hash=sha256:9b9ce1ad8daeb049c0b55fdb753d7414260bad8952645367e70ac91aec90e07e \ - --hash=sha256:a64ee25f05fc2d3d8474985c58042b6759100a475f8237da1f4faf7fcd7e6309 \ - --hash=sha256:bfe012b50e1491d439172c43ccb50db66d23fab714d500b57ed52526a1020bb7 \ - --hash=sha256:c72861b7139a4f738344faa0e150834467521a3fba42dc98264e5aa9507dd601 \ - --hash=sha256:dcfb754dea911039ac12434d1950d69a2f05acd4d56f7935ed402be09fad145e \ - --hash=sha256:dee78a8b9746c30c1e617ccb1307b351ded57f0de0d287ca6276378d770006c0 \ - --hash=sha256:e478601cc3e3fa9d6734d255a59c7a2e5c2934da4378f3dd1e3411ea8a248642 \ - --hash=sha256:eafc1b7319b40ddabdc3db8d7d48e76cfc65bbeeafaa525a4e0fa6b76175467f \ - --hash=sha256:faca7ab947c9f457a08dcb8d9a8664fd438080e002b0fa3e41b0535335edcf7f \ - --hash=sha256:fd313226af375d52e1e36c383f39bf3836e1f192801116b31b090dfcd3ec5266 +mypy==1.13.0 \ + --hash=sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc \ + --hash=sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e \ + --hash=sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f \ + --hash=sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74 \ + --hash=sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a \ + --hash=sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2 \ + --hash=sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b \ + --hash=sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73 \ + --hash=sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e \ + --hash=sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d \ + --hash=sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d \ + --hash=sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6 \ + --hash=sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca \ + --hash=sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d \ + --hash=sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5 \ + --hash=sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62 \ + --hash=sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a \ + --hash=sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc \ + --hash=sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7 \ + --hash=sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb \ + --hash=sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7 \ + --hash=sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732 \ + --hash=sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80 \ + --hash=sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a \ + --hash=sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc \ + --hash=sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2 \ + --hash=sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0 \ + --hash=sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24 \ + --hash=sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7 \ + --hash=sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b \ + --hash=sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372 \ + --hash=sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8 # via -r requirements/dev.in mypy-extensions==1.0.0 \ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ @@ -723,24 +723,24 @@ prompt-toolkit==3.0.48 \ --hash=sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90 \ --hash=sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e # via ipython -psutil==6.0.0 \ - --hash=sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35 \ - --hash=sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0 \ - --hash=sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c \ - --hash=sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1 \ - --hash=sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3 \ - --hash=sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c \ - --hash=sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd \ - --hash=sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3 \ - --hash=sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0 \ - --hash=sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2 \ - --hash=sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6 \ - --hash=sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d \ - --hash=sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c \ - --hash=sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0 \ - --hash=sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132 \ - --hash=sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14 \ - --hash=sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0 +psutil==6.1.0 \ + --hash=sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047 \ + --hash=sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc \ + --hash=sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e \ + --hash=sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747 \ + --hash=sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e \ + --hash=sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a \ + --hash=sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b \ + --hash=sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76 \ + --hash=sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca \ + --hash=sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688 \ + --hash=sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e \ + --hash=sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38 \ + --hash=sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85 \ + --hash=sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be \ + --hash=sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942 \ + --hash=sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a \ + --hash=sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0 # via ipykernel ptyprocess==0.7.0 ; sys_platform != 'emscripten' and sys_platform != 'win32' \ --hash=sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35 \ @@ -867,9 +867,9 @@ pydantic-core==2.23.4 \ # via # -c requirements/main.txt # pydantic -pydantic-settings==2.5.2 \ - --hash=sha256:2c912e55fd5794a59bf8c832b9de832dcfdf4778d79ff79b708744eed499a907 \ - --hash=sha256:f90b139682bee4d2065273d5185d71d37ea46cfe57e1b5ae184fc6a0b2484ca0 +pydantic-settings==2.6.0 \ + --hash=sha256:44a1804abffac9e6a30372bb45f6cafab945ef5af25e66b1c634c01dd39e0188 \ + --hash=sha256:4a819166f119b74d7f8c765196b165f95cc7487ce58ea27dec8a5a26be0970e0 # via autodoc-pydantic pydata-sphinx-theme==0.12.0 \ --hash=sha256:7a07c3ac1fb1cfbb5f7d1e147a9500fb120e329d610e0fa2caac4a645141bdd9 \ @@ -1228,9 +1228,9 @@ rpds-py==0.20.0 \ # via # jsonschema # referencing -setuptools==75.1.0 \ - --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ - --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 +setuptools==75.2.0 \ + --hash=sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec \ + --hash=sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8 # via # documenteer # sphinxcontrib-bibtex @@ -1366,36 +1366,63 @@ sphinxext-rediraffe==0.2.7 \ --hash=sha256:9e430a52d4403847f4ffb3a8dd6dfc34a9fe43525305131f52ed899743a5fd8c # via documenteer sqlalchemy==2.0.36 \ + --hash=sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763 \ --hash=sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436 \ + --hash=sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2 \ --hash=sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588 \ --hash=sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e \ --hash=sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959 \ --hash=sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d \ --hash=sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575 \ + --hash=sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908 \ --hash=sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8 \ + --hash=sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8 \ + --hash=sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545 \ --hash=sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7 \ --hash=sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971 \ + --hash=sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855 \ --hash=sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c \ + --hash=sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71 \ + --hash=sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d \ + --hash=sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb \ + --hash=sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72 \ --hash=sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f \ + --hash=sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5 \ + --hash=sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346 \ + --hash=sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24 \ --hash=sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e \ --hash=sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5 \ + --hash=sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08 \ --hash=sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793 \ --hash=sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88 \ + --hash=sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686 \ + --hash=sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b \ --hash=sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2 \ --hash=sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28 \ + --hash=sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d \ --hash=sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5 \ --hash=sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a \ --hash=sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a \ + --hash=sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3 \ + --hash=sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf \ --hash=sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5 \ + --hash=sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef \ + --hash=sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689 \ --hash=sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c \ + --hash=sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b \ --hash=sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07 \ --hash=sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa \ --hash=sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06 \ --hash=sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1 \ + --hash=sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff \ + --hash=sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa \ --hash=sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687 \ + --hash=sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4 \ --hash=sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb \ --hash=sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44 \ - --hash=sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e + --hash=sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c \ + --hash=sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e \ + --hash=sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53 # via jupyter-cache stack-data==0.6.3 \ --hash=sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9 \ diff --git a/requirements/main.txt b/requirements/main.txt index 9e4fbbc2e6..4aba020f7c 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -232,41 +232,41 @@ colorama==0.4.6 ; platform_system == 'Windows' \ --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 # via click -cryptography==43.0.1 \ - --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ - --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ - --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ - --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ - --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ - --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ - --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ - --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ - --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ - --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ - --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ - --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ - --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ - --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ - --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ - --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ - --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ - --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ - --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ - --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ - --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ - --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ - --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ - --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ - --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ - --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ - --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 +cryptography==43.0.3 \ + --hash=sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362 \ + --hash=sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4 \ + --hash=sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa \ + --hash=sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83 \ + --hash=sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff \ + --hash=sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805 \ + --hash=sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6 \ + --hash=sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664 \ + --hash=sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08 \ + --hash=sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e \ + --hash=sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18 \ + --hash=sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f \ + --hash=sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73 \ + --hash=sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5 \ + --hash=sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984 \ + --hash=sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd \ + --hash=sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3 \ + --hash=sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e \ + --hash=sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405 \ + --hash=sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2 \ + --hash=sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c \ + --hash=sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995 \ + --hash=sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73 \ + --hash=sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16 \ + --hash=sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7 \ + --hash=sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd \ + --hash=sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7 # via # phalanx (pyproject.toml) # pyjwt # safir -fastapi==0.115.2 \ - --hash=sha256:3995739e0b09fa12f984bce8fa9ae197b35d433750d3d312422d846e283697ee \ - --hash=sha256:61704c71286579cc5a598763905928f24ee98bfcc07aabe84cfefb98812bbc86 +fastapi==0.115.4 \ + --hash=sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742 \ + --hash=sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349 # via safir gidgethub==5.3.0 \ --hash=sha256:4dd92f2252d12756b13f9dd15cde322bfb0d625b6fb5d680da1567ec74b462c0 \ @@ -309,68 +309,68 @@ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via phalanx (pyproject.toml) -markupsafe==3.0.1 \ - --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ - --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ - --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ - --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ - --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ - --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ - --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ - --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ - --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ - --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ - --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ - --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ - --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ - --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ - --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ - --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ - --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ - --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ - --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ - --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ - --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ - --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ - --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ - --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ - --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ - --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ - --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ - --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ - --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ - --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ - --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ - --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ - --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ - --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ - --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ - --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ - --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ - --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ - --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ - --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ - --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ - --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ - --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ - --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ - --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ - --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ - --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ - --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ - --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ - --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ - --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ - --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ - --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ - --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ - --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ - --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ - --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ - --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ - --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ - --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ - --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f +markupsafe==3.0.2 \ + --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ + --hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \ + --hash=sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0 \ + --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ + --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ + --hash=sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13 \ + --hash=sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028 \ + --hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \ + --hash=sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557 \ + --hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \ + --hash=sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0 \ + --hash=sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b \ + --hash=sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579 \ + --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ + --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ + --hash=sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff \ + --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ + --hash=sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22 \ + --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ + --hash=sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb \ + --hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \ + --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ + --hash=sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a \ + --hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \ + --hash=sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a \ + --hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \ + --hash=sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8 \ + --hash=sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225 \ + --hash=sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c \ + --hash=sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144 \ + --hash=sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f \ + --hash=sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87 \ + --hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \ + --hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \ + --hash=sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf \ + --hash=sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158 \ + --hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \ + --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ + --hash=sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48 \ + --hash=sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171 \ + --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ + --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ + --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ + --hash=sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d \ + --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ + --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ + --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ + --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ + --hash=sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29 \ + --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ + --hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \ + --hash=sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c \ + --hash=sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8 \ + --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ + --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ + --hash=sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a \ + --hash=sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178 \ + --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \ + --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ + --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 \ + --hash=sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50 # via jinja2 onepasswordconnectsdk==1.5.1 \ --hash=sha256:8924c614ffed98f29faada03dba940dc0bc47851b1f5f4ef7e312e43c10ec25b \ @@ -555,9 +555,9 @@ safir==6.4.0 \ --hash=sha256:ba7af071eab0d198e6e15a2117028566f3f4237e02e2278e8bfc2633a7c68228 \ --hash=sha256:f38c3f1d7d76d304984b572288826510e5c7a0e1f965b2eabdd7f3bace07c48a # via phalanx (pyproject.toml) -safir-logging==6.4.0 \ - --hash=sha256:4031a430d738b8fe5bfd29125dce6cbf4e4949879307ba4146648afa3d24cd0a \ - --hash=sha256:e2dbf0b5d9dabecd70c27bff9bf01629bf0724b05b0f0087a1fe4f45c702215f +safir-logging==6.5.1 \ + --hash=sha256:b056306de26627e29bd6a6d04b1144456a1319ec0e15a67ebbc12b43362a27cd \ + --hash=sha256:ff591f0247fda10842835e714a6dbf601a894432d33d6d98e20fe035a5ad952c # via safir six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ @@ -574,9 +574,9 @@ sniffio==1.3.1 \ # anyio # httpcore # httpx -starlette==0.40.0 \ - --hash=sha256:1a3139688fb298ce5e2d661d37046a66ad996ce94be4d4983be019a23a04ea35 \ - --hash=sha256:c494a22fae73805376ea6bf88439783ecfba9aac88a43911b48c653437e784c4 +starlette==0.41.2 \ + --hash=sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62 \ + --hash=sha256:fbc189474b4731cf30fcef52f18a8d070e3f3b46c6a04c97579e85e6ffca942d # via # fastapi # safir diff --git a/requirements/tox.txt b/requirements/tox.txt index 0ff0174f02..5feefbdbb3 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -50,9 +50,9 @@ pyproject-api==1.8.0 \ --hash=sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228 \ --hash=sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496 # via tox -tox==4.22.0 \ - --hash=sha256:03734d9a9ac138cd1a898a372fb1b8079e2728618ae06dc37cbf3686cfb56eea \ - --hash=sha256:acc6c627cb3316585238d55d2b633e132fea1bdb01b9d93b56bce7caea6ae73d +tox==4.23.2 \ + --hash=sha256:452bc32bb031f2282881a2118923176445bac783ab97c874b8770ab4c3b76c38 \ + --hash=sha256:86075e00e555df6e82e74cfc333917f91ecb47ffbc868dcafbd2672e332f4a2c # via # -r requirements/tox.in # tox-uv @@ -60,27 +60,27 @@ tox-uv==1.15.0 \ --hash=sha256:a5f08c80a3eabc47881e378700e5044b67ba94b03181ae38246627127f6a9183 \ --hash=sha256:dfe7e48274248458349b47780da7db815c0156bd3751b6486152bbf01d7672fb # via -r requirements/tox.in -uv==0.4.22 \ - --hash=sha256:062a57ac3aab9a7d41e1b6a66948d563bf47478c719894661ea2c5ed6485a146 \ - --hash=sha256:0904c141f9fd7088d7837fb7ac5e43191236ed9cf8edf824ed838bdc77da7406 \ - --hash=sha256:0ff4ff91a25ed633f4d2556777e1b317262c01f71e8f72dfbc540e97e7eb5392 \ - --hash=sha256:455538b910db65f20a70cf806c5e65cc1d80ea7f40a116ba1c3d4bd1dab933d9 \ - --hash=sha256:48232daa35ebd3e963eea236cf33915a8b0c8a3673d5da35d764f8b1fec0b1b2 \ - --hash=sha256:52605e291f7ab1daca682b7a92b926c2f70e1fc86caaa37cbd56b64587730ea2 \ - --hash=sha256:527d785dafa5bf8fa4aba42188787a4b25c11d005a5f4bd8afda6e8c2c231e1b \ - --hash=sha256:63156e306f860d9fa2bb1d7c9af30053b88276004b2790cd9bbf20cc83ce988b \ - --hash=sha256:7041bf9d2d5d391cebca7778207eb88a96537ff2e93df2ff9f41d6c4057252c3 \ - --hash=sha256:71f3faaa94f60d362a6984fdf7675d6d2d244139de91a7d46e2367caf950951e \ - --hash=sha256:765dac79e5c8e2924efbd4663d4e03f5d7689f1baa98223b298fe4292610a25a \ - --hash=sha256:7be7adf47158c456031b2b78742a432260b5c22e9a86784fa57e7a208b0c3206 \ - --hash=sha256:956c4f0a9eddb8e18003bc39d114c78f6d6b4ba2683a262af043770abee44f2e \ - --hash=sha256:9cf96ddcb6ea2743e4c44fa22b08a4f2fd09cc9c5e228e8ab04b0cd08371c868 \ - --hash=sha256:af70ea49389397d0f6ff43827f73e0e71db0fc45cdf50c7dcff8318d726c8224 \ - --hash=sha256:c96eb12d1bdb1a826cba3c38273604629ac51e723d705aed17ae282650d030f0 \ - --hash=sha256:d9a242b3360c3a62e248053b3a6f618dc59cb5c56f4e30748433a19a002e4bf5 \ - --hash=sha256:e18c42cc99bc2a3f91d43aeb2df61a6d259114fca50dd3818879e9ee12064f7f +uv==0.4.27 \ + --hash=sha256:07d693092ad1f2536fec59e1ad5170fab10a214e9d2e39f9cf385cccbf426aa7 \ + --hash=sha256:0a7d8041f80bf59fac1d3a630ad5ed9d91008c85edc03e318e3016122235c568 \ + --hash=sha256:0bae39264d575d16d5bb3b40699396afb2b27f987d7d7cfe8f336c24d26eda87 \ + --hash=sha256:2035efeb39d8d86355d9002e129a76a032a54b47b1332c6952225f48aa9b583c \ + --hash=sha256:3dd79e9392af6f41c470f9a95a2f3f8e73cde585eecb2df721f0716cd6134893 \ + --hash=sha256:4d249ca5e5444de4dd4984627bef6f077ffdb45c3ad6b27413ddfb1146daf79b \ + --hash=sha256:6c5782274a8d3075f4bf82e90c90b0a960abc11424ab353dc559e9329b479681 \ + --hash=sha256:6d335e40658a6c23554683410e710e5f54374fec20642e459771f50c8736d600 \ + --hash=sha256:ae4f45a0640de23c880bd5bdb27b1d3a059b45c9f73c2f7d53e392664efeca10 \ + --hash=sha256:b05165b0b24573c509286b87825c619658162079e2d3b20fea01d0dd9f444238 \ + --hash=sha256:b7a858209dfaab2527c547836cf823aef5cc1e051c5b15df4ba445a71b252df8 \ + --hash=sha256:b92728ba102ac7284f560c144507961be5aca5263d7a0d70a6896bba7660271c \ + --hash=sha256:b9e9b8b4062388df4c7a5d1e6c692dc8929242f883e1509010efb2b766ac4edd \ + --hash=sha256:bb5ced184be4e7611d983462a9f31a24a2e66de60f688ded6a8c36dc701a58ef \ + --hash=sha256:c0a5a40f23b61b2c693f6fa6f08b920c7d8b9058ce7ca20f18856844d2f11b2c \ + --hash=sha256:c13eea45257362ecfa2a2b31de9b62fbd0542e211a573562d98ab7c8fc50d8fc \ + --hash=sha256:d1731252da1a71a9f38e5864eb037401340a17eab519ad32e9a9f8fd54b7ada9 \ + --hash=sha256:f552967f4b392f880a1a50d3f57b9372a9666da274ea7826ee14e024ba035f4e # via tox-uv -virtualenv==20.26.6 \ - --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ - --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 +virtualenv==20.27.1 \ + --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ + --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 # via tox From f6701dc81a04649bd3c133660ee136a6e59f4533 Mon Sep 17 00:00:00 2001 From: Hsin-Fang Chiang Date: Fri, 25 Oct 2024 15:35:20 -0700 Subject: [PATCH 390/567] Fix instrument field for LSSTComCam and simplify survey placeholder --- .../prompt-proto-service-lsstcomcam/README.md | 2 +- .../values-usdfprod-prompt-processing.yaml | 18 +++++------------- .../values.yaml | 2 +- 3 files changed, 7 insertions(+), 15 deletions(-) diff --git a/applications/prompt-proto-service-lsstcomcam/README.md b/applications/prompt-proto-service-lsstcomcam/README.md index eb34082609..59907947a7 100644 --- a/applications/prompt-proto-service-lsstcomcam/README.md +++ b/applications/prompt-proto-service-lsstcomcam/README.md @@ -29,7 +29,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.imageNotifications.kafkaClusterAddress | string | None, must be set | Hostname and port of the Kafka provider | | prompt-proto-service.imageNotifications.topic | string | None, must be set | Topic where raw image arrival notifications appear | | prompt-proto-service.instrument.calibRepo | string | None, must be set | URI to the shared repo used for calibrations, templates, and pipeline outputs. If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. | -| prompt-proto-service.instrument.name | string | `""` | The "short" name of the instrument | +| prompt-proto-service.instrument.name | string | `"LSSTComCam"` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.skymap | string | `"ops_rehersal_prep_2k_v1"` | Skymap to use with the instrument | diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index c39a312b2f..d6ae66afcd 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -22,21 +22,13 @@ prompt-proto-service: (survey="comcam-ap")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/SingleFrame.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr.yaml] - (survey="comcam-isr-cal")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] - (survey="cwfs")=[] - (survey="cwfs-focus-sweep")=[] - (survey="spec-survey")=[] - (survey="photon-transfer")=[] - (survey="daily-calibration-seq")=[] + (survey="BLOCK-T208")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] + (survey="BLOCK-T60")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] (survey="")=[] preprocessing: >- - (survey="comcam-preprocessing")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml] - (survey="daytime-checkout")=[] - (survey="cwfs")=[] - (survey="cwfs-focus-sweep")=[] - (survey="spec-survey")=[] - (survey="photon-transfer")=[] - (survey="daily-calibration-seq")=[] + (survey="comcam-ap")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml] + (survey="BLOCK-T208")=[] + (survey="BLOCK-T60")=[] (survey="")=[] calibRepo: s3://rubin-summit-users diff --git a/applications/prompt-proto-service-lsstcomcam/values.yaml b/applications/prompt-proto-service-lsstcomcam/values.yaml index e015b69c44..fd880bdaca 100644 --- a/applications/prompt-proto-service-lsstcomcam/values.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values.yaml @@ -39,7 +39,7 @@ prompt-proto-service: instrument: # -- The "short" name of the instrument - name: "" + name: LSSTComCam pipelines: # -- Machine-readable string describing which pipeline(s) should be run for which visits. # Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. From 146d4e33d7c654fee7dca869006b21e83844c8ea Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 28 Oct 2024 13:58:44 -0700 Subject: [PATCH 391/567] Enable Gafaelfawr metrics on idfint Enable app-metrics for Sasquatch on idfint, and enable metrics reporting for Gafaelfawr. --- applications/gafaelfawr/values-idfint.yaml | 4 ++++ applications/sasquatch/values-idfint.yaml | 5 +++++ 2 files changed, 9 insertions(+) diff --git a/applications/gafaelfawr/values-idfint.yaml b/applications/gafaelfawr/values-idfint.yaml index 2da17e0e44..b782a763d4 100644 --- a/applications/gafaelfawr/values-idfint.yaml +++ b/applications/gafaelfawr/values-idfint.yaml @@ -33,6 +33,10 @@ config: oidcServer: enabled: true + # Enable metrics reporting. + metrics: + enabled: true + # User quota settings for services. quota: default: diff --git a/applications/sasquatch/values-idfint.yaml b/applications/sasquatch/values-idfint.yaml index 1eb64499c1..139a8cfd14 100644 --- a/applications/sasquatch/values-idfint.yaml +++ b/applications/sasquatch/values-idfint.yaml @@ -159,3 +159,8 @@ chronograf: GENERIC_API_KEY: sub PUBLIC_URL: https://data-int.lsst.cloud/ STATUS_FEED_URL: https://raw.githubusercontent.com/lsst-sqre/rsp_broadcast/main/jsonfeeds/idfint.json + +app-metrics: + enabled: true + apps: + - gafaelfawr From a33813419b943d8380db1068d4971305cd6017f3 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 28 Oct 2024 14:24:39 -0700 Subject: [PATCH 392/567] Enable strimzi-access-operator on idfint This is required for `KafkaAccess` resources to work. --- applications/strimzi-access-operator/values-idfint.yaml | 0 environments/values-idfint.yaml | 1 + 2 files changed, 1 insertion(+) create mode 100644 applications/strimzi-access-operator/values-idfint.yaml diff --git a/applications/strimzi-access-operator/values-idfint.yaml b/applications/strimzi-access-operator/values-idfint.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/environments/values-idfint.yaml b/environments/values-idfint.yaml index 2a6b2fee82..64873171d9 100644 --- a/environments/values-idfint.yaml +++ b/environments/values-idfint.yaml @@ -30,6 +30,7 @@ applications: sqlproxy-cross-project: true squareone: true strimzi: true + strimzi-access-operator: true tap: true telegraf: true telegraf-ds: true From 4195c66b548ba07a2983bf1a436531530fe79cd0 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 28 Oct 2024 15:43:49 -0700 Subject: [PATCH 393/567] Update Gafaelfawr to 12.1.0 New features for ingress labeling, better authentication metrics, and more OpenID Connect client support. --- applications/gafaelfawr/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/gafaelfawr/Chart.yaml b/applications/gafaelfawr/Chart.yaml index 5a04e7190b..0b6a7b9af0 100644 --- a/applications/gafaelfawr/Chart.yaml +++ b/applications/gafaelfawr/Chart.yaml @@ -5,7 +5,7 @@ description: "Authentication and identity system" home: "https://gafaelfawr.lsst.io/" sources: - "https://github.com/lsst-sqre/gafaelfawr" -appVersion: 12.0.1 +appVersion: 12.1.0 dependencies: - name: "redis" From ae9f8e2fc91dff01f77a6887a0858b4d42f67b71 Mon Sep 17 00:00:00 2001 From: adam Date: Fri, 11 Oct 2024 15:38:24 -0700 Subject: [PATCH 394/567] Parameterize slack profile field in checkerboard --- applications/checkerboard/README.md | 3 ++- applications/checkerboard/templates/deployment.yaml | 2 ++ applications/checkerboard/values-roundtable-dev.yaml | 2 -- applications/checkerboard/values.yaml | 4 ++++ environments/values-roundtable-dev.yaml | 1 - 5 files changed, 8 insertions(+), 4 deletions(-) diff --git a/applications/checkerboard/README.md b/applications/checkerboard/README.md index 3a3a245f1f..95c9e31cc3 100644 --- a/applications/checkerboard/README.md +++ b/applications/checkerboard/README.md @@ -11,9 +11,10 @@ Identity mapping service | Key | Type | Default | Description | |-----|------|---------|-------------| | affinity | object | `{}` | Affinity rules for the checkerboard frontend pod | -| config | object | `{"logLevel":"INFO","profile":"production"}` | Configuration for checkerboard server | +| config | object | See `values.yaml` | Configuration for checkerboard server | | config.logLevel | string | `"INFO"` | Choose from the text form of Python logging levels | | config.profile | string | `"production"` | application Safir profile ("production" or "development") | +| config.slackProfileField | string | `"GitHub username"` | name of Slack profile field for GitHub username (case-sensitive) | | fullnameOverride | string | `""` | Override the full name for resources (includes the release name) | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | diff --git a/applications/checkerboard/templates/deployment.yaml b/applications/checkerboard/templates/deployment.yaml index 113a430ef3..66a083a1fa 100644 --- a/applications/checkerboard/templates/deployment.yaml +++ b/applications/checkerboard/templates/deployment.yaml @@ -25,6 +25,8 @@ spec: containers: - name: "checkerboard" env: + - name: "CHECKERBOARD_PROFILE_FIELD" + value: {{ .Values.config.slackProfileField | quote }} - name: "CHECKERBOARD_REDIS_PASSWORD" valueFrom: secretKeyRef: diff --git a/applications/checkerboard/values-roundtable-dev.yaml b/applications/checkerboard/values-roundtable-dev.yaml index 4bea0d3028..96752b4003 100644 --- a/applications/checkerboard/values-roundtable-dev.yaml +++ b/applications/checkerboard/values-roundtable-dev.yaml @@ -3,5 +3,3 @@ redis: storageClass: "standard-rwo" config: logLevel: "DEBUG" -image: - pullPolicy: "Always" diff --git a/applications/checkerboard/values.yaml b/applications/checkerboard/values.yaml index 8488bfab11..844ab9c8b4 100644 --- a/applications/checkerboard/values.yaml +++ b/applications/checkerboard/values.yaml @@ -101,6 +101,7 @@ redis: # -- Configuration for checkerboard server +# @default -- See `values.yaml` config: # -- Choose from the text form of Python logging levels logLevel: "INFO" @@ -108,6 +109,9 @@ config: # -- application Safir profile ("production" or "development") profile: "production" + # -- name of Slack profile field for GitHub username (case-sensitive) + slackProfileField: "GitHub username" + global: # -- Base URL for the environment # @default -- Set by Argo CD diff --git a/environments/values-roundtable-dev.yaml b/environments/values-roundtable-dev.yaml index a11686b579..a447b4b181 100644 --- a/environments/values-roundtable-dev.yaml +++ b/environments/values-roundtable-dev.yaml @@ -11,7 +11,6 @@ onepassword: vaultPathPrefix: "secret/phalanx/roundtable-dev" applications: - checkerboard: false giftless: true jira-data-proxy: true kubernetes-replicator: true From 5469414cc41c9a622fa5160cc056e92bbe43cfd0 Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 15 Oct 2024 10:28:28 -0700 Subject: [PATCH 395/567] pin mermaid version --- docs/conf.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/conf.py b/docs/conf.py index 41e6428493..dfa6d9b86a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -33,3 +33,6 @@ linkcheck_exclude_documents = [ r"applications/.*/values", ] + +# Remove this later after we fix documenteer +mermaid_version = "11.2.0" From b04eebbd44615fdc865b69970e1512b90ecd4c10 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 28 Oct 2024 16:56:57 -0700 Subject: [PATCH 396/567] Reclassify some applications Add a new project and category for support services, for services that are generic (not Rubin- or Roundtable-specific) but are also not required and aren't cluster infrastructure. Move things from several other categories into that project. --- docs/applications/index.rst | 1 + docs/applications/infrastructure.rst | 4 ---- docs/applications/roundtable.rst | 1 - docs/applications/rsp.rst | 2 -- docs/applications/support.rst | 18 ++++++++++++++++++ .../ghostwriter.yaml | 4 ++-- .../kubernetes-replicator.yaml | 2 +- .../{infrastructure => support}/postgres.yaml | 2 +- .../sqlproxy-cross-project.yaml | 2 +- .../strimzi-access-operator.yaml | 2 +- .../{infrastructure => support}/strimzi.yaml | 2 +- environments/templates/projects/support.yaml | 19 +++++++++++++++++++ src/phalanx/models/applications.py | 1 + 13 files changed, 46 insertions(+), 14 deletions(-) create mode 100644 docs/applications/support.rst rename environments/templates/applications/{infrastructure => support}/ghostwriter.yaml (95%) rename environments/templates/applications/{roundtable => support}/kubernetes-replicator.yaml (97%) rename environments/templates/applications/{infrastructure => support}/postgres.yaml (96%) rename environments/templates/applications/{rsp => support}/sqlproxy-cross-project.yaml (97%) rename environments/templates/applications/{infrastructure => support}/strimzi-access-operator.yaml (96%) rename environments/templates/applications/{infrastructure => support}/strimzi.yaml (95%) create mode 100644 environments/templates/projects/support.yaml diff --git a/docs/applications/index.rst b/docs/applications/index.rst index c0bd61dc7b..a3697b97db 100644 --- a/docs/applications/index.rst +++ b/docs/applications/index.rst @@ -20,5 +20,6 @@ To learn how to develop applications for Phalanx, see the :doc:`/developers/inde rubin roundtable monitoring + support prompt telescope diff --git a/docs/applications/infrastructure.rst b/docs/applications/infrastructure.rst index bb614908d1..8399df03e8 100644 --- a/docs/applications/infrastructure.rst +++ b/docs/applications/infrastructure.rst @@ -12,11 +12,7 @@ Argo CD project: ``infrastructure`` argocd/index cert-manager/index - ghostwriter/index ingress-nginx/index gafaelfawr/index mobu/index - postgres/index - strimzi/index - strimzi-access-operator/index vault-secrets-operator/index diff --git a/docs/applications/roundtable.rst b/docs/applications/roundtable.rst index 8d3ecce818..f2e7c02373 100644 --- a/docs/applications/roundtable.rst +++ b/docs/applications/roundtable.rst @@ -14,7 +14,6 @@ Argo CD project: ``roundtable`` checkerboard/index giftless/index - kubernetes-replicator/index onepassword-connect/index ook/index sqrbot-sr/index diff --git a/docs/applications/rsp.rst b/docs/applications/rsp.rst index b395276738..e866ff94af 100644 --- a/docs/applications/rsp.rst +++ b/docs/applications/rsp.rst @@ -18,10 +18,8 @@ Argo CD project: ``rsp`` noteburst/index nublado/index portal/index - ppdb-replication/index semaphore/index siav2/index - sqlproxy-cross-project/index squareone/index ssotap/index tap/index diff --git a/docs/applications/support.rst b/docs/applications/support.rst new file mode 100644 index 0000000000..14c67ff93d --- /dev/null +++ b/docs/applications/support.rst @@ -0,0 +1,18 @@ +################ +Support services +################ + +Additional Argo CD services that are not required cluster infrastructure but are also not Rubin- or Roundtable-specific +These may be of use in a variety of different clusters, but do not need to be enabled in all clusters. + +Argo CD project: ``support`` + +.. toctree:: + :maxdepth: 1 + + ghostwriter/index + kubernetes-replicator/index + postgres/index + sqlproxy-cross-project/index + strimzi/index + strimzi-access-operator/index diff --git a/environments/templates/applications/infrastructure/ghostwriter.yaml b/environments/templates/applications/support/ghostwriter.yaml similarity index 95% rename from environments/templates/applications/infrastructure/ghostwriter.yaml rename to environments/templates/applications/support/ghostwriter.yaml index 5d993e0b88..3ba3e01ed4 100644 --- a/environments/templates/applications/infrastructure/ghostwriter.yaml +++ b/environments/templates/applications/support/ghostwriter.yaml @@ -15,7 +15,7 @@ spec: destination: namespace: "ghostwriter" server: "https://kubernetes.default.svc" - project: "infrastructure" + project: "support" source: path: "applications/ghostwriter" repoURL: {{ .Values.repoUrl | quote }} @@ -31,4 +31,4 @@ spec: valueFiles: - "values.yaml" - "values-{{ .Values.name }}.yaml" -{{- end -}} \ No newline at end of file +{{- end -}} diff --git a/environments/templates/applications/roundtable/kubernetes-replicator.yaml b/environments/templates/applications/support/kubernetes-replicator.yaml similarity index 97% rename from environments/templates/applications/roundtable/kubernetes-replicator.yaml rename to environments/templates/applications/support/kubernetes-replicator.yaml index ea241fc3e9..8cdc7bd1a6 100644 --- a/environments/templates/applications/roundtable/kubernetes-replicator.yaml +++ b/environments/templates/applications/support/kubernetes-replicator.yaml @@ -15,7 +15,7 @@ spec: destination: namespace: "kubernetes-replicator" server: "https://kubernetes.default.svc" - project: "roundtable" + project: "support" source: path: "applications/kubernetes-replicator" repoURL: {{ .Values.repoUrl | quote }} diff --git a/environments/templates/applications/infrastructure/postgres.yaml b/environments/templates/applications/support/postgres.yaml similarity index 96% rename from environments/templates/applications/infrastructure/postgres.yaml rename to environments/templates/applications/support/postgres.yaml index 4517078e85..1b1cee1451 100644 --- a/environments/templates/applications/infrastructure/postgres.yaml +++ b/environments/templates/applications/support/postgres.yaml @@ -15,7 +15,7 @@ spec: destination: namespace: "postgres" server: "https://kubernetes.default.svc" - project: "infrastructure" + project: "support" source: path: "applications/postgres" repoURL: {{ .Values.repoUrl | quote }} diff --git a/environments/templates/applications/rsp/sqlproxy-cross-project.yaml b/environments/templates/applications/support/sqlproxy-cross-project.yaml similarity index 97% rename from environments/templates/applications/rsp/sqlproxy-cross-project.yaml rename to environments/templates/applications/support/sqlproxy-cross-project.yaml index d8197acebe..7483dd3792 100644 --- a/environments/templates/applications/rsp/sqlproxy-cross-project.yaml +++ b/environments/templates/applications/support/sqlproxy-cross-project.yaml @@ -15,7 +15,7 @@ spec: destination: namespace: "sqlproxy-cross-project" server: "https://kubernetes.default.svc" - project: "rsp" + project: "support" source: path: "applications/sqlproxy-cross-project" repoURL: {{ .Values.repoUrl | quote }} diff --git a/environments/templates/applications/infrastructure/strimzi-access-operator.yaml b/environments/templates/applications/support/strimzi-access-operator.yaml similarity index 96% rename from environments/templates/applications/infrastructure/strimzi-access-operator.yaml rename to environments/templates/applications/support/strimzi-access-operator.yaml index 4038ee1493..c5933f228d 100644 --- a/environments/templates/applications/infrastructure/strimzi-access-operator.yaml +++ b/environments/templates/applications/support/strimzi-access-operator.yaml @@ -15,7 +15,7 @@ spec: destination: namespace: "strimzi-access-operator" server: "https://kubernetes.default.svc" - project: "infrastructure" + project: "support" source: path: "applications/strimzi-access-operator" repoURL: {{ .Values.repoUrl | quote }} diff --git a/environments/templates/applications/infrastructure/strimzi.yaml b/environments/templates/applications/support/strimzi.yaml similarity index 95% rename from environments/templates/applications/infrastructure/strimzi.yaml rename to environments/templates/applications/support/strimzi.yaml index 4b48562dbf..cdce21fe43 100644 --- a/environments/templates/applications/infrastructure/strimzi.yaml +++ b/environments/templates/applications/support/strimzi.yaml @@ -15,7 +15,7 @@ spec: destination: namespace: "strimzi" server: "https://kubernetes.default.svc" - project: "infrastructure" + project: "support" source: path: "applications/strimzi" repoURL: {{ .Values.repoUrl | quote }} diff --git a/environments/templates/projects/support.yaml b/environments/templates/projects/support.yaml new file mode 100644 index 0000000000..d1c4206e1e --- /dev/null +++ b/environments/templates/projects/support.yaml @@ -0,0 +1,19 @@ +apiVersion: argoproj.io/v1alpha1 +kind: AppProject +metadata: + name: "support" + namespace: "argocd" +spec: + clusterResourceWhitelist: + - group: "*" + kind: "*" + destinations: + - namespace: "!kube-system" + server: "*" + - namespace: "*" + server: "*" + namespaceResourceWhitelist: + - group: "*" + kind: "*" + sourceRepos: + - "*" diff --git a/src/phalanx/models/applications.py b/src/phalanx/models/applications.py index f490b5776b..33e6020c40 100644 --- a/src/phalanx/models/applications.py +++ b/src/phalanx/models/applications.py @@ -48,6 +48,7 @@ class Project(Enum): rubin = "rubin" roundtable = "roundtable" monitoring = "monitoring" + support = "support" prompt = "prompt" telescope = "telescope" From 911f3e4d35686b00f251363e5b861f6538b0b97a Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Mon, 28 Oct 2024 17:02:24 -0700 Subject: [PATCH 397/567] Update exposurelog to point to new embargo rack --- applications/exposurelog/values-usdfdev.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/exposurelog/values-usdfdev.yaml b/applications/exposurelog/values-usdfdev.yaml index e914a0e17f..c2ad32a60a 100644 --- a/applications/exposurelog/values-usdfdev.yaml +++ b/applications/exposurelog/values-usdfdev.yaml @@ -10,7 +10,7 @@ env: - name: DAF_BUTLER_REPOSITORY_INDEX value: "/project/data-repos.yaml" - name: S3_ENDPOINT_URL - value: "https://s3dfrgw.slac.stanford.edu" + value: "https://sdfembs3.sdf.slac.stanford.edu" - name: PGPASSFILE value: "/var/secrets/butler/postgres-credentials.txt" - name: PGUSER From c91b2d40258234c242b9951c15d66c3885106013 Mon Sep 17 00:00:00 2001 From: Stelios Voutsinas Date: Mon, 23 Sep 2024 12:29:22 -0700 Subject: [PATCH 398/567] Enable sia application in phalanx --- applications/sia/Chart.yaml | 13 +++ applications/sia/README.md | 34 ++++++ applications/sia/secrets.yaml | 20 ++++ applications/sia/templates/_helpers.tpl | 52 +++++++++ applications/sia/templates/configmap.yaml | 10 ++ applications/sia/templates/deployment.yaml | 109 ++++++++++++++++++ .../sia/templates/ingress-anonymous.yaml | 43 +++++++ applications/sia/templates/ingress.yaml | 35 ++++++ applications/sia/templates/networkpolicy.yaml | 21 ++++ applications/sia/templates/service.yaml | 15 +++ applications/sia/templates/vault-secrets.yaml | 9 ++ applications/sia/values-idfdev.yaml | 11 ++ applications/sia/values-idfint.yaml | 11 ++ applications/sia/values.yaml | 84 ++++++++++++++ docs/applications/rsp.rst | 1 + docs/applications/sia/index.rst | 28 +++++ docs/applications/sia/values.md | 12 ++ environments/README.md | 1 + .../templates/applications/rsp/sia.yaml | 34 ++++++ environments/values-idfdev.yaml | 1 + environments/values-idfint.yaml | 1 + environments/values-idfprod.yaml | 1 + environments/values.yaml | 3 + tests/data/input/docs/applications/rsp.rst | 1 + tests/data/output/docs/rsp.rst | 1 + 25 files changed, 551 insertions(+) create mode 100644 applications/sia/Chart.yaml create mode 100644 applications/sia/README.md create mode 100644 applications/sia/secrets.yaml create mode 100644 applications/sia/templates/_helpers.tpl create mode 100644 applications/sia/templates/configmap.yaml create mode 100644 applications/sia/templates/deployment.yaml create mode 100644 applications/sia/templates/ingress-anonymous.yaml create mode 100644 applications/sia/templates/ingress.yaml create mode 100644 applications/sia/templates/networkpolicy.yaml create mode 100644 applications/sia/templates/service.yaml create mode 100644 applications/sia/templates/vault-secrets.yaml create mode 100644 applications/sia/values-idfdev.yaml create mode 100644 applications/sia/values-idfint.yaml create mode 100644 applications/sia/values.yaml create mode 100644 docs/applications/sia/index.rst create mode 100644 docs/applications/sia/values.md create mode 100644 environments/templates/applications/rsp/sia.yaml diff --git a/applications/sia/Chart.yaml b/applications/sia/Chart.yaml new file mode 100644 index 0000000000..21df0de4e5 --- /dev/null +++ b/applications/sia/Chart.yaml @@ -0,0 +1,13 @@ +apiVersion: v2 +appVersion: 0.1.1 +description: Simple Image Access (SIA) IVOA Service using Butler +name: sia +sources: +- https://github.com/lsst-sqre/sia +type: application +version: 1.0.0 +annotations: + phalanx.lsst.io/docs: | + - id: "SQR-095" + title: "SIAv2 over Butler FastAPI service" + url: "https://sqr-095.lsst.io" diff --git a/applications/sia/README.md b/applications/sia/README.md new file mode 100644 index 0000000000..47ffeac2d5 --- /dev/null +++ b/applications/sia/README.md @@ -0,0 +1,34 @@ +# sia + +Simple Image Access (SIA) IVOA Service using Butler + +## Source Code + +* + +## Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| affinity | object | `{}` | Affinity rules for the sia deployment pod | +| config.butlerDataCollections | list | `[]` | List of data (Butler) Collections Expected attributes: `config`, `label`, `name`, `butler_type`, `repository`, `datalink_url` & `default_instrument` | +| config.logLevel | string | `"INFO"` | Logging level | +| config.logProfile | string | `"production"` | Logging profile (`production` for JSON, `development` for human-friendly) | +| config.pathPrefix | string | `"/api/sia"` | URL path prefix | +| config.pgUser | string | `"rubin"` | User to use from the PGPASSFILE if sia is using a direct Butler connection | +| config.slackAlerts | bool | `false` | Whether to send alerts and status to Slack. | +| fullnameOverride | string | `""` | Override the full name for resources (includes the release name) | +| global.baseUrl | string | Set by Argo CD | Base URL for the environment | +| global.host | string | Set by Argo CD | Host name for ingress | +| global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | +| image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the sia image | +| image.repository | string | `"ghcr.io/lsst-sqre/sia"` | Image to use in the sia deployment | +| image.tag | string | The appVersion of the chart | Tag of image to use | +| ingress.annotations | object | `{}` | Additional annotations for the ingress rule | +| ingress.path | string | `"/api/sia"` | Path prefix where app is hosted | +| nameOverride | string | `""` | Override the base name for resources | +| nodeSelector | object | `{}` | Node selection rules for the sia deployment pod | +| podAnnotations | object | `{}` | Annotations for the sia deployment pod | +| replicaCount | int | `1` | Number of web deployment pods to start | +| resources | object | See `values.yaml` | Resource limits and requests for the sia deployment pod | +| tolerations | list | `[]` | Tolerations for the sia deployment pod | diff --git a/applications/sia/secrets.yaml b/applications/sia/secrets.yaml new file mode 100644 index 0000000000..ada6b88933 --- /dev/null +++ b/applications/sia/secrets.yaml @@ -0,0 +1,20 @@ +"aws-credentials.ini": + copy: + application: nublado + key: "aws-credentials.ini" +"butler-gcs-idf-creds.json": + copy: + application: nublado + key: "butler-gcs-idf-creds.json" +"postgres-credentials.txt": + copy: + application: nublado + key: "postgres-credentials.txt" +slack-webhook: + description: >- + Slack web hook used to report internal errors to Slack. This secret may be + changed at any time. + if: config.slackAlerts + copy: + application: mobu + key: app-alert-webhook diff --git a/applications/sia/templates/_helpers.tpl b/applications/sia/templates/_helpers.tpl new file mode 100644 index 0000000000..92bdc6ea01 --- /dev/null +++ b/applications/sia/templates/_helpers.tpl @@ -0,0 +1,52 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "sia.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "sia.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "sia.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "sia.labels" -}} +helm.sh/chart: {{ include "sia.chart" . }} +{{ include "sia.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "sia.selectorLabels" -}} +app.kubernetes.io/name: "sia" +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + diff --git a/applications/sia/templates/configmap.yaml b/applications/sia/templates/configmap.yaml new file mode 100644 index 0000000000..f594419d9f --- /dev/null +++ b/applications/sia/templates/configmap.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: "sia" + labels: + {{- include "sia.labels" . | nindent 4 }} +data: + SIA_LOG_LEVEL: {{ .Values.config.logLevel | quote }} + SIA_PATH_PREFIX: {{ .Values.config.pathPrefix | quote }} + SIA_PROFILE: {{ .Values.config.logProfile | quote }} diff --git a/applications/sia/templates/deployment.yaml b/applications/sia/templates/deployment.yaml new file mode 100644 index 0000000000..3cf3c23b7e --- /dev/null +++ b/applications/sia/templates/deployment.yaml @@ -0,0 +1,109 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: "sia" + labels: + {{- include "sia.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + {{- include "sia.selectorLabels" . | nindent 6 }} + template: + metadata: + annotations: + checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} + {{- with .Values.podAnnotations }} + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "sia.selectorLabels" . | nindent 8 }} + spec: + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + automountServiceAccountToken: false + initContainers: + - name: fix-secret-permissions + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy | quote }} + command: + - "/bin/sh" + - "-c" + - | + cp -RL /tmp/secrets-raw/* /etc/butler/secrets/ + chmod 0400 /etc/butler/secrets/* + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - "all" + volumeMounts: + - name: "secrets" + mountPath: "/etc/butler/secrets" + - name: "secrets-raw" + mountPath: "/tmp/secrets-raw" + containers: + - name: {{ .Chart.Name }} + envFrom: + - configMapRef: + name: "sia" + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + ports: + - name: "http" + containerPort: 8080 + protocol: "TCP" + readinessProbe: + httpGet: + path: {{ .Values.config.pathPrefix }} + port: "http" + resources: + {{- toYaml .Values.resources | nindent 12 }} + env: + - name: "SIA_BUTLER_DATA_COLLECTIONS" + value: {{ .Values.config.butlerDataCollections | toJson | quote }} + {{- if .Values.config.slackAlerts }} + - name: "SIA_SLACK_WEBHOOK" + valueFrom: + secretKeyRef: + name: "sia" + key: "slack-webhook" + {{- end }} + - name: "AWS_SHARED_CREDENTIALS_FILE" + value: "/tmp/secrets/aws-credentials.ini" + - name: "PGUSER" + value: {{ .Values.config.pgUser }} + - name: "PGPASSFILE" + value: "/etc/butler/secrets/postgres-credentials.txt" + - name: "GOOGLE_APPLICATION_CREDENTIALS" + value: "/tmp/secrets/butler-gcs-idf-creds.json" + volumeMounts: + - name: "secrets" + mountPath: "/etc/butler/secrets" + readOnly: true + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - "all" + readOnlyRootFilesystem: false + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} + volumes: + - name: "secrets-raw" + secret: + secretName: "sia" + - name: "secrets" + emptyDir: {} + securityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 1000 diff --git a/applications/sia/templates/ingress-anonymous.yaml b/applications/sia/templates/ingress-anonymous.yaml new file mode 100644 index 0000000000..3587f2bc5d --- /dev/null +++ b/applications/sia/templates/ingress-anonymous.yaml @@ -0,0 +1,43 @@ +apiVersion: gafaelfawr.lsst.io/v1alpha1 +kind: GafaelfawrIngress +metadata: + name: {{ template "sia.fullname" . }}-anonymous + labels: + {{- include "sia.labels" . | nindent 4 }} +config: + baseUrl: {{ .Values.global.baseUrl | quote }} + scopes: + anonymous: true +template: + metadata: + name: {{ template "sia.fullname" . }}-anonymous + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 6 }} + {{- end }} + spec: + rules: + - host: {{ .Values.global.host | quote }} + http: + paths: + - path: "{{ .Values.ingress.path }}/openapi.json" + pathType: "Exact" + backend: + service: + name: {{ template "sia.fullname" . }} + port: + number: 8080 + - path: "{{ .Values.ingress.path }}/.+/capabilities" + pathType: "Exact" + backend: + service: + name: {{ template "sia.fullname" . }} + port: + number: 8080 + - path: "{{ .Values.ingress.path }}/.+/availability" + pathType: "Exact" + backend: + service: + name: {{ template "sia.fullname" . }} + port: + number: 8080 diff --git a/applications/sia/templates/ingress.yaml b/applications/sia/templates/ingress.yaml new file mode 100644 index 0000000000..bb9638b596 --- /dev/null +++ b/applications/sia/templates/ingress.yaml @@ -0,0 +1,35 @@ +apiVersion: gafaelfawr.lsst.io/v1alpha1 +kind: GafaelfawrIngress +metadata: + name: {{ template "sia.fullname" . }} + labels: + {{- include "sia.labels" . | nindent 4 }} +config: + baseUrl: {{ .Values.global.baseUrl | quote }} + scopes: + all: + - "read:image" + delegate: + internal: + service: "sia" + scopes: + - "read:image" +template: + metadata: + name: {{ template "sia.fullname" . }} + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 6 }} + {{- end }} + spec: + rules: + - host: {{ required "global.host must be set" .Values.global.host | quote }} + http: + paths: + - path: {{ .Values.config.pathPrefix | quote }} + pathType: "Prefix" + backend: + service: + name: "sia" + port: + number: 8080 diff --git a/applications/sia/templates/networkpolicy.yaml b/applications/sia/templates/networkpolicy.yaml new file mode 100644 index 0000000000..4edbb84b29 --- /dev/null +++ b/applications/sia/templates/networkpolicy.yaml @@ -0,0 +1,21 @@ +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: "sia" +spec: + podSelector: + matchLabels: + {{- include "sia.selectorLabels" . | nindent 6 }} + policyTypes: + - "Ingress" + ingress: + # Allow inbound access from pods (in any namespace) labeled + # gafaelfawr.lsst.io/ingress: true. + - from: + - namespaceSelector: {} + podSelector: + matchLabels: + gafaelfawr.lsst.io/ingress: "true" + ports: + - protocol: "TCP" + port: 8080 diff --git a/applications/sia/templates/service.yaml b/applications/sia/templates/service.yaml new file mode 100644 index 0000000000..679e84dffa --- /dev/null +++ b/applications/sia/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: "sia" + labels: + {{- include "sia.labels" . | nindent 4 }} +spec: + type: "ClusterIP" + ports: + - port: 8080 + targetPort: "http" + protocol: "TCP" + name: "http" + selector: + {{- include "sia.selectorLabels" . | nindent 4 }} diff --git a/applications/sia/templates/vault-secrets.yaml b/applications/sia/templates/vault-secrets.yaml new file mode 100644 index 0000000000..3b1ebc978a --- /dev/null +++ b/applications/sia/templates/vault-secrets.yaml @@ -0,0 +1,9 @@ +apiVersion: ricoberger.de/v1alpha1 +kind: VaultSecret +metadata: + name: "sia" + labels: + {{- include "sia.labels" . | nindent 4 }} +spec: + path: "{{ .Values.global.vaultSecretsPath }}/sia" + type: Opaque diff --git a/applications/sia/values-idfdev.yaml b/applications/sia/values-idfdev.yaml new file mode 100644 index 0000000000..92ae953404 --- /dev/null +++ b/applications/sia/values-idfdev.yaml @@ -0,0 +1,11 @@ +config: + + # Data (Butler) Collections + butlerDataCollections: + - config: "https://raw.githubusercontent.com/lsst-dm/dax_obscore/refs/heads/main/configs/dp02.yaml" + label: "LSST.DP02" + name: "dp02" + butler_type: "REMOTE" + repository: "https://data-dev.lsst.cloud/api/butler/repo/dp02/butler.yaml" + datalink_url: "https://data-dev.lsst.cloud/api/datalink/links?ID=butler%3A//dp02/{id}" + default_instrument: "LSSTCam-imSim" diff --git a/applications/sia/values-idfint.yaml b/applications/sia/values-idfint.yaml new file mode 100644 index 0000000000..687799b059 --- /dev/null +++ b/applications/sia/values-idfint.yaml @@ -0,0 +1,11 @@ +config: + + # Data (Butler) Collections + butlerDataCollections: + - config: "https://raw.githubusercontent.com/lsst-dm/dax_obscore/refs/heads/main/configs/dp02.yaml" + label: "LSST.DP02" + name: "dp02" + butler_type: "REMOTE" + repository: "https://data-int.lsst.cloud/api/butler/repo/dp02/butler.yaml" + datalink_url: "https://data-int.lsst.cloud/api/datalink/links?ID=butler%3A//dp02/{id}" + default_instrument: "LSSTCam-imSim" diff --git a/applications/sia/values.yaml b/applications/sia/values.yaml new file mode 100644 index 0000000000..ee0df61f5c --- /dev/null +++ b/applications/sia/values.yaml @@ -0,0 +1,84 @@ +# Default values for sia. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +# -- Override the base name for resources +nameOverride: "" + +# -- Override the full name for resources (includes the release name) +fullnameOverride: "" + +# -- Number of web deployment pods to start +replicaCount: 1 + +image: + # -- Image to use in the sia deployment + repository: "ghcr.io/lsst-sqre/sia" + + # -- Pull policy for the sia image + pullPolicy: "IfNotPresent" + + # -- Tag of image to use + # @default -- The appVersion of the chart + tag: "" + +config: + # -- Whether to send alerts and status to Slack. + slackAlerts: false + + # -- Logging level + logLevel: "INFO" + + # -- Logging profile (`production` for JSON, `development` for + # human-friendly) + logProfile: "production" + + # -- URL path prefix + pathPrefix: "/api/sia" + + # -- List of data (Butler) Collections + # Expected attributes: `config`, `label`, `name`, `butler_type`, `repository`, `datalink_url` & `default_instrument` + butlerDataCollections: [] + + # -- User to use from the PGPASSFILE if sia is using a direct Butler + # connection + pgUser: "rubin" + + +ingress: + # -- Additional annotations for the ingress rule + annotations: {} + + # -- Path prefix where app is hosted + path: "/api/sia" + +# -- Affinity rules for the sia deployment pod +affinity: {} + +# -- Node selection rules for the sia deployment pod +nodeSelector: {} + +# -- Annotations for the sia deployment pod +podAnnotations: {} + +# -- Resource limits and requests for the sia deployment pod +# @default -- See `values.yaml` +resources: {} + +# -- Tolerations for the sia deployment pod +tolerations: [] + +# The following will be set by parameters injected by Argo CD and should not +# be set in the individual environment values files. +global: + # -- Base URL for the environment + # @default -- Set by Argo CD + baseUrl: null + + # -- Host name for ingress + # @default -- Set by Argo CD + host: null + + # -- Base path for Vault secrets + # @default -- Set by Argo CD + vaultSecretsPath: null diff --git a/docs/applications/rsp.rst b/docs/applications/rsp.rst index b395276738..ae76cd8f21 100644 --- a/docs/applications/rsp.rst +++ b/docs/applications/rsp.rst @@ -20,6 +20,7 @@ Argo CD project: ``rsp`` portal/index ppdb-replication/index semaphore/index + sia/index siav2/index sqlproxy-cross-project/index squareone/index diff --git a/docs/applications/sia/index.rst b/docs/applications/sia/index.rst new file mode 100644 index 0000000000..ec4a177b15 --- /dev/null +++ b/docs/applications/sia/index.rst @@ -0,0 +1,28 @@ +.. px-app:: sia + +###################################### +sia — Simple Image Access (v2) service +###################################### + +``sia`` is an image-access API complying with the IVOA SIA (v2) specification. +This application is designed to interact with Butler repositories, through the dax_obscore package https://github.com/lsst-dm/dax_obscore and allows users to find image links for objects that match one or more filter criteria, listed in the IVOA SIA specification https://www.ivoa.net/documents/SIA/. + +Results of an SIAv2 query will be contain either a datalink if the images are stored behind an authenticated store, or a direct link to the images. + +The SIA service will have as client the RSP Portal Aspect but can also be accessed by other IVOA-compatible clients. + +If the SIA application does not appear under a VO Registry, use of it by IVOA-compatible clients will require users to input the SIA service URL manually. + +Both POST & GET methods are implemented for the /query API, as well as the VOSI-availability and VOSI-capabilities endpoints. + + +.. jinja:: sia + :file: applications/_summary.rst.jinja + +Guides +====== + +.. toctree:: + :maxdepth: 1 + + values diff --git a/docs/applications/sia/values.md b/docs/applications/sia/values.md new file mode 100644 index 0000000000..88ea24dbc4 --- /dev/null +++ b/docs/applications/sia/values.md @@ -0,0 +1,12 @@ +```{px-app-values} sia +``` + +# sia Helm values reference + +Helm values reference table for the {px-app}`sia` application. + +```{include} ../../../applications/sia/README.md +--- +start-after: "## Values" +--- +``` \ No newline at end of file diff --git a/environments/README.md b/environments/README.md index d44f6c7e2d..e92de4f65a 100644 --- a/environments/README.md +++ b/environments/README.md @@ -57,6 +57,7 @@ | applications.sasquatch-backpack | bool | `false` | Enable the sasquatch-backpack application | | applications.schedview-snapshot | bool | `false` | Enable the schedview-snapshot application | | applications.semaphore | bool | `false` | Enable the semaphore application | +| applications.sia | bool | `false` | Enable the sia over butler application | | applications.siav2 | bool | `false` | Enable the siav2 application | | applications.simonyitel | bool | `false` | Enable the simonyitel control system application | | applications.sqlproxy-cross-project | bool | `false` | Enable the sqlproxy-cross-project application | diff --git a/environments/templates/applications/rsp/sia.yaml b/environments/templates/applications/rsp/sia.yaml new file mode 100644 index 0000000000..ace746138e --- /dev/null +++ b/environments/templates/applications/rsp/sia.yaml @@ -0,0 +1,34 @@ +{{- if (index .Values "applications" "sia") -}} +apiVersion: v1 +kind: Namespace +metadata: + name: "sia" +--- +apiVersion: argoproj.io/v1alpha1 +kind: Application +metadata: + name: "sia" + namespace: "argocd" + finalizers: + - "resources-finalizer.argocd.argoproj.io" +spec: + destination: + namespace: "sia" + server: "https://kubernetes.default.svc" + project: "rsp" + source: + path: "applications/sia" + repoURL: {{ .Values.repoUrl | quote }} + targetRevision: {{ .Values.targetRevision | quote }} + helm: + parameters: + - name: "global.host" + value: {{ .Values.fqdn | quote }} + - name: "global.baseUrl" + value: "https://{{ .Values.fqdn }}" + - name: "global.vaultSecretsPath" + value: {{ .Values.vaultPathPrefix | quote }} + valueFiles: + - "values.yaml" + - "values-{{ .Values.name }}.yaml" +{{- end -}} diff --git a/environments/values-idfdev.yaml b/environments/values-idfdev.yaml index 6283dddfbd..4f56c10fba 100644 --- a/environments/values-idfdev.yaml +++ b/environments/values-idfdev.yaml @@ -25,6 +25,7 @@ applications: portal: true sasquatch: true semaphore: true + sia: true siav2: false ssotap: true squareone: true diff --git a/environments/values-idfint.yaml b/environments/values-idfint.yaml index 64873171d9..804062f9b1 100644 --- a/environments/values-idfint.yaml +++ b/environments/values-idfint.yaml @@ -22,6 +22,7 @@ applications: nublado: true portal: true sasquatch: true + sia: true siav2: false ssotap: true production-tools: true diff --git a/environments/values-idfprod.yaml b/environments/values-idfprod.yaml index 0a6a26cc37..f0b77f439c 100644 --- a/environments/values-idfprod.yaml +++ b/environments/values-idfprod.yaml @@ -23,6 +23,7 @@ applications: nublado: true portal: true semaphore: true + sia: false siav2: false squareone: true ssotap: true diff --git a/environments/values.yaml b/environments/values.yaml index cd11a31959..b65965df03 100644 --- a/environments/values.yaml +++ b/environments/values.yaml @@ -171,6 +171,9 @@ applications: # -- Enable the schedview-snapshot application schedview-snapshot: false + # -- Enable the sia over butler application + sia: false + # -- Enable the siav2 application siav2: false diff --git a/tests/data/input/docs/applications/rsp.rst b/tests/data/input/docs/applications/rsp.rst index 234524c178..02c38e2fec 100644 --- a/tests/data/input/docs/applications/rsp.rst +++ b/tests/data/input/docs/applications/rsp.rst @@ -18,6 +18,7 @@ Argo CD project: ``rsp`` nublado/index portal/index semaphore/index + sia/index siav2/index sqlproxy-cross-project/index squareone/index diff --git a/tests/data/output/docs/rsp.rst b/tests/data/output/docs/rsp.rst index a1a7d3ea67..23462b5e28 100644 --- a/tests/data/output/docs/rsp.rst +++ b/tests/data/output/docs/rsp.rst @@ -19,6 +19,7 @@ Argo CD project: ``rsp`` nublado/index portal/index semaphore/index + sia/index siav2/index sqlproxy-cross-project/index squareone/index From 5bd073f84e52e69d8a25ec7d77aeee79796c40a9 Mon Sep 17 00:00:00 2001 From: Stelios Voutsinas Date: Fri, 25 Oct 2024 16:42:04 -0700 Subject: [PATCH 399/567] Add flag to annotate direct butler support & make direct butler secrets conditional --- applications/sia/Chart.yaml | 2 +- applications/sia/README.md | 1 + applications/sia/secrets.yaml | 3 +++ applications/sia/templates/deployment.yaml | 8 ++++++++ applications/sia/templates/ingress-anonymous.yaml | 5 +++-- applications/sia/values.yaml | 3 +++ 6 files changed, 19 insertions(+), 3 deletions(-) diff --git a/applications/sia/Chart.yaml b/applications/sia/Chart.yaml index 21df0de4e5..8ddf1e2c6d 100644 --- a/applications/sia/Chart.yaml +++ b/applications/sia/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: 0.1.1 +appVersion: 0.1.2 description: Simple Image Access (SIA) IVOA Service using Butler name: sia sources: diff --git a/applications/sia/README.md b/applications/sia/README.md index 47ffeac2d5..e030a859fa 100644 --- a/applications/sia/README.md +++ b/applications/sia/README.md @@ -12,6 +12,7 @@ Simple Image Access (SIA) IVOA Service using Butler |-----|------|---------|-------------| | affinity | object | `{}` | Affinity rules for the sia deployment pod | | config.butlerDataCollections | list | `[]` | List of data (Butler) Collections Expected attributes: `config`, `label`, `name`, `butler_type`, `repository`, `datalink_url` & `default_instrument` | +| config.directButlerEnabled | bool | `false` | Whether direct butler access is enabled | | config.logLevel | string | `"INFO"` | Logging level | | config.logProfile | string | `"production"` | Logging profile (`production` for JSON, `development` for human-friendly) | | config.pathPrefix | string | `"/api/sia"` | URL path prefix | diff --git a/applications/sia/secrets.yaml b/applications/sia/secrets.yaml index ada6b88933..05d15deeea 100644 --- a/applications/sia/secrets.yaml +++ b/applications/sia/secrets.yaml @@ -1,12 +1,15 @@ "aws-credentials.ini": + if: config.directButlerEnabled copy: application: nublado key: "aws-credentials.ini" "butler-gcs-idf-creds.json": + if: config.directButlerEnabled copy: application: nublado key: "butler-gcs-idf-creds.json" "postgres-credentials.txt": + if: config.directButlerEnabled copy: application: nublado key: "postgres-credentials.txt" diff --git a/applications/sia/templates/deployment.yaml b/applications/sia/templates/deployment.yaml index 3cf3c23b7e..e7da6b12f4 100644 --- a/applications/sia/templates/deployment.yaml +++ b/applications/sia/templates/deployment.yaml @@ -24,6 +24,7 @@ spec: {{- toYaml . | nindent 8 }} {{- end }} automountServiceAccountToken: false + {{- if .Values.config.directButlerEnabled }} initContainers: - name: fix-secret-permissions image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" @@ -44,6 +45,7 @@ spec: mountPath: "/etc/butler/secrets" - name: "secrets-raw" mountPath: "/tmp/secrets-raw" + {{- end }} containers: - name: {{ .Chart.Name }} envFrom: @@ -71,6 +73,7 @@ spec: name: "sia" key: "slack-webhook" {{- end }} + {{- if .Values.config.directButlerEnabled }} - name: "AWS_SHARED_CREDENTIALS_FILE" value: "/tmp/secrets/aws-credentials.ini" - name: "PGUSER" @@ -79,10 +82,13 @@ spec: value: "/etc/butler/secrets/postgres-credentials.txt" - name: "GOOGLE_APPLICATION_CREDENTIALS" value: "/tmp/secrets/butler-gcs-idf-creds.json" + {{- end }} + {{- if .Values.config.directButlerEnabled }} volumeMounts: - name: "secrets" mountPath: "/etc/butler/secrets" readOnly: true + {{- end }} securityContext: allowPrivilegeEscalation: false capabilities: @@ -97,12 +103,14 @@ spec: tolerations: {{- toYaml . | nindent 8 }} {{- end }} + {{- if .Values.config.directButlerEnabled }} volumes: - name: "secrets-raw" secret: secretName: "sia" - name: "secrets" emptyDir: {} + {{- end }} securityContext: runAsNonRoot: true runAsUser: 1000 diff --git a/applications/sia/templates/ingress-anonymous.yaml b/applications/sia/templates/ingress-anonymous.yaml index 3587f2bc5d..c683c4e2d1 100644 --- a/applications/sia/templates/ingress-anonymous.yaml +++ b/applications/sia/templates/ingress-anonymous.yaml @@ -13,6 +13,7 @@ template: name: {{ template "sia.fullname" . }}-anonymous {{- with .Values.ingress.annotations }} annotations: + nginx.ingress.kubernetes.io/use-regex: "true" {{- toYaml . | nindent 6 }} {{- end }} spec: @@ -28,14 +29,14 @@ template: port: number: 8080 - path: "{{ .Values.ingress.path }}/.+/capabilities" - pathType: "Exact" + pathType: "ImplementationSpecific" backend: service: name: {{ template "sia.fullname" . }} port: number: 8080 - path: "{{ .Values.ingress.path }}/.+/availability" - pathType: "Exact" + pathType: "ImplementationSpecific" backend: service: name: {{ template "sia.fullname" . }} diff --git a/applications/sia/values.yaml b/applications/sia/values.yaml index ee0df61f5c..fe76b45dd4 100644 --- a/applications/sia/values.yaml +++ b/applications/sia/values.yaml @@ -36,6 +36,9 @@ config: # -- URL path prefix pathPrefix: "/api/sia" + # -- Whether direct butler access is enabled + directButlerEnabled: false + # -- List of data (Butler) Collections # Expected attributes: `config`, `label`, `name`, `butler_type`, `repository`, `datalink_url` & `default_instrument` butlerDataCollections: [] From 8a871246af98aa697fed6f3ec18e62ab4ca0a344 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 28 Oct 2024 17:32:18 -0700 Subject: [PATCH 400/567] Fix project on application summary page Print out the value of the enum rather than the name of the enum value to avoid adding a spurious `Project.` prefix. --- docs/applications/_summary.rst.jinja | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/applications/_summary.rst.jinja b/docs/applications/_summary.rst.jinja index 2f836ed853..5f0012bc07 100644 --- a/docs/applications/_summary.rst.jinja +++ b/docs/applications/_summary.rst.jinja @@ -34,7 +34,7 @@ * - Namespace - {{ app.namespace }} * - Argo CD Project - - {{ app.project }} + - {{ app.project.value }} * - Environments {%- if app.active_environments %} - .. list-table:: From 2ad439a6625bbcd762a0bfa615b18046d5dd89d5 Mon Sep 17 00:00:00 2001 From: ugyballoons Date: Tue, 29 Oct 2024 16:40:23 +0000 Subject: [PATCH 401/567] Update RubinTV to v2.5.1 --- applications/rubintv/values-summit.yaml | 2 +- applications/rubintv/values-usdfprod.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index c733551b29..74aadee21c 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -20,7 +20,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.5.0 + tag: v2.5.1 pullPolicy: Always workers: diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index a569e70d35..35b51e3fe4 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -16,7 +16,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.5.0 + tag: v2.5.1 pullPolicy: Always workers: From 27f0d6a745e7153796add4a279205549f936e014 Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Tue, 29 Oct 2024 10:17:28 -0700 Subject: [PATCH 402/567] Upgrade butler server Upgrade butler for a bugfix needed by Stelios' sia service. --- applications/butler/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/butler/Chart.yaml b/applications/butler/Chart.yaml index 9d3b40a094..b0a92da48b 100644 --- a/applications/butler/Chart.yaml +++ b/applications/butler/Chart.yaml @@ -4,4 +4,4 @@ version: 1.0.0 description: Server for Butler data abstraction service sources: - https://github.com/lsst/daf_butler -appVersion: server-2.1.0 +appVersion: w.2024.43 From 1bfc34dd6b72e5b5788cb95ab567b4e5c6c76749 Mon Sep 17 00:00:00 2001 From: ac6y micha Date: Tue, 29 Oct 2024 19:42:21 -0700 Subject: [PATCH 403/567] Update sync-secrets.rst This *appears* to be a typo. Assuming that by default, obsolete secrets are *not* deleted, and to delete them the user must explicitly add the `--delete` flag to the `phalanx sync secrets`, then the word "not" should be added here. The code indeed appears to require the `--delete` flag in order to trigger the `self._clean_vault_secrets()` method call --- docs/admin/sync-secrets.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/admin/sync-secrets.rst b/docs/admin/sync-secrets.rst index ede6114600..0a9e9740a1 100644 --- a/docs/admin/sync-secrets.rst +++ b/docs/admin/sync-secrets.rst @@ -23,7 +23,7 @@ It can then be run again whenever the secrets for that environment change. Deleting secrets ================ -By default old secrets that are no longer required are deleted out of Vault. +By default old secrets that are no longer required are not deleted out of Vault. To delete obsolete secrets, pass the ``--delete`` flag to :command:`phalanx secrets sync`. This will keep your Vault tidy, but you should use this flag with caution if you have applications temporarily disabled or if you store static secrets directly in Vault and nowhere else. From 4192839a6d6dd56ea835e6b7d54843844a91819d Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Wed, 30 Oct 2024 07:59:22 -0700 Subject: [PATCH 404/567] Improve the secrets sync documentation Add an introduction to secrets syncing, fix a couple of errors, and emphasize that secrets audit should be run before secrets sync. --- docs/admin/audit-secrets.rst | 2 +- docs/admin/sync-secrets.rst | 28 +++++++++++++++++++++------- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/docs/admin/audit-secrets.rst b/docs/admin/audit-secrets.rst index a04193a727..c764e24f1d 100644 --- a/docs/admin/audit-secrets.rst +++ b/docs/admin/audit-secrets.rst @@ -2,7 +2,7 @@ Audit secrets for an environment ################################ -To check that all of the necessary secrets for an environment named ```` are in Vault and appear to have the proper form, run: +To check that all of the necessary secrets for an environment named ```` are in Vault and appear to have the proper form, and to see exactly what a :doc:`syncing secrets for that environment ` would do, run: .. prompt:: bash diff --git a/docs/admin/sync-secrets.rst b/docs/admin/sync-secrets.rst index 0a9e9740a1..4998b506d1 100644 --- a/docs/admin/sync-secrets.rst +++ b/docs/admin/sync-secrets.rst @@ -2,8 +2,21 @@ Sync secrets for an environment ############################### -Before syncing secrets for an environment, you should normally audit the secrets so that you know what will change. -See :doc:`audit-secrets`. +Phalanx uses :px-app:`vault-secrets-operator` to create Kubernetes ``Secret`` resources from ``VaultSecret`` resources and entries in Vault. +It requires every Phalanx application with secrets have its own entry in Vault whose keys and values collect all secrets used by that application. +Some secrets therefore have to be duplicated between applications, and others can be automatically generated if missing. +This process of copying and generating secrets as needed is called syncing secrets. + +Syncing secrets must be done before installing a Phalanx environment for the first time, and then every time the secrets for that environment change. +Even if the environment stores static secrets in Vault directly, secrets will still need to be synced periodically to handle the copied and generated secrets also stored in Vault. + +Syncing secrets +=============== + +.. warning:: + + Before syncing secrets for an environment, you should normally audit the secrets so that you know what will change. + See :doc:`audit-secrets`. To populate Vault with all of the necessary secrets for an environment named ````, run: @@ -17,13 +30,13 @@ For SQuaRE-managed deployments, the 1Password token for ``OP_CONNECT_TOKEN`` com If you did not store the Vault write token for your environment with the static secrets, the ``VAULT_TOKEN`` environment variable must be set to the Vault write token for this environment. For SQuaRE-managed environments, you can get the write token from the ``Phalanx Vault write tokens`` item in the SQuaRE 1Password vault. -This must be done before installing a Phalanx environment for the first time. -It can then be run again whenever the secrets for that environment change. +Only secrets for the named environment will be affected. +No changes will be made outside of the configured secrets path for that environment. Deleting secrets ================ -By default old secrets that are no longer required are not deleted out of Vault. +By default, old secrets that are no longer required are not deleted out of Vault. To delete obsolete secrets, pass the ``--delete`` flag to :command:`phalanx secrets sync`. This will keep your Vault tidy, but you should use this flag with caution if you have applications temporarily disabled or if you store static secrets directly in Vault and nowhere else. @@ -34,11 +47,12 @@ Regenerating secrets By default, :command:`phalanx secrets sync` will leave any existing generated secrets set to their current values. This is almost always what you want. -In the rare case where you are completely reinstalling an environment and want to invalidate all existing secrets (such as after a security breach), you can add the ``--regenerate`` flag to regenerate all static secrets. + +In the rare case where you are completely reinstalling an environment and want to invalidate all existing secrets (such as after a security breach), you can add the ``--regenerate`` flag to regenerate all non-static secrets. .. warning:: Using ``--regenerate`` will invalidate all user sessions, all user tokens, and other, possibly unanticipated, interactions with the existing cluster. - It will also break most running Phalanx applications until their secrets have been recreated and they have been restarted. + It will also break most running Phalanx applications until their Kubernetes ``Secret`` resources have been recreated and they have been restarted. This should only be used when you also plan to empty the Gafaelfawr database and otherwise reset the environment to start fresh. From 5a5de13ee69d1a4eec8598a3ed87610987ae9eaa Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Wed, 30 Oct 2024 10:10:43 -0700 Subject: [PATCH 405/567] Make LFA secrets per-environment. --- applications/consdb/secrets-base.yaml | 12 ++++++++++++ applications/consdb/secrets-summit.yaml | 12 ++++++++++++ applications/consdb/secrets.yaml | 18 +++--------------- 3 files changed, 27 insertions(+), 15 deletions(-) create mode 100644 applications/consdb/secrets-base.yaml create mode 100644 applications/consdb/secrets-summit.yaml diff --git a/applications/consdb/secrets-base.yaml b/applications/consdb/secrets-base.yaml new file mode 100644 index 0000000000..2695612b18 --- /dev/null +++ b/applications/consdb/secrets-base.yaml @@ -0,0 +1,12 @@ +lfa-password: + description: >- + LFA password, used for retrieving Header Service objects. + copy: + application: auxtel + key: aws-secret-access-key +lfa-key: + description: >- + LFA key, used for retrieving Header Service objects. + copy: + application: auxtel + key: aws-access-key-id diff --git a/applications/consdb/secrets-summit.yaml b/applications/consdb/secrets-summit.yaml new file mode 100644 index 0000000000..2695612b18 --- /dev/null +++ b/applications/consdb/secrets-summit.yaml @@ -0,0 +1,12 @@ +lfa-password: + description: >- + LFA password, used for retrieving Header Service objects. + copy: + application: auxtel + key: aws-secret-access-key +lfa-key: + description: >- + LFA key, used for retrieving Header Service objects. + copy: + application: auxtel + key: aws-access-key-id diff --git a/applications/consdb/secrets.yaml b/applications/consdb/secrets.yaml index 3799b8f010..f8c76e5445 100644 --- a/applications/consdb/secrets.yaml +++ b/applications/consdb/secrets.yaml @@ -1,27 +1,15 @@ consdb-password: description: >- - Kafka password for consdb user + Kafka password for consdb user, used to get EFD data. copy: application: sasquatch key: consdb-password exposurelog-password: description: >- - PostgreSQL password for the exposurelog user exposurelog database. + PostgreSQL password for the exposurelog user exposurelog database, used to write to ConsDB schemas. copy: application: exposurelog key: exposurelog_password oods-password: description: >- - PostgreSQL password for the OODS user Butler database. -lfa-password: - description: >- - LFA password - copy: - application: auxtel - key: aws-secret-access-key -lfa-key: - description: >- - LFA key - copy: - application: auxtel - key: aws-access-key-id + PostgreSQL password for the OODS user in the Butler database. From 6b803d650753b1b17f2cfa0e9a8e57f46d717371 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Wed, 30 Oct 2024 10:39:39 -0700 Subject: [PATCH 406/567] Add TTS. --- applications/consdb/secrets-tucson-teststand.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 applications/consdb/secrets-tucson-teststand.yaml diff --git a/applications/consdb/secrets-tucson-teststand.yaml b/applications/consdb/secrets-tucson-teststand.yaml new file mode 100644 index 0000000000..2695612b18 --- /dev/null +++ b/applications/consdb/secrets-tucson-teststand.yaml @@ -0,0 +1,12 @@ +lfa-password: + description: >- + LFA password, used for retrieving Header Service objects. + copy: + application: auxtel + key: aws-secret-access-key +lfa-key: + description: >- + LFA key, used for retrieving Header Service objects. + copy: + application: auxtel + key: aws-access-key-id From 74e50891983808eb11c86ea497691c3a1c1d8d03 Mon Sep 17 00:00:00 2001 From: Dan Fuchs Date: Mon, 28 Oct 2024 12:38:05 -0500 Subject: [PATCH 407/567] DM-47199: `idfprod` Sasquatch Provision Sasquatch services in `idfprod` to support application metrics. Services that are not needed for application metrics are not provisioned, like `mirrormaker` and the REST proxy. * Secrets are in 1Password and have been synced * IP addresses were created ephemerally when the Strimzi Kafka resource was provisioned, then I promted them to static IPs in GCP * Angelo provisioned all of the necessary DNS and TLS ACME records in Route53 --- applications/sasquatch/values-idfprod.yaml | 97 +++++++++++++++++++ .../values-idfprod.yaml | 0 applications/strimzi/values-idfprod.yaml | 9 ++ environments/values-idfprod.yaml | 3 + 4 files changed, 109 insertions(+) create mode 100644 applications/sasquatch/values-idfprod.yaml create mode 100644 applications/strimzi-access-operator/values-idfprod.yaml create mode 100644 applications/strimzi/values-idfprod.yaml diff --git a/applications/sasquatch/values-idfprod.yaml b/applications/sasquatch/values-idfprod.yaml new file mode 100644 index 0000000000..fce7457c18 --- /dev/null +++ b/applications/sasquatch/values-idfprod.yaml @@ -0,0 +1,97 @@ +strimzi-kafka: + kafka: + externalListener: + tls: + enabled: true + bootstrap: + loadBalancerIP: "34.55.132.0" + host: sasquatch-kafka-bootstrap.lsst.cloud + + brokers: + - broker: 3 + loadBalancerIP: "34.122.37.250" + host: sasquatch-kafka-3.lsst.cloud + - broker: 4 + loadBalancerIP: "34.72.131.177" + host: sasquatch-kafka-4.lsst.cloud + - broker: 5 + loadBalancerIP: "34.72.103.157" + host: sasquatch-kafka-5.lsst.cloud + users: + kafdrop: + enabled: true + telegraf: + enabled: true + kraft: + enabled: true + kafkaController: + enabled: true + resources: + requests: + memory: 8Gi + cpu: "1" + limits: + memory: 8Gi + cpu: "1" + registry: + ingress: + enabled: true + annotations: + nginx.ingress.kubernetes.io/rewrite-target: /$2 + hostname: data.lsst.cloud + path: /schema-registry(/|$)(.*) + connect: + enabled: false + +influxdb: + ingress: + enabled: true + hostname: data.lsst.cloud + resources: + requests: + memory: 8Gi + cpu: 1 + limits: + memory: 8Gi + cpu: 1 + +telegraf-kafka-consumer: + enabled: true + kafkaConsumers: + example: + enabled: true + replicaCount: 1 + database: "lsst.example" + tags: | + [ "band", "instrument" ] + timestamp_format: "unix_ms" + timestamp_field: "timestamp" + topicRegexps: | + [ "lsst.example" ] +kafdrop: + ingress: + enabled: true + hostname: data.lsst.cloud + +chronograf: + ingress: + enabled: true + hostname: data.lsst.cloud + + env: + GENERIC_NAME: "OIDC" + GENERIC_AUTH_URL: https://data.lsst.cloud/auth/openid/login + GENERIC_TOKEN_URL: https://data.lsst.cloud/auth/openid/token + USE_ID_TOKEN: 1 + JWKS_URL: https://data.lsst.cloud/.well-known/jwks.json + GENERIC_API_URL: https://data.lsst.cloud/auth/openid/userinfo + GENERIC_SCOPES: openid + GENERIC_API_KEY: sub + PUBLIC_URL: https://data.lsst.cloud/ + STATUS_FEED_URL: https://raw.githubusercontent.com/lsst-sqre/rsp_broadcast/main/jsonfeeds/idfprod.json + +app-metrics: + enabled: true + apps: + - gafaelfawr + - mobu diff --git a/applications/strimzi-access-operator/values-idfprod.yaml b/applications/strimzi-access-operator/values-idfprod.yaml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/applications/strimzi/values-idfprod.yaml b/applications/strimzi/values-idfprod.yaml new file mode 100644 index 0000000000..1abe0d7c86 --- /dev/null +++ b/applications/strimzi/values-idfprod.yaml @@ -0,0 +1,9 @@ +strimzi-kafka-operator: + resources: + limits: + memory: "1Gi" + requests: + memory: "512Mi" + watchNamespaces: + - "sasquatch" + logLevel: "INFO" diff --git a/environments/values-idfprod.yaml b/environments/values-idfprod.yaml index f0b77f439c..fcc11b5370 100644 --- a/environments/values-idfprod.yaml +++ b/environments/values-idfprod.yaml @@ -22,10 +22,13 @@ applications: mobu: true nublado: true portal: true + sasquatch: true semaphore: true sia: false siav2: false squareone: true + strimzi: true + strimzi-access-operator: true ssotap: true tap: true telegraf: true From b7db9b127b40e91882467fb009f879e6b91fefd0 Mon Sep 17 00:00:00 2001 From: Stelios Voutsinas Date: Tue, 29 Oct 2024 11:05:35 -0700 Subject: [PATCH 408/567] Remove default_instrument from sia values --- applications/sia/Chart.yaml | 2 +- applications/sia/README.md | 2 +- applications/sia/values-idfdev.yaml | 1 - applications/sia/values-idfint.yaml | 1 - applications/sia/values.yaml | 2 +- 5 files changed, 3 insertions(+), 5 deletions(-) diff --git a/applications/sia/Chart.yaml b/applications/sia/Chart.yaml index 8ddf1e2c6d..0a5388c001 100644 --- a/applications/sia/Chart.yaml +++ b/applications/sia/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: 0.1.2 +appVersion: 0.1.3 description: Simple Image Access (SIA) IVOA Service using Butler name: sia sources: diff --git a/applications/sia/README.md b/applications/sia/README.md index e030a859fa..b105a3f178 100644 --- a/applications/sia/README.md +++ b/applications/sia/README.md @@ -11,7 +11,7 @@ Simple Image Access (SIA) IVOA Service using Butler | Key | Type | Default | Description | |-----|------|---------|-------------| | affinity | object | `{}` | Affinity rules for the sia deployment pod | -| config.butlerDataCollections | list | `[]` | List of data (Butler) Collections Expected attributes: `config`, `label`, `name`, `butler_type`, `repository`, `datalink_url` & `default_instrument` | +| config.butlerDataCollections | list | `[]` | List of data (Butler) Collections Expected attributes: `config`, `label`, `name`, `butler_type`, `repository` & `datalink_url` | | config.directButlerEnabled | bool | `false` | Whether direct butler access is enabled | | config.logLevel | string | `"INFO"` | Logging level | | config.logProfile | string | `"production"` | Logging profile (`production` for JSON, `development` for human-friendly) | diff --git a/applications/sia/values-idfdev.yaml b/applications/sia/values-idfdev.yaml index 92ae953404..857cf69f15 100644 --- a/applications/sia/values-idfdev.yaml +++ b/applications/sia/values-idfdev.yaml @@ -8,4 +8,3 @@ config: butler_type: "REMOTE" repository: "https://data-dev.lsst.cloud/api/butler/repo/dp02/butler.yaml" datalink_url: "https://data-dev.lsst.cloud/api/datalink/links?ID=butler%3A//dp02/{id}" - default_instrument: "LSSTCam-imSim" diff --git a/applications/sia/values-idfint.yaml b/applications/sia/values-idfint.yaml index 687799b059..610338ae61 100644 --- a/applications/sia/values-idfint.yaml +++ b/applications/sia/values-idfint.yaml @@ -8,4 +8,3 @@ config: butler_type: "REMOTE" repository: "https://data-int.lsst.cloud/api/butler/repo/dp02/butler.yaml" datalink_url: "https://data-int.lsst.cloud/api/datalink/links?ID=butler%3A//dp02/{id}" - default_instrument: "LSSTCam-imSim" diff --git a/applications/sia/values.yaml b/applications/sia/values.yaml index fe76b45dd4..241790f837 100644 --- a/applications/sia/values.yaml +++ b/applications/sia/values.yaml @@ -40,7 +40,7 @@ config: directButlerEnabled: false # -- List of data (Butler) Collections - # Expected attributes: `config`, `label`, `name`, `butler_type`, `repository`, `datalink_url` & `default_instrument` + # Expected attributes: `config`, `label`, `name`, `butler_type`, `repository` & `datalink_url` butlerDataCollections: [] # -- User to use from the PGPASSFILE if sia is using a direct Butler From fd4c894741d71310d24ddf1830b6f1a55eef43dd Mon Sep 17 00:00:00 2001 From: Stelios Voutsinas Date: Wed, 30 Oct 2024 12:02:27 -0700 Subject: [PATCH 409/567] Enable SIA on idfprod --- applications/sia/values-idfprod.yaml | 10 ++++++++++ environments/values-idfprod.yaml | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 applications/sia/values-idfprod.yaml diff --git a/applications/sia/values-idfprod.yaml b/applications/sia/values-idfprod.yaml new file mode 100644 index 0000000000..32d3d5f76d --- /dev/null +++ b/applications/sia/values-idfprod.yaml @@ -0,0 +1,10 @@ +config: + + # Data (Butler) Collections + butlerDataCollections: + - config: "https://raw.githubusercontent.com/lsst-dm/dax_obscore/refs/heads/main/configs/dp02.yaml" + label: "LSST.DP02" + name: "dp02" + butler_type: "REMOTE" + repository: "https://data.lsst.cloud/api/butler/repo/dp02/butler.yaml" + datalink_url: "https://data.lsst.cloud/api/datalink/links?ID=butler%3A//dp02/{id}" diff --git a/environments/values-idfprod.yaml b/environments/values-idfprod.yaml index fcc11b5370..0a9eee8a8d 100644 --- a/environments/values-idfprod.yaml +++ b/environments/values-idfprod.yaml @@ -24,7 +24,7 @@ applications: portal: true sasquatch: true semaphore: true - sia: false + sia: true siav2: false squareone: true strimzi: true From bff5d47caae7f432191d1cf96a50cc1aac6997d0 Mon Sep 17 00:00:00 2001 From: roby Date: Tue, 1 Oct 2024 12:55:22 -0600 Subject: [PATCH 410/567] appVersion: "portal-2024.3.1-final" --- applications/portal/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/portal/Chart.yaml b/applications/portal/Chart.yaml index 6e3cee19bf..a21819c4ab 100644 --- a/applications/portal/Chart.yaml +++ b/applications/portal/Chart.yaml @@ -5,7 +5,7 @@ description: Rubin Science Platform Portal Aspect sources: - https://github.com/lsst/suit - https://github.com/Caltech-IPAC/firefly -appVersion: "portal-2024.2.3" +appVersion: "portal-2024.3.1-final" dependencies: - name: redis From 9d6ba154cc94813aab44edb3dca7292b859956da Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Thu, 31 Oct 2024 10:06:05 -0700 Subject: [PATCH 411/567] Add obsenv-management to BTS and summit. --- .../obsenv-management/values-base.yaml | 18 ++++++++++++++++++ .../obsenv-management/values-summit.yaml | 18 ++++++++++++++++++ environments/values-base.yaml | 1 + environments/values-summit.yaml | 1 + 4 files changed, 38 insertions(+) create mode 100644 applications/obsenv-management/values-base.yaml create mode 100644 applications/obsenv-management/values-summit.yaml diff --git a/applications/obsenv-management/values-base.yaml b/applications/obsenv-management/values-base.yaml new file mode 100644 index 0000000000..fc08eb1355 --- /dev/null +++ b/applications/obsenv-management/values-base.yaml @@ -0,0 +1,18 @@ +obsenv-api: + image: + repository: rubin-cr.lsst.org/obsenv-api + tag: 0.2.0 + pullPolicy: Always + config: + logLevel: "DEBUG" + nfsMount: + server: nfs-obsenv.ls.lsst.org + +obsenv-ui: + image: + repository: rubin-cr.lsst.org/obsenv-ui + tag: 0.2.0 + pullPolicy: Always + config: + pathPrefix: /obsenv-management + authGroup: obsenv-admins diff --git a/applications/obsenv-management/values-summit.yaml b/applications/obsenv-management/values-summit.yaml new file mode 100644 index 0000000000..8a2906974d --- /dev/null +++ b/applications/obsenv-management/values-summit.yaml @@ -0,0 +1,18 @@ +obsenv-api: + image: + repository: rubin-cr.lsst.org/obsenv-api + tag: 0.2.0 + pullPolicy: Always + config: + logLevel: "DEBUG" + nfsMount: + server: nfs-obsenv.cp.lsst.org + +obsenv-ui: + image: + repository: rubin-cr.lsst.org/obsenv-ui + tag: 0.2.0 + pullPolicy: Always + config: + pathPrefix: /obsenv-management + authGroup: obsenv-admins diff --git a/environments/values-base.yaml b/environments/values-base.yaml index e0a262f932..03455ce0a7 100644 --- a/environments/values-base.yaml +++ b/environments/values-base.yaml @@ -22,6 +22,7 @@ applications: narrativelog: true nightreport: true nublado: true + obsenv-management: true obssys: true portal: true rubintv: true diff --git a/environments/values-summit.yaml b/environments/values-summit.yaml index ce63e3bd3f..838471061d 100644 --- a/environments/values-summit.yaml +++ b/environments/values-summit.yaml @@ -13,6 +13,7 @@ applications: narrativelog: true nightreport: true nublado: true + obsenv-management: true portal: true rapid-analysis: true rubintv: true From 51ebeae3bfef3bed4165aef247e8eda82b4ee05c Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 29 Oct 2024 09:53:08 -0700 Subject: [PATCH 412/567] Bump mobu version --- applications/mobu/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/mobu/Chart.yaml b/applications/mobu/Chart.yaml index aa975f02a2..453ec7eb7f 100644 --- a/applications/mobu/Chart.yaml +++ b/applications/mobu/Chart.yaml @@ -5,4 +5,4 @@ description: "Continuous integration testing" home: https://mobu.lsst.io/ sources: - "https://github.com/lsst-sqre/mobu" -appVersion: 11.0.0 +appVersion: 12.0.2 From cfd486bdba7af500c39753579cf66f8140122cae Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Fri, 1 Nov 2024 12:57:15 -0700 Subject: [PATCH 413/567] Upgrade butler server to fix conflict with weekly 44 Upgrade Butler server to fix an issue where dataset queries were failing with a weekly 44 client, due to extra properties added in the new version that were being rejected by the old server. --- applications/butler/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/butler/Chart.yaml b/applications/butler/Chart.yaml index b0a92da48b..5216bf15ad 100644 --- a/applications/butler/Chart.yaml +++ b/applications/butler/Chart.yaml @@ -4,4 +4,4 @@ version: 1.0.0 description: Server for Butler data abstraction service sources: - https://github.com/lsst/daf_butler -appVersion: w.2024.43 +appVersion: server-2.2.1 From a27debc14c8e9d01b24e484ffa1da0db42a73561 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 10:24:41 +0000 Subject: [PATCH 414/567] chore(deps): update helm release argo-workflows to v0.42.7 --- applications/argo-workflows/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argo-workflows/Chart.yaml b/applications/argo-workflows/Chart.yaml index ba374e241b..01ae908e43 100644 --- a/applications/argo-workflows/Chart.yaml +++ b/applications/argo-workflows/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-workflows - version: 0.42.5 + version: 0.42.7 repository: https://argoproj.github.io/argo-helm From 159507768ca273d0c42b3c6e49ec8271ff999d28 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 10:24:44 +0000 Subject: [PATCH 415/567] chore(deps): update helm release vault-secrets-operator to v2.6.1 --- applications/vault-secrets-operator/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/vault-secrets-operator/Chart.yaml b/applications/vault-secrets-operator/Chart.yaml index 8556856914..b366e67f4e 100644 --- a/applications/vault-secrets-operator/Chart.yaml +++ b/applications/vault-secrets-operator/Chart.yaml @@ -5,7 +5,7 @@ sources: - https://github.com/ricoberger/vault-secrets-operator dependencies: - name: vault-secrets-operator - version: 2.6.0 + version: 2.6.1 repository: https://ricoberger.github.io/helm-charts/ annotations: phalanx.lsst.io/docs: | From 79dbe50664841adfd21cfe7e6f265b8dc7d196d9 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 12:17:54 +0000 Subject: [PATCH 416/567] chore(deps): update helm release strimzi-kafka-operator to v0.44.0 --- applications/strimzi/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/strimzi/Chart.yaml b/applications/strimzi/Chart.yaml index fc8ddc5460..d3d40d3d35 100644 --- a/applications/strimzi/Chart.yaml +++ b/applications/strimzi/Chart.yaml @@ -7,5 +7,5 @@ home: https://strimzi.io appVersion: "0.39.0" dependencies: - name: strimzi-kafka-operator - version: "0.43.0" + version: "0.44.0" repository: https://strimzi.io/charts/ From a7f440d616f3a2a8f7c330f22af244a9a6fff0ba Mon Sep 17 00:00:00 2001 From: Dave McKay Date: Mon, 4 Nov 2024 12:21:24 +0000 Subject: [PATCH 417/567] Update loadbalancerip --- applications/ingress-nginx/values-roe.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/ingress-nginx/values-roe.yaml b/applications/ingress-nginx/values-roe.yaml index 3fcae8f034..e2e549ef68 100644 --- a/applications/ingress-nginx/values-roe.yaml +++ b/applications/ingress-nginx/values-roe.yaml @@ -7,7 +7,7 @@ ingress-nginx: use-proxy-protocol: "false" enable-health-monitor: "false" service: - loadBalancerIP: "192.41.122.130" + loadBalancerIP: "192.41.122.16" annotations: kubernetes.io/ingress.class: "openstack" loadbalancer.openstack.org/enable-health-monitor: "false" From 789cf68976af7d01fbd39f44af2c3fae962d8625 Mon Sep 17 00:00:00 2001 From: Dave McKay Date: Mon, 4 Nov 2024 12:22:17 +0000 Subject: [PATCH 418/567] Update values-roe.yaml updateSchema: true --- applications/gafaelfawr/values-roe.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/gafaelfawr/values-roe.yaml b/applications/gafaelfawr/values-roe.yaml index 8bfe88e382..0cc138ca67 100644 --- a/applications/gafaelfawr/values-roe.yaml +++ b/applications/gafaelfawr/values-roe.yaml @@ -4,7 +4,7 @@ redis: config: internalDatabase: true - updateSchema: false + updateSchema: true github: clientId: "10172b4db1b67ee31620" From 52f34ad3a598b6c6f8031ed4fbc45a3cccc03466 Mon Sep 17 00:00:00 2001 From: Dave McKay Date: Mon, 4 Nov 2024 18:16:52 +0000 Subject: [PATCH 419/567] Update values-roe.yaml --- applications/gafaelfawr/values-roe.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/gafaelfawr/values-roe.yaml b/applications/gafaelfawr/values-roe.yaml index 0cc138ca67..8bfe88e382 100644 --- a/applications/gafaelfawr/values-roe.yaml +++ b/applications/gafaelfawr/values-roe.yaml @@ -4,7 +4,7 @@ redis: config: internalDatabase: true - updateSchema: true + updateSchema: false github: clientId: "10172b4db1b67ee31620" From 744a161761a1373ec810e29e41a5d849d0462ef6 Mon Sep 17 00:00:00 2001 From: Erin Howard Date: Mon, 4 Nov 2024 14:01:01 -0800 Subject: [PATCH 420/567] Deploy Prompt Processing 4.7.0 for LSSTComCam. --- .../values-usdfprod-prompt-processing.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index d6ae66afcd..3e0de99ff0 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.6.0 + tag: 4.7.0 instrument: pipelines: From 89ad262515678ff1d2e043e25d2df4fba3a88c2c Mon Sep 17 00:00:00 2001 From: Erin Howard Date: Mon, 4 Nov 2024 14:01:36 -0800 Subject: [PATCH 421/567] Deploy Prompt Processing 4.7.0 for LATISS. --- .../values-usdfprod-prompt-processing.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index e350a646eb..844c69f0a3 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.6.0 + tag: 4.7.0 instrument: pipelines: From 1e5f3ae8d68ef466d01845d862d79dbe87b565fa Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 31 Oct 2024 16:14:51 -0700 Subject: [PATCH 422/567] Disable AE service - Need to disable AE to avoid competition with the manual shard restore procedure --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/influxdb-enterprise/README.md | 2 +- applications/sasquatch/charts/influxdb-enterprise/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index bc03b4ca9e..98998fb592 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -107,7 +107,7 @@ Rubin Observatory's telemetry service | influxdb-enterprise.bootstrap.ddldml.configMap | string | Do not run DDL or DML | A config map containing DDL and DML that define databases, retention policies, and inject some data. The keys `ddl` and `dml` must exist, even if one of them is empty. DDL is executed before DML to ensure databases and retention policies exist. | | influxdb-enterprise.bootstrap.ddldml.resources | object | `{}` | Kubernetes resources and limits for the bootstrap job | | influxdb-enterprise.data.affinity | object | See `values.yaml` | Affinity rules for data pods | -| influxdb-enterprise.data.config.antiEntropy.enabled | bool | `true` | Enable the anti-entropy service, which copies and repairs shards | +| influxdb-enterprise.data.config.antiEntropy.enabled | bool | `false` | Enable the anti-entropy service, which copies and repairs shards | | influxdb-enterprise.data.config.cluster.log-queries-after | string | `"15s"` | Maximum duration a query can run before InfluxDB logs it as a slow query | | influxdb-enterprise.data.config.cluster.max-concurrent-queries | int | `1000` | Maximum number of running queries allowed on the instance (0 is unlimited) | | influxdb-enterprise.data.config.cluster.query-timeout | string | `"300s"` | Maximum duration a query is allowed to run before it is killed | diff --git a/applications/sasquatch/charts/influxdb-enterprise/README.md b/applications/sasquatch/charts/influxdb-enterprise/README.md index 12233edf75..1f95a590c9 100644 --- a/applications/sasquatch/charts/influxdb-enterprise/README.md +++ b/applications/sasquatch/charts/influxdb-enterprise/README.md @@ -14,7 +14,7 @@ Run InfluxDB Enterprise on Kubernetes | bootstrap.ddldml.configMap | string | Do not run DDL or DML | A config map containing DDL and DML that define databases, retention policies, and inject some data. The keys `ddl` and `dml` must exist, even if one of them is empty. DDL is executed before DML to ensure databases and retention policies exist. | | bootstrap.ddldml.resources | object | `{}` | Kubernetes resources and limits for the bootstrap job | | data.affinity | object | See `values.yaml` | Affinity rules for data pods | -| data.config.antiEntropy.enabled | bool | `true` | Enable the anti-entropy service, which copies and repairs shards | +| data.config.antiEntropy.enabled | bool | `false` | Enable the anti-entropy service, which copies and repairs shards | | data.config.cluster.log-queries-after | string | `"15s"` | Maximum duration a query can run before InfluxDB logs it as a slow query | | data.config.cluster.max-concurrent-queries | int | `1000` | Maximum number of running queries allowed on the instance (0 is unlimited) | | data.config.cluster.query-timeout | string | `"300s"` | Maximum duration a query is allowed to run before it is killed | diff --git a/applications/sasquatch/charts/influxdb-enterprise/values.yaml b/applications/sasquatch/charts/influxdb-enterprise/values.yaml index 0709b449c6..5df8482d84 100644 --- a/applications/sasquatch/charts/influxdb-enterprise/values.yaml +++ b/applications/sasquatch/charts/influxdb-enterprise/values.yaml @@ -364,7 +364,7 @@ data: antiEntropy: # -- Enable the anti-entropy service, which copies and repairs shards - enabled: true + enabled: false http: # -- Whether to enable the Flux query endpoint From 0d0f5976ec8f6c9c5cdedea0b6b0406aa00299f3 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 31 Oct 2024 20:44:40 -0700 Subject: [PATCH 423/567] Enable AE service --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/influxdb-enterprise/README.md | 2 +- applications/sasquatch/charts/influxdb-enterprise/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 98998fb592..bc03b4ca9e 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -107,7 +107,7 @@ Rubin Observatory's telemetry service | influxdb-enterprise.bootstrap.ddldml.configMap | string | Do not run DDL or DML | A config map containing DDL and DML that define databases, retention policies, and inject some data. The keys `ddl` and `dml` must exist, even if one of them is empty. DDL is executed before DML to ensure databases and retention policies exist. | | influxdb-enterprise.bootstrap.ddldml.resources | object | `{}` | Kubernetes resources and limits for the bootstrap job | | influxdb-enterprise.data.affinity | object | See `values.yaml` | Affinity rules for data pods | -| influxdb-enterprise.data.config.antiEntropy.enabled | bool | `false` | Enable the anti-entropy service, which copies and repairs shards | +| influxdb-enterprise.data.config.antiEntropy.enabled | bool | `true` | Enable the anti-entropy service, which copies and repairs shards | | influxdb-enterprise.data.config.cluster.log-queries-after | string | `"15s"` | Maximum duration a query can run before InfluxDB logs it as a slow query | | influxdb-enterprise.data.config.cluster.max-concurrent-queries | int | `1000` | Maximum number of running queries allowed on the instance (0 is unlimited) | | influxdb-enterprise.data.config.cluster.query-timeout | string | `"300s"` | Maximum duration a query is allowed to run before it is killed | diff --git a/applications/sasquatch/charts/influxdb-enterprise/README.md b/applications/sasquatch/charts/influxdb-enterprise/README.md index 1f95a590c9..12233edf75 100644 --- a/applications/sasquatch/charts/influxdb-enterprise/README.md +++ b/applications/sasquatch/charts/influxdb-enterprise/README.md @@ -14,7 +14,7 @@ Run InfluxDB Enterprise on Kubernetes | bootstrap.ddldml.configMap | string | Do not run DDL or DML | A config map containing DDL and DML that define databases, retention policies, and inject some data. The keys `ddl` and `dml` must exist, even if one of them is empty. DDL is executed before DML to ensure databases and retention policies exist. | | bootstrap.ddldml.resources | object | `{}` | Kubernetes resources and limits for the bootstrap job | | data.affinity | object | See `values.yaml` | Affinity rules for data pods | -| data.config.antiEntropy.enabled | bool | `false` | Enable the anti-entropy service, which copies and repairs shards | +| data.config.antiEntropy.enabled | bool | `true` | Enable the anti-entropy service, which copies and repairs shards | | data.config.cluster.log-queries-after | string | `"15s"` | Maximum duration a query can run before InfluxDB logs it as a slow query | | data.config.cluster.max-concurrent-queries | int | `1000` | Maximum number of running queries allowed on the instance (0 is unlimited) | | data.config.cluster.query-timeout | string | `"300s"` | Maximum duration a query is allowed to run before it is killed | diff --git a/applications/sasquatch/charts/influxdb-enterprise/values.yaml b/applications/sasquatch/charts/influxdb-enterprise/values.yaml index 5df8482d84..0709b449c6 100644 --- a/applications/sasquatch/charts/influxdb-enterprise/values.yaml +++ b/applications/sasquatch/charts/influxdb-enterprise/values.yaml @@ -364,7 +364,7 @@ data: antiEntropy: # -- Enable the anti-entropy service, which copies and repairs shards - enabled: false + enabled: true http: # -- Whether to enable the Flux query endpoint From 83ad08de76a097156d6d0741c31b478684fda34b Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 4 Nov 2024 16:57:42 -0700 Subject: [PATCH 424/567] Change readiness probe path for obsenv-ui. --- .../charts/obsenv-ui/templates/deployment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml index 8967cde053..61f100a8b2 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/deployment.yaml @@ -37,7 +37,7 @@ spec: protocol: "TCP" readinessProbe: httpGet: - path: "/obsenv-management" + path: "/obsenv-management/status" port: "http" resources: {{- toYaml .Values.resources | nindent 12 }} From 9cb8c8691bd898a9249c95a2e263f3c576938796 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 4 Nov 2024 16:58:15 -0700 Subject: [PATCH 425/567] Change chart app versions. --- applications/obsenv-management/charts/obsenv-api/Chart.yaml | 2 +- applications/obsenv-management/charts/obsenv-ui/Chart.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/obsenv-management/charts/obsenv-api/Chart.yaml b/applications/obsenv-management/charts/obsenv-api/Chart.yaml index a4779452b3..304e694acb 100644 --- a/applications/obsenv-management/charts/obsenv-api/Chart.yaml +++ b/applications/obsenv-management/charts/obsenv-api/Chart.yaml @@ -2,4 +2,4 @@ name: obsenv-api apiVersion: v2 version: 1.0.0 description: Helm chart for the Observatory Environment Management API. -appVersion: "0.1.0" +appVersion: "0.2.0" diff --git a/applications/obsenv-management/charts/obsenv-ui/Chart.yaml b/applications/obsenv-management/charts/obsenv-ui/Chart.yaml index b1ec63afb3..2692bbac6b 100644 --- a/applications/obsenv-management/charts/obsenv-ui/Chart.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/Chart.yaml @@ -2,4 +2,4 @@ name: obsenv-ui apiVersion: v2 version: 1.0.0 description: Helm chart for the Observatory Environment Management UI. -appVersion: "0.1.0" +appVersion: "0.2.1" From 19bfcbfd47d50fca1c6bcd4aec9648fcba663b1b Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 4 Nov 2024 16:59:09 -0700 Subject: [PATCH 426/567] Update obsenv-ui versions at all sites. --- applications/obsenv-management/values-base.yaml | 2 +- applications/obsenv-management/values-summit.yaml | 2 +- applications/obsenv-management/values-tucson-teststand.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/obsenv-management/values-base.yaml b/applications/obsenv-management/values-base.yaml index fc08eb1355..cf94687ec3 100644 --- a/applications/obsenv-management/values-base.yaml +++ b/applications/obsenv-management/values-base.yaml @@ -11,7 +11,7 @@ obsenv-api: obsenv-ui: image: repository: rubin-cr.lsst.org/obsenv-ui - tag: 0.2.0 + tag: 0.2.1 pullPolicy: Always config: pathPrefix: /obsenv-management diff --git a/applications/obsenv-management/values-summit.yaml b/applications/obsenv-management/values-summit.yaml index 8a2906974d..78b60b8048 100644 --- a/applications/obsenv-management/values-summit.yaml +++ b/applications/obsenv-management/values-summit.yaml @@ -11,7 +11,7 @@ obsenv-api: obsenv-ui: image: repository: rubin-cr.lsst.org/obsenv-ui - tag: 0.2.0 + tag: 0.2.1 pullPolicy: Always config: pathPrefix: /obsenv-management diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 672f70bf83..7fc3005cf9 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -11,7 +11,7 @@ obsenv-api: obsenv-ui: image: repository: rubin-cr.lsst.org/obsenv-ui - tag: 0.2.0 + tag: 0.2.1 pullPolicy: Always config: pathPrefix: /obsenv-management From 8db44ac84a0b5c33b21d72ffd60bb83ac2dbb3d1 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 5 Nov 2024 12:12:45 -0300 Subject: [PATCH 427/567] rubintv: update app version to v2.5.2 for summit and usdf production deployments. --- applications/rubintv/values-summit.yaml | 2 +- applications/rubintv/values-usdfprod.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index 74aadee21c..a575b87a4a 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -20,7 +20,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.5.1 + tag: v2.5.2 pullPolicy: Always workers: diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index 35b51e3fe4..d6755ff37e 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -16,7 +16,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.5.1 + tag: v2.5.2 pullPolicy: Always workers: From cbac53ed121bae3798a0019911546a3d0b9bbba4 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Tue, 5 Nov 2024 09:13:56 -0800 Subject: [PATCH 428/567] Configure per-filter cache size for Prompt Processing. This config serves as a floor to the cache size that guarantees at least one dataset per filter can be cached at once. --- applications/prompt-proto-service-hsc-gpu/README.md | 1 + applications/prompt-proto-service-hsc-gpu/values.yaml | 3 +++ applications/prompt-proto-service-hsc/README.md | 1 + applications/prompt-proto-service-hsc/values.yaml | 3 +++ applications/prompt-proto-service-latiss/README.md | 1 + applications/prompt-proto-service-latiss/values.yaml | 3 +++ applications/prompt-proto-service-lsstcam/README.md | 1 + applications/prompt-proto-service-lsstcam/values.yaml | 3 +++ applications/prompt-proto-service-lsstcomcam/README.md | 1 + applications/prompt-proto-service-lsstcomcam/values.yaml | 3 +++ applications/prompt-proto-service-lsstcomcamsim/README.md | 1 + applications/prompt-proto-service-lsstcomcamsim/values.yaml | 3 +++ charts/prompt-proto-service/README.md | 1 + .../prompt-proto-service/templates/prompt-proto-service.yaml | 2 ++ charts/prompt-proto-service/values.yaml | 3 +++ 15 files changed, 30 insertions(+) diff --git a/applications/prompt-proto-service-hsc-gpu/README.md b/applications/prompt-proto-service-hsc-gpu/README.md index 2415159676..c15afbac89 100644 --- a/applications/prompt-proto-service-hsc-gpu/README.md +++ b/applications/prompt-proto-service-hsc-gpu/README.md @@ -19,6 +19,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | +| prompt-proto-service.cache.maxFilters | int | `20` | The maximum number of datasets of a given type the service might load if the filter is unknown. Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. | | prompt-proto-service.cache.patchesPerImage | int | `4` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | | prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | diff --git a/applications/prompt-proto-service-hsc-gpu/values.yaml b/applications/prompt-proto-service-hsc-gpu/values.yaml index 7efc93a3bb..b769efe5e4 100644 --- a/applications/prompt-proto-service-hsc-gpu/values.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values.yaml @@ -60,6 +60,9 @@ prompt-proto-service: refcatsPerImage: 4 # -- A factor by which to multiply `baseSize` for templates and other patch-based datasets. patchesPerImage: 4 + # -- The maximum number of datasets of a given type the service might load if the filter is unknown. + # Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. + maxFilters: 20 s3: # -- Bucket containing the incoming raw images diff --git a/applications/prompt-proto-service-hsc/README.md b/applications/prompt-proto-service-hsc/README.md index fbb60fceae..e42e98570b 100644 --- a/applications/prompt-proto-service-hsc/README.md +++ b/applications/prompt-proto-service-hsc/README.md @@ -19,6 +19,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | +| prompt-proto-service.cache.maxFilters | int | `20` | The maximum number of datasets of a given type the service might load if the filter is unknown. Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. | | prompt-proto-service.cache.patchesPerImage | int | `4` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | | prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | diff --git a/applications/prompt-proto-service-hsc/values.yaml b/applications/prompt-proto-service-hsc/values.yaml index c3921fcb42..4f2bb6153d 100644 --- a/applications/prompt-proto-service-hsc/values.yaml +++ b/applications/prompt-proto-service-hsc/values.yaml @@ -60,6 +60,9 @@ prompt-proto-service: refcatsPerImage: 4 # -- A factor by which to multiply `baseSize` for templates and other patch-based datasets. patchesPerImage: 4 + # -- The maximum number of datasets of a given type the service might load if the filter is unknown. + # Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. + maxFilters: 20 s3: # -- Bucket containing the incoming raw images diff --git a/applications/prompt-proto-service-latiss/README.md b/applications/prompt-proto-service-latiss/README.md index 941c350a20..ce765f2da7 100644 --- a/applications/prompt-proto-service-latiss/README.md +++ b/applications/prompt-proto-service-latiss/README.md @@ -19,6 +19,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | +| prompt-proto-service.cache.maxFilters | int | `10` | The maximum number of datasets of a given type the service might load if the filter is unknown. Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. | | prompt-proto-service.cache.patchesPerImage | int | `6` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | | prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | diff --git a/applications/prompt-proto-service-latiss/values.yaml b/applications/prompt-proto-service-latiss/values.yaml index 38fddacd35..fed814b736 100644 --- a/applications/prompt-proto-service-latiss/values.yaml +++ b/applications/prompt-proto-service-latiss/values.yaml @@ -61,6 +61,9 @@ prompt-proto-service: refcatsPerImage: 4 # -- A factor by which to multiply `baseSize` for templates and other patch-based datasets. patchesPerImage: 6 + # -- The maximum number of datasets of a given type the service might load if the filter is unknown. + # Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. + maxFilters: 10 s3: # -- Bucket containing the incoming raw images diff --git a/applications/prompt-proto-service-lsstcam/README.md b/applications/prompt-proto-service-lsstcam/README.md index b2d000f026..b49f608383 100644 --- a/applications/prompt-proto-service-lsstcam/README.md +++ b/applications/prompt-proto-service-lsstcam/README.md @@ -19,6 +19,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | +| prompt-proto-service.cache.maxFilters | int | `20` | The maximum number of datasets of a given type the service might load if the filter is unknown. Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. | | prompt-proto-service.cache.patchesPerImage | int | `4` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | | prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | diff --git a/applications/prompt-proto-service-lsstcam/values.yaml b/applications/prompt-proto-service-lsstcam/values.yaml index a590661413..c72da7e373 100644 --- a/applications/prompt-proto-service-lsstcam/values.yaml +++ b/applications/prompt-proto-service-lsstcam/values.yaml @@ -60,6 +60,9 @@ prompt-proto-service: refcatsPerImage: 4 # -- A factor by which to multiply `baseSize` for templates and other patch-based datasets. patchesPerImage: 4 + # -- The maximum number of datasets of a given type the service might load if the filter is unknown. + # Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. + maxFilters: 20 s3: # -- Bucket containing the incoming raw images diff --git a/applications/prompt-proto-service-lsstcomcam/README.md b/applications/prompt-proto-service-lsstcomcam/README.md index 59907947a7..b7991bf994 100644 --- a/applications/prompt-proto-service-lsstcomcam/README.md +++ b/applications/prompt-proto-service-lsstcomcam/README.md @@ -19,6 +19,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | +| prompt-proto-service.cache.maxFilters | int | `20` | The maximum number of datasets of a given type the service might load if the filter is unknown. Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. | | prompt-proto-service.cache.patchesPerImage | int | `16` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `6` | A factor by which to multiply `baseSize` for refcat datasets. | | prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | diff --git a/applications/prompt-proto-service-lsstcomcam/values.yaml b/applications/prompt-proto-service-lsstcomcam/values.yaml index fd880bdaca..52e133a11a 100644 --- a/applications/prompt-proto-service-lsstcomcam/values.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values.yaml @@ -63,6 +63,9 @@ prompt-proto-service: refcatsPerImage: 6 # -- A factor by which to multiply `baseSize` for templates and other patch-based datasets. patchesPerImage: 16 + # -- The maximum number of datasets of a given type the service might load if the filter is unknown. + # Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. + maxFilters: 20 s3: # -- Bucket containing the incoming raw images diff --git a/applications/prompt-proto-service-lsstcomcamsim/README.md b/applications/prompt-proto-service-lsstcomcamsim/README.md index 6854bea8e2..7fd4eb76a1 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/README.md +++ b/applications/prompt-proto-service-lsstcomcamsim/README.md @@ -19,6 +19,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | prompt-proto-service.apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | prompt-proto-service.cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | +| prompt-proto-service.cache.maxFilters | int | `3` | The maximum number of datasets of a given type the service might load if the filter is unknown. Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. | | prompt-proto-service.cache.patchesPerImage | int | `16` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | prompt-proto-service.cache.refcatsPerImage | int | `6` | A factor by which to multiply `baseSize` for refcat datasets. | | prompt-proto-service.containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | diff --git a/applications/prompt-proto-service-lsstcomcamsim/values.yaml b/applications/prompt-proto-service-lsstcomcamsim/values.yaml index 4b5797b52d..ac825014c7 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values.yaml @@ -63,6 +63,9 @@ prompt-proto-service: refcatsPerImage: 6 # -- A factor by which to multiply `baseSize` for templates and other patch-based datasets. patchesPerImage: 16 + # -- The maximum number of datasets of a given type the service might load if the filter is unknown. + # Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. + maxFilters: 3 s3: # -- Bucket containing the incoming raw images diff --git a/charts/prompt-proto-service/README.md b/charts/prompt-proto-service/README.md index 03390726d6..94f154cdcb 100644 --- a/charts/prompt-proto-service/README.md +++ b/charts/prompt-proto-service/README.md @@ -19,6 +19,7 @@ Event-driven processing of camera images | alerts.username | string | `"kafka-admin"` | Username for sending alerts to the alert stream | | apdb.config | string | None, must be set | URL to a serialized APDB configuration, or the "label:" prefix followed by the indexed name of such a config. | | cache.baseSize | int | `3` | The default number of datasets of each type to keep. The pipeline only needs one of most dataset types (one bias, one flat, etc.), so this is roughly the number of visits that fit in the cache. | +| cache.maxFilters | int | `20` | The maximum number of datasets of a given type the service might load if the filter is unknown. Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. | | cache.patchesPerImage | int | `4` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | | cacheCalibs | bool | `true` | Whether or not calibs should be cached between runs of a pod. This is a temporary flag that should only be unset in specific circumstances, and only in the development environment. | diff --git a/charts/prompt-proto-service/templates/prompt-proto-service.yaml b/charts/prompt-proto-service/templates/prompt-proto-service.yaml index f08eb4b17e..b25784a038 100644 --- a/charts/prompt-proto-service/templates/prompt-proto-service.yaml +++ b/charts/prompt-proto-service/templates/prompt-proto-service.yaml @@ -115,6 +115,8 @@ spec: value: {{ .Values.cache.refcatsPerImage | toString | quote }} - name: PATCHES_PER_IMAGE value: {{ .Values.cache.patchesPerImage | toString | quote }} + - name: FILTERS_WITH_CALIBS + value: {{ .Values.cache.maxFilters | toString | quote }} - name: DEBUG_CACHE_CALIBS value: {{ if .Values.cacheCalibs }}'1'{{ else }}'0'{{ end }} volumeMounts: diff --git a/charts/prompt-proto-service/values.yaml b/charts/prompt-proto-service/values.yaml index 954c50e7d1..7169250899 100644 --- a/charts/prompt-proto-service/values.yaml +++ b/charts/prompt-proto-service/values.yaml @@ -62,6 +62,9 @@ cache: refcatsPerImage: 4 # -- A factor by which to multiply `baseSize` for templates and other patch-based datasets. patchesPerImage: 4 + # -- The maximum number of datasets of a given type the service might load if the filter is unknown. + # Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. + maxFilters: 20 s3: # -- Bucket containing the incoming raw images From 4cfecde7d73dd3886cb69f94b5af82b2130f3feb Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Tue, 5 Nov 2024 14:02:45 -0800 Subject: [PATCH 429/567] Deploy Prompt Processing 4.7.1 for ComCam. --- .../values-usdfprod-prompt-processing.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index 3e0de99ff0..6e8911b2a6 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.7.0 + tag: 4.7.1 instrument: pipelines: From 8aa9b0ac44d0109913fb23c0ca2ac866129eb74b Mon Sep 17 00:00:00 2001 From: Brianna Smart Date: Tue, 5 Nov 2024 16:12:16 -0800 Subject: [PATCH 430/567] Update IP --- .../alert-stream-broker/values-usdfdev-alert-stream-broker.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml b/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml index c80f69d767..3792a879b1 100644 --- a/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml +++ b/applications/alert-stream-broker/values-usdfdev-alert-stream-broker.yaml @@ -17,7 +17,7 @@ alert-stream-broker: enabled: false bootstrap: host: usdf-alert-stream-dev.lsst.cloud - ip: "" + ip: "134.79.23.185" annotations: metallb.universe.tf/address-pool: 'sdf-dmz' brokers: From 72eb1485b936cbe972d1a41a89aac80c270b6b29 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Tue, 5 Nov 2024 17:51:06 -0300 Subject: [PATCH 431/567] Update rubintv app version to v2.5.3 for summit and usdf production deployments. --- applications/rubintv/values-summit.yaml | 2 +- applications/rubintv/values-usdfprod.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index a575b87a4a..3d6ce653c9 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -20,7 +20,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.5.2 + tag: v2.5.3 pullPolicy: Always workers: diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index d6755ff37e..68c74079d4 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -16,7 +16,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.5.2 + tag: v2.5.3 pullPolicy: Always workers: From 77693b6f7d88fe09a0c3bcfeee10ec7874f55425 Mon Sep 17 00:00:00 2001 From: Dave McKay Date: Wed, 6 Nov 2024 13:55:48 +0000 Subject: [PATCH 432/567] Update values-roe.yaml --- applications/ingress-nginx/values-roe.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/ingress-nginx/values-roe.yaml b/applications/ingress-nginx/values-roe.yaml index e2e549ef68..57d502af02 100644 --- a/applications/ingress-nginx/values-roe.yaml +++ b/applications/ingress-nginx/values-roe.yaml @@ -7,7 +7,7 @@ ingress-nginx: use-proxy-protocol: "false" enable-health-monitor: "false" service: - loadBalancerIP: "192.41.122.16" + loadBalancerIP: "192.41.122.216" annotations: kubernetes.io/ingress.class: "openstack" loadbalancer.openstack.org/enable-health-monitor: "false" From fe066b0f45cc128db0cd6b9c3442c1cebd607cdb Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Wed, 6 Nov 2024 08:57:38 -0800 Subject: [PATCH 433/567] Update dependencies Update Python and pre-commit dependencies. --- .pre-commit-config.yaml | 2 +- requirements/dev.txt | 219 +++++++++++++++++++--------------------- requirements/tox.txt | 44 ++++---- 3 files changed, 126 insertions(+), 139 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4c56ca5f21..829634ac21 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,7 +46,7 @@ repos: - --template-files=../helm-docs.md.gotmpl - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.1 + rev: v0.7.2 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/requirements/dev.txt b/requirements/dev.txt index 6611f5e909..54483ec8d0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -342,9 +342,9 @@ decorator==5.1.1 \ --hash=sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330 \ --hash=sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186 # via ipython -diagrams==0.23.4 \ - --hash=sha256:1ba69d98fcf8d768dbddf07d2c77aba6cc95c2e6f90f37146c04c96bc6765450 \ - --hash=sha256:b7ada0b119b5189dd021b1dc1467fad3704737452bb18b1e06d05e4d1fa48ed7 +diagrams==0.24.0 \ + --hash=sha256:0ded0099c70aa847ceec341c8224aaf2a46fcd180402fa50638a77cf231b761f \ + --hash=sha256:da20d7e326fd55631a86386ee484832fd00ddecf9ef0c07a42e299bf2a5cb6a2 # via sphinx-diagrams documenteer==1.4.2 \ --hash=sha256:03a4cf3b8ffa4905c59662131f87afe77417238f10e9f01075d849f08a32e99d \ @@ -867,9 +867,9 @@ pydantic-core==2.23.4 \ # via # -c requirements/main.txt # pydantic -pydantic-settings==2.6.0 \ - --hash=sha256:44a1804abffac9e6a30372bb45f6cafab945ef5af25e66b1c634c01dd39e0188 \ - --hash=sha256:4a819166f119b74d7f8c765196b165f95cc7487ce58ea27dec8a5a26be0970e0 +pydantic-settings==2.6.1 \ + --hash=sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87 \ + --hash=sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0 # via autodoc-pydantic pydata-sphinx-theme==0.12.0 \ --hash=sha256:7a07c3ac1fb1cfbb5f7d1e147a9500fb120e329d610e0fa2caac4a645141bdd9 \ @@ -893,9 +893,9 @@ pytest==8.3.3 \ # -r requirements/dev.in # pytest-cov # pytest-sugar -pytest-cov==5.0.0 \ - --hash=sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652 \ - --hash=sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857 +pytest-cov==6.0.0 \ + --hash=sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35 \ + --hash=sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0 # via -r requirements/dev.in pytest-sugar==1.0.0 \ --hash=sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a \ @@ -1121,116 +1121,103 @@ requests==2.32.3 \ # documenteer # sphinx # sphinxcontrib-youtube -rpds-py==0.20.0 \ - --hash=sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c \ - --hash=sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585 \ - --hash=sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5 \ - --hash=sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6 \ - --hash=sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef \ - --hash=sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2 \ - --hash=sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29 \ - --hash=sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318 \ - --hash=sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b \ - --hash=sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399 \ - --hash=sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739 \ - --hash=sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee \ - --hash=sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174 \ - --hash=sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a \ - --hash=sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344 \ - --hash=sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2 \ - --hash=sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03 \ - --hash=sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5 \ - --hash=sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22 \ - --hash=sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e \ - --hash=sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96 \ - --hash=sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91 \ - --hash=sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752 \ - --hash=sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075 \ - --hash=sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253 \ - --hash=sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee \ - --hash=sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad \ - --hash=sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5 \ - --hash=sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce \ - --hash=sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7 \ - --hash=sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b \ - --hash=sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8 \ - --hash=sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57 \ - --hash=sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3 \ - --hash=sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec \ - --hash=sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209 \ - --hash=sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921 \ - --hash=sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045 \ - --hash=sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074 \ - --hash=sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580 \ - --hash=sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7 \ - --hash=sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5 \ - --hash=sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3 \ - --hash=sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0 \ - --hash=sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24 \ - --hash=sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139 \ - --hash=sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db \ - --hash=sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc \ - --hash=sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789 \ - --hash=sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f \ - --hash=sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2 \ - --hash=sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c \ - --hash=sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232 \ - --hash=sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6 \ - --hash=sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c \ - --hash=sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29 \ - --hash=sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489 \ - --hash=sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94 \ - --hash=sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751 \ - --hash=sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2 \ - --hash=sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda \ - --hash=sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9 \ - --hash=sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51 \ - --hash=sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c \ - --hash=sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8 \ - --hash=sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989 \ - --hash=sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511 \ - --hash=sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1 \ - --hash=sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2 \ - --hash=sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150 \ - --hash=sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c \ - --hash=sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965 \ - --hash=sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f \ - --hash=sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58 \ - --hash=sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b \ - --hash=sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f \ - --hash=sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d \ - --hash=sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821 \ - --hash=sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de \ - --hash=sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121 \ - --hash=sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855 \ - --hash=sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272 \ - --hash=sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60 \ - --hash=sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02 \ - --hash=sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1 \ - --hash=sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140 \ - --hash=sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879 \ - --hash=sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940 \ - --hash=sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364 \ - --hash=sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4 \ - --hash=sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e \ - --hash=sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420 \ - --hash=sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5 \ - --hash=sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24 \ - --hash=sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c \ - --hash=sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf \ - --hash=sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f \ - --hash=sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e \ - --hash=sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab \ - --hash=sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08 \ - --hash=sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92 \ - --hash=sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a \ - --hash=sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8 +rpds-py==0.21.0 \ + --hash=sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba \ + --hash=sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d \ + --hash=sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e \ + --hash=sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a \ + --hash=sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202 \ + --hash=sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271 \ + --hash=sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250 \ + --hash=sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d \ + --hash=sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928 \ + --hash=sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0 \ + --hash=sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d \ + --hash=sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333 \ + --hash=sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e \ + --hash=sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a \ + --hash=sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18 \ + --hash=sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044 \ + --hash=sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677 \ + --hash=sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664 \ + --hash=sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75 \ + --hash=sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89 \ + --hash=sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027 \ + --hash=sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9 \ + --hash=sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e \ + --hash=sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8 \ + --hash=sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44 \ + --hash=sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3 \ + --hash=sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95 \ + --hash=sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd \ + --hash=sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab \ + --hash=sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a \ + --hash=sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560 \ + --hash=sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035 \ + --hash=sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919 \ + --hash=sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c \ + --hash=sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266 \ + --hash=sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e \ + --hash=sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592 \ + --hash=sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9 \ + --hash=sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3 \ + --hash=sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624 \ + --hash=sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9 \ + --hash=sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b \ + --hash=sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f \ + --hash=sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca \ + --hash=sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1 \ + --hash=sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8 \ + --hash=sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590 \ + --hash=sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed \ + --hash=sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952 \ + --hash=sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11 \ + --hash=sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061 \ + --hash=sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c \ + --hash=sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74 \ + --hash=sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c \ + --hash=sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94 \ + --hash=sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c \ + --hash=sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8 \ + --hash=sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf \ + --hash=sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a \ + --hash=sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5 \ + --hash=sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6 \ + --hash=sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5 \ + --hash=sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3 \ + --hash=sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed \ + --hash=sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87 \ + --hash=sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b \ + --hash=sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72 \ + --hash=sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05 \ + --hash=sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed \ + --hash=sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f \ + --hash=sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c \ + --hash=sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153 \ + --hash=sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b \ + --hash=sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0 \ + --hash=sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d \ + --hash=sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d \ + --hash=sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e \ + --hash=sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e \ + --hash=sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd \ + --hash=sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682 \ + --hash=sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4 \ + --hash=sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db \ + --hash=sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976 \ + --hash=sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937 \ + --hash=sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1 \ + --hash=sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb \ + --hash=sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a \ + --hash=sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7 \ + --hash=sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356 \ + --hash=sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be # via # jsonschema # referencing -setuptools==75.2.0 \ - --hash=sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec \ - --hash=sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8 +setuptools==75.3.0 \ + --hash=sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd \ + --hash=sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686 # via # documenteer # sphinxcontrib-bibtex diff --git a/requirements/tox.txt b/requirements/tox.txt index 5feefbdbb3..06fb544a53 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -56,29 +56,29 @@ tox==4.23.2 \ # via # -r requirements/tox.in # tox-uv -tox-uv==1.15.0 \ - --hash=sha256:a5f08c80a3eabc47881e378700e5044b67ba94b03181ae38246627127f6a9183 \ - --hash=sha256:dfe7e48274248458349b47780da7db815c0156bd3751b6486152bbf01d7672fb +tox-uv==1.16.0 \ + --hash=sha256:71b2e2fa6c35c1360b91a302df1d65b3e5a1f656b321c5ebf7b84545804c9f01 \ + --hash=sha256:e6f0b525a687e745ab878d07cbf5c7e85d582028d4a7c8935f95e84350651432 # via -r requirements/tox.in -uv==0.4.27 \ - --hash=sha256:07d693092ad1f2536fec59e1ad5170fab10a214e9d2e39f9cf385cccbf426aa7 \ - --hash=sha256:0a7d8041f80bf59fac1d3a630ad5ed9d91008c85edc03e318e3016122235c568 \ - --hash=sha256:0bae39264d575d16d5bb3b40699396afb2b27f987d7d7cfe8f336c24d26eda87 \ - --hash=sha256:2035efeb39d8d86355d9002e129a76a032a54b47b1332c6952225f48aa9b583c \ - --hash=sha256:3dd79e9392af6f41c470f9a95a2f3f8e73cde585eecb2df721f0716cd6134893 \ - --hash=sha256:4d249ca5e5444de4dd4984627bef6f077ffdb45c3ad6b27413ddfb1146daf79b \ - --hash=sha256:6c5782274a8d3075f4bf82e90c90b0a960abc11424ab353dc559e9329b479681 \ - --hash=sha256:6d335e40658a6c23554683410e710e5f54374fec20642e459771f50c8736d600 \ - --hash=sha256:ae4f45a0640de23c880bd5bdb27b1d3a059b45c9f73c2f7d53e392664efeca10 \ - --hash=sha256:b05165b0b24573c509286b87825c619658162079e2d3b20fea01d0dd9f444238 \ - --hash=sha256:b7a858209dfaab2527c547836cf823aef5cc1e051c5b15df4ba445a71b252df8 \ - --hash=sha256:b92728ba102ac7284f560c144507961be5aca5263d7a0d70a6896bba7660271c \ - --hash=sha256:b9e9b8b4062388df4c7a5d1e6c692dc8929242f883e1509010efb2b766ac4edd \ - --hash=sha256:bb5ced184be4e7611d983462a9f31a24a2e66de60f688ded6a8c36dc701a58ef \ - --hash=sha256:c0a5a40f23b61b2c693f6fa6f08b920c7d8b9058ce7ca20f18856844d2f11b2c \ - --hash=sha256:c13eea45257362ecfa2a2b31de9b62fbd0542e211a573562d98ab7c8fc50d8fc \ - --hash=sha256:d1731252da1a71a9f38e5864eb037401340a17eab519ad32e9a9f8fd54b7ada9 \ - --hash=sha256:f552967f4b392f880a1a50d3f57b9372a9666da274ea7826ee14e024ba035f4e +uv==0.4.30 \ + --hash=sha256:0c89f2eff63a08d04e81629611f43b1ffa668af6de0382b95a71599af7d4b77c \ + --hash=sha256:1a83df281c5d900b4758b1a3969b3cff57231f9027db8508b71dce1f2da78684 \ + --hash=sha256:232575f30ed971ea32d4a525b7146c4b088a07ed6e70a31da63792d563fcac44 \ + --hash=sha256:353617bfcf72e1eabade426d83fb86a69d11273d1612aabc3f4566d41c596c97 \ + --hash=sha256:444468ad0e94b35cbf6acfc8a28589cfe1247136d43895e60a18955ff89a07ad \ + --hash=sha256:44c5aeb5b374f9fd1083959934daa9020db3610f0405198c5e3d8ec1f23d961d \ + --hash=sha256:4aecd9fb39cf018e129627090a1d35af2b0184bb87078d573c9998f5e4072416 \ + --hash=sha256:4d41d09cabba1988728c2d9b9ad25f79233c2aa3d6ecd724c36f4678c4c89711 \ + --hash=sha256:4ddad09385221fa5c609169e4a0dd5bee27cf56c1dc450d4cdc113122c54bb09 \ + --hash=sha256:63196143f45018364c450ba94279a5bcff8562c14ba63deb41a92ed30baa6e22 \ + --hash=sha256:6395820540f368f622e818735862abd633dfe7e729c450fca56b65bab4b46661 \ + --hash=sha256:7f09bd6a853767863e2fb905f0eb1a0ed7afa9ea118852e5c02d2b451944e1cf \ + --hash=sha256:9e17a799c6279800996828e10288ca8ccc40cc883d8998802b938aa671dfa9ce \ + --hash=sha256:9ed0183e747065b9b1bcfb699ff10df671ebe6259709ce83e709f86cea564aee \ + --hash=sha256:d9de718380e2f167243ca5e1dccea781e06404158442491255fec5955d57fed9 \ + --hash=sha256:dedcae3619f0eb181459b597fefefd99cb21fe5a5a48a530be6f5ad934399bfb \ + --hash=sha256:ea55ca0fe5bdd04e46deaf395b3daf4fa92392f774e83610d066a2b272af5d3f \ + --hash=sha256:f63d6646acdf2f38a5afca9fb9eeac62efa663a57f3c134f735a5f575b4e748f # via tox-uv virtualenv==20.27.1 \ --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ From f5c1004c9bf1e02934ea01ecdf0afd0c07cc1c77 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Wed, 6 Nov 2024 14:08:01 -0700 Subject: [PATCH 434/567] Add LEDProjector to summit and USDF-prod connectors/sinks. --- applications/sasquatch/values-summit.yaml | 4 ++-- applications/sasquatch/values-usdfprod.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 3919490042..f7447f323b 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -212,7 +212,7 @@ kafka-connect-manager: calsys: enabled: true repairerConnector: false - topicsRegex: ".*ATMonochromator|.*ATWhiteLight|.*CBP|.*Electrometer|.*FiberSpectrograph|.*LinearStage|.*TunableLaser" + topicsRegex: ".*ATMonochromator|.*ATWhiteLight|.*CBP|.*Electrometer|.*FiberSpectrograph|.*LEDProjector|.*LinearStage|.*TunableLaser" mtaircompressor: enabled: true repairerConnector: false @@ -381,7 +381,7 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.ATMonochromator", "lsst.sal.ATWhiteLight", "lsst.sal.CBP", "lsst.sal.Electrometer", "lsst.sal.FiberSpectrograph", "lsst.sal.LinearStage", "lsst.sal.TunableLaser" ] + [ "lsst.sal.ATMonochromator", "lsst.sal.ATWhiteLight", "lsst.sal.CBP", "lsst.sal.Electrometer", "lsst.sal.FiberSpectrograph", "lsst.sal.LEDProjector", "lsst.sal.LinearStage", "lsst.sal.TunableLaser" ] debug: true mtaircompressor: enabled: true diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 1ea6936014..7e93e3caac 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -213,7 +213,7 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.ATMonochromator", "lsst.sal.ATWhiteLight", "lsst.sal.CBP", "lsst.sal.Electrometer", "lsst.sal.FiberSpectrograph", "lsst.sal.LinearStage", "lsst.sal.TunableLaser" ] + [ "lsst.sal.ATMonochromator", "lsst.sal.ATWhiteLight", "lsst.sal.CBP", "lsst.sal.Electrometer", "lsst.sal.FiberSpectrograph", "lsst.sal.LEDProjector", "lsst.sal.LinearStage", "lsst.sal.TunableLaser" ] debug: true mtaircompressor: enabled: true From 0908139ac5a115e2d956022308e4f0157bcc21b6 Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Fri, 18 Oct 2024 13:08:25 -0700 Subject: [PATCH 435/567] Remove ir2 repo from data-dev Butler server This is no longer needed and the configuration is in the way of other changes. --- applications/butler/README.md | 1 - applications/butler/secrets.yaml | 6 ------ applications/butler/templates/deployment.yaml | 7 ------- applications/butler/values-idfdev.yaml | 2 -- applications/butler/values.yaml | 5 ----- 5 files changed, 21 deletions(-) diff --git a/applications/butler/README.md b/applications/butler/README.md index a3d2d49811..b4a3ff5180 100644 --- a/applications/butler/README.md +++ b/applications/butler/README.md @@ -15,7 +15,6 @@ Server for Butler data abstraction service | autoscaling.maxReplicas | int | `100` | Maximum number of butler deployment pods | | autoscaling.minReplicas | int | `1` | Minimum number of butler deployment pods | | autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization of butler deployment pods | -| config.additionalS3ProfileName | string | No second S3 profile is available. | Profile name identifying a second S3 endpoint and set of credentials to use for accessing files in the datastore. | | config.dp02ClientServerIsDefault | bool | `false` | True if the 'dp02' Butler repository alias should use client/server Butler. False if it should use DirectButler. | | config.dp02PostgresUri | string | No configuration file for DP02 will be generated. | Postgres connection string pointing to the registry database hosting Data Preview 0.2 data. | | config.pathPrefix | string | `"/api/butler"` | The prefix of the path portion of the URL where the Butler service will be exposed. For example, if the service should be exposed at `https://data.lsst.cloud/api/butler`, this should be set to `/api/butler` | diff --git a/applications/butler/secrets.yaml b/applications/butler/secrets.yaml index 23ee59d217..1b2d88511e 100644 --- a/applications/butler/secrets.yaml +++ b/applications/butler/secrets.yaml @@ -18,9 +18,3 @@ copy: application: nublado key: "postgres-credentials.txt" -"additional-s3-profile": - description: >- - Credentials and endpoint for a second S3 profile to use, in addition to the - default endpoint. For docs on format see - https://github.com/lsst/resources/blob/a34598e125919799d3db4bd8a2363087c3de434e/python/lsst/resources/s3utils.py#L201 - if: config.additionalS3ProfileName diff --git a/applications/butler/templates/deployment.yaml b/applications/butler/templates/deployment.yaml index c7e3f06b4c..9ba64a4257 100644 --- a/applications/butler/templates/deployment.yaml +++ b/applications/butler/templates/deployment.yaml @@ -65,13 +65,6 @@ spec: - name: PGUSER value: {{ .Values.config.pguser | quote }} {{ end }} - {{ if .Values.config.additionalS3ProfileName }} - - name: LSST_RESOURCES_S3_PROFILE_{{ .Values.config.additionalS3ProfileName }} - valueFrom: - secretKeyRef: - name: {{ include "butler.fullname" . }} - key: additional-s3-profile - {{ end }} volumeMounts: - name: "butler-secrets" mountPath: "/opt/lsst/butler/secrets" diff --git a/applications/butler/values-idfdev.yaml b/applications/butler/values-idfdev.yaml index 92cc0e6897..4b1a531e4a 100644 --- a/applications/butler/values-idfdev.yaml +++ b/applications/butler/values-idfdev.yaml @@ -5,7 +5,5 @@ config: dp02ClientServerIsDefault: true dp02PostgresUri: postgresql://postgres@sqlproxy-butler-int.sqlproxy-cross-project:5432/dp02 s3EndpointUrl: "https://storage.googleapis.com" - additionalS3ProfileName: "ir2" repositories: dp02: "file:///opt/lsst/butler/config/dp02.yaml" - ir2: "s3://butler-us-central1-panda-dev/ir2/butler-ir2.yaml" diff --git a/applications/butler/values.yaml b/applications/butler/values.yaml index 51ec757201..c3342afdcd 100644 --- a/applications/butler/values.yaml +++ b/applications/butler/values.yaml @@ -96,11 +96,6 @@ config: # -- URL for the S3 service where files for datasets are stored by Butler. s3EndpointUrl: "" - # -- Profile name identifying a second S3 endpoint and set of credentials - # to use for accessing files in the datastore. - # @default -- No second S3 profile is available. - additionalS3ProfileName: "" - # -- The prefix of the path portion of the URL where the Butler service will # be exposed. For example, if the service should be exposed at # `https://data.lsst.cloud/api/butler`, this should be set to `/api/butler` From 4a7c662f3d73b7f113178e4a4df69de96fa9d0f4 Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Mon, 21 Oct 2024 10:00:08 -0700 Subject: [PATCH 436/567] Update usdf Butler server for new embargo repo The new embargo repo is using a different S3 profile and configuration file than the old one. SQRE is setting up an SIAv2 service that needs to access data from the new repo. --- applications/butler/README.md | 3 ++- applications/butler/templates/deployment.yaml | 4 ++++ applications/butler/values-usdfdev.yaml | 4 +++- applications/butler/values-usdfint.yaml | 4 +++- applications/butler/values-usdfprod.yaml | 4 +++- applications/butler/values.yaml | 7 ++++++- 6 files changed, 21 insertions(+), 5 deletions(-) diff --git a/applications/butler/README.md b/applications/butler/README.md index b4a3ff5180..0f3f968ca2 100644 --- a/applications/butler/README.md +++ b/applications/butler/README.md @@ -15,12 +15,13 @@ Server for Butler data abstraction service | autoscaling.maxReplicas | int | `100` | Maximum number of butler deployment pods | | autoscaling.minReplicas | int | `1` | Minimum number of butler deployment pods | | autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization of butler deployment pods | +| config.additionalS3EndpointUrls | object | No additional URLs | Endpoint URLs for additional S3 services used by the Butler, as a mapping from profile name to URL. | | config.dp02ClientServerIsDefault | bool | `false` | True if the 'dp02' Butler repository alias should use client/server Butler. False if it should use DirectButler. | | config.dp02PostgresUri | string | No configuration file for DP02 will be generated. | Postgres connection string pointing to the registry database hosting Data Preview 0.2 data. | | config.pathPrefix | string | `"/api/butler"` | The prefix of the path portion of the URL where the Butler service will be exposed. For example, if the service should be exposed at `https://data.lsst.cloud/api/butler`, this should be set to `/api/butler` | | config.pguser | string | Use values specified in per-repository Butler config files. | Postgres username used to connect to the Butler DB | | config.repositories | object | `{}` | Mapping from Butler repository label to Butler configuration URI for repositories which will be hosted by this server. | -| config.s3EndpointUrl | string | `""` | URL for the S3 service where files for datasets are stored by Butler. | +| config.s3EndpointUrl | string | `""` | URL for the primary S3 service where files for datasets are stored by Butler. | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | diff --git a/applications/butler/templates/deployment.yaml b/applications/butler/templates/deployment.yaml index 9ba64a4257..ebbe277955 100644 --- a/applications/butler/templates/deployment.yaml +++ b/applications/butler/templates/deployment.yaml @@ -52,6 +52,10 @@ spec: value: "/opt/lsst/butler/secrets/butler-gcs-creds.json" - name: S3_ENDPOINT_URL value: {{ .Values.config.s3EndpointUrl | quote }} + {{- range $name, $url := .Values.config.additionalS3EndpointUrls }} + - name: LSST_RESOURCES_S3_PROFILE_{{ $name }} + value: {{ $url }} + {{ end }} - name: DAF_BUTLER_REPOSITORIES value: {{ .Values.config.repositories | toJson | quote }} # Serve the configuration files generated by configmap.yaml via diff --git a/applications/butler/values-usdfdev.yaml b/applications/butler/values-usdfdev.yaml index 68098547ab..8ce6a0660a 100644 --- a/applications/butler/values-usdfdev.yaml +++ b/applications/butler/values-usdfdev.yaml @@ -2,4 +2,6 @@ config: pguser: "rubin" s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu" repositories: - embargo: "s3://rubin-summit-users/butler.yaml" + embargo: s3://embargo@rubin-summit-users/butler.yaml + additionalS3EndpointUrls: + embargo: "https://sdfembs3.sdf.slac.stanford.edu" diff --git a/applications/butler/values-usdfint.yaml b/applications/butler/values-usdfint.yaml index 68098547ab..8ce6a0660a 100644 --- a/applications/butler/values-usdfint.yaml +++ b/applications/butler/values-usdfint.yaml @@ -2,4 +2,6 @@ config: pguser: "rubin" s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu" repositories: - embargo: "s3://rubin-summit-users/butler.yaml" + embargo: s3://embargo@rubin-summit-users/butler.yaml + additionalS3EndpointUrls: + embargo: "https://sdfembs3.sdf.slac.stanford.edu" diff --git a/applications/butler/values-usdfprod.yaml b/applications/butler/values-usdfprod.yaml index 68098547ab..8ce6a0660a 100644 --- a/applications/butler/values-usdfprod.yaml +++ b/applications/butler/values-usdfprod.yaml @@ -2,4 +2,6 @@ config: pguser: "rubin" s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu" repositories: - embargo: "s3://rubin-summit-users/butler.yaml" + embargo: s3://embargo@rubin-summit-users/butler.yaml + additionalS3EndpointUrls: + embargo: "https://sdfembs3.sdf.slac.stanford.edu" diff --git a/applications/butler/values.yaml b/applications/butler/values.yaml index c3342afdcd..3ea128cf97 100644 --- a/applications/butler/values.yaml +++ b/applications/butler/values.yaml @@ -93,9 +93,14 @@ config: # @default -- Use values specified in per-repository Butler config files. pguser: "" - # -- URL for the S3 service where files for datasets are stored by Butler. + # -- URL for the primary S3 service where files for datasets are stored by Butler. s3EndpointUrl: "" + # -- Endpoint URLs for additional S3 services used by the Butler, as a + # mapping from profile name to URL. + # @default -- No additional URLs + additionalS3EndpointUrls: {} + # -- The prefix of the path portion of the URL where the Butler service will # be exposed. For example, if the service should be exposed at # `https://data.lsst.cloud/api/butler`, this should be set to `/api/butler` From be254398ced41601622e59f0c32c84c0ab90fa45 Mon Sep 17 00:00:00 2001 From: Dave McKay Date: Thu, 7 Nov 2024 13:05:51 +0000 Subject: [PATCH 437/567] Update values-roe.yaml - updateSchema --- applications/gafaelfawr/values-roe.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/gafaelfawr/values-roe.yaml b/applications/gafaelfawr/values-roe.yaml index 8bfe88e382..0cc138ca67 100644 --- a/applications/gafaelfawr/values-roe.yaml +++ b/applications/gafaelfawr/values-roe.yaml @@ -4,7 +4,7 @@ redis: config: internalDatabase: true - updateSchema: false + updateSchema: true github: clientId: "10172b4db1b67ee31620" From 321a04273d1d4c0dcf14149ac10d0a1f94942c9d Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 5 Nov 2024 10:57:21 -0700 Subject: [PATCH 438/567] Enable InfluxDB Enterprise on data-dev - This environment will be used temporarily to restore a shard from USDF and test query performance --- applications/sasquatch/values-idfdev.yaml | 42 +++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/applications/sasquatch/values-idfdev.yaml b/applications/sasquatch/values-idfdev.yaml index 5b9101ccb5..62603995ee 100644 --- a/applications/sasquatch/values-idfdev.yaml +++ b/applications/sasquatch/values-idfdev.yaml @@ -64,6 +64,48 @@ influxdb: memory: 16Gi cpu: 2 +influxdb-enterprise: + enabled: true + license: + secret: + name: sasquatch + key: influxdb-enterprise-license + meta: + ingress: + enabled: true + hostname: data-dev.lsst.cloud + persistence: + enabled: true + accessMode: ReadWriteOnce + size: 16Gi + sharedSecret: + secret: + name: sasquatch + key: influxdb-enterprise-shared-secret + resources: + requests: + memory: 2Gi + cpu: 1 + limits: + memory: 4Gi + cpu: 2 + data: + replicas: 2 + ingress: + enabled: true + hostname: data-dev.lsst.cloud + persistence: + enabled: true + accessMode: ReadWriteOnce + size: 1Ti + resources: + requests: + memory: 8Gi + cpu: 2 + limits: + memory: 16Gi + cpu: 4 + telegraf-kafka-consumer: enabled: true kafkaConsumers: From 8b170e0f7e74a9328e8b19bcd6931be40675bfd8 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 6 Nov 2024 11:40:50 -0700 Subject: [PATCH 439/567] Disable AE service --- applications/sasquatch/values-idfdev.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/applications/sasquatch/values-idfdev.yaml b/applications/sasquatch/values-idfdev.yaml index 62603995ee..4b95d39543 100644 --- a/applications/sasquatch/values-idfdev.yaml +++ b/applications/sasquatch/values-idfdev.yaml @@ -91,6 +91,9 @@ influxdb-enterprise: cpu: 2 data: replicas: 2 + config: + antiEntropy: + enabled: false ingress: enabled: true hostname: data-dev.lsst.cloud From 47a34b7e97032245b4b94f5d9722c8df48cce70c Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 6 Nov 2024 11:56:08 -0700 Subject: [PATCH 440/567] Reduce number of replicas to one Removed shard from data-1, we expect to have it on data-0 (the only replica) --- applications/sasquatch/values-idfdev.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/sasquatch/values-idfdev.yaml b/applications/sasquatch/values-idfdev.yaml index 4b95d39543..589be378d1 100644 --- a/applications/sasquatch/values-idfdev.yaml +++ b/applications/sasquatch/values-idfdev.yaml @@ -90,7 +90,7 @@ influxdb-enterprise: memory: 4Gi cpu: 2 data: - replicas: 2 + replicas: 1 config: antiEntropy: enabled: false From dea5c918820bc8bbf188f128b7cb72cd87caa8b7 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 6 Nov 2024 12:11:45 -0700 Subject: [PATCH 441/567] Add data-1 back - So I can remove shard metadata from data-1 --- applications/sasquatch/values-idfdev.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/sasquatch/values-idfdev.yaml b/applications/sasquatch/values-idfdev.yaml index 589be378d1..4b95d39543 100644 --- a/applications/sasquatch/values-idfdev.yaml +++ b/applications/sasquatch/values-idfdev.yaml @@ -90,7 +90,7 @@ influxdb-enterprise: memory: 4Gi cpu: 2 data: - replicas: 1 + replicas: 2 config: antiEntropy: enabled: false From b081728d10c70394459cdeb0f55ed178ff1e5041 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 6 Nov 2024 12:18:52 -0700 Subject: [PATCH 442/567] Back to one replica --- applications/sasquatch/values-idfdev.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/sasquatch/values-idfdev.yaml b/applications/sasquatch/values-idfdev.yaml index 4b95d39543..589be378d1 100644 --- a/applications/sasquatch/values-idfdev.yaml +++ b/applications/sasquatch/values-idfdev.yaml @@ -90,7 +90,7 @@ influxdb-enterprise: memory: 4Gi cpu: 2 data: - replicas: 2 + replicas: 1 config: antiEntropy: enabled: false From af2465c8961f7b59afebf1fa172f01ec582181b6 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 7 Nov 2024 10:07:05 -0700 Subject: [PATCH 443/567] Back to two replicas and enable AE service --- applications/sasquatch/values-idfdev.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/sasquatch/values-idfdev.yaml b/applications/sasquatch/values-idfdev.yaml index 589be378d1..76740fa521 100644 --- a/applications/sasquatch/values-idfdev.yaml +++ b/applications/sasquatch/values-idfdev.yaml @@ -90,10 +90,10 @@ influxdb-enterprise: memory: 4Gi cpu: 2 data: - replicas: 1 + replicas: 2 config: antiEntropy: - enabled: false + enabled: true ingress: enabled: true hostname: data-dev.lsst.cloud From b93206e84f45fe60ab78be7c7eb8a0b11ddbcc92 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 23 Oct 2024 06:14:38 +0200 Subject: [PATCH 444/567] Enable repairer connector for lsst.dm namespace --- applications/sasquatch/values-usdfdev.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/sasquatch/values-usdfdev.yaml b/applications/sasquatch/values-usdfdev.yaml index a08a521d88..0461f60200 100644 --- a/applications/sasquatch/values-usdfdev.yaml +++ b/applications/sasquatch/values-usdfdev.yaml @@ -98,6 +98,7 @@ kafka-connect-manager: topicsRegex: ".*GIS" lsstdm: enabled: true + repairerConnector: true timestamp: "timestamp" connectInfluxDb: "lsst.dm" topicsRegex: "lsst.dm.*" From c873a8dfabe4f1922cfee96ef8bc105aaf6bab60 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 23 Oct 2024 06:53:43 +0200 Subject: [PATCH 445/567] Allow an unlimited number of series per database - The default configuration drops points when we reach one million series. --- applications/sasquatch/README.md | 1 + applications/sasquatch/values.yaml | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index bc03b4ca9e..2cfb99b5bd 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -40,6 +40,7 @@ Rubin Observatory's telemetry service | influxdb.config.coordinator.query-timeout | string | `"30s"` | Maximum duration a query is allowed to run before it is killed | | influxdb.config.coordinator.write-timeout | string | `"1h"` | Duration a write request waits before timeout is returned to the caller | | influxdb.config.data.cache-max-memory-size | int | `0` | Maximum size a shared cache can reach before it starts rejecting writes | +| influxdb.config.data.max-series-per-database | int | `0` | Maximum number of series allowed per database before writes are dropped. Change the setting to 0 to allow an unlimited number of series per database. | | influxdb.config.data.trace-logging-enabled | bool | `true` | Whether to enable verbose logging of additional debug information within the TSM engine and WAL | | influxdb.config.data.wal-fsync-delay | string | `"100ms"` | Duration a write will wait before fsyncing. This is useful for slower disks or when WAL write contention is present. | | influxdb.config.http.auth-enabled | bool | `true` | Whether authentication is required | diff --git a/applications/sasquatch/values.yaml b/applications/sasquatch/values.yaml index cc9fff35e6..6eb5775989 100644 --- a/applications/sasquatch/values.yaml +++ b/applications/sasquatch/values.yaml @@ -93,6 +93,11 @@ influxdb: # within the TSM engine and WAL trace-logging-enabled: true + # -- Maximum number of series allowed per database before writes are + # dropped. Change the setting to 0 to allow an unlimited number of series per + # database. + max-series-per-database: 0 + http: # -- Whether to enable the HTTP endpoints enabled: true From dd60ea9c3987d660accae7dffbb023abac7583ee Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 23 Oct 2024 07:10:36 +0200 Subject: [PATCH 446/567] Disable repairer connector --- applications/sasquatch/values-usdfdev.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/applications/sasquatch/values-usdfdev.yaml b/applications/sasquatch/values-usdfdev.yaml index 0461f60200..a08a521d88 100644 --- a/applications/sasquatch/values-usdfdev.yaml +++ b/applications/sasquatch/values-usdfdev.yaml @@ -98,7 +98,6 @@ kafka-connect-manager: topicsRegex: ".*GIS" lsstdm: enabled: true - repairerConnector: true timestamp: "timestamp" connectInfluxDb: "lsst.dm" topicsRegex: "lsst.dm.*" From 416cb2a1588a7c22ddf16f851f0a25cb2e213e8e Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Wed, 6 Nov 2024 15:09:58 -0800 Subject: [PATCH 447/567] Order ComCam test blocks in numberical order. This change makes it easier to find a block and check whether it's configured, once there are more than two. --- .../values-usdfprod-prompt-processing.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index 6e8911b2a6..8c6a222b35 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -22,13 +22,13 @@ prompt-proto-service: (survey="comcam-ap")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/SingleFrame.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr.yaml] - (survey="BLOCK-T208")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] (survey="BLOCK-T60")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] + (survey="BLOCK-T208")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] (survey="")=[] preprocessing: >- (survey="comcam-ap")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml] - (survey="BLOCK-T208")=[] (survey="BLOCK-T60")=[] + (survey="BLOCK-T208")=[] (survey="")=[] calibRepo: s3://rubin-summit-users From e8be12e25ec2f72974f19b153823b69b9562c6d3 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Wed, 6 Nov 2024 15:44:38 -0800 Subject: [PATCH 448/567] Add long-term ComCam surveys to Prompt Processing. The blocks included on this list are those that a) have been scheduled daily, and could plausibly be part of the standard observing routine, and b) generate nextVisit messages. The list excludes blocks that have only instrument characterization value, even if they are regularly scheduled. --- .../values-usdfprod-prompt-processing.yaml | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index 8c6a222b35..97e5675b4d 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -18,17 +18,28 @@ prompt-proto-service: instrument: pipelines: + # BLOCK-T60 is optics alignment + # BLOCK-T75 is giant donuts + # BLOCK-T88 is optics alignment + # BLOCK-T246 is instrument checkout + # BLOCK-T249 is AOS alignment main: >- - (survey="comcam-ap")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml, + (survey="PP-SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/SingleFrame.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr.yaml] (survey="BLOCK-T60")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] - (survey="BLOCK-T208")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] + (survey="BLOCK-T75")=[] + (survey="BLOCK-T88")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] + (survey="BLOCK-T246")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] + (survey="BLOCK-T249")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] (survey="")=[] preprocessing: >- - (survey="comcam-ap")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml] + (survey="PP-SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml] (survey="BLOCK-T60")=[] - (survey="BLOCK-T208")=[] + (survey="BLOCK-T75")=[] + (survey="BLOCK-T88")=[] + (survey="BLOCK-T246")=[] + (survey="BLOCK-T249")=[] (survey="")=[] calibRepo: s3://rubin-summit-users From b23523fc417083c2fb0a8aa99e7c8c63dcf9fe4a Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Wed, 6 Nov 2024 15:48:50 -0800 Subject: [PATCH 449/567] Set scale for Prompt Processing ComCam-prod. --- .../values-usdfprod-prompt-processing.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index 97e5675b4d..dc1d2d0db3 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -2,8 +2,9 @@ prompt-proto-service: podAnnotations: # HACK: disable autoscaling as workaround for DM-41829 - autoscaling.knative.dev/min-scale: "20" - autoscaling.knative.dev/max-scale: "20" + autoscaling.knative.dev/min-scale: "200" + # see values.yaml for calculation of max-scale + autoscaling.knative.dev/max-scale: "200" # Update this field if using latest or static image tag in dev revision: "1" From 4b648ff81e5255a3d9f8c781aaacefb067d3a88b Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Thu, 7 Nov 2024 12:57:02 -0800 Subject: [PATCH 450/567] Update Kafka server for Prompt Processing dev. Kafka had to be redeployed under a new name because of an unrelated problem. --- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfdev-prompt-processing.yaml | 2 +- .../values-usdfdev-prompt-processing.yaml | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml index 08310c2342..33666a8bf6 100644 --- a/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml @@ -21,7 +21,7 @@ prompt-proto-service: endpointUrl: https://s3dfrgw.slac.stanford.edu imageNotifications: - kafkaClusterAddress: prompt-processing-kafka-bootstrap.kafka:9092 + kafkaClusterAddress: prompt-processing2-kafka-bootstrap.kafka:9092 topic: prompt-processing-dev apdb: diff --git a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml index 857f01a6ae..6ef44857a0 100644 --- a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml @@ -22,7 +22,7 @@ prompt-proto-service: endpointUrl: https://s3dfrgw.slac.stanford.edu imageNotifications: - kafkaClusterAddress: prompt-processing-kafka-bootstrap.kafka:9092 + kafkaClusterAddress: prompt-processing2-kafka-bootstrap.kafka:9092 topic: prompt-processing-dev apdb: diff --git a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml index 1d459bf40f..4bd5b8032d 100644 --- a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml @@ -22,7 +22,7 @@ prompt-proto-service: endpointUrl: https://s3dfrgw.slac.stanford.edu imageNotifications: - kafkaClusterAddress: prompt-processing-kafka-bootstrap.kafka:9092 + kafkaClusterAddress: prompt-processing2-kafka-bootstrap.kafka:9092 topic: prompt-processing-dev apdb: diff --git a/applications/prompt-proto-service-lsstcam/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-lsstcam/values-usdfdev-prompt-processing.yaml index 818307f6ca..eb1c6de38f 100644 --- a/applications/prompt-proto-service-lsstcam/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcam/values-usdfdev-prompt-processing.yaml @@ -19,7 +19,7 @@ prompt-proto-service: disableBucketValidation: 1 imageNotifications: - kafkaClusterAddress: prompt-processing-kafka-bootstrap.kafka:9092 + kafkaClusterAddress: prompt-processing2-kafka-bootstrap.kafka:9092 topic: rubin-prompt-processing alerts: diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml index 4b8736f8ab..e8fbd39cd6 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml @@ -22,7 +22,7 @@ prompt-proto-service: endpointUrl: https://s3dfrgw.slac.stanford.edu imageNotifications: - kafkaClusterAddress: prompt-processing-kafka-bootstrap.kafka:9092 + kafkaClusterAddress: prompt-processing2-kafka-bootstrap.kafka:9092 topic: prompt-processing-dev apdb: diff --git a/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml index 9a8af83570..17d39cf975 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml @@ -23,7 +23,7 @@ prompt-proto-service: endpointUrl: https://s3dfrgw.slac.stanford.edu imageNotifications: - kafkaClusterAddress: prompt-processing-kafka-bootstrap.kafka:9092 + kafkaClusterAddress: prompt-processing2-kafka-bootstrap.kafka:9092 topic: prompt-processing-dev apdb: From 2ece2909327ae630324852e622243b22787d218c Mon Sep 17 00:00:00 2001 From: Adam Thornton Date: Tue, 5 Nov 2024 10:41:45 -0700 Subject: [PATCH 451/567] external client for noteburst/idf-dev --- applications/noteburst/values-idfdev.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/noteburst/values-idfdev.yaml b/applications/noteburst/values-idfdev.yaml index b1f15683d5..9486828271 100644 --- a/applications/noteburst/values-idfdev.yaml +++ b/applications/noteburst/values-idfdev.yaml @@ -1,5 +1,6 @@ image: pullPolicy: Always + tag: tickets-DM-47329 config: logLevel: "DEBUG" From 1cda529b2dab84e4e453551ca0c527633a3b54b1 Mon Sep 17 00:00:00 2001 From: adam Date: Thu, 7 Nov 2024 14:43:43 -0700 Subject: [PATCH 452/567] Try new release on idfdev --- applications/noteburst/values-idfdev.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/applications/noteburst/values-idfdev.yaml b/applications/noteburst/values-idfdev.yaml index 9486828271..3baa2ed30d 100644 --- a/applications/noteburst/values-idfdev.yaml +++ b/applications/noteburst/values-idfdev.yaml @@ -1,6 +1,5 @@ image: - pullPolicy: Always - tag: tickets-DM-47329 + tag: "0.14.0" config: logLevel: "DEBUG" From b4b97daa3c48f76bd1baf9662882c4c9563edb82 Mon Sep 17 00:00:00 2001 From: adam Date: Thu, 7 Nov 2024 14:48:36 -0700 Subject: [PATCH 453/567] Update Chart.yaml for noteburst to point to 0.14.0; pin back usdfdev --- applications/noteburst/Chart.yaml | 2 +- applications/noteburst/values-idfdev.yaml | 3 --- applications/noteburst/values-usdfdev.yaml | 2 +- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/applications/noteburst/Chart.yaml b/applications/noteburst/Chart.yaml index d47db25b12..93965bf255 100644 --- a/applications/noteburst/Chart.yaml +++ b/applications/noteburst/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v2 name: noteburst version: 1.0.0 -appVersion: "0.13.0" +appVersion: "0.14.0" description: Noteburst is a notebook execution service for the Rubin Science Platform. type: application home: https://noteburst.lsst.io/ diff --git a/applications/noteburst/values-idfdev.yaml b/applications/noteburst/values-idfdev.yaml index 3baa2ed30d..3fd9be7aae 100644 --- a/applications/noteburst/values-idfdev.yaml +++ b/applications/noteburst/values-idfdev.yaml @@ -1,6 +1,3 @@ -image: - tag: "0.14.0" - config: logLevel: "DEBUG" worker: diff --git a/applications/noteburst/values-usdfdev.yaml b/applications/noteburst/values-usdfdev.yaml index 75c085515f..612ce2000e 100644 --- a/applications/noteburst/values-usdfdev.yaml +++ b/applications/noteburst/values-usdfdev.yaml @@ -1,5 +1,5 @@ image: - pullPolicy: Always + tag: "0.13.0" replicaCount: 3 From e5b8c586c830946bc2b09407f287e4940d3781b6 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Thu, 7 Nov 2024 15:58:09 -0700 Subject: [PATCH 454/567] Increase M1M3 connector batch size - We notice the M1M3 cannot keep up with the default batch_size of 1000 messages and increased its value to 2500 --- applications/sasquatch/values-summit.yaml | 1 + applications/sasquatch/values-usdfprod.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index f7447f323b..24afd30be1 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -343,6 +343,7 @@ telegraf-kafka-consumer: debug: true m1m3: enabled: true + metric_batch_size: 2500 database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 7e93e3caac..035ec26702 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -170,6 +170,7 @@ telegraf-kafka-consumer: m1m3: enabled: true repair: false + metric_batch_size: 2500 database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | From 1a3ed1fa4b36c85bcaaaa444292c53d3427381f2 Mon Sep 17 00:00:00 2001 From: gpfrancis <30290454+gpfrancis@users.noreply.github.com> Date: Fri, 8 Nov 2024 09:54:32 +0000 Subject: [PATCH 455/567] change storageclass to ceph-hdd --- applications/postgres/values-roe.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/postgres/values-roe.yaml b/applications/postgres/values-roe.yaml index b827fda643..376aec149b 100644 --- a/applications/postgres/values-roe.yaml +++ b/applications/postgres/values-roe.yaml @@ -7,4 +7,4 @@ nublado3_db: gafaelfawr_db: user: "gafaelfawr" db: "gafaelfawr" -postgresStorageClass: "standard" +postgresStorageClass: "ceph-hdd" From e0b5c0bd462d5de2692acaa631fb4b0ab94aa597 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Fri, 8 Nov 2024 16:35:16 -0300 Subject: [PATCH 456/567] Add AWS configurations for exposurelog on USDF-prod. --- applications/exposurelog/secrets-usdfprod.yaml | 12 ++++++++++++ applications/exposurelog/values-usdfprod.yaml | 13 ++++++++++++- 2 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 applications/exposurelog/secrets-usdfprod.yaml diff --git a/applications/exposurelog/secrets-usdfprod.yaml b/applications/exposurelog/secrets-usdfprod.yaml new file mode 100644 index 0000000000..317e9c5aab --- /dev/null +++ b/applications/exposurelog/secrets-usdfprod.yaml @@ -0,0 +1,12 @@ +"aws-credentials.ini": + description: >- + S3 Butler credentials in AWS format. + copy: + application: nublado + key: "aws-credentials.ini" +"postgres-credentials.txt": + description: >- + PostgreSQL credentials in its pgpass format for the Butler database. + copy: + application: nublado + key: "postgres-credentials.txt" diff --git a/applications/exposurelog/values-usdfprod.yaml b/applications/exposurelog/values-usdfprod.yaml index 8f4f585d48..a2edbfd410 100644 --- a/applications/exposurelog/values-usdfprod.yaml +++ b/applications/exposurelog/values-usdfprod.yaml @@ -1,6 +1,17 @@ config: site_id: usdfprod - butler_uri_1: s3://rubin-summit-users/butler.yaml + butler_uri_1: s3://embargo@rubin-summit-users/butler.yaml db: host: usdf-summitdb.slac.stanford.edu user: usdf +env: + - name: AWS_SHARED_CREDENTIALS_FILE + value: "/var/secrets/butler/aws-credentials.ini" + - name: DAF_BUTLER_REPOSITORY_INDEX + value: "/project/data-repos.yaml" + - name: S3_ENDPOINT_URL + value: "https://sdfembs3.sdf.slac.stanford.edu" + - name: PGPASSFILE + value: "/var/secrets/butler/postgres-credentials.txt" + - name: PGUSER + value: "rubin" From eb75b9e2bae0df617478491ab3baab1f43dc4cbd Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Fri, 8 Nov 2024 13:35:37 -0800 Subject: [PATCH 457/567] Deploy Prompt Processing 4.7.2. --- .../values-usdfprod-prompt-processing.yaml | 2 +- .../values-usdfprod-prompt-processing.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index 844c69f0a3..1f93567fdd 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.7.0 + tag: 4.7.2 instrument: pipelines: diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index dc1d2d0db3..955b5135e5 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -15,7 +15,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.7.1 + tag: 4.7.2 instrument: pipelines: From f4067848ba82e9d322c69bb4b4b8ae318582628d Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 13:58:16 +0000 Subject: [PATCH 458/567] Update Helm release vault to v0.29.0 --- applications/vault/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/vault/Chart.yaml b/applications/vault/Chart.yaml index 4c89b4f645..8226ae401f 100644 --- a/applications/vault/Chart.yaml +++ b/applications/vault/Chart.yaml @@ -4,5 +4,5 @@ version: 1.0.0 description: Secret Storage dependencies: - name: vault - version: 0.28.1 + version: 0.29.0 repository: https://helm.releases.hashicorp.com From 5e42571dd6e124d2822b6bb151be8a07cc176629 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 16:41:18 +0000 Subject: [PATCH 459/567] Update Helm release argo-cd to v7.7.2 --- applications/argocd/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argocd/Chart.yaml b/applications/argocd/Chart.yaml index db65dd7abc..1a7593b5d3 100644 --- a/applications/argocd/Chart.yaml +++ b/applications/argocd/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-cd - version: 7.6.12 + version: 7.7.2 repository: https://argoproj.github.io/argo-helm From 4e121c6ada9404f7289b01df1ce38ec5b46ae65d Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Tue, 12 Nov 2024 08:44:52 -0800 Subject: [PATCH 460/567] Remove labels from GafaelfawrIngress tmeplate exposurelog, narrativelog, and nightreport incorrectly added the Helm labels to the template for the Ingress, but that's not allowed by the GafaelfawrIngress schema. Remove them. --- applications/exposurelog/templates/ingress.yaml | 2 -- applications/narrativelog/templates/ingress.yaml | 2 -- applications/nightreport/templates/ingress.yaml | 2 -- 3 files changed, 6 deletions(-) diff --git a/applications/exposurelog/templates/ingress.yaml b/applications/exposurelog/templates/ingress.yaml index c929dfb9a8..c5eba0a88a 100644 --- a/applications/exposurelog/templates/ingress.yaml +++ b/applications/exposurelog/templates/ingress.yaml @@ -18,8 +18,6 @@ config: template: metadata: name: {{ template "exposurelog.fullname" . }} - labels: - {{- include "exposurelog.labels" . | nindent 4 }} spec: rules: - host: {{ required "global.host must be set" .Values.global.host | quote }} diff --git a/applications/narrativelog/templates/ingress.yaml b/applications/narrativelog/templates/ingress.yaml index f81cd57dd6..796e78fd1d 100644 --- a/applications/narrativelog/templates/ingress.yaml +++ b/applications/narrativelog/templates/ingress.yaml @@ -18,8 +18,6 @@ config: template: metadata: name: {{ template "narrativelog.fullname" . }} - labels: - {{- include "narrativelog.labels" . | nindent 4 }} spec: rules: - host: {{ required "global.host must be set" .Values.global.host | quote }} diff --git a/applications/nightreport/templates/ingress.yaml b/applications/nightreport/templates/ingress.yaml index 4cdf367a87..930d61eab8 100644 --- a/applications/nightreport/templates/ingress.yaml +++ b/applications/nightreport/templates/ingress.yaml @@ -18,8 +18,6 @@ config: template: metadata: name: {{ template "nightreport.fullname" . }} - labels: - {{- include "nightreport.labels" . | nindent 4 }} spec: rules: - host: {{ required "global.host must be set" .Values.global.host | quote }} From 4db8fbe47d9024dc27190147cd229555194ae263 Mon Sep 17 00:00:00 2001 From: Dan Fuchs Date: Tue, 12 Nov 2024 11:16:12 -0600 Subject: [PATCH 461/567] DM-47518: don't copy summit consdb secrets from `auxtel` --- applications/consdb/secrets-summit.yaml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/applications/consdb/secrets-summit.yaml b/applications/consdb/secrets-summit.yaml index 2695612b18..f7baafb1ec 100644 --- a/applications/consdb/secrets-summit.yaml +++ b/applications/consdb/secrets-summit.yaml @@ -1,12 +1,6 @@ lfa-password: description: >- LFA password, used for retrieving Header Service objects. - copy: - application: auxtel - key: aws-secret-access-key lfa-key: description: >- LFA key, used for retrieving Header Service objects. - copy: - application: auxtel - key: aws-access-key-id From 1052e0c68f5eb9b69f60eeb0648fd48f637e3fdc Mon Sep 17 00:00:00 2001 From: Dan Fuchs Date: Tue, 12 Nov 2024 11:49:01 -0600 Subject: [PATCH 462/567] DM-47518: don't copy consdb secrets from auxtel in tucson-teststand --- applications/consdb/secrets-tucson-teststand.yaml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/applications/consdb/secrets-tucson-teststand.yaml b/applications/consdb/secrets-tucson-teststand.yaml index 2695612b18..f7baafb1ec 100644 --- a/applications/consdb/secrets-tucson-teststand.yaml +++ b/applications/consdb/secrets-tucson-teststand.yaml @@ -1,12 +1,6 @@ lfa-password: description: >- LFA password, used for retrieving Header Service objects. - copy: - application: auxtel - key: aws-secret-access-key lfa-key: description: >- LFA key, used for retrieving Header Service objects. - copy: - application: auxtel - key: aws-access-key-id From 0d88330a19c58698754b5df7213bc508d8677461 Mon Sep 17 00:00:00 2001 From: pav511 <38131208+pav511@users.noreply.github.com> Date: Tue, 12 Nov 2024 12:02:25 -0800 Subject: [PATCH 463/567] saranda argocd rbac --- applications/argocd/values-usdfint.yaml | 1 + applications/argocd/values-usdfprod.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/applications/argocd/values-usdfint.yaml b/applications/argocd/values-usdfint.yaml index 12ba88dd96..db73444c20 100644 --- a/applications/argocd/values-usdfint.yaml +++ b/applications/argocd/values-usdfint.yaml @@ -58,6 +58,7 @@ argo-cd: g, spothi@slac.stanford.edu, role:developer g, bbrond@slac.stanford.edu, role:developer g, vbecker@slac.stanford.edu, role:developer + g, saranda@slac.stanford.edu, role:developer scopes: "[email]" server: diff --git a/applications/argocd/values-usdfprod.yaml b/applications/argocd/values-usdfprod.yaml index e97bea0f23..b39289de10 100644 --- a/applications/argocd/values-usdfprod.yaml +++ b/applications/argocd/values-usdfprod.yaml @@ -55,6 +55,7 @@ argo-cd: g, spothi@slac.stanford.edu, role:developer g, bbrond@slac.stanford.edu, role:developer g, vbecker@slac.stanford.edu, role:developer + g, saranda@slac.stanford.edu, role:developer scopes: "[email]" server: From b9298c28ab5c348efdd6756102d1300952afb836 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 12 Nov 2024 13:08:30 -0700 Subject: [PATCH 464/567] Adjust Kafka resources requests and limits - One of the kafka borkers is not being scheduled in TTS because of lack of resources. --- applications/sasquatch/values-tucson-teststand.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index cba6baf46d..3cfe4b3025 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -23,11 +23,11 @@ strimzi-kafka: host: sasquatch-tts-kafka-2.lsst.codes resources: requests: - memory: 80Gi - cpu: 4 + memory: 8Gi + cpu: 1 limits: - memory: 80Gi - cpu: 4 + memory: 8Gi + cpu: 1 metricsConfig: enabled: true kafkaExporter: From 792cf5d0b761292f42a3805450284f482da0dfc5 Mon Sep 17 00:00:00 2001 From: Dan Fuchs Date: Fri, 8 Nov 2024 12:32:21 -0600 Subject: [PATCH 465/567] DM-45394 mobu: Config from a single YAML configmap --- applications/mobu/Chart.yaml | 2 +- applications/mobu/README.md | 7 +-- .../mobu/templates/configmap-autostart.yaml | 11 ----- .../templates/configmap-github-ci-app.yaml | 11 ----- .../configmap-github-refresh-app.yaml | 11 ----- applications/mobu/templates/configmap.yaml | 9 ++++ applications/mobu/templates/deployment.yaml | 48 ++----------------- .../tests/github_ci_app_enabled_test.yaml | 36 +++++++------- .../mobu/tests/github_disabled_test.yaml | 44 ++++++----------- .../github_refresh_app_enabled_test.yaml | 24 +++++----- applications/mobu/values-idfdemo.yaml | 3 +- applications/mobu/values-idfdev.yaml | 3 +- applications/mobu/values-roundtable-dev.yaml | 1 - applications/mobu/values-usdfdev.yaml | 1 - applications/mobu/values-usdfint.yaml | 1 - applications/mobu/values-usdfprod.yaml | 1 - applications/mobu/values.yaml | 15 ++++-- 17 files changed, 80 insertions(+), 148 deletions(-) delete mode 100644 applications/mobu/templates/configmap-autostart.yaml delete mode 100644 applications/mobu/templates/configmap-github-ci-app.yaml delete mode 100644 applications/mobu/templates/configmap-github-refresh-app.yaml create mode 100644 applications/mobu/templates/configmap.yaml diff --git a/applications/mobu/Chart.yaml b/applications/mobu/Chart.yaml index 453ec7eb7f..25cf5bdeeb 100644 --- a/applications/mobu/Chart.yaml +++ b/applications/mobu/Chart.yaml @@ -5,4 +5,4 @@ description: "Continuous integration testing" home: https://mobu.lsst.io/ sources: - "https://github.com/lsst-sqre/mobu" -appVersion: 12.0.2 +appVersion: 13.0.0 diff --git a/applications/mobu/README.md b/applications/mobu/README.md index bf46cb4c6a..4bbc954dc1 100644 --- a/applications/mobu/README.md +++ b/applications/mobu/README.md @@ -14,10 +14,11 @@ Continuous integration testing |-----|------|---------|-------------| | affinity | object | `{}` | Affinity rules for the mobu frontend pod | | config.autostart | list | `[]` | Autostart specification. Must be a list of mobu flock specifications. Each flock listed will be automatically started when mobu is started. | -| config.debug | bool | `false` | If set to true, include the output from all flocks in the main mobu log and disable structured JSON logging. | -| config.githubCiApp | object | disabled. | Configuration for the GitHub refresh app integration. See https://mobu.lsst.io/operations/github_ci_app.html#add-phalanx-configuration | -| config.githubRefreshApp | object | disabled. | Configuration for the GitHub refresh app integration. See https://mobu.lsst.io/operations/github_refresh_app.html#add-phalanx-configuration | +| config.githubCiApp | string | disabled. | Configuration for the GitHub refresh app integration. See https://mobu.lsst.io/operations/github_ci_app.html#add-phalanx-configuration | +| config.githubRefreshApp | string | disabled. | Configuration for the GitHub refresh app integration. See https://mobu.lsst.io/operations/github_refresh_app.html#add-phalanx-configuration | +| config.logLevel | string | `"INFO"` | Log level. Set to 'DEBUG' to include the output from all flocks in the main mobu log. | | config.pathPrefix | string | `"/mobu"` | Prefix for mobu's API routes. | +| config.profile | string | `"production"` | One of 'production' or 'development'. 'production' configures structured JSON logging, and 'development' configures unstructured human readable logging. | | config.slackAlerts | bool | `true` | Whether to send alerts and status to Slack. | | fullnameOverride | string | `""` | Override the full name for resources (includes the release name) | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | diff --git a/applications/mobu/templates/configmap-autostart.yaml b/applications/mobu/templates/configmap-autostart.yaml deleted file mode 100644 index 93537ae0b6..0000000000 --- a/applications/mobu/templates/configmap-autostart.yaml +++ /dev/null @@ -1,11 +0,0 @@ -{{- if .Values.config.autostart -}} -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ include "mobu.fullname" . }}-autostart - labels: - {{- include "mobu.labels" . | nindent 4 }} -data: - autostart.yaml: | - {{- toYaml .Values.config.autostart | nindent 4 }} -{{- end }} diff --git a/applications/mobu/templates/configmap-github-ci-app.yaml b/applications/mobu/templates/configmap-github-ci-app.yaml deleted file mode 100644 index a90491cd72..0000000000 --- a/applications/mobu/templates/configmap-github-ci-app.yaml +++ /dev/null @@ -1,11 +0,0 @@ -{{- if .Values.config.githubCiApp -}} -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ include "mobu.fullname" . }}-github-ci-app - labels: - {{- include "mobu.labels" . | nindent 4 }} -data: - github-ci-app.yaml: | - {{- toYaml .Values.config.githubCiApp | nindent 4 }} -{{- end }} diff --git a/applications/mobu/templates/configmap-github-refresh-app.yaml b/applications/mobu/templates/configmap-github-refresh-app.yaml deleted file mode 100644 index 04910d22ef..0000000000 --- a/applications/mobu/templates/configmap-github-refresh-app.yaml +++ /dev/null @@ -1,11 +0,0 @@ -{{- if .Values.config.githubRefreshApp -}} -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ include "mobu.fullname" . }}-github-refresh-app - labels: - {{- include "mobu.labels" . | nindent 4 }} -data: - github-refresh-app.yaml: | - {{- toYaml .Values.config.githubRefreshApp | nindent 4 }} -{{- end }} diff --git a/applications/mobu/templates/configmap.yaml b/applications/mobu/templates/configmap.yaml new file mode 100644 index 0000000000..d4e1f8f851 --- /dev/null +++ b/applications/mobu/templates/configmap.yaml @@ -0,0 +1,9 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "mobu.fullname" . }} + labels: + {{- include "mobu.labels" . | nindent 4 }} +data: + config.yaml: | + {{- toYaml .Values.config | nindent 4 }} diff --git a/applications/mobu/templates/deployment.yaml b/applications/mobu/templates/deployment.yaml index 07ab0f54c6..bbb05a3396 100644 --- a/applications/mobu/templates/deployment.yaml +++ b/applications/mobu/templates/deployment.yaml @@ -13,8 +13,10 @@ spec: type: "Recreate" template: metadata: - {{- with .Values.podAnnotations }} annotations: + # Force the pod to restart when the config map is updated. + checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} + {{- with .Values.podAnnotations }} {{- toYaml . | nindent 8 }} {{- end }} labels: @@ -31,10 +33,6 @@ spec: name: {{ template "mobu.fullname" . }}-secret key: "ALERT_HOOK" {{- end }} - {{- if .Values.config.autostart }} - - name: "MOBU_AUTOSTART_PATH" - value: "/etc/mobu/autostart.yaml" - {{- end }} - name: "MOBU_ENVIRONMENT_URL" value: {{ .Values.global.baseUrl }} - name: "MOBU_GAFAELFAWR_TOKEN" @@ -43,8 +41,6 @@ spec: name: {{ template "mobu.fullname" . }}-gafaelfawr-token key: "token" {{- if .Values.config.githubRefreshApp }} - - name: "MOBU_GITHUB_REFRESH_APP_CONFIG_PATH" - value: "/etc/mobu/github-refresh-app.yaml" - name: "MOBU_GITHUB_REFRESH_APP_WEBHOOK_SECRET" valueFrom: secretKeyRef: @@ -52,8 +48,6 @@ spec: key: "github-refresh-app-webhook-secret" {{- end}} {{- if .Values.config.githubCiApp }} - - name: "MOBU_GITHUB_CI_APP_CONFIG_PATH" - value: "/etc/mobu/github-ci-app.yaml" - name: "MOBU_GITHUB_CI_APP_ID" valueFrom: secretKeyRef: @@ -70,17 +64,6 @@ spec: name: {{ template "mobu.fullname" . }}-secret key: "github-ci-app-webhook-secret" {{- end}} - - name: "MOBU_PATH_PREFIX" - value: {{ .Values.config.pathPrefix | quote }} - {{- if .Values.config.debug }} - - name: "MOBU_LOGGING_PROFILE" - value: "development" - - name: "MOBU_LOG_LEVEL" - value: "DEBUG" - {{- else }} - - name: "MOBU_LOGGING_PROFILE" - value: "production" - {{- end }} image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy | quote }} ports: @@ -114,29 +97,8 @@ spec: runAsGroup: 1000 volumes: - name: "config" - projected: - sources: - {{- if .Values.config.autostart }} - - configMap: - name: {{ include "mobu.fullname" . }}-autostart - items: - - key: autostart.yaml - path: autostart.yaml - {{- end }} - {{- if .Values.config.githubCiApp }} - - configMap: - name: {{ include "mobu.fullname" . }}-github-ci-app - items: - - key: github-ci-app.yaml - path: github-ci-app.yaml - {{- end }} - {{- if .Values.config.githubRefreshApp }} - - configMap: - name: {{ include "mobu.fullname" . }}-github-refresh-app - items: - - key: github-refresh-app.yaml - path: github-refresh-app.yaml - {{- end }} + configMap: + name: "mobu" - name: "tmp" emptyDir: {} {{- with .Values.nodeSelector }} diff --git a/applications/mobu/tests/github_ci_app_enabled_test.yaml b/applications/mobu/tests/github_ci_app_enabled_test.yaml index fcc0995c1e..edb5723900 100644 --- a/applications/mobu/tests/github_ci_app_enabled_test.yaml +++ b/applications/mobu/tests/github_ci_app_enabled_test.yaml @@ -2,7 +2,7 @@ suite: Github CI App Integration Enabled set: config: githubCiApp: - accepted_github_orgs: + acceptedGithubOrgs: - "org1" - "org2" users: @@ -27,11 +27,6 @@ tests: - it: "Should inject secrets into the Deployment env" template: "deployment.yaml" asserts: - - contains: - path: "spec.template.spec.containers[0].env" - any: true - content: - name: "MOBU_GITHUB_CI_APP_CONFIG_PATH" - contains: path: "spec.template.spec.containers[0].env" any: true @@ -48,20 +43,27 @@ tests: content: name: "MOBU_GITHUB_CI_APP_WEBHOOK_SECRET" - it: "Should create a ConfigMap" - template: "configmap-github-ci-app.yaml" + template: "configmap.yaml" asserts: - containsDocument: kind: "ConfigMap" apiVersion: v1 - equal: - path: "data['github-ci-app.yaml']" + path: "data['config.yaml']" value: | - accepted_github_orgs: - - org1 - - org2 - scopes: - - exec:notebook - - read:tap - users: - - bot-mobu-ci-user-1 - - bot-mobu-ci-user-2 + autostart: [] + githubCiApp: + acceptedGithubOrgs: + - org1 + - org2 + scopes: + - exec:notebook + - read:tap + users: + - bot-mobu-ci-user-1 + - bot-mobu-ci-user-2 + githubRefreshApp: null + logLevel: INFO + pathPrefix: /mobu + profile: production + slackAlerts: true diff --git a/applications/mobu/tests/github_disabled_test.yaml b/applications/mobu/tests/github_disabled_test.yaml index ee33c78981..223cf551d3 100644 --- a/applications/mobu/tests/github_disabled_test.yaml +++ b/applications/mobu/tests/github_disabled_test.yaml @@ -4,34 +4,25 @@ set: global: host: "example.com" tests: - - it: "Should not create a GitHub CI app ingress" - template: "ingress-github-ci-app.yaml" + - it: "Should not include github things in the config file ConfigMap" + template: "configmap.yaml" asserts: - - hasDocuments: - count: 0 - - it: "Should not create a GitHub refresh app ingress" - template: "ingress-github-refresh-app.yaml" - asserts: - - hasDocuments: - count: 0 - - it: "Should not create a GitHub CI app ConfigMap" - template: "configmap-github-ci-app.yaml" - asserts: - - hasDocuments: - count: 0 - - it: "Should not create a GitHub refresh app ConfigMap" - template: "configmap-github-refresh-app.yaml" - asserts: - - hasDocuments: - count: 0 + - containsDocument: + kind: "ConfigMap" + apiVersion: v1 + - equal: + path: "data['config.yaml']" + value: | + autostart: [] + githubCiApp: null + githubRefreshApp: null + logLevel: INFO + pathPrefix: /mobu + profile: production + slackAlerts: true - it: "Should not inject GitHub CI app secrets into the Deployment env" template: "deployment.yaml" asserts: - - notContains: - path: "spec.template.spec.containers[0].env" - any: true - content: - name: "MOBU_GITHUB_CI_APP_CONFIG_PATH" - notContains: path: "spec.template.spec.containers[0].env" any: true @@ -50,11 +41,6 @@ tests: - it: "Should not inject GitHub refresh app secrets into the Deployment env" template: "deployment.yaml" asserts: - - notContains: - path: "spec.template.spec.containers[0].env" - any: true - content: - name: "MOBU_GITHUB_REFRESH_APP_CONFIG_PATH" - notContains: path: "spec.template.spec.containers[0].env" any: true diff --git a/applications/mobu/tests/github_refresh_app_enabled_test.yaml b/applications/mobu/tests/github_refresh_app_enabled_test.yaml index 13e84ec0e5..c038f8e334 100644 --- a/applications/mobu/tests/github_refresh_app_enabled_test.yaml +++ b/applications/mobu/tests/github_refresh_app_enabled_test.yaml @@ -2,7 +2,7 @@ suite: Github Refresh App Integration Enabled set: config: githubRefreshApp: - accepted_github_orgs: + acceptedGithubOrgs: - "org1" - "org2" global: @@ -21,25 +21,27 @@ tests: - it: "Should inject secrets into the Deployment env" template: "deployment.yaml" asserts: - - contains: - path: "spec.template.spec.containers[0].env" - any: true - content: - name: "MOBU_GITHUB_REFRESH_APP_CONFIG_PATH" - contains: path: "spec.template.spec.containers[0].env" any: true content: name: "MOBU_GITHUB_REFRESH_APP_WEBHOOK_SECRET" - it: "Should create a ConfigMap" - template: "configmap-github-refresh-app.yaml" + template: "configmap.yaml" asserts: - containsDocument: kind: "ConfigMap" apiVersion: v1 - equal: - path: "data['github-refresh-app.yaml']" + path: "data['config.yaml']" value: | - accepted_github_orgs: - - org1 - - org2 + autostart: [] + githubCiApp: null + githubRefreshApp: + acceptedGithubOrgs: + - org1 + - org2 + logLevel: INFO + pathPrefix: /mobu + profile: production + slackAlerts: true diff --git a/applications/mobu/values-idfdemo.yaml b/applications/mobu/values-idfdemo.yaml index ee9e81f4cc..74cb36b14c 100644 --- a/applications/mobu/values-idfdemo.yaml +++ b/applications/mobu/values-idfdemo.yaml @@ -1,3 +1,4 @@ config: - debug: true + logLevel: DEBUG + profile: development autostart: [] diff --git a/applications/mobu/values-idfdev.yaml b/applications/mobu/values-idfdev.yaml index 7bc506a8bc..5ec676ec7e 100644 --- a/applications/mobu/values-idfdev.yaml +++ b/applications/mobu/values-idfdev.yaml @@ -1,5 +1,6 @@ config: - debug: true + logLevel: DEBUG + profile: development githubRefreshApp: acceptedGithubOrgs: - lsst-sqre diff --git a/applications/mobu/values-roundtable-dev.yaml b/applications/mobu/values-roundtable-dev.yaml index f69f4ff4dd..ba29bd722d 100644 --- a/applications/mobu/values-roundtable-dev.yaml +++ b/applications/mobu/values-roundtable-dev.yaml @@ -1,5 +1,4 @@ config: - debug: true autostart: - name: "gitlfs" count: 1 diff --git a/applications/mobu/values-usdfdev.yaml b/applications/mobu/values-usdfdev.yaml index 87c824fe18..873086520e 100644 --- a/applications/mobu/values-usdfdev.yaml +++ b/applications/mobu/values-usdfdev.yaml @@ -1,5 +1,4 @@ config: - debug: true githubRefreshApp: acceptedGithubOrgs: - lsst-sqre diff --git a/applications/mobu/values-usdfint.yaml b/applications/mobu/values-usdfint.yaml index 438dee94a6..cb733ed173 100644 --- a/applications/mobu/values-usdfint.yaml +++ b/applications/mobu/values-usdfint.yaml @@ -1,5 +1,4 @@ config: - debug: true githubRefreshApp: acceptedGithubOrgs: - lsst-sqre diff --git a/applications/mobu/values-usdfprod.yaml b/applications/mobu/values-usdfprod.yaml index b10d42b593..d8097ed9f6 100644 --- a/applications/mobu/values-usdfprod.yaml +++ b/applications/mobu/values-usdfprod.yaml @@ -1,5 +1,4 @@ config: - debug: true autostart: - name: "firefighter" count: 1 diff --git a/applications/mobu/values.yaml b/applications/mobu/values.yaml index 6c1dd4851f..8b88740624 100644 --- a/applications/mobu/values.yaml +++ b/applications/mobu/values.yaml @@ -29,17 +29,22 @@ config: # -- Configuration for the GitHub refresh app integration. # See https://mobu.lsst.io/operations/github_refresh_app.html#add-phalanx-configuration # @default -- disabled. - githubRefreshApp: {} + githubRefreshApp: null # -- Configuration for the GitHub CI app integration. # -- Configuration for the GitHub refresh app integration. # See https://mobu.lsst.io/operations/github_ci_app.html#add-phalanx-configuration # @default -- disabled. - githubCiApp: {} + githubCiApp: null - # -- If set to true, include the output from all flocks in the main mobu log - # and disable structured JSON logging. - debug: false + # -- Log level. Set to 'DEBUG' to include the output from all flocks in the + # main mobu log. + logLevel: INFO + + # -- One of 'production' or 'development'. 'production' configures structured + # JSON logging, and 'development' configures unstructured human readable + # logging. + profile: production # -- Whether to send alerts and status to Slack. slackAlerts: true From 17bc3711e371dddd7b8183948177923137cbd7b6 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Tue, 12 Nov 2024 15:37:46 -0800 Subject: [PATCH 466/567] Update Python and pre-commit dependencies --- .pre-commit-config.yaml | 2 +- requirements/dev.txt | 127 +++++++++++++--------------------------- requirements/main.txt | 6 +- requirements/tox.txt | 44 +++++++------- 4 files changed, 68 insertions(+), 111 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 829634ac21..604fb6c2dd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,7 +46,7 @@ repos: - --template-files=../helm-docs.md.gotmpl - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.2 + rev: v0.7.3 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/requirements/dev.txt b/requirements/dev.txt index 54483ec8d0..52cd42a798 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -310,41 +310,41 @@ coverage==7.6.4 \ # via # -r requirements/dev.in # pytest-cov -debugpy==1.8.7 \ - --hash=sha256:11ad72eb9ddb436afb8337891a986302e14944f0f755fd94e90d0d71e9100bba \ - --hash=sha256:171899588bcd412151e593bd40d9907133a7622cd6ecdbdb75f89d1551df13c2 \ - --hash=sha256:18b8f731ed3e2e1df8e9cdaa23fb1fc9c24e570cd0081625308ec51c82efe42e \ - --hash=sha256:29e1571c276d643757ea126d014abda081eb5ea4c851628b33de0c2b6245b037 \ - --hash=sha256:2efb84d6789352d7950b03d7f866e6d180284bc02c7e12cb37b489b7083d81aa \ - --hash=sha256:2f729228430ef191c1e4df72a75ac94e9bf77413ce5f3f900018712c9da0aaca \ - --hash=sha256:45c30aaefb3e1975e8a0258f5bbd26cd40cde9bfe71e9e5a7ac82e79bad64e39 \ - --hash=sha256:4b908291a1d051ef3331484de8e959ef3e66f12b5e610c203b5b75d2725613a7 \ - --hash=sha256:4d27d842311353ede0ad572600c62e4bcd74f458ee01ab0dd3a1a4457e7e3706 \ - --hash=sha256:57b00de1c8d2c84a61b90880f7e5b6deaf4c312ecbde3a0e8912f2a56c4ac9ae \ - --hash=sha256:628a11f4b295ffb4141d8242a9bb52b77ad4a63a2ad19217a93be0f77f2c28c9 \ - --hash=sha256:6a9d9d6d31846d8e34f52987ee0f1a904c7baa4912bf4843ab39dadf9b8f3e0d \ - --hash=sha256:6e1c4ffb0c79f66e89dfd97944f335880f0d50ad29525dc792785384923e2211 \ - --hash=sha256:703c1fd62ae0356e194f3e7b7a92acd931f71fe81c4b3be2c17a7b8a4b546ec2 \ - --hash=sha256:85ce9c1d0eebf622f86cc68618ad64bf66c4fc3197d88f74bb695a416837dd55 \ - --hash=sha256:90d93e4f2db442f8222dec5ec55ccfc8005821028982f1968ebf551d32b28907 \ - --hash=sha256:93176e7672551cb5281577cdb62c63aadc87ec036f0c6a486f0ded337c504596 \ - --hash=sha256:95fe04a573b8b22896c404365e03f4eda0ce0ba135b7667a1e57bd079793b96b \ - --hash=sha256:a6cf2510740e0c0b4a40330640e4b454f928c7b99b0c9dbf48b11efba08a8cda \ - --hash=sha256:b12515e04720e9e5c2216cc7086d0edadf25d7ab7e3564ec8b4521cf111b4f8c \ - --hash=sha256:b6db2a370e2700557a976eaadb16243ec9c91bd46f1b3bb15376d7aaa7632c81 \ - --hash=sha256:caf528ff9e7308b74a1749c183d6808ffbedbb9fb6af78b033c28974d9b8831f \ - --hash=sha256:cba1d078cf2e1e0b8402e6bda528bf8fda7ccd158c3dba6c012b7897747c41a0 \ - --hash=sha256:d050a1ec7e925f514f0f6594a1e522580317da31fbda1af71d1530d6ea1f2b40 \ - --hash=sha256:da8df5b89a41f1fd31503b179d0a84a5fdb752dddd5b5388dbd1ae23cda31ce9 \ - --hash=sha256:f2f4349a28e3228a42958f8ddaa6333d6f8282d5edaea456070e48609c5983b7 +debugpy==1.8.8 \ + --hash=sha256:09cc7b162586ea2171eea055985da2702b0723f6f907a423c9b2da5996ad67ba \ + --hash=sha256:0cc94186340be87b9ac5a707184ec8f36547fb66636d1029ff4f1cc020e53996 \ + --hash=sha256:143ef07940aeb8e7316de48f5ed9447644da5203726fca378f3a6952a50a9eae \ + --hash=sha256:19ffbd84e757a6ca0113574d1bf5a2298b3947320a3e9d7d8dc3377f02d9f864 \ + --hash=sha256:26b461123a030e82602a750fb24d7801776aa81cd78404e54ab60e8b5fecdad5 \ + --hash=sha256:3a9c013077a3a0000e83d97cf9cc9328d2b0bbb31f56b0e99ea3662d29d7a6a2 \ + --hash=sha256:4b93e4832fd4a759a0c465c967214ed0c8a6e8914bced63a28ddb0dd8c5f078b \ + --hash=sha256:535f4fb1c024ddca5913bb0eb17880c8f24ba28aa2c225059db145ee557035e9 \ + --hash=sha256:53709d4ec586b525724819dc6af1a7703502f7e06f34ded7157f7b1f963bb854 \ + --hash=sha256:5c0e5a38c7f9b481bf31277d2f74d2109292179081f11108e668195ef926c0f9 \ + --hash=sha256:5c6e885dbf12015aed73770f29dec7023cb310d0dc2ba8bfbeb5c8e43f80edc9 \ + --hash=sha256:64674e95916e53c2e9540a056e5f489e0ad4872645399d778f7c598eacb7b7f9 \ + --hash=sha256:705cd123a773d184860ed8dae99becd879dfec361098edbefb5fc0d3683eb804 \ + --hash=sha256:890fd16803f50aa9cb1a9b9b25b5ec321656dd6b78157c74283de241993d086f \ + --hash=sha256:90244598214bbe704aa47556ec591d2f9869ff9e042e301a2859c57106649add \ + --hash=sha256:a6531d952b565b7cb2fbd1ef5df3d333cf160b44f37547a4e7cf73666aca5d8d \ + --hash=sha256:b01f4a5e5c5fb1d34f4ccba99a20ed01eabc45a4684f4948b5db17a319dfb23f \ + --hash=sha256:c399023146e40ae373753a58d1be0a98bf6397fadc737b97ad612886b53df318 \ + --hash=sha256:d4483836da2a533f4b1454dffc9f668096ac0433de855f0c22cdce8c9f7e10c4 \ + --hash=sha256:e59b1607c51b71545cb3496876544f7186a7a27c00b436a62f285603cc68d1c6 \ + --hash=sha256:e6355385db85cbd666be703a96ab7351bc9e6c61d694893206f8001e22aee091 \ + --hash=sha256:ec684553aba5b4066d4de510859922419febc710df7bba04fe9e7ef3de15d34f \ + --hash=sha256:eea8821d998ebeb02f0625dd0d76839ddde8cbf8152ebbe289dd7acf2cdc6b98 \ + --hash=sha256:f3cbf1833e644a3100eadb6120f25be8a532035e8245584c4f7532937edc652a \ + --hash=sha256:f95651bdcbfd3b27a408869a53fbefcc2bcae13b694daee5f1365b1b83a00113 \ + --hash=sha256:ffe94dd5e9a6739a75f0b85316dc185560db3e97afa6b215628d1b6a17561cb2 # via ipykernel decorator==5.1.1 \ --hash=sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330 \ --hash=sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186 # via ipython -diagrams==0.24.0 \ - --hash=sha256:0ded0099c70aa847ceec341c8224aaf2a46fcd180402fa50638a77cf231b761f \ - --hash=sha256:da20d7e326fd55631a86386ee484832fd00ddecf9ef0c07a42e299bf2a5cb6a2 +diagrams==0.24.1 \ + --hash=sha256:47b77a0e4dac926a095ff2ae4dd4ec1a192be781799befc660a8f5ce6ea1052f \ + --hash=sha256:c1e3267b018bdb66886a09214c7a7884796a0c28456f8aefdf38916a232c2362 # via sphinx-diagrams documenteer==1.4.2 \ --hash=sha256:03a4cf3b8ffa4905c59662131f87afe77417238f10e9f01075d849f08a32e99d \ @@ -493,9 +493,9 @@ ipython==8.29.0 \ # via # ipykernel # myst-nb -jedi==0.19.1 \ - --hash=sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd \ - --hash=sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0 +jedi==0.19.2 \ + --hash=sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0 \ + --hash=sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9 # via ipython jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ @@ -694,9 +694,9 @@ nest-asyncio==1.6.0 \ --hash=sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe \ --hash=sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c # via ipykernel -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f # via # ipykernel # pydata-sphinx-theme @@ -1215,9 +1215,9 @@ rpds-py==0.21.0 \ # via # jsonschema # referencing -setuptools==75.3.0 \ - --hash=sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd \ - --hash=sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686 +setuptools==75.4.0 \ + --hash=sha256:1dc484f5cf56fd3fe7216d7b8df820802e7246cfb534a1db2aa64f14fcb9cdcb \ + --hash=sha256:b3c5d862f98500b06ffdf7cc4499b48c46c317d8d56cb30b5c8bce4d88f5c216 # via # documenteer # sphinxcontrib-bibtex @@ -1454,49 +1454,6 @@ traitlets==5.14.3 \ # matplotlib-inline # nbclient # nbformat -typed-ast==1.5.5 \ - --hash=sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10 \ - --hash=sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede \ - --hash=sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e \ - --hash=sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c \ - --hash=sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d \ - --hash=sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8 \ - --hash=sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e \ - --hash=sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5 \ - --hash=sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155 \ - --hash=sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4 \ - --hash=sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba \ - --hash=sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5 \ - --hash=sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a \ - --hash=sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b \ - --hash=sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311 \ - --hash=sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769 \ - --hash=sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686 \ - --hash=sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d \ - --hash=sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2 \ - --hash=sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814 \ - --hash=sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9 \ - --hash=sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b \ - --hash=sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b \ - --hash=sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4 \ - --hash=sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd \ - --hash=sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18 \ - --hash=sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa \ - --hash=sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6 \ - --hash=sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee \ - --hash=sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88 \ - --hash=sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4 \ - --hash=sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431 \ - --hash=sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04 \ - --hash=sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d \ - --hash=sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02 \ - --hash=sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8 \ - --hash=sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437 \ - --hash=sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274 \ - --hash=sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f \ - --hash=sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a \ - --hash=sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2 - # via diagrams types-pyyaml==6.0.12.20240917 \ --hash=sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570 \ --hash=sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587 @@ -1527,7 +1484,7 @@ wcwidth==0.2.13 \ --hash=sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859 \ --hash=sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5 # via prompt-toolkit -zipp==3.20.2 \ - --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ - --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 +zipp==3.21.0 \ + --hash=sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4 \ + --hash=sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931 # via importlib-metadata diff --git a/requirements/main.txt b/requirements/main.txt index 4aba020f7c..e95e00df81 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -264,9 +264,9 @@ cryptography==43.0.3 \ # phalanx (pyproject.toml) # pyjwt # safir -fastapi==0.115.4 \ - --hash=sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742 \ - --hash=sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349 +fastapi==0.115.5 \ + --hash=sha256:0e7a4d0dc0d01c68df21887cce0945e72d3c48b9f4f79dfe7a7d53aa08fbb289 \ + --hash=sha256:596b95adbe1474da47049e802f9a65ab2ffa9c2b07e7efee70eb8a66c9f2f796 # via safir gidgethub==5.3.0 \ --hash=sha256:4dd92f2252d12756b13f9dd15cde322bfb0d625b6fb5d680da1567ec74b462c0 \ diff --git a/requirements/tox.txt b/requirements/tox.txt index 06fb544a53..44e69d23fe 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -25,9 +25,9 @@ filelock==3.16.1 \ # via # tox # virtualenv -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f # via # -c requirements/dev.txt # pyproject-api @@ -60,25 +60,25 @@ tox-uv==1.16.0 \ --hash=sha256:71b2e2fa6c35c1360b91a302df1d65b3e5a1f656b321c5ebf7b84545804c9f01 \ --hash=sha256:e6f0b525a687e745ab878d07cbf5c7e85d582028d4a7c8935f95e84350651432 # via -r requirements/tox.in -uv==0.4.30 \ - --hash=sha256:0c89f2eff63a08d04e81629611f43b1ffa668af6de0382b95a71599af7d4b77c \ - --hash=sha256:1a83df281c5d900b4758b1a3969b3cff57231f9027db8508b71dce1f2da78684 \ - --hash=sha256:232575f30ed971ea32d4a525b7146c4b088a07ed6e70a31da63792d563fcac44 \ - --hash=sha256:353617bfcf72e1eabade426d83fb86a69d11273d1612aabc3f4566d41c596c97 \ - --hash=sha256:444468ad0e94b35cbf6acfc8a28589cfe1247136d43895e60a18955ff89a07ad \ - --hash=sha256:44c5aeb5b374f9fd1083959934daa9020db3610f0405198c5e3d8ec1f23d961d \ - --hash=sha256:4aecd9fb39cf018e129627090a1d35af2b0184bb87078d573c9998f5e4072416 \ - --hash=sha256:4d41d09cabba1988728c2d9b9ad25f79233c2aa3d6ecd724c36f4678c4c89711 \ - --hash=sha256:4ddad09385221fa5c609169e4a0dd5bee27cf56c1dc450d4cdc113122c54bb09 \ - --hash=sha256:63196143f45018364c450ba94279a5bcff8562c14ba63deb41a92ed30baa6e22 \ - --hash=sha256:6395820540f368f622e818735862abd633dfe7e729c450fca56b65bab4b46661 \ - --hash=sha256:7f09bd6a853767863e2fb905f0eb1a0ed7afa9ea118852e5c02d2b451944e1cf \ - --hash=sha256:9e17a799c6279800996828e10288ca8ccc40cc883d8998802b938aa671dfa9ce \ - --hash=sha256:9ed0183e747065b9b1bcfb699ff10df671ebe6259709ce83e709f86cea564aee \ - --hash=sha256:d9de718380e2f167243ca5e1dccea781e06404158442491255fec5955d57fed9 \ - --hash=sha256:dedcae3619f0eb181459b597fefefd99cb21fe5a5a48a530be6f5ad934399bfb \ - --hash=sha256:ea55ca0fe5bdd04e46deaf395b3daf4fa92392f774e83610d066a2b272af5d3f \ - --hash=sha256:f63d6646acdf2f38a5afca9fb9eeac62efa663a57f3c134f735a5f575b4e748f +uv==0.5.1 \ + --hash=sha256:01c40f756e9536c05fdf3485c1dfe3da610c3169195bbe20fab03a4c4b7a0d98 \ + --hash=sha256:3db7513c804fb89dcde671ba917cc486cfb574408d6257e19b19ae6b55f5982f \ + --hash=sha256:3ffb230be0f6552576da67a2737a32a6a640e4b3f42144088222a669802d7f10 \ + --hash=sha256:4601d40b0c02aff9fb791efa5b6f4c7dbad0970e13ac679aa8fb07365f331354 \ + --hash=sha256:4d1ec4a1bc19b523a84fc1bf2a92e9c4d982c831d3da450af71fc3057999d456 \ + --hash=sha256:6a76765c3cc49268f3c6773bd89a0dacf8a91b040fc3faea6c527ef6f2308eba \ + --hash=sha256:6ec61220d883751777cbabf0b076607cfbdeb812bc52c28722e897271461e589 \ + --hash=sha256:72b54a3308e13a81aa2df19baea40611fc344c7556f75d2113f9b9b5a894355e \ + --hash=sha256:73853b98bce9e118cda2d64360ddd7e0f79e237aca8cd2f28b6d5679400b239e \ + --hash=sha256:821b6a9d591d3e951fbe81c53d32499d11500100d66b1c119e183f3d4a6cd07c \ + --hash=sha256:8dce5b6d6dea41db71fe8d9895167cc5abf3e7b28c016174b1b9a9aecb74d483 \ + --hash=sha256:922685dcaa1c9b6663649b379f9bdbe5b87af230f512e69398efc51bd9d8b8eb \ + --hash=sha256:93f0a02ea9149f4e7e359ef92da6f221da2ecf458cda2af729a1f6fa8c3ed1d2 \ + --hash=sha256:aaa63053ff6dc4456e2ac2a9b6a8eda0cfaa1e0f861633d9e7315c7df9a0a525 \ + --hash=sha256:ac3fce68002e79f3c070f3e7d914e992f205f05af00bfffbe6c44d37aa39c86a \ + --hash=sha256:ad2dd8a994a8334a5d4b354589be4b8c4b3b2ebb7bb2f2976c8e21d2799f45a9 \ + --hash=sha256:c4d209164448c8529e21aca4ef1e3da94303b1bf726924786feffd87ed93ab4a \ + --hash=sha256:f66859e67d10ffff8b17c67c7ede207d67487cef20c3d17bc427b690f9dff795 # via tox-uv virtualenv==20.27.1 \ --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ From b754fc5e9ca5ff0fb9fcbfec3c62bf7499c7d60f Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Tue, 12 Nov 2024 15:47:58 -0800 Subject: [PATCH 467/567] Add TAXICAB to unfurlbot Jira topics Also clean up the unfurlbot `values.yaml` documentation a bit. The addition was requested by Eli Rykoff. --- applications/unfurlbot/README.md | 2 +- applications/unfurlbot/values.yaml | 14 ++++++++++---- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/applications/unfurlbot/README.md b/applications/unfurlbot/README.md index c9f6b75900..4d615367d9 100644 --- a/applications/unfurlbot/README.md +++ b/applications/unfurlbot/README.md @@ -15,7 +15,7 @@ Squarebot backend that unfurls Jira issues. | autoscaling.maxReplicas | int | `100` | Maximum number of unfurlbot deployment pods | | autoscaling.minReplicas | int | `1` | Minimum number of unfurlbot deployment pods | | autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization of unfurlbot deployment pods | -| config.jiraProjects | string | `"ADMIN, CCB, CAP, COMCAM, COMT, DM, EPO, FRACAS, IAM, IHS, IT, ITRFC, LOVE, LASD, LIT, LOPS, LVV, M1M3V, OPSIM, PHOSIM, PST, PSV, PUB, RFC, RM, SAFE, SIM, SPP, SBTT, SE, SUMMIT, TSAIV, TCT, SECMVERIF, TMDC, TPC, TSEIA, TAS, TELV, TSSAL, TSS, TSSPP, WMP, PREOPS, OBS, SITCOM, BLOCK\n"` | Names of Jira projects to unfurl (comma-separated) | +| config.jiraProjects | string | See `values.yaml` | Names of Jira projects to unfurl (comma-separated) | | config.jiraUrl | string | `"https://rubinobs.atlassian.net/"` | Jira base URL | | config.logLevel | string | `"INFO"` | Logging level: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL" | | config.redisUrl | string | `"redis://unfurlbot-redis:6379/0"` | URL to the local redis instance | diff --git a/applications/unfurlbot/values.yaml b/applications/unfurlbot/values.yaml index df1aece2f7..207f34358e 100644 --- a/applications/unfurlbot/values.yaml +++ b/applications/unfurlbot/values.yaml @@ -26,22 +26,27 @@ config: # -- Kafka topic name for the Slack `app_mention` events slackAppMention: "lsst.square-events.squarebot.slack.app.mention" - # -- Kafka topic name for the Slack `message.channels` events (public channels) + # -- Kafka topic name for the Slack `message.channels` events (public + # channels) slackMessageChannels: "lsst.square-events.squarebot.slack.message.channels" - # -- Kafka topic name for the Slack `message.groups` events (private channels) + # -- Kafka topic name for the Slack `message.groups` events (private + # channels) slackMessageGroups: "lsst.square-events.squarebot.slack.message.groups" - # -- Kafka topic name for the Slack `message.im` events (direct message channels) + # -- Kafka topic name for the Slack `message.im` events (direct message + # channels) slackMessageIm: "lsst.square-events.squarebot.slack.message.im" - # -- Kafka topic name for the Slack `message.mpim` events (multi-person direct messages) + # -- Kafka topic name for the Slack `message.mpim` events (multi-person + # direct messages) slackMessageMpim: "lsst.square-events.squarebot.slack.message.mpim" # -- Jira base URL jiraUrl: "https://rubinobs.atlassian.net/" # -- Names of Jira projects to unfurl (comma-separated) + # @default -- See `values.yaml` jiraProjects: > ADMIN, CCB, @@ -81,6 +86,7 @@ config: TPC, TSEIA, TAS, + TAXICAB, TELV, TSSAL, TSS, From 1914963ece78278a93a7b80f6e4eb0541165b7d9 Mon Sep 17 00:00:00 2001 From: Colin Slater Date: Wed, 6 Nov 2024 13:07:38 -0800 Subject: [PATCH 468/567] Switch from butler pod to production_tools. --- applications/plot-navigator/README.md | 1 - .../templates/butler-deployment.yaml | 125 ------------------ .../templates/butler-service.yaml | 12 -- .../plot-navigator/templates/ingress.yaml | 7 + .../templates/production-tools-worker.yaml | 2 +- .../templates/production-tools.yaml | 2 +- .../plot-navigator/values-usdfdev.yaml | 28 +--- .../plot-navigator/values-usdfint.yaml | 28 +--- .../plot-navigator/values-usdfprod.yaml | 28 +--- applications/plot-navigator/values.yaml | 10 -- 10 files changed, 21 insertions(+), 222 deletions(-) delete mode 100644 applications/plot-navigator/templates/butler-deployment.yaml delete mode 100644 applications/plot-navigator/templates/butler-service.yaml diff --git a/applications/plot-navigator/README.md b/applications/plot-navigator/README.md index 0f4f7590c0..7bb6496183 100644 --- a/applications/plot-navigator/README.md +++ b/applications/plot-navigator/README.md @@ -10,7 +10,6 @@ Plot-navigator | Key | Type | Default | Description | |-----|------|---------|-------------| -| butlerResources | object | see `values.yaml` | Resource limits and requests for the butler pod | | config.persistentVolumeClaims | list | `[]` | PersistentVolumeClaims to create. | | config.separateSecrets | bool | `false` | Whether to use the new secrets management scheme | | config.volume_mounts | list | `[]` | Mount points for additional volumes | diff --git a/applications/plot-navigator/templates/butler-deployment.yaml b/applications/plot-navigator/templates/butler-deployment.yaml deleted file mode 100644 index 1d53204d70..0000000000 --- a/applications/plot-navigator/templates/butler-deployment.yaml +++ /dev/null @@ -1,125 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: "internal-butler" - labels: - {{- include "plot-navigator.labels" . | nindent 4 }} - app.kubernetes.io/component: butler -spec: - selector: - matchLabels: - {{- include "plot-navigator.selectorLabels" . | nindent 6 }} - app.kubernetes.io/component: butler - template: - metadata: - {{- with .Values.podAnnotations }} - annotations: - {{- toYaml . | nindent 8 }} - {{- end }} - labels: - {{- include "plot-navigator.selectorLabels" . | nindent 8 }} - app.kubernetes.io/component: butler - spec: - automountServiceAccountToken: false - containers: - - name: internal-butler - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - "all" - readOnlyRootFilesystem: true - image: ghcr.io/ctslater/daf_butler:test_cts - imagePullPolicy: Always - ports: - - name: "http" - containerPort: 8080 - protocol: "TCP" - readinessProbe: - httpGet: - path: "/" - port: "http" - resources: - {{- toYaml .Values.butlerResources | nindent 12 }} - env: - - name: AWS_SHARED_CREDENTIALS_FILE - value: "/opt/lsst/butler/secrets/aws-credentials.ini" - - name: PGPASSFILE - value: "/opt/lsst/butler/secrets/postgres-credentials.txt" - - name: GOOGLE_APPLICATION_CREDENTIALS - value: "/opt/lsst/butler/secrets/butler-gcs-creds.json" - - name: S3_ENDPOINT_URL - value: {{ .Values.butlerConfig.s3EndpointUrl | quote }} - - name: DAF_BUTLER_REPOSITORIES - value: {{ .Values.butlerConfig.repositories | toJson | quote }} - {{ if .Values.butlerConfig.pguser }} - - name: PGUSER - value: {{ .Values.butlerConfig.pguser | quote }} - {{ end }} - volumeMounts: - - name: "butler-secrets" - mountPath: "/opt/lsst/butler/secrets" - readOnly: true - {{- with .Values.butlerConfig.volume_mounts }} - {{- . | toYaml | nindent 12 }} - {{- end }} - volumes: - # butler-secrets-raw pulls in the secrets from the vault as files. - # These files are owned by root and group/world readable. - # This volume is not used directly by the container running the actual - # Butler application. - - name: "butler-secrets-raw" - secret: - secretName: {{ include "plot-navigator.fullname" . }} - # Postgres will not use a pgpass file (postgres-credentials.txt in the - # vault) if it is group/world writeable or owned by a different user. - # So the initContainers below copies the files from butler-secrets-raw - # to butlet-secrets, changing the owner and permissions. - # This volume is the one used by the container running the actual - # Butler application. - - name: "butler-secrets" - emptyDir: {} - {{- with .Values.butlerConfig.volumes }} - {{- . | toYaml | nindent 8 }} - {{- end }} - initContainers: - # To deal with the Postgres file permission issued mentioned above, - # copy the secrets from butler-secrets-raw to butler-secrets. - # This initContainer definition is borrowed from obsloctap's - # deployment.yaml. - - name: fix-secret-permissions - image: ghcr.io/ctslater/daf_butler:test_cts - imagePullPolicy: Always - command: - - "/bin/sh" - - "-c" - - | - cp -RL /tmp/butler-secrets-raw/* /opt/lsst/butler/secrets/ - chmod 0400 /opt/lsst/butler/secrets/* - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - "all" - volumeMounts: - - name: "butler-secrets" - mountPath: "/opt/lsst/butler/secrets" - - name: "butler-secrets-raw" - mountPath: "/tmp/butler-secrets-raw" - readOnly: true - securityContext: - runAsNonRoot: true - runAsUser: 1000 - runAsGroup: 4085 - {{- with .Values.nodeSelector }} - nodeSelector: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.affinity }} - affinity: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.tolerations }} - tolerations: - {{- toYaml . | nindent 8 }} - {{- end }} diff --git a/applications/plot-navigator/templates/butler-service.yaml b/applications/plot-navigator/templates/butler-service.yaml deleted file mode 100644 index 7482986a49..0000000000 --- a/applications/plot-navigator/templates/butler-service.yaml +++ /dev/null @@ -1,12 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: internal-butler -spec: - selector: - {{- include "plot-navigator.selectorLabels" . | nindent 4 }} - app.kubernetes.io/component: butler - ports: - - port: 80 - protocol: TCP - targetPort: 8080 diff --git a/applications/plot-navigator/templates/ingress.yaml b/applications/plot-navigator/templates/ingress.yaml index f431ffabc1..a0c1d19102 100644 --- a/applications/plot-navigator/templates/ingress.yaml +++ b/applications/plot-navigator/templates/ingress.yaml @@ -47,3 +47,10 @@ template: name: "production-tools" port: number: 8080 + - path: "/plot-navigator/images" + pathType: "Prefix" + backend: + service: + name: "production-tools" + port: + number: 8080 diff --git a/applications/plot-navigator/templates/production-tools-worker.yaml b/applications/plot-navigator/templates/production-tools-worker.yaml index 1b233ffeee..6c670bf996 100644 --- a/applications/plot-navigator/templates/production-tools-worker.yaml +++ b/applications/plot-navigator/templates/production-tools-worker.yaml @@ -45,7 +45,7 @@ spec: secretKeyRef: name: {{ include "plot-navigator.fullname" . }} key: S3_SECRET - - name: CM_ARQ_REDIS_PASSWORD + - name: REDIS_PASSWORD valueFrom: secretKeyRef: name: redis-secret diff --git a/applications/plot-navigator/templates/production-tools.yaml b/applications/plot-navigator/templates/production-tools.yaml index 093a4e705f..f5e8298e9e 100644 --- a/applications/plot-navigator/templates/production-tools.yaml +++ b/applications/plot-navigator/templates/production-tools.yaml @@ -45,7 +45,7 @@ spec: secretKeyRef: name: {{ include "plot-navigator.fullname" . }} key: S3_SECRET - - name: CM_ARQ_REDIS_PASSWORD + - name: REDIS_PASSWORD valueFrom: secretKeyRef: name: redis-secret diff --git a/applications/plot-navigator/values-usdfdev.yaml b/applications/plot-navigator/values-usdfdev.yaml index 7412d951f0..5af84b8757 100644 --- a/applications/plot-navigator/values-usdfdev.yaml +++ b/applications/plot-navigator/values-usdfdev.yaml @@ -28,16 +28,18 @@ config: productionTools: image: repository: ghcr.io/lsst-dm/production_tools - tag: 0.1 + tag: 0.1.2 env: DAF_BUTLER_REPOSITORY_INDEX: "/sdf/group/rubin/shared/data-repos.yaml" PGPASSFILE: "/opt/lsst/butler/secrets/postgres-credentials.txt" PGUSER: "rubin" AWS_SHARED_CREDENTIALS_FILE: "/opt/lsst/butler/secrets/aws-credentials.ini" S3_ENDPOINT_URL: "https://s3dfrgw.slac.stanford.edu" + LSST_RESOURCES_S3_PROFILE_embargo: "https://sdfembs3.sdf.slac.stanford.edu" LSST_DISABLE_BUCKET_VALIDATION: "1" REDIS_HOST: "plot-navigator-redis" REDIS_PORT: "6379" + BUTLER_REPO_NAMES: "embargo,/repo/embargo,/repo/main,/repo/dc2" persistentVolumeClaims: - name: sdf-group-rubin storageClassName: sdf-group-rubin @@ -58,27 +60,5 @@ productionTools: mountPath: /sdf/data/rubin readOnly: true -butlerConfig: - pguser: "rubin" - s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu" - repositories: - embargo: "s3://rubin-summit-users/butler.yaml" - main: "/sdf/group/rubin/repo/main" - dc2: "/sdf/group/rubin/repo/dc2" - volumes: - - name: sdf-group-rubin - persistentVolumeClaim: - claimName: sdf-group-rubin - - name: sdf-data-rubin - persistentVolumeClaim: - claimName: sdf-data-rubin - volume_mounts: - - name: sdf-group-rubin - mountPath: /sdf/group/rubin - readOnly: true - - name: sdf-data-rubin - mountPath: /sdf/data/rubin - readOnly: true - image: - tag: v0.2.1 + tag: v0.2.2 diff --git a/applications/plot-navigator/values-usdfint.yaml b/applications/plot-navigator/values-usdfint.yaml index 7412d951f0..5af84b8757 100644 --- a/applications/plot-navigator/values-usdfint.yaml +++ b/applications/plot-navigator/values-usdfint.yaml @@ -28,16 +28,18 @@ config: productionTools: image: repository: ghcr.io/lsst-dm/production_tools - tag: 0.1 + tag: 0.1.2 env: DAF_BUTLER_REPOSITORY_INDEX: "/sdf/group/rubin/shared/data-repos.yaml" PGPASSFILE: "/opt/lsst/butler/secrets/postgres-credentials.txt" PGUSER: "rubin" AWS_SHARED_CREDENTIALS_FILE: "/opt/lsst/butler/secrets/aws-credentials.ini" S3_ENDPOINT_URL: "https://s3dfrgw.slac.stanford.edu" + LSST_RESOURCES_S3_PROFILE_embargo: "https://sdfembs3.sdf.slac.stanford.edu" LSST_DISABLE_BUCKET_VALIDATION: "1" REDIS_HOST: "plot-navigator-redis" REDIS_PORT: "6379" + BUTLER_REPO_NAMES: "embargo,/repo/embargo,/repo/main,/repo/dc2" persistentVolumeClaims: - name: sdf-group-rubin storageClassName: sdf-group-rubin @@ -58,27 +60,5 @@ productionTools: mountPath: /sdf/data/rubin readOnly: true -butlerConfig: - pguser: "rubin" - s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu" - repositories: - embargo: "s3://rubin-summit-users/butler.yaml" - main: "/sdf/group/rubin/repo/main" - dc2: "/sdf/group/rubin/repo/dc2" - volumes: - - name: sdf-group-rubin - persistentVolumeClaim: - claimName: sdf-group-rubin - - name: sdf-data-rubin - persistentVolumeClaim: - claimName: sdf-data-rubin - volume_mounts: - - name: sdf-group-rubin - mountPath: /sdf/group/rubin - readOnly: true - - name: sdf-data-rubin - mountPath: /sdf/data/rubin - readOnly: true - image: - tag: v0.2.1 + tag: v0.2.2 diff --git a/applications/plot-navigator/values-usdfprod.yaml b/applications/plot-navigator/values-usdfprod.yaml index 7412d951f0..5af84b8757 100644 --- a/applications/plot-navigator/values-usdfprod.yaml +++ b/applications/plot-navigator/values-usdfprod.yaml @@ -28,16 +28,18 @@ config: productionTools: image: repository: ghcr.io/lsst-dm/production_tools - tag: 0.1 + tag: 0.1.2 env: DAF_BUTLER_REPOSITORY_INDEX: "/sdf/group/rubin/shared/data-repos.yaml" PGPASSFILE: "/opt/lsst/butler/secrets/postgres-credentials.txt" PGUSER: "rubin" AWS_SHARED_CREDENTIALS_FILE: "/opt/lsst/butler/secrets/aws-credentials.ini" S3_ENDPOINT_URL: "https://s3dfrgw.slac.stanford.edu" + LSST_RESOURCES_S3_PROFILE_embargo: "https://sdfembs3.sdf.slac.stanford.edu" LSST_DISABLE_BUCKET_VALIDATION: "1" REDIS_HOST: "plot-navigator-redis" REDIS_PORT: "6379" + BUTLER_REPO_NAMES: "embargo,/repo/embargo,/repo/main,/repo/dc2" persistentVolumeClaims: - name: sdf-group-rubin storageClassName: sdf-group-rubin @@ -58,27 +60,5 @@ productionTools: mountPath: /sdf/data/rubin readOnly: true -butlerConfig: - pguser: "rubin" - s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu" - repositories: - embargo: "s3://rubin-summit-users/butler.yaml" - main: "/sdf/group/rubin/repo/main" - dc2: "/sdf/group/rubin/repo/dc2" - volumes: - - name: sdf-group-rubin - persistentVolumeClaim: - claimName: sdf-group-rubin - - name: sdf-data-rubin - persistentVolumeClaim: - claimName: sdf-data-rubin - volume_mounts: - - name: sdf-group-rubin - mountPath: /sdf/group/rubin - readOnly: true - - name: sdf-data-rubin - mountPath: /sdf/data/rubin - readOnly: true - image: - tag: v0.2.1 + tag: v0.2.2 diff --git a/applications/plot-navigator/values.yaml b/applications/plot-navigator/values.yaml index a18145869f..9a69783083 100644 --- a/applications/plot-navigator/values.yaml +++ b/applications/plot-navigator/values.yaml @@ -51,16 +51,6 @@ resources: cpu: "50m" memory: "256Mi" -# -- Resource limits and requests for the butler pod -# @default -- see `values.yaml` -butlerResources: - limits: - cpu: "1" - memory: "324Mi" - requests: - cpu: "15m" - memory: "150Mi" - redis: config: # -- Name of secret containing Redis password From 7cae6f36170cd4411ce05a2cc2bf722a9c1c3d4f Mon Sep 17 00:00:00 2001 From: Hsin-Fang Chiang Date: Wed, 28 Aug 2024 12:38:56 -0700 Subject: [PATCH 469/567] Add PRELOAD_PADDING env var to Prompt Processing This number will be used to set the amount of region padding for spatial preload such as refcat template, and DIA preload. --- applications/prompt-proto-service-hsc-gpu/README.md | 1 + applications/prompt-proto-service-hsc-gpu/values.yaml | 2 ++ applications/prompt-proto-service-hsc/README.md | 1 + applications/prompt-proto-service-hsc/values.yaml | 2 ++ applications/prompt-proto-service-latiss/README.md | 1 + applications/prompt-proto-service-latiss/values.yaml | 2 ++ applications/prompt-proto-service-lsstcam/README.md | 1 + applications/prompt-proto-service-lsstcam/values.yaml | 2 ++ applications/prompt-proto-service-lsstcomcam/README.md | 1 + applications/prompt-proto-service-lsstcomcam/values.yaml | 2 ++ applications/prompt-proto-service-lsstcomcamsim/README.md | 1 + applications/prompt-proto-service-lsstcomcamsim/values.yaml | 2 ++ charts/prompt-proto-service/README.md | 1 + charts/prompt-proto-service/templates/prompt-proto-service.yaml | 2 ++ charts/prompt-proto-service/values.yaml | 2 ++ 15 files changed, 23 insertions(+) diff --git a/applications/prompt-proto-service-hsc-gpu/README.md b/applications/prompt-proto-service-hsc-gpu/README.md index c15afbac89..ac15e45b47 100644 --- a/applications/prompt-proto-service-hsc-gpu/README.md +++ b/applications/prompt-proto-service-hsc-gpu/README.md @@ -33,6 +33,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `"HSC"` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"hsc_rings_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-hsc-gpu/values.yaml b/applications/prompt-proto-service-hsc-gpu/values.yaml index b769efe5e4..9ccc7af9dd 100644 --- a/applications/prompt-proto-service-hsc-gpu/values.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values.yaml @@ -47,6 +47,8 @@ prompt-proto-service: preprocessing: "" # -- Skymap to use with the instrument skymap: hsc_rings_v1 + # -- Number of arcseconds to pad the spatial region in preloading. + preloadPadding: 30 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set diff --git a/applications/prompt-proto-service-hsc/README.md b/applications/prompt-proto-service-hsc/README.md index e42e98570b..ac633519c8 100644 --- a/applications/prompt-proto-service-hsc/README.md +++ b/applications/prompt-proto-service-hsc/README.md @@ -33,6 +33,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `"HSC"` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"hsc_rings_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-hsc/values.yaml b/applications/prompt-proto-service-hsc/values.yaml index 4f2bb6153d..46ac0611d7 100644 --- a/applications/prompt-proto-service-hsc/values.yaml +++ b/applications/prompt-proto-service-hsc/values.yaml @@ -47,6 +47,8 @@ prompt-proto-service: preprocessing: "" # -- Skymap to use with the instrument skymap: hsc_rings_v1 + # -- Number of arcseconds to pad the spatial region in preloading. + preloadPadding: 30 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set diff --git a/applications/prompt-proto-service-latiss/README.md b/applications/prompt-proto-service-latiss/README.md index ce765f2da7..b5028a2171 100644 --- a/applications/prompt-proto-service-latiss/README.md +++ b/applications/prompt-proto-service-latiss/README.md @@ -33,6 +33,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `"LATISS"` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"latiss_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-latiss/values.yaml b/applications/prompt-proto-service-latiss/values.yaml index fed814b736..9229ff3319 100644 --- a/applications/prompt-proto-service-latiss/values.yaml +++ b/applications/prompt-proto-service-latiss/values.yaml @@ -48,6 +48,8 @@ prompt-proto-service: preprocessing: "" # -- Skymap to use with the instrument skymap: latiss_v1 + # -- Number of arcseconds to pad the spatial region in preloading. + preloadPadding: 30 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set diff --git a/applications/prompt-proto-service-lsstcam/README.md b/applications/prompt-proto-service-lsstcam/README.md index b49f608383..e1071dcac0 100644 --- a/applications/prompt-proto-service-lsstcam/README.md +++ b/applications/prompt-proto-service-lsstcam/README.md @@ -33,6 +33,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `""` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `""` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-lsstcam/values.yaml b/applications/prompt-proto-service-lsstcam/values.yaml index c72da7e373..4ff089ed5e 100644 --- a/applications/prompt-proto-service-lsstcam/values.yaml +++ b/applications/prompt-proto-service-lsstcam/values.yaml @@ -47,6 +47,8 @@ prompt-proto-service: preprocessing: "" # -- Skymap to use with the instrument skymap: "" + # -- Number of arcseconds to pad the spatial region in preloading. + preloadPadding: 30 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set diff --git a/applications/prompt-proto-service-lsstcomcam/README.md b/applications/prompt-proto-service-lsstcomcam/README.md index b7991bf994..81edb0b954 100644 --- a/applications/prompt-proto-service-lsstcomcam/README.md +++ b/applications/prompt-proto-service-lsstcomcam/README.md @@ -33,6 +33,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `"LSSTComCam"` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"ops_rehersal_prep_2k_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-lsstcomcam/values.yaml b/applications/prompt-proto-service-lsstcomcam/values.yaml index 52e133a11a..a6116e6e11 100644 --- a/applications/prompt-proto-service-lsstcomcam/values.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values.yaml @@ -50,6 +50,8 @@ prompt-proto-service: preprocessing: "" # -- Skymap to use with the instrument skymap: "ops_rehersal_prep_2k_v1" + # -- Number of arcseconds to pad the spatial region in preloading. + preloadPadding: 30 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set diff --git a/applications/prompt-proto-service-lsstcomcamsim/README.md b/applications/prompt-proto-service-lsstcomcamsim/README.md index 7fd4eb76a1..f2874d9eae 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/README.md +++ b/applications/prompt-proto-service-lsstcomcamsim/README.md @@ -33,6 +33,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `"LSSTComCamSim"` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"ops_rehersal_prep_2k_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-lsstcomcamsim/values.yaml b/applications/prompt-proto-service-lsstcomcamsim/values.yaml index ac825014c7..1f977a62bb 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values.yaml @@ -50,6 +50,8 @@ prompt-proto-service: preprocessing: "" # -- Skymap to use with the instrument skymap: ops_rehersal_prep_2k_v1 + # -- Number of arcseconds to pad the spatial region in preloading. + preloadPadding: 30 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set diff --git a/charts/prompt-proto-service/README.md b/charts/prompt-proto-service/README.md index 94f154cdcb..06c8d5eff9 100644 --- a/charts/prompt-proto-service/README.md +++ b/charts/prompt-proto-service/README.md @@ -36,6 +36,7 @@ Event-driven processing of camera images | instrument.name | string | None, must be set | The "short" name of the instrument | | instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits' raws. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | | instrument.skymap | string | `""` | Skymap to use with the instrument | | knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/charts/prompt-proto-service/templates/prompt-proto-service.yaml b/charts/prompt-proto-service/templates/prompt-proto-service.yaml index b25784a038..aca24d55ba 100644 --- a/charts/prompt-proto-service/templates/prompt-proto-service.yaml +++ b/charts/prompt-proto-service/templates/prompt-proto-service.yaml @@ -47,6 +47,8 @@ spec: value: {{ .Values.instrument.pipelines.main }} - name: SKYMAP value: {{ .Values.instrument.skymap }} + - name: PRELOAD_PADDING + value: {{ .Values.instrument.preloadPadding | toString | quote }} - name: IMAGE_BUCKET value: {{ .Values.s3.imageBucket }} - name: BUCKET_TOPIC diff --git a/charts/prompt-proto-service/values.yaml b/charts/prompt-proto-service/values.yaml index 7169250899..04c55b7472 100644 --- a/charts/prompt-proto-service/values.yaml +++ b/charts/prompt-proto-service/values.yaml @@ -49,6 +49,8 @@ instrument: preprocessing: "" # -- Skymap to use with the instrument skymap: "" + # -- Number of arcseconds to pad the spatial region in preloading. + preloadPadding: 30 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set From e2246acb90a3f0207866341a56f317a0603379c2 Mon Sep 17 00:00:00 2001 From: MAINETTI Gabriele Date: Wed, 13 Nov 2024 10:00:44 +0100 Subject: [PATCH 470/567] set CC-IN2P3 qserv for TAP --- applications/tap/values-ccin2p3.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/tap/values-ccin2p3.yaml b/applications/tap/values-ccin2p3.yaml index ac85822b0e..cf30c0a947 100644 --- a/applications/tap/values-ccin2p3.yaml +++ b/applications/tap/values-ccin2p3.yaml @@ -1,8 +1,8 @@ cadc-tap: tapSchema: image: - repository: "stvoutsin/tap-schema-roe" - tag: 2.3.0 + repository: "gabrimaine/tap-schema-ccin2p3" + tag: 2.4.1 config: gcsBucket: "lsstrsp:async-results.lsst.codes" @@ -11,4 +11,4 @@ cadc-tap: datalinkPayloadUrl: "https://github.com/gabrimaine/sdm_schemas/releases/download/2.4.1/datalink-snippets.zip" qserv: - host: "192.41.122.85:30040" + host: "ccqserv201.in2p3.fr:30040" From 171aeed5e1796a99f8c81b8a18836b3424ebc5ac Mon Sep 17 00:00:00 2001 From: Colin Slater Date: Wed, 13 Nov 2024 09:57:21 -0800 Subject: [PATCH 471/567] Disable secrets copying for plot-navigator. --- applications/plot-navigator/secrets.yaml | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/applications/plot-navigator/secrets.yaml b/applications/plot-navigator/secrets.yaml index 3b763e3aa1..cb657b2df6 100644 --- a/applications/plot-navigator/secrets.yaml +++ b/applications/plot-navigator/secrets.yaml @@ -2,22 +2,9 @@ description: >- Google Cloud Storage credentials to the Butler data store, formatted using AWS syntax for use with boto. - copy: - application: nublado - key: "aws-credentials.ini" -"butler-gcs-idf-creds.json": - description: >- - Google Cloud Storage credentials to the Butler data store in the native - Google syntax, containing the private asymmetric key. - copy: - application: nublado - key: "butler-gcs-idf-creds.json" "postgres-credentials.txt": description: >- PostgreSQL credentials in its pgpass format for the Butler database. - copy: - application: nublado - key: "postgres-credentials.txt" redis-password: description: >- Password used to authenticate production-tools to the arq redis server. From 4426c67e7bbd03fd60ba37ef3affcd30d3f45829 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 13 Nov 2024 16:33:01 -0700 Subject: [PATCH 472/567] Turn off AE service - As recommended by InfluxData as this might compete with compaction processes. --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/influxdb-enterprise/README.md | 2 +- applications/sasquatch/charts/influxdb-enterprise/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 2cfb99b5bd..c3c19d70a0 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -108,7 +108,7 @@ Rubin Observatory's telemetry service | influxdb-enterprise.bootstrap.ddldml.configMap | string | Do not run DDL or DML | A config map containing DDL and DML that define databases, retention policies, and inject some data. The keys `ddl` and `dml` must exist, even if one of them is empty. DDL is executed before DML to ensure databases and retention policies exist. | | influxdb-enterprise.bootstrap.ddldml.resources | object | `{}` | Kubernetes resources and limits for the bootstrap job | | influxdb-enterprise.data.affinity | object | See `values.yaml` | Affinity rules for data pods | -| influxdb-enterprise.data.config.antiEntropy.enabled | bool | `true` | Enable the anti-entropy service, which copies and repairs shards | +| influxdb-enterprise.data.config.antiEntropy.enabled | bool | `false` | Enable the anti-entropy service, which copies and repairs shards | | influxdb-enterprise.data.config.cluster.log-queries-after | string | `"15s"` | Maximum duration a query can run before InfluxDB logs it as a slow query | | influxdb-enterprise.data.config.cluster.max-concurrent-queries | int | `1000` | Maximum number of running queries allowed on the instance (0 is unlimited) | | influxdb-enterprise.data.config.cluster.query-timeout | string | `"300s"` | Maximum duration a query is allowed to run before it is killed | diff --git a/applications/sasquatch/charts/influxdb-enterprise/README.md b/applications/sasquatch/charts/influxdb-enterprise/README.md index 12233edf75..1f95a590c9 100644 --- a/applications/sasquatch/charts/influxdb-enterprise/README.md +++ b/applications/sasquatch/charts/influxdb-enterprise/README.md @@ -14,7 +14,7 @@ Run InfluxDB Enterprise on Kubernetes | bootstrap.ddldml.configMap | string | Do not run DDL or DML | A config map containing DDL and DML that define databases, retention policies, and inject some data. The keys `ddl` and `dml` must exist, even if one of them is empty. DDL is executed before DML to ensure databases and retention policies exist. | | bootstrap.ddldml.resources | object | `{}` | Kubernetes resources and limits for the bootstrap job | | data.affinity | object | See `values.yaml` | Affinity rules for data pods | -| data.config.antiEntropy.enabled | bool | `true` | Enable the anti-entropy service, which copies and repairs shards | +| data.config.antiEntropy.enabled | bool | `false` | Enable the anti-entropy service, which copies and repairs shards | | data.config.cluster.log-queries-after | string | `"15s"` | Maximum duration a query can run before InfluxDB logs it as a slow query | | data.config.cluster.max-concurrent-queries | int | `1000` | Maximum number of running queries allowed on the instance (0 is unlimited) | | data.config.cluster.query-timeout | string | `"300s"` | Maximum duration a query is allowed to run before it is killed | diff --git a/applications/sasquatch/charts/influxdb-enterprise/values.yaml b/applications/sasquatch/charts/influxdb-enterprise/values.yaml index 0709b449c6..5df8482d84 100644 --- a/applications/sasquatch/charts/influxdb-enterprise/values.yaml +++ b/applications/sasquatch/charts/influxdb-enterprise/values.yaml @@ -364,7 +364,7 @@ data: antiEntropy: # -- Enable the anti-entropy service, which copies and repairs shards - enabled: true + enabled: false http: # -- Whether to enable the Flux query endpoint From eb56f69601d7c9c10b19ad97654d218abd6e60f9 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 12:17:51 +0000 Subject: [PATCH 473/567] chore(deps): update kapacitor docker tag to v1.7.6 --- applications/sasquatch/README.md | 2 +- applications/sasquatch/values.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index c3c19d70a0..a3a058d559 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -69,7 +69,7 @@ Rubin Observatory's telemetry service | kapacitor.envVars | object | See `values.yaml` | Additional environment variables to set | | kapacitor.existingSecret | string | `"sasquatch"` | Use `influxdb-user` and `influxdb-password` keys from this secret | | kapacitor.image.repository | string | `"kapacitor"` | Docker image to use for Kapacitor | -| kapacitor.image.tag | string | `"1.7.5"` | Tag to use for Kapacitor | +| kapacitor.image.tag | string | `"1.7.6"` | Tag to use for Kapacitor | | kapacitor.influxURL | string | `"http://sasquatch-influxdb.sasquatch:8086"` | InfluxDB connection URL | | kapacitor.persistence.enabled | bool | `true` | Whether to enable Kapacitor data persistence | | kapacitor.persistence.size | string | `"100Gi"` | Size of storage to request if enabled | diff --git a/applications/sasquatch/values.yaml b/applications/sasquatch/values.yaml index 6eb5775989..589d55b00e 100644 --- a/applications/sasquatch/values.yaml +++ b/applications/sasquatch/values.yaml @@ -237,7 +237,7 @@ kapacitor: repository: kapacitor # -- Tag to use for Kapacitor - tag: 1.7.5 + tag: 1.7.6 persistence: # -- Whether to enable Kapacitor data persistence From 62b2a2fc9250c704f39cd68a6cdeb22eb856caa8 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 8 Nov 2024 14:17:25 -0700 Subject: [PATCH 474/567] Use kafka-broker node label - Use another node label to make it clear the node is dedicated to a kafka broker --- applications/sasquatch/values-base.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index 4440c387ea..8947a770e6 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -119,7 +119,7 @@ strimzi-kafka: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - - key: local-storage + - key: kafka-broker operator: In values: - "true" From 98056351ce564cfc3c9a3ad0514f9fa49ffd4279 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Fri, 8 Nov 2024 14:21:05 -0700 Subject: [PATCH 475/567] Add tolerations for scheduling kafka broker pods - Add tolerations so that the kafka brokers are allowed to run on the kafka nodes, after tainting the kafka nodes with NoSchedule. --- .../sasquatch/charts/strimzi-kafka/templates/kafka.yaml | 4 ++++ applications/sasquatch/values-base.yaml | 5 +++++ 2 files changed, 9 insertions(+) diff --git a/applications/sasquatch/charts/strimzi-kafka/templates/kafka.yaml b/applications/sasquatch/charts/strimzi-kafka/templates/kafka.yaml index 0fac119249..ac8e91832a 100644 --- a/applications/sasquatch/charts/strimzi-kafka/templates/kafka.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/templates/kafka.yaml @@ -83,6 +83,10 @@ spec: affinity: {{- toYaml . | nindent 10 }} {{- end }} + {{- with .Values.brokerStorage.tolerations }} + tolerations: + {{- toYaml . | nindent 10 }} + {{- end}} {{- with .Values.kafka.resources }} resources: {{- toYaml . | nindent 6 }} diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index 8947a770e6..38df1a1590 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -123,6 +123,11 @@ strimzi-kafka: operator: In values: - "true" + tolerations: + - key: "kafka-broker" + operator: "Equal" + value: "true" + effect: "NoSchedule" influxdb: persistence: From ba1edb69699b5ffc2646aef0308feb10a663add8 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 12 Nov 2024 14:29:15 -0700 Subject: [PATCH 476/567] Configure Kafka external listeners on BTS to use the regular node IP pool --- applications/sasquatch/values-base.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index 38df1a1590..d34b7659b3 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -35,7 +35,7 @@ strimzi-kafka: tls: enabled: true bootstrap: - loadBalancerIP: "139.229.153.65" + loadBalancerIP: "139.229.151.176" host: sasquatch-base-kafka-bootstrap.lsst.codes brokers: - broker: 6 From fead991becd9cbedc3965bf37b43e5f233c0b1ee Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Wed, 13 Nov 2024 10:44:58 -0700 Subject: [PATCH 477/567] Grant permissions to ts-salkafka topic - ts-salkafka Kafka user needs permissions on lsst.s3 topics --- .../sasquatch/charts/strimzi-kafka/templates/users.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml b/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml index 5b30f2a6a3..22174b52e7 100644 --- a/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/templates/users.yaml @@ -29,6 +29,13 @@ spec: type: allow host: "*" operation: All + - resource: + type: topic + name: "lsst.s3" + patternType: prefix + type: allow + host: "*" + operation: All - resource: type: cluster operations: From bce1600ce8b4e0de8dee9417cf2d02b3b9c2346c Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Thu, 14 Nov 2024 14:47:05 -0700 Subject: [PATCH 478/567] Test new version of obsenv-ui. --- applications/obsenv-management/values-tucson-teststand.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 7fc3005cf9..5d45d1cc9c 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -11,7 +11,7 @@ obsenv-api: obsenv-ui: image: repository: rubin-cr.lsst.org/obsenv-ui - tag: 0.2.1 + tag: tickets-DM-47564 pullPolicy: Always config: pathPrefix: /obsenv-management From fc0bc04c75105d545f134e95413152b0bc6a8cae Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Thu, 14 Nov 2024 15:17:23 -0700 Subject: [PATCH 479/567] Release new obsenv-ui everywhere. --- applications/obsenv-management/charts/obsenv-ui/Chart.yaml | 2 +- applications/obsenv-management/values-base.yaml | 2 +- applications/obsenv-management/values-summit.yaml | 2 +- applications/obsenv-management/values-tucson-teststand.yaml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/obsenv-management/charts/obsenv-ui/Chart.yaml b/applications/obsenv-management/charts/obsenv-ui/Chart.yaml index 2692bbac6b..ee0e1d1a4b 100644 --- a/applications/obsenv-management/charts/obsenv-ui/Chart.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/Chart.yaml @@ -2,4 +2,4 @@ name: obsenv-ui apiVersion: v2 version: 1.0.0 description: Helm chart for the Observatory Environment Management UI. -appVersion: "0.2.1" +appVersion: "0.3.0" diff --git a/applications/obsenv-management/values-base.yaml b/applications/obsenv-management/values-base.yaml index cf94687ec3..c0534703db 100644 --- a/applications/obsenv-management/values-base.yaml +++ b/applications/obsenv-management/values-base.yaml @@ -11,7 +11,7 @@ obsenv-api: obsenv-ui: image: repository: rubin-cr.lsst.org/obsenv-ui - tag: 0.2.1 + tag: 0.3.0 pullPolicy: Always config: pathPrefix: /obsenv-management diff --git a/applications/obsenv-management/values-summit.yaml b/applications/obsenv-management/values-summit.yaml index 78b60b8048..4ba6a8c0bc 100644 --- a/applications/obsenv-management/values-summit.yaml +++ b/applications/obsenv-management/values-summit.yaml @@ -11,7 +11,7 @@ obsenv-api: obsenv-ui: image: repository: rubin-cr.lsst.org/obsenv-ui - tag: 0.2.1 + tag: 0.3.0 pullPolicy: Always config: pathPrefix: /obsenv-management diff --git a/applications/obsenv-management/values-tucson-teststand.yaml b/applications/obsenv-management/values-tucson-teststand.yaml index 5d45d1cc9c..5ac85bb7ac 100644 --- a/applications/obsenv-management/values-tucson-teststand.yaml +++ b/applications/obsenv-management/values-tucson-teststand.yaml @@ -11,7 +11,7 @@ obsenv-api: obsenv-ui: image: repository: rubin-cr.lsst.org/obsenv-ui - tag: tickets-DM-47564 + tag: 0.3.0 pullPolicy: Always config: pathPrefix: /obsenv-management From 19d2c394b3a83a6a7a66b57d41e5debb6fd08cac Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Wed, 13 Nov 2024 14:01:24 -0800 Subject: [PATCH 480/567] Update Prompt Processing to 4.8.0. --- .../values-usdfprod-prompt-processing.yaml | 2 +- .../values-usdfprod-prompt-processing.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index 1f93567fdd..7ee999aeb8 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.7.2 + tag: 4.8.0 instrument: pipelines: diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index 955b5135e5..672788f165 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -15,7 +15,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.7.2 + tag: 4.8.0 instrument: pipelines: From 22a34f1f434ec93b4934f4dde68741735a2c9d68 Mon Sep 17 00:00:00 2001 From: pav511 <38131208+pav511@users.noreply.github.com> Date: Fri, 15 Nov 2024 09:45:29 -0800 Subject: [PATCH 481/567] bump recommended image prod/int --- applications/nublado/values-usdfdev.yaml | 2 +- applications/nublado/values-usdfint.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/nublado/values-usdfdev.yaml b/applications/nublado/values-usdfdev.yaml index 83928e10cb..0e680ff040 100644 --- a/applications/nublado/values-usdfdev.yaml +++ b/applications/nublado/values-usdfdev.yaml @@ -7,7 +7,7 @@ controller: registry: "docker-registry.slac.stanford.edu" repository: "lsstsqre/sciplat-lab" pin: - - "w_2024_32" + - "w_2024_42" lab: env: AWS_SHARED_CREDENTIALS_FILE: "/opt/lsst/software/jupyterlab/secrets/aws-credentials.ini" diff --git a/applications/nublado/values-usdfint.yaml b/applications/nublado/values-usdfint.yaml index 83928e10cb..0e680ff040 100644 --- a/applications/nublado/values-usdfint.yaml +++ b/applications/nublado/values-usdfint.yaml @@ -7,7 +7,7 @@ controller: registry: "docker-registry.slac.stanford.edu" repository: "lsstsqre/sciplat-lab" pin: - - "w_2024_32" + - "w_2024_42" lab: env: AWS_SHARED_CREDENTIALS_FILE: "/opt/lsst/software/jupyterlab/secrets/aws-credentials.ini" From 62fec529158bc7d372718150a0b4ac1e010370eb Mon Sep 17 00:00:00 2001 From: Eric Neilsen Date: Fri, 15 Nov 2024 12:26:16 -0800 Subject: [PATCH 482/567] Update schedview-snapshot to schedview v0.15.0 --- applications/schedview-snapshot/Chart.yaml | 2 +- applications/schedview-snapshot/values-usdfdev.yaml | 2 +- applications/schedview-snapshot/values-usdfint.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/schedview-snapshot/Chart.yaml b/applications/schedview-snapshot/Chart.yaml index f16fd1a7bd..1f34b86432 100644 --- a/applications/schedview-snapshot/Chart.yaml +++ b/applications/schedview-snapshot/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: v0.12.0 +appVersion: v0.15.0 description: Dashboard for examination of scheduler snapshots name: schedview-snapshot sources: diff --git a/applications/schedview-snapshot/values-usdfdev.yaml b/applications/schedview-snapshot/values-usdfdev.yaml index 564723a628..a2a71bce62 100644 --- a/applications/schedview-snapshot/values-usdfdev.yaml +++ b/applications/schedview-snapshot/values-usdfdev.yaml @@ -1,3 +1,3 @@ image: # -- Overrides the image tag whose default is the chart appVersion. - tag: "v0.12.0" + tag: "v0.15.0" diff --git a/applications/schedview-snapshot/values-usdfint.yaml b/applications/schedview-snapshot/values-usdfint.yaml index 70bdc55d00..ad1ab415d9 100644 --- a/applications/schedview-snapshot/values-usdfint.yaml +++ b/applications/schedview-snapshot/values-usdfint.yaml @@ -1,6 +1,6 @@ image: # -- Overrides the image tag whose default is the chart appVersion. - tag: "v0.12.0" + tag: "v0.15.0" resources: limits: From 3df15284aab7d730abbd8784b0629f20860fe3e7 Mon Sep 17 00:00:00 2001 From: Erin Howard Date: Sun, 17 Nov 2024 21:39:22 -0500 Subject: [PATCH 483/567] Update Prompt Processing to 4.8.1. --- .../values-usdfprod-prompt-processing.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index 672788f165..d10f3bc4cf 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -15,7 +15,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.8.0 + tag: 4.8.1 instrument: pipelines: From 0877394d905e91e6a3a239cd05e5969c087838d1 Mon Sep 17 00:00:00 2001 From: Erin Howard Date: Sun, 17 Nov 2024 21:42:29 -0500 Subject: [PATCH 484/567] Update Prompt Processing to 4.8.1. --- .../values-usdfprod-prompt-processing.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index 7ee999aeb8..8f1f5bfe48 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.8.0 + tag: 4.8.1 instrument: pipelines: From 8b322c6fa77a176ab550d8e3bd2c18e526ed2d68 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 10:06:44 +0000 Subject: [PATCH 485/567] chore(deps): update helm release argo-cd to v7.7.3 --- applications/argocd/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argocd/Chart.yaml b/applications/argocd/Chart.yaml index 1a7593b5d3..11b5e03524 100644 --- a/applications/argocd/Chart.yaml +++ b/applications/argocd/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-cd - version: 7.7.2 + version: 7.7.3 repository: https://argoproj.github.io/argo-helm From 0dbe7460895f3a01117d859d9229d1f8d415a5af Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 10:06:49 +0000 Subject: [PATCH 486/567] chore(deps): update helm release kubernetes-replicator to v2.11.0 --- applications/kubernetes-replicator/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/kubernetes-replicator/Chart.yaml b/applications/kubernetes-replicator/Chart.yaml index 335507f312..90365efd2c 100644 --- a/applications/kubernetes-replicator/Chart.yaml +++ b/applications/kubernetes-replicator/Chart.yaml @@ -7,5 +7,5 @@ sources: - https://github.com/mittwald/kubernetes-replicator dependencies: - name: kubernetes-replicator - version: 2.10.2 + version: 2.11.0 repository: https://helm.mittwald.de From 7225994376990fbb96b8e10769f27a1f8749557d Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 18 Nov 2024 13:49:01 -0800 Subject: [PATCH 487/567] Update pre-commit and Python dependencies --- .pre-commit-config.yaml | 2 +- requirements/dev.txt | 138 ++++++++++++++++++++-------------------- requirements/main.txt | 6 +- requirements/tox.txt | 38 +++++------ 4 files changed, 92 insertions(+), 92 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 604fb6c2dd..a5a2f68946 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,7 +46,7 @@ repos: - --template-files=../helm-docs.md.gotmpl - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.3 + rev: v0.7.4 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/requirements/dev.txt b/requirements/dev.txt index 52cd42a798..3c3e316dab 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -244,69 +244,69 @@ comm==0.2.2 \ --hash=sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e \ --hash=sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3 # via ipykernel -coverage==7.6.4 \ - --hash=sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376 \ - --hash=sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9 \ - --hash=sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111 \ - --hash=sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172 \ - --hash=sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491 \ - --hash=sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546 \ - --hash=sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2 \ - --hash=sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11 \ - --hash=sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08 \ - --hash=sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c \ - --hash=sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2 \ - --hash=sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963 \ - --hash=sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613 \ - --hash=sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0 \ - --hash=sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db \ - --hash=sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf \ - --hash=sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73 \ - --hash=sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117 \ - --hash=sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1 \ - --hash=sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e \ - --hash=sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522 \ - --hash=sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25 \ - --hash=sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc \ - --hash=sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea \ - --hash=sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52 \ - --hash=sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a \ - --hash=sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07 \ - --hash=sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06 \ - --hash=sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa \ - --hash=sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901 \ - --hash=sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b \ - --hash=sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17 \ - --hash=sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0 \ - --hash=sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21 \ - --hash=sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19 \ - --hash=sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5 \ - --hash=sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51 \ - --hash=sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3 \ - --hash=sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3 \ - --hash=sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f \ - --hash=sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076 \ - --hash=sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a \ - --hash=sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718 \ - --hash=sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba \ - --hash=sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e \ - --hash=sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27 \ - --hash=sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e \ - --hash=sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09 \ - --hash=sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e \ - --hash=sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70 \ - --hash=sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f \ - --hash=sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72 \ - --hash=sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a \ - --hash=sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef \ - --hash=sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b \ - --hash=sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b \ - --hash=sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f \ - --hash=sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806 \ - --hash=sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b \ - --hash=sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1 \ - --hash=sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c \ - --hash=sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858 +coverage==7.6.7 \ + --hash=sha256:0266b62cbea568bd5e93a4da364d05de422110cbed5056d69339bd5af5685433 \ + --hash=sha256:0573f5cbf39114270842d01872952d301027d2d6e2d84013f30966313cadb529 \ + --hash=sha256:0ddcb70b3a3a57581b450571b31cb774f23eb9519c2aaa6176d3a84c9fc57671 \ + --hash=sha256:108bb458827765d538abcbf8288599fee07d2743357bdd9b9dad456c287e121e \ + --hash=sha256:14045b8bfd5909196a90da145a37f9d335a5d988a83db34e80f41e965fb7cb42 \ + --hash=sha256:1a5407a75ca4abc20d6252efeb238377a71ce7bda849c26c7a9bece8680a5d99 \ + --hash=sha256:2bc3e45c16564cc72de09e37413262b9f99167803e5e48c6156bccdfb22c8327 \ + --hash=sha256:2d608a7808793e3615e54e9267519351c3ae204a6d85764d8337bd95993581a8 \ + --hash=sha256:34d23e28ccb26236718a3a78ba72744212aa383141961dd6825f6595005c8b06 \ + --hash=sha256:37a15573f988b67f7348916077c6d8ad43adb75e478d0910957394df397d2874 \ + --hash=sha256:3c0317288f032221d35fa4cbc35d9f4923ff0dfd176c79c9b356e8ef8ef2dff4 \ + --hash=sha256:3c42ec2c522e3ddd683dec5cdce8e62817afb648caedad9da725001fa530d354 \ + --hash=sha256:3c6b24007c4bcd0b19fac25763a7cac5035c735ae017e9a349b927cfc88f31c1 \ + --hash=sha256:40cca284c7c310d622a1677f105e8507441d1bb7c226f41978ba7c86979609ab \ + --hash=sha256:46f21663e358beae6b368429ffadf14ed0a329996248a847a4322fb2e35d64d3 \ + --hash=sha256:49ed5ee4109258973630c1f9d099c7e72c5c36605029f3a91fe9982c6076c82b \ + --hash=sha256:5c95e0fa3d1547cb6f021ab72f5c23402da2358beec0a8e6d19a368bd7b0fb37 \ + --hash=sha256:5dd4e4a49d9c72a38d18d641135d2fb0bdf7b726ca60a103836b3d00a1182acd \ + --hash=sha256:5e444b8e88339a2a67ce07d41faabb1d60d1004820cee5a2c2b54e2d8e429a0f \ + --hash=sha256:60dcf7605c50ea72a14490d0756daffef77a5be15ed1b9fea468b1c7bda1bc3b \ + --hash=sha256:623e6965dcf4e28a3debaa6fcf4b99ee06d27218f46d43befe4db1c70841551c \ + --hash=sha256:673184b3156cba06154825f25af33baa2671ddae6343f23175764e65a8c4c30b \ + --hash=sha256:6cf96ceaa275f071f1bea3067f8fd43bec184a25a962c754024c973af871e1b7 \ + --hash=sha256:70a56a2ec1869e6e9fa69ef6b76b1a8a7ef709972b9cc473f9ce9d26b5997ce3 \ + --hash=sha256:77256ad2345c29fe59ae861aa11cfc74579c88d4e8dbf121cbe46b8e32aec808 \ + --hash=sha256:796c9b107d11d2d69e1849b2dfe41730134b526a49d3acb98ca02f4985eeff7a \ + --hash=sha256:7c07de0d2a110f02af30883cd7dddbe704887617d5c27cf373362667445a4c76 \ + --hash=sha256:7e61b0e77ff4dddebb35a0e8bb5a68bf0f8b872407d8d9f0c726b65dfabe2469 \ + --hash=sha256:82c809a62e953867cf57e0548c2b8464207f5f3a6ff0e1e961683e79b89f2c55 \ + --hash=sha256:850cfd2d6fc26f8346f422920ac204e1d28814e32e3a58c19c91980fa74d8289 \ + --hash=sha256:87ea64b9fa52bf395272e54020537990a28078478167ade6c61da7ac04dc14bc \ + --hash=sha256:90746521206c88bdb305a4bf3342b1b7316ab80f804d40c536fc7d329301ee13 \ + --hash=sha256:951aade8297358f3618a6e0660dc74f6b52233c42089d28525749fc8267dccd2 \ + --hash=sha256:963e4a08cbb0af6623e61492c0ec4c0ec5c5cf74db5f6564f98248d27ee57d30 \ + --hash=sha256:987a8e3da7da4eed10a20491cf790589a8e5e07656b6dc22d3814c4d88faf163 \ + --hash=sha256:9c2eb378bebb2c8f65befcb5147877fc1c9fbc640fc0aad3add759b5df79d55d \ + --hash=sha256:a1ab9763d291a17b527ac6fd11d1a9a9c358280adb320e9c2672a97af346ac2c \ + --hash=sha256:a3b925300484a3294d1c70f6b2b810d6526f2929de954e5b6be2bf8caa1f12c1 \ + --hash=sha256:acbb8af78f8f91b3b51f58f288c0994ba63c646bc1a8a22ad072e4e7e0a49f1c \ + --hash=sha256:ad32a981bcdedb8d2ace03b05e4fd8dace8901eec64a532b00b15217d3677dd2 \ + --hash=sha256:aee9cf6b0134d6f932d219ce253ef0e624f4fa588ee64830fcba193269e4daa3 \ + --hash=sha256:af05bbba896c4472a29408455fe31b3797b4d8648ed0a2ccac03e074a77e2314 \ + --hash=sha256:b6cce5c76985f81da3769c52203ee94722cd5d5889731cd70d31fee939b74bf0 \ + --hash=sha256:bb684694e99d0b791a43e9fc0fa58efc15ec357ac48d25b619f207c41f2fd384 \ + --hash=sha256:c132b5a22821f9b143f87446805e13580b67c670a548b96da945a8f6b4f2efbb \ + --hash=sha256:c296263093f099da4f51b3dff1eff5d4959b527d4f2f419e16508c5da9e15e8c \ + --hash=sha256:c973b2fe4dc445cb865ab369df7521df9c27bf40715c837a113edaa2aa9faf45 \ + --hash=sha256:cdd94501d65adc5c24f8a1a0eda110452ba62b3f4aeaba01e021c1ed9cb8f34a \ + --hash=sha256:d79d4826e41441c9a118ff045e4bccb9fdbdcb1d02413e7ea6eb5c87b5439d24 \ + --hash=sha256:dbba8210f5067398b2c4d96b4e64d8fb943644d5eb70be0d989067c8ca40c0f8 \ + --hash=sha256:df002e59f2d29e889c37abd0b9ee0d0e6e38c24f5f55d71ff0e09e3412a340ec \ + --hash=sha256:dfd14bcae0c94004baba5184d1c935ae0d1231b8409eb6c103a5fd75e8ecdc56 \ + --hash=sha256:e25bacb53a8c7325e34d45dddd2f2fbae0dbc230d0e2642e264a64e17322a777 \ + --hash=sha256:e2c8e3384c12dfa19fa9a52f23eb091a8fad93b5b81a41b14c17c78e23dd1d8b \ + --hash=sha256:e5f2a0f161d126ccc7038f1f3029184dbdf8f018230af17ef6fd6a707a5b881f \ + --hash=sha256:e69ad502f1a2243f739f5bd60565d14a278be58be4c137d90799f2c263e7049a \ + --hash=sha256:ead9b9605c54d15be228687552916c89c9683c215370c4a44f1f217d2adcc34d \ + --hash=sha256:f07ff574986bc3edb80e2c36391678a271d555f91fd1d332a1e0f4b5ea4b6ea9 \ + --hash=sha256:f2c7a045eef561e9544359a0bf5784b44e55cefc7261a20e730baa9220c83413 \ + --hash=sha256:f3e8796434a8106b3ac025fd15417315d7a58ee3e600ad4dbcfddc3f4b14342c \ + --hash=sha256:f63e21ed474edd23f7501f89b53280014436e383a14b9bd77a648366c81dce7b \ + --hash=sha256:fd49c01e5057a451c30c9b892948976f5d38f2cbd04dc556a82743ba8e27ed8c # via # -r requirements/dev.in # pytest-cov @@ -517,9 +517,9 @@ jsonschema-specifications==2024.10.1 \ --hash=sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272 \ --hash=sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf # via jsonschema -jupyter-cache==1.0.0 \ - --hash=sha256:594b1c4e29b488b36547e12477645f489dbdc62cc939b2408df5679f79245078 \ - --hash=sha256:d0fa7d7533cd5798198d8889318269a8c1382ed3b22f622c09a9356521f48687 +jupyter-cache==1.0.1 \ + --hash=sha256:16e808eb19e3fb67a223db906e131ea6e01f03aa27f49a7214ce6a5fec186fb9 \ + --hash=sha256:9c3cafd825ba7da8b5830485343091143dff903e4d8c69db9349b728b140abf6 # via myst-nb jupyter-client==8.6.3 \ --hash=sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419 \ @@ -1215,9 +1215,9 @@ rpds-py==0.21.0 \ # via # jsonschema # referencing -setuptools==75.4.0 \ - --hash=sha256:1dc484f5cf56fd3fe7216d7b8df820802e7246cfb534a1db2aa64f14fcb9cdcb \ - --hash=sha256:b3c5d862f98500b06ffdf7cc4499b48c46c317d8d56cb30b5c8bce4d88f5c216 +setuptools==75.5.0 \ + --hash=sha256:5c4ccb41111392671f02bb5f8436dfc5a9a7185e80500531b133f5775c4163ef \ + --hash=sha256:87cb777c3b96d638ca02031192d40390e0ad97737e27b6b4fa831bea86f2f829 # via # documenteer # sphinxcontrib-bibtex diff --git a/requirements/main.txt b/requirements/main.txt index e95e00df81..489a168c0c 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -480,9 +480,9 @@ pydantic-core==2.23.4 \ # via # pydantic # safir -pyjwt==2.9.0 \ - --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ - --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c +pyjwt==2.10.0 \ + --hash=sha256:543b77207db656de204372350926bed5a86201c4cbff159f623f79c7bb487a15 \ + --hash=sha256:7628a7eb7938959ac1b26e819a1df0fd3259505627b575e4bad6d08f76db695c # via gidgethub python-dateutil==2.9.0.post0 \ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ diff --git a/requirements/tox.txt b/requirements/tox.txt index 44e69d23fe..a3206bec05 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -60,25 +60,25 @@ tox-uv==1.16.0 \ --hash=sha256:71b2e2fa6c35c1360b91a302df1d65b3e5a1f656b321c5ebf7b84545804c9f01 \ --hash=sha256:e6f0b525a687e745ab878d07cbf5c7e85d582028d4a7c8935f95e84350651432 # via -r requirements/tox.in -uv==0.5.1 \ - --hash=sha256:01c40f756e9536c05fdf3485c1dfe3da610c3169195bbe20fab03a4c4b7a0d98 \ - --hash=sha256:3db7513c804fb89dcde671ba917cc486cfb574408d6257e19b19ae6b55f5982f \ - --hash=sha256:3ffb230be0f6552576da67a2737a32a6a640e4b3f42144088222a669802d7f10 \ - --hash=sha256:4601d40b0c02aff9fb791efa5b6f4c7dbad0970e13ac679aa8fb07365f331354 \ - --hash=sha256:4d1ec4a1bc19b523a84fc1bf2a92e9c4d982c831d3da450af71fc3057999d456 \ - --hash=sha256:6a76765c3cc49268f3c6773bd89a0dacf8a91b040fc3faea6c527ef6f2308eba \ - --hash=sha256:6ec61220d883751777cbabf0b076607cfbdeb812bc52c28722e897271461e589 \ - --hash=sha256:72b54a3308e13a81aa2df19baea40611fc344c7556f75d2113f9b9b5a894355e \ - --hash=sha256:73853b98bce9e118cda2d64360ddd7e0f79e237aca8cd2f28b6d5679400b239e \ - --hash=sha256:821b6a9d591d3e951fbe81c53d32499d11500100d66b1c119e183f3d4a6cd07c \ - --hash=sha256:8dce5b6d6dea41db71fe8d9895167cc5abf3e7b28c016174b1b9a9aecb74d483 \ - --hash=sha256:922685dcaa1c9b6663649b379f9bdbe5b87af230f512e69398efc51bd9d8b8eb \ - --hash=sha256:93f0a02ea9149f4e7e359ef92da6f221da2ecf458cda2af729a1f6fa8c3ed1d2 \ - --hash=sha256:aaa63053ff6dc4456e2ac2a9b6a8eda0cfaa1e0f861633d9e7315c7df9a0a525 \ - --hash=sha256:ac3fce68002e79f3c070f3e7d914e992f205f05af00bfffbe6c44d37aa39c86a \ - --hash=sha256:ad2dd8a994a8334a5d4b354589be4b8c4b3b2ebb7bb2f2976c8e21d2799f45a9 \ - --hash=sha256:c4d209164448c8529e21aca4ef1e3da94303b1bf726924786feffd87ed93ab4a \ - --hash=sha256:f66859e67d10ffff8b17c67c7ede207d67487cef20c3d17bc427b690f9dff795 +uv==0.5.2 \ + --hash=sha256:15c7ffa08ae21abd221dbdf9ba25c8969235f587cec6df8035552434e5ca1cc5 \ + --hash=sha256:2597e91be45b3f4458d0d16a5a1cda7e93af7d6dbfddf251aae5377f9187fa88 \ + --hash=sha256:27d666da8fbb0f87d9df67abf9feea0da4ee1336730f2c4be29a11f3feaa0a29 \ + --hash=sha256:374e9498e155fcaa8728a6770b84f03781106d705332f4ec059e1cc93c8f4d8a \ + --hash=sha256:5052758d374dd769efd0c70b4789ffb08439567eb114ad8fe728536bb5cc5299 \ + --hash=sha256:675ca34829ceca3e9de395cf05e8f881334a24488f97dd923c463830270d52a7 \ + --hash=sha256:67776d34cba359c63919c5ad50331171261d2ec7a83fd07f032eb8cc22e22b8e \ + --hash=sha256:71467545d51883d1af7094c8f6da69b55e7d49b742c2dc707d644676dcb66515 \ + --hash=sha256:772b32d157ec8f27c0099ecac94cf5cd298bce72f1a1f512205591de4e9f0c5c \ + --hash=sha256:7bde66f13571e437fd45f32f5742ab53d5e011b4edb1c74cb74cb8b1cbb828b5 \ + --hash=sha256:89e60ad9601f35f187326de84f35e7517c6eb1438359da42ec85cfd9c1895957 \ + --hash=sha256:a4d4fdad03e6dc3e8216192b8a12bcf2c71c8b12046e755575c7f262cbb61924 \ + --hash=sha256:a8a9897dd7657258c53f41aecdbe787da99f4fc0775f19826ab65cc0a7136cbf \ + --hash=sha256:c9795b990fb0b2a18d3a8cef8822e13c6a6f438bc16d34ccf01d931c76cfd5da \ + --hash=sha256:cfba5b0070652da4174083b78852f3ab3d262ba1c8b63a4d5ae497263b02b834 \ + --hash=sha256:d0834c6b37750c045bbea80600d3ae3e95becc4db148f5c0d0bc3ec6a7924e8f \ + --hash=sha256:d1fe4e025dbb9ec5c9250bfc1231847b8487706538f94d10c769f0a54db3e0af \ + --hash=sha256:dfcd8275ff8cb59d5f26f826a44270b2fe8f38aa7188d7355c48d3e9b759d0c0 # via tox-uv virtualenv==20.27.1 \ --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ From 78a63799ea124c92b058f57b0efded0ec5f3e2d2 Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Mon, 18 Nov 2024 15:14:05 -0700 Subject: [PATCH 488/567] Upgrade Butler server Upgrade Butler server to add a new `/query/all_datasets` endpoint. --- applications/butler/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/butler/Chart.yaml b/applications/butler/Chart.yaml index 5216bf15ad..9bae1440de 100644 --- a/applications/butler/Chart.yaml +++ b/applications/butler/Chart.yaml @@ -4,4 +4,4 @@ version: 1.0.0 description: Server for Butler data abstraction service sources: - https://github.com/lsst/daf_butler -appVersion: server-2.2.1 +appVersion: server-2.3.0 From a039835bba9f52931666d36557c9782a825d93c6 Mon Sep 17 00:00:00 2001 From: Angelo Fausti Date: Tue, 19 Nov 2024 11:09:26 -0700 Subject: [PATCH 489/567] Update schema registry version to 7.7.1 --- .../charts/strimzi-kafka/templates/schema-registry.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/sasquatch/charts/strimzi-kafka/templates/schema-registry.yaml b/applications/sasquatch/charts/strimzi-kafka/templates/schema-registry.yaml index 8f15e429a5..601864b6f1 100644 --- a/applications/sasquatch/charts/strimzi-kafka/templates/schema-registry.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/templates/schema-registry.yaml @@ -5,6 +5,7 @@ metadata: spec: listener: tls compatibilityLevel: none + registryImageTag: "7.7.1" cpuLimit: {{ .Values.registry.resources.limits.cpu | quote }} cpuRequest: {{ .Values.registry.resources.requests.cpu | quote }} memoryLimit: {{ .Values.registry.resources.limits.memory | quote }} From 09eaa37007b7e3a9a70e9d47ee90570158bf335b Mon Sep 17 00:00:00 2001 From: adam Date: Tue, 19 Nov 2024 11:11:17 -0700 Subject: [PATCH 490/567] Bump mobu version --- applications/mobu/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/mobu/Chart.yaml b/applications/mobu/Chart.yaml index 25cf5bdeeb..351942bf12 100644 --- a/applications/mobu/Chart.yaml +++ b/applications/mobu/Chart.yaml @@ -5,4 +5,4 @@ description: "Continuous integration testing" home: https://mobu.lsst.io/ sources: - "https://github.com/lsst-sqre/mobu" -appVersion: 13.0.0 +appVersion: 13.0.1 From 3b71f3aee320c9769bafe42f80d30462397a4abe Mon Sep 17 00:00:00 2001 From: Amanda Ibsen Date: Fri, 15 Nov 2024 11:31:38 -0300 Subject: [PATCH 491/567] Update rubintv worker image to cycle 39 --- applications/rubintv/values-summit.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index 3d6ce653c9..eef9d8dec8 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -27,7 +27,7 @@ rubintv: replicas: 1 image: repository: ts-dockerhub.lsst.org/rapid-analysis - tag: c0037 + tag: c0039 pullPolicy: Always uid: 73006 gid: 73006 From 25dd1b41ef64a15135b39e4710fdb375d272ffce Mon Sep 17 00:00:00 2001 From: adam Date: Tue, 19 Nov 2024 12:49:59 -0700 Subject: [PATCH 492/567] Change users to not be logged out on lab cull --- applications/nublado/README.md | 2 +- applications/nublado/values.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/nublado/README.md b/applications/nublado/README.md index 69b6fe4279..1ecc7508f2 100644 --- a/applications/nublado/README.md +++ b/applications/nublado/README.md @@ -90,7 +90,7 @@ JupyterHub and custom spawner for the Rubin Science Platform | jupyterhub.cull.maxAge | int | 2160000 (25 days) | Maximum age of a lab regardless of activity | | jupyterhub.cull.removeNamedServers | bool | `true` | Whether to remove named servers when culling their lab | | jupyterhub.cull.timeout | int | 432000 (5 days) | Default idle timeout before the lab is automatically deleted in seconds | -| jupyterhub.cull.users | bool | `true` | Whether to log out the server when culling their lab | +| jupyterhub.cull.users | bool | `false` | Whether to log out the user (from JupyterHub) when culling their lab | | jupyterhub.hub.authenticatePrometheus | bool | `false` | Whether to require metrics requests to be authenticated | | jupyterhub.hub.baseUrl | string | `"/nb"` | Base URL on which JupyterHub listens | | jupyterhub.hub.containerSecurityContext | object | `{"allowPrivilegeEscalation":false,"runAsGroup":768,"runAsUser":768}` | Security context for JupyterHub container | diff --git a/applications/nublado/values.yaml b/applications/nublado/values.yaml index 433bd6c695..79049d8170 100644 --- a/applications/nublado/values.yaml +++ b/applications/nublado/values.yaml @@ -539,8 +539,8 @@ jupyterhub: # @default -- 300 (5 minutes) every: 300 - # -- Whether to log out the server when culling their lab - users: true + # -- Whether to log out the user (from JupyterHub) when culling their lab + users: false # -- Whether to remove named servers when culling their lab removeNamedServers: true From f721444caf7749aa55bf85b3abd9193d51e56cb6 Mon Sep 17 00:00:00 2001 From: adam Date: Tue, 19 Nov 2024 13:01:26 -0700 Subject: [PATCH 493/567] Bump Nublado version --- applications/nublado/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/nublado/Chart.yaml b/applications/nublado/Chart.yaml index 430008b4fc..ffffd4b4fe 100644 --- a/applications/nublado/Chart.yaml +++ b/applications/nublado/Chart.yaml @@ -5,7 +5,7 @@ description: JupyterHub and custom spawner for the Rubin Science Platform sources: - https://github.com/lsst-sqre/nublado home: https://nublado.lsst.io/ -appVersion: 7.2.0 +appVersion: 8.0.3 dependencies: - name: jupyterhub From 2f7af952e880811ddaf78086f17ff9ba0b5eebb5 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Tue, 19 Nov 2024 14:26:28 -0700 Subject: [PATCH 494/567] Add topic creation to ts-salkafka user in strimzi-kafka chart. --- applications/sasquatch/README.md | 1 + .../sasquatch/charts/strimzi-kafka/README.md | 1 + .../charts/strimzi-kafka/templates/topics.yaml | 15 +++++++++++++++ .../sasquatch/charts/strimzi-kafka/values.yaml | 3 +++ 4 files changed, 20 insertions(+) create mode 100644 applications/sasquatch/charts/strimzi-kafka/templates/topics.yaml diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index a3a058d559..148e7b4ed9 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -419,6 +419,7 @@ Rubin Observatory's telemetry service | strimzi-kafka.users.replicator.enabled | bool | `false` | Enable user replicator (used by Mirror Maker 2 and required at both source and target clusters) | | strimzi-kafka.users.telegraf.enabled | bool | `false` | Enable user telegraf (deployed by parent Sasquatch chart) | | strimzi-kafka.users.tsSalKafka.enabled | bool | `false` | Enable user ts-salkafka, used at the telescope environments | +| strimzi-kafka.users.tsSalKafka.topics | list | `[]` | Create topics for the ts-salkafka user not controlled by CSCs | | telegraf-kafka-consumer.affinity | object | `{}` | Affinity for pod assignment | | telegraf-kafka-consumer.args | list | `[]` | Arguments passed to the Telegraf agent containers | | telegraf-kafka-consumer.enabled | bool | `false` | Wether the Telegraf Kafka Consumer is enabled | diff --git a/applications/sasquatch/charts/strimzi-kafka/README.md b/applications/sasquatch/charts/strimzi-kafka/README.md index 10f3965922..7fbeddcc65 100644 --- a/applications/sasquatch/charts/strimzi-kafka/README.md +++ b/applications/sasquatch/charts/strimzi-kafka/README.md @@ -72,3 +72,4 @@ A subchart to deploy Strimzi Kafka components for Sasquatch. | users.replicator.enabled | bool | `false` | Enable user replicator (used by Mirror Maker 2 and required at both source and target clusters) | | users.telegraf.enabled | bool | `false` | Enable user telegraf (deployed by parent Sasquatch chart) | | users.tsSalKafka.enabled | bool | `false` | Enable user ts-salkafka, used at the telescope environments | +| users.tsSalKafka.topics | list | `[]` | Create topics for the ts-salkafka user not controlled by CSCs | diff --git a/applications/sasquatch/charts/strimzi-kafka/templates/topics.yaml b/applications/sasquatch/charts/strimzi-kafka/templates/topics.yaml new file mode 100644 index 0000000000..4c323af95b --- /dev/null +++ b/applications/sasquatch/charts/strimzi-kafka/templates/topics.yaml @@ -0,0 +1,15 @@ +{{- if .Values.users.tsSalKafka.enabled }} +{{- $cluster := .Values.cluster.name }} +{{- range $topic := .Values.users.tsSalKafka.topics }} +--- +apiVersion: kafka.strimzi.io/v1beta1 +kind: KafkaTopic +metadata: + name: {{ $topic }} + labels: + strimzi.io/cluster: {{ $cluster }} +spec: + replicas: 3 + partitions: 1 +{{- end }} +{{- end }} diff --git a/applications/sasquatch/charts/strimzi-kafka/values.yaml b/applications/sasquatch/charts/strimzi-kafka/values.yaml index 8f0eab97bf..47ded52266 100644 --- a/applications/sasquatch/charts/strimzi-kafka/values.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/values.yaml @@ -263,6 +263,9 @@ users: # -- Enable user ts-salkafka, used at the telescope environments enabled: false + # -- Create topics for the ts-salkafka user not controlled by CSCs + topics: [] + kafdrop: # -- Enable user Kafdrop (deployed by parent Sasquatch chart). enabled: false From be5f538788b43a188d9631a5723eab69cf15b7c5 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Tue, 19 Nov 2024 14:29:15 -0700 Subject: [PATCH 495/567] Add ts-salkafka topics to base. --- applications/sasquatch/values-base.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index d34b7659b3..142a86e212 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -82,6 +82,9 @@ strimzi-kafka: enabled: true tsSalKafka: enabled: true + topics: + - lsst.s3.raw.latiss + - lsst.s3.raw.lsstcam kafdrop: enabled: true telegraf: From 7e224a5117bba6645dbe377c1269350b37917f90 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Tue, 19 Nov 2024 14:51:40 -0800 Subject: [PATCH 496/567] Update Prompt Processing to 4.8.2. --- .../values-usdfprod-prompt-processing.yaml | 2 +- .../values-usdfprod-prompt-processing.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index 8f1f5bfe48..8325a34a2a 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.8.1 + tag: 4.8.2 instrument: pipelines: diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index d10f3bc4cf..7a6a4ee05a 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -15,7 +15,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.8.1 + tag: 4.8.2 instrument: pipelines: From 73193cd81e3b33e1f4d88f644546afe24515288d Mon Sep 17 00:00:00 2001 From: Hsin-Fang Chiang Date: Tue, 19 Nov 2024 14:07:38 -0800 Subject: [PATCH 497/567] Update the ComCam skymap to lsst_cells_v1 --- applications/prompt-proto-service-lsstcomcam/README.md | 2 +- applications/prompt-proto-service-lsstcomcam/values.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/prompt-proto-service-lsstcomcam/README.md b/applications/prompt-proto-service-lsstcomcam/README.md index 81edb0b954..f4334af2ea 100644 --- a/applications/prompt-proto-service-lsstcomcam/README.md +++ b/applications/prompt-proto-service-lsstcomcam/README.md @@ -34,7 +34,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | -| prompt-proto-service.instrument.skymap | string | `"ops_rehersal_prep_2k_v1"` | Skymap to use with the instrument | +| prompt-proto-service.instrument.skymap | string | `"lsst_cells_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.ephemeralStorageLimit | string | `"5Gi"` | The maximum storage space allowed for each container (mostly local Butler). This allocation is for the full pod (see `containerConcurrency`) | diff --git a/applications/prompt-proto-service-lsstcomcam/values.yaml b/applications/prompt-proto-service-lsstcomcam/values.yaml index a6116e6e11..85b421ac96 100644 --- a/applications/prompt-proto-service-lsstcomcam/values.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values.yaml @@ -49,7 +49,7 @@ prompt-proto-service: # @default -- None, must be set preprocessing: "" # -- Skymap to use with the instrument - skymap: "ops_rehersal_prep_2k_v1" + skymap: "lsst_cells_v1" # -- Number of arcseconds to pad the spatial region in preloading. preloadPadding: 30 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. From e0c84f1381ccd45d32222147924b768937dbe993 Mon Sep 17 00:00:00 2001 From: Hsin-Fang Chiang Date: Mon, 18 Nov 2024 16:15:41 -0800 Subject: [PATCH 498/567] Update the nextVisit handling in LSSTComCam Prompt Processing BLOCK-320 is the block to perform the observations for the individual visits that are queued up by the Feature Based Scheduler. BLOCK-320 is expected to be the standard json block for taking science images with ComCam going forward. Ignore all T blocks except BLOCK-T246 for now. --- .../values-usdfprod-prompt-processing.yaml | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index 7a6a4ee05a..6e3885b223 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -22,25 +22,37 @@ prompt-proto-service: # BLOCK-T60 is optics alignment # BLOCK-T75 is giant donuts # BLOCK-T88 is optics alignment + # BLOCK-T215 is evening twilight flats + # BLOCK-T216 is morning twilight flats + # BLOCK-T219 is pretty picture # BLOCK-T246 is instrument checkout # BLOCK-T249 is AOS alignment + # BLOCK-T250 is TMA daytime checkout main: >- - (survey="PP-SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml, + (survey="BLOCK-320")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/SingleFrame.yaml, ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr.yaml] - (survey="BLOCK-T60")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] + (survey="BLOCK-T60")=[] (survey="BLOCK-T75")=[] - (survey="BLOCK-T88")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] + (survey="BLOCK-T88")=[] + (survey="BLOCK-T215")=[] + (survey="BLOCK-T216")=[] + (survey="BLOCK-T219")=[] (survey="BLOCK-T246")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] - (survey="BLOCK-T249")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] + (survey="BLOCK-T249")=[] + (survey="BLOCK-T250")=[] (survey="")=[] preprocessing: >- - (survey="PP-SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml] + (survey="BLOCK-320")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml] (survey="BLOCK-T60")=[] (survey="BLOCK-T75")=[] (survey="BLOCK-T88")=[] + (survey="BLOCK-T215")=[] + (survey="BLOCK-T216")=[] + (survey="BLOCK-T219")=[] (survey="BLOCK-T246")=[] (survey="BLOCK-T249")=[] + (survey="BLOCK-T250")=[] (survey="")=[] calibRepo: s3://rubin-summit-users From 9f7625fdfed971ce4b4aa5aa3b1c60386a5d5867 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Tue, 19 Nov 2024 11:46:04 -0800 Subject: [PATCH 499/567] Fix style in Next Visit Fan Out template. --- applications/next-visit-fan-out/templates/deployment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/next-visit-fan-out/templates/deployment.yaml b/applications/next-visit-fan-out/templates/deployment.yaml index 2ec0ec5307..39416d74e2 100644 --- a/applications/next-visit-fan-out/templates/deployment.yaml +++ b/applications/next-visit-fan-out/templates/deployment.yaml @@ -52,7 +52,7 @@ spec: key: kafka_pp_sasl_password name: {{ template "next-visit-fan-out.fullname" . }}-secret - name: DEBUG_LOGS - value: {{ ternary "true" "false" .Values.debug | quote}} + value: {{ ternary "true" "false" .Values.debug | quote }} - name: SUPPORTED_INSTRUMENTS value: {{ .Values.instruments }} - name: INSTRUMENT_CONFIG_FILE From 1c2cbc717f38c4caab42668970713c56351dd27b Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Tue, 19 Nov 2024 11:48:40 -0800 Subject: [PATCH 500/567] Add retry option to Next Visit Fan Out. The flag (on by default) controls whether Fan Out retries certain requests (currently HTTP 503), or treats them as failed. --- applications/next-visit-fan-out/README.md | 1 + applications/next-visit-fan-out/templates/deployment.yaml | 2 ++ .../next-visit-fan-out/values-usdfdev-prompt-processing.yaml | 1 + applications/next-visit-fan-out/values.yaml | 2 ++ 4 files changed, 6 insertions(+) diff --git a/applications/next-visit-fan-out/README.md b/applications/next-visit-fan-out/README.md index c9d89654df..5f48aa4d07 100644 --- a/applications/next-visit-fan-out/README.md +++ b/applications/next-visit-fan-out/README.md @@ -22,6 +22,7 @@ Poll next visit events from Kafka, duplicate them, and send them to all applicat | kafka.saslMechamism | string | `"SCRAM-SHA-512"` | | | kafka.securityProtocol | string | `"SASL_PLAINTEXT"` | | | knative.maxMessages | string | None, must be set. | The maximum number of messages that can be forwarded to all Knative instances combined. | +| knative.retryRequests | bool | `true` | Whether or not to retry requests that returned a suitable response. | | knative.urls | object | See `values.yaml`. | A mapping of instrument to that instrument's Knative service. | | nameOverride | string | `""` | | | nodeSelector | object | `{}` | Node selection rules for the next-visit-fan-out deployment pod | diff --git a/applications/next-visit-fan-out/templates/deployment.yaml b/applications/next-visit-fan-out/templates/deployment.yaml index 39416d74e2..499ffd23b5 100644 --- a/applications/next-visit-fan-out/templates/deployment.yaml +++ b/applications/next-visit-fan-out/templates/deployment.yaml @@ -41,6 +41,8 @@ spec: value: {{ .Values.kafka.securityProtocol }} - name: MAX_FAN_OUT_MESSAGES value: {{ .Values.knative.maxMessages | toString | quote }} + - name: RETRY_KNATIVE_REQUESTS + value: {{ ternary "true" "false" .Values.knative.retryRequests | quote }} - name: SASL_USERNAME valueFrom: secretKeyRef: diff --git a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml index e2ac0f59a6..ca025d6fc4 100644 --- a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml @@ -1,5 +1,6 @@ knative: maxMessages: 150 + retryRequests: false kafka: schemaRegistryUrl: http://10.96.181.159:8081 diff --git a/applications/next-visit-fan-out/values.yaml b/applications/next-visit-fan-out/values.yaml index 750b483c14..dc95396e8c 100644 --- a/applications/next-visit-fan-out/values.yaml +++ b/applications/next-visit-fan-out/values.yaml @@ -10,6 +10,8 @@ knative: # -- The maximum number of messages that can be forwarded to all Knative instances combined. # @default -- None, must be set. maxMessages: "" + # -- Whether or not to retry requests that returned a suitable response. + retryRequests: true kafka: offset: latest From d16c49a0fcb8dee3e152f1a7a7867c42f94878eb Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Tue, 19 Nov 2024 16:52:38 -0800 Subject: [PATCH 501/567] Update Next Visit Fan Out to 2.5.0. --- .../next-visit-fan-out/values-usdfdev-prompt-processing.yaml | 2 +- .../next-visit-fan-out/values-usdfprod-prompt-processing.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml index ca025d6fc4..fc97581a64 100644 --- a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml @@ -14,6 +14,6 @@ image: repository: ghcr.io/lsst-dm/next_visit_fan_out pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 2.4.0 + tag: 2.5.0 instruments: "LATISS LSSTComCam LSSTComCamSim HSC" diff --git a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml index 73bcb8144d..75a9eccb55 100644 --- a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml @@ -11,6 +11,6 @@ image: repository: ghcr.io/lsst-dm/next_visit_fan_out pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 2.4.0 + tag: 2.5.0 instruments: "LATISS LSSTComCam" From 50348eb9ff1136855339e5a4435c288d24fd0e56 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Tue, 19 Nov 2024 17:20:21 -0800 Subject: [PATCH 502/567] Update Gafaelfawr to 12.1.1 Update the Gafaelfawr relesae to 12.1.1, which fixes doubled slashes in the OpenID Connect configuration endpoint. Note that this version of Gafaelfawr no longer supports direct upgrades from versions prior to 10.0.0. Upgrade to an earlier version first, complete the schema migration, and then upgrade to the latest version. --- applications/gafaelfawr/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/gafaelfawr/Chart.yaml b/applications/gafaelfawr/Chart.yaml index 0b6a7b9af0..c2b7e19a1e 100644 --- a/applications/gafaelfawr/Chart.yaml +++ b/applications/gafaelfawr/Chart.yaml @@ -5,7 +5,7 @@ description: "Authentication and identity system" home: "https://gafaelfawr.lsst.io/" sources: - "https://github.com/lsst-sqre/gafaelfawr" -appVersion: 12.1.0 +appVersion: 12.1.1 dependencies: - name: "redis" From 346ae0f3b5e34312ad9cf62a6329e3f5d000b647 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Wed, 20 Nov 2024 12:01:30 -0300 Subject: [PATCH 503/567] Add secrets for rubintv on summit. --- applications/rubintv/secrets-summit.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 applications/rubintv/secrets-summit.yaml diff --git a/applications/rubintv/secrets-summit.yaml b/applications/rubintv/secrets-summit.yaml new file mode 100644 index 0000000000..e059b139ff --- /dev/null +++ b/applications/rubintv/secrets-summit.yaml @@ -0,0 +1,5 @@ +"postgres-credentials.txt": + description: >- + PostgreSQL credentials in its pgpass format for the Butler and Exposurelog database. + onepassword: + encoded: true From e684503955db5230153c96a728e8f4cc80d3de77 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Sat, 16 Nov 2024 18:38:53 -0800 Subject: [PATCH 504/567] Add s3proxy application. --- applications/s3proxy/.helmignore | 23 +++++ applications/s3proxy/Chart.yaml | 8 ++ applications/s3proxy/README.md | 30 +++++++ applications/s3proxy/secrets.yaml | 8 ++ applications/s3proxy/templates/_helpers.tpl | 26 ++++++ applications/s3proxy/templates/configmap.yaml | 14 +++ .../s3proxy/templates/deployment.yaml | 89 +++++++++++++++++++ applications/s3proxy/templates/ingress.yaml | 34 +++++++ .../s3proxy/templates/networkpolicy.yaml | 21 +++++ applications/s3proxy/templates/service.yaml | 15 ++++ .../s3proxy/templates/vault-secret.yaml | 9 ++ applications/s3proxy/values-usdfdev.yaml | 16 ++++ applications/s3proxy/values.yaml | 69 ++++++++++++++ docs/applications/rubin.rst | 1 + docs/applications/s3proxy/index.rst | 19 ++++ docs/applications/s3proxy/values.md | 12 +++ environments/README.md | 1 + .../templates/applications/rubin/s3proxy.yaml | 34 +++++++ environments/values-usdfdev.yaml | 1 + environments/values.yaml | 3 + 20 files changed, 433 insertions(+) create mode 100644 applications/s3proxy/.helmignore create mode 100644 applications/s3proxy/Chart.yaml create mode 100644 applications/s3proxy/README.md create mode 100644 applications/s3proxy/secrets.yaml create mode 100644 applications/s3proxy/templates/_helpers.tpl create mode 100644 applications/s3proxy/templates/configmap.yaml create mode 100644 applications/s3proxy/templates/deployment.yaml create mode 100644 applications/s3proxy/templates/ingress.yaml create mode 100644 applications/s3proxy/templates/networkpolicy.yaml create mode 100644 applications/s3proxy/templates/service.yaml create mode 100644 applications/s3proxy/templates/vault-secret.yaml create mode 100644 applications/s3proxy/values-usdfdev.yaml create mode 100644 applications/s3proxy/values.yaml create mode 100644 docs/applications/s3proxy/index.rst create mode 100644 docs/applications/s3proxy/values.md create mode 100644 environments/templates/applications/rubin/s3proxy.yaml diff --git a/applications/s3proxy/.helmignore b/applications/s3proxy/.helmignore new file mode 100644 index 0000000000..0e8a0eb36f --- /dev/null +++ b/applications/s3proxy/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/applications/s3proxy/Chart.yaml b/applications/s3proxy/Chart.yaml new file mode 100644 index 0000000000..53fe481e1c --- /dev/null +++ b/applications/s3proxy/Chart.yaml @@ -0,0 +1,8 @@ +apiVersion: v2 +appVersion: 0.1.0 +description: Simple application to gateway S3 URLs to HTTPS +name: s3proxy +sources: +- https://github.com/lsst-dm/s3proxy +type: application +version: 1.0.0 diff --git a/applications/s3proxy/README.md b/applications/s3proxy/README.md new file mode 100644 index 0000000000..d8c3df29e6 --- /dev/null +++ b/applications/s3proxy/README.md @@ -0,0 +1,30 @@ +# s3proxy + +Simple application to gateway S3 URLs to HTTPS + +## Source Code + +* + +## Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| affinity | object | `{}` | Affinity rules for the s3proxy deployment pod | +| config.logLevel | string | `"INFO"` | Logging level | +| config.logProfile | string | `"production"` | Logging profile (`production` for JSON, `development` for human-friendly) | +| config.pathPrefix | string | `"/s3proxy"` | URL path prefix | +| config.profiles | list | `[]` | Profiles using different endpoint URLs and credentials | +| config.s3EndpointUrl | string | `""` | Default S3 endpoint URL | +| global.baseUrl | string | Set by Argo CD | Base URL for the environment | +| global.host | string | Set by Argo CD | Host name for ingress | +| global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | +| image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the s3proxy image | +| image.repository | string | `"ghcr.io/lsst-sqre/s3proxy"` | Image to use in the s3proxy deployment | +| image.tag | string | The appVersion of the chart | Tag of image to use | +| ingress.annotations | object | `{}` | Additional annotations for the ingress rule | +| nodeSelector | object | `{}` | Node selection rules for the s3proxy deployment pod | +| podAnnotations | object | `{}` | Annotations for the s3proxy deployment pod | +| replicaCount | int | `1` | Number of web deployment pods to start | +| resources | object | See `values.yaml` | Resource limits and requests for the s3proxy deployment pod | +| tolerations | list | `[]` | Tolerations for the s3proxy deployment pod | diff --git a/applications/s3proxy/secrets.yaml b/applications/s3proxy/secrets.yaml new file mode 100644 index 0000000000..fd1f46e6ee --- /dev/null +++ b/applications/s3proxy/secrets.yaml @@ -0,0 +1,8 @@ +"aws-credentials.ini": + description: | + S3 credentials in AWS INI format. + Each section corresponds to a profile. + Each section contains an aws_access_key_id and an aws_secret_access_key. + copy: + application: nublado + key: "aws-credentials.ini" diff --git a/applications/s3proxy/templates/_helpers.tpl b/applications/s3proxy/templates/_helpers.tpl new file mode 100644 index 0000000000..67e025f3fc --- /dev/null +++ b/applications/s3proxy/templates/_helpers.tpl @@ -0,0 +1,26 @@ +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "s3proxy.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "s3proxy.labels" -}} +helm.sh/chart: {{ include "s3proxy.chart" . }} +{{ include "s3proxy.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "s3proxy.selectorLabels" -}} +app.kubernetes.io/name: "s3proxy" +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} diff --git a/applications/s3proxy/templates/configmap.yaml b/applications/s3proxy/templates/configmap.yaml new file mode 100644 index 0000000000..261cbd8bf3 --- /dev/null +++ b/applications/s3proxy/templates/configmap.yaml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: "s3proxy" + labels: + {{- include "s3proxy.labels" . | nindent 4 }} +data: + S3PROXY_LOG_LEVEL: {{ .Values.config.logLevel | quote }} + S3PROXY_PATH_PREFIX: {{ .Values.config.pathPrefix | quote }} + S3PROXY_PROFILE: {{ .Values.config.logProfile | quote }} + S3_ENDPOINT_URL: {{ .Values.config.s3EndpointUrl | quote }} + {{- range .Values.config.profiles }} + LSST_RESOURCES_S3_PROFILE_{{ .name | quote }}: {{ .url | quote }} + {{- end }} diff --git a/applications/s3proxy/templates/deployment.yaml b/applications/s3proxy/templates/deployment.yaml new file mode 100644 index 0000000000..0a93824c54 --- /dev/null +++ b/applications/s3proxy/templates/deployment.yaml @@ -0,0 +1,89 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: "s3proxy" + labels: + {{- include "s3proxy.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + {{- include "s3proxy.selectorLabels" . | nindent 6 }} + template: + metadata: + annotations: + checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} + {{- with .Values.podAnnotations }} + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "s3proxy.selectorLabels" . | nindent 8 }} + spec: + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + automountServiceAccountToken: false + containers: + - name: {{ .Chart.Name }} + env: + - name: AWS_SHARED_CREDENTIALS_FILE + value: /pod-secrets/aws-credentials.ini + envFrom: + - configMapRef: + name: "s3proxy" + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.image.pullPolicy }} + ports: + - name: "http" + containerPort: 8080 + protocol: "TCP" + readinessProbe: + httpGet: + path: "/" + port: "http" + resources: + {{- toYaml .Values.resources | nindent 12 }} + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - "all" + readOnlyRootFilesystem: true + volumes: + - name: pod-secrets + mountPath: /pod-secrets + initContainers: + - name: secret-setup + command: + - /bin/ash + - "-c" + - | + cp -R /secrets /pod-secrets + chmod -R go-rwx /pod-secrets + image: alpine + volumeMounts: + - name: secret-volume + mountPath: /secrets + readOnly: true + - name: pod-secrets + mountPath: /pod-secrets + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} + securityContext: + runAsNonRoot: true + runAsUser: 1000 + runAsGroup: 1000 + volumes: + - name: pod-secrets + emptyDir: + sizeLimit: 1Mi + - name: secret-volume + secret: + secretName: "aws-credentials.ini" diff --git a/applications/s3proxy/templates/ingress.yaml b/applications/s3proxy/templates/ingress.yaml new file mode 100644 index 0000000000..61ce54a2ff --- /dev/null +++ b/applications/s3proxy/templates/ingress.yaml @@ -0,0 +1,34 @@ +apiVersion: gafaelfawr.lsst.io/v1alpha1 +kind: GafaelfawrIngress +metadata: + name: "s3proxy" + labels: + {{- include "s3proxy.labels" . | nindent 4 }} +config: + authCacheDuration: 5m + baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true + onlyServices: + - portal + scopes: + all: + - "read:image" +template: + metadata: + name: "s3proxy" + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 6 }} + {{- end }} + spec: + rules: + - host: {{ required "global.host must be set" .Values.global.host | quote }} + http: + paths: + - path: {{ .Values.config.pathPrefix | quote }} + pathType: "Prefix" + backend: + service: + name: "s3proxy" + port: + number: 8080 diff --git a/applications/s3proxy/templates/networkpolicy.yaml b/applications/s3proxy/templates/networkpolicy.yaml new file mode 100644 index 0000000000..70d2c1e80d --- /dev/null +++ b/applications/s3proxy/templates/networkpolicy.yaml @@ -0,0 +1,21 @@ +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: "s3proxy" +spec: + podSelector: + matchLabels: + {{- include "s3proxy.selectorLabels" . | nindent 6 }} + policyTypes: + - "Ingress" + ingress: + # Allow inbound access from pods (in any namespace) labeled + # gafaelfawr.lsst.io/ingress: true. + - from: + - namespaceSelector: {} + podSelector: + matchLabels: + gafaelfawr.lsst.io/ingress: "true" + ports: + - protocol: "TCP" + port: 8080 diff --git a/applications/s3proxy/templates/service.yaml b/applications/s3proxy/templates/service.yaml new file mode 100644 index 0000000000..ffe65b1110 --- /dev/null +++ b/applications/s3proxy/templates/service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: "s3proxy" + labels: + {{- include "s3proxy.labels" . | nindent 4 }} +spec: + type: "ClusterIP" + ports: + - port: 8080 + targetPort: "http" + protocol: "TCP" + name: "http" + selector: + {{- include "s3proxy.selectorLabels" . | nindent 4 }} diff --git a/applications/s3proxy/templates/vault-secret.yaml b/applications/s3proxy/templates/vault-secret.yaml new file mode 100644 index 0000000000..f1db384244 --- /dev/null +++ b/applications/s3proxy/templates/vault-secret.yaml @@ -0,0 +1,9 @@ +apiVersion: ricoberger.de/v1alpha1 +kind: VaultSecret +metadata: + name: "s3proxy" + labels: + {{- include "s3proxy.labels" . | nindent 4 }} +spec: + path: "{{ .Values.global.vaultSecretsPath }}/s3proxy" + type: Opaque diff --git a/applications/s3proxy/values-usdfdev.yaml b/applications/s3proxy/values-usdfdev.yaml new file mode 100644 index 0000000000..783b055acc --- /dev/null +++ b/applications/s3proxy/values-usdfdev.yaml @@ -0,0 +1,16 @@ +image: + tag: tickets/DM-47606 + +config: + profiles: + - name: embargo + url: "https://sdfembs3.sdf.slac.stanford.edu/" + s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu/" + +resources: + limits: + compute: 500m + memory: 1Gi + requests: + compute: 200m + memory: 100Mi diff --git a/applications/s3proxy/values.yaml b/applications/s3proxy/values.yaml new file mode 100644 index 0000000000..ed8ba92582 --- /dev/null +++ b/applications/s3proxy/values.yaml @@ -0,0 +1,69 @@ +# Default values for s3proxy. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +# -- Number of web deployment pods to start +replicaCount: 1 + +image: + # -- Image to use in the s3proxy deployment + repository: "ghcr.io/lsst-sqre/s3proxy" + + # -- Pull policy for the s3proxy image + pullPolicy: "IfNotPresent" + + # -- Tag of image to use + # @default -- The appVersion of the chart + tag: 0.1.0 + +config: + # -- Logging level + logLevel: "INFO" + + # -- Logging profile (`production` for JSON, `development` for + # human-friendly) + logProfile: "production" + + # -- URL path prefix + pathPrefix: "/s3proxy" + + # -- Profiles using different endpoint URLs and credentials + profiles: [] + + # -- Default S3 endpoint URL + s3EndpointUrl: "" + +ingress: + # -- Additional annotations for the ingress rule + annotations: {} + +# -- Affinity rules for the s3proxy deployment pod +affinity: {} + +# -- Node selection rules for the s3proxy deployment pod +nodeSelector: {} + +# -- Annotations for the s3proxy deployment pod +podAnnotations: {} + +# -- Resource limits and requests for the s3proxy deployment pod +# @default -- See `values.yaml` +resources: {} + +# -- Tolerations for the s3proxy deployment pod +tolerations: [] + +# The following will be set by parameters injected by Argo CD and should not +# be set in the individual environment values files. +global: + # -- Base URL for the environment + # @default -- Set by Argo CD + baseUrl: null + + # -- Host name for ingress + # @default -- Set by Argo CD + host: null + + # -- Base path for Vault secrets + # @default -- Set by Argo CD + vaultSecretsPath: null diff --git a/docs/applications/rubin.rst b/docs/applications/rubin.rst index 483f483438..ebd6ab8be3 100644 --- a/docs/applications/rubin.rst +++ b/docs/applications/rubin.rst @@ -23,4 +23,5 @@ Argo CD project: ``rubin`` rapid-analysis/index rubintv/index rubintv-dev/index + s3proxy/index schedview-snapshot/index diff --git a/docs/applications/s3proxy/index.rst b/docs/applications/s3proxy/index.rst new file mode 100644 index 0000000000..7ed9bd8ccc --- /dev/null +++ b/docs/applications/s3proxy/index.rst @@ -0,0 +1,19 @@ +.. px-app:: s3proxy + +######################################################## +s3proxy — Simple application to gateway S3 URLs to HTTPS +######################################################## + +This application provides authenticated internal links to S3 resources. +It is intended for deployment only at the USDF, but it could be used elsewhere. + +.. jinja:: s3proxy + :file: applications/_summary.rst.jinja + +Guides +====== + +.. toctree:: + :maxdepth: 1 + + values diff --git a/docs/applications/s3proxy/values.md b/docs/applications/s3proxy/values.md new file mode 100644 index 0000000000..1e546df95b --- /dev/null +++ b/docs/applications/s3proxy/values.md @@ -0,0 +1,12 @@ +```{px-app-values} s3proxy +``` + +# s3proxy Helm values reference + +Helm values reference table for the {px-app}`s3proxy` application. + +```{include} ../../../applications/s3proxy/README.md +--- +start-after: "## Values" +--- +``` \ No newline at end of file diff --git a/environments/README.md b/environments/README.md index e92de4f65a..3fce8c9d31 100644 --- a/environments/README.md +++ b/environments/README.md @@ -53,6 +53,7 @@ | applications.prompt-proto-service-lsstcomcamsim | bool | `false` | Enable the prompt-proto-service-lsstcomcamsim application | | applications.rubintv | bool | `false` | Enable the rubintv application | | applications.rubintv-dev | bool | `false` | Enable the rubintv-dev application | +| applications.s3proxy | bool | `false` | Enable the s3proxy application | | applications.sasquatch | bool | `false` | Enable the sasquatch application | | applications.sasquatch-backpack | bool | `false` | Enable the sasquatch-backpack application | | applications.schedview-snapshot | bool | `false` | Enable the schedview-snapshot application | diff --git a/environments/templates/applications/rubin/s3proxy.yaml b/environments/templates/applications/rubin/s3proxy.yaml new file mode 100644 index 0000000000..1f33f79960 --- /dev/null +++ b/environments/templates/applications/rubin/s3proxy.yaml @@ -0,0 +1,34 @@ +{{- if (index .Values "applications" "s3proxy") -}} +apiVersion: v1 +kind: Namespace +metadata: + name: "s3proxy" +--- +apiVersion: argoproj.io/v1alpha1 +kind: Application +metadata: + name: "s3proxy" + namespace: "argocd" + finalizers: + - "resources-finalizer.argocd.argoproj.io" +spec: + destination: + namespace: "s3proxy" + server: "https://kubernetes.default.svc" + project: "rubin" + source: + path: "applications/s3proxy" + repoURL: {{ .Values.repoUrl | quote }} + targetRevision: {{ .Values.targetRevision | quote }} + helm: + parameters: + - name: "global.host" + value: {{ .Values.fqdn | quote }} + - name: "global.baseUrl" + value: "https://{{ .Values.fqdn }}" + - name: "global.vaultSecretsPath" + value: {{ .Values.vaultPathPrefix | quote }} + valueFiles: + - "values.yaml" + - "values-{{ .Values.name }}.yaml" +{{- end -}} \ No newline at end of file diff --git a/environments/values-usdfdev.yaml b/environments/values-usdfdev.yaml index d186cba8d2..ee2390b317 100644 --- a/environments/values-usdfdev.yaml +++ b/environments/values-usdfdev.yaml @@ -29,6 +29,7 @@ applications: postgres: true ppdb-replication: true rubintv: true + s3proxy: true sasquatch: true schedview-snapshot: true semaphore: true diff --git a/environments/values.yaml b/environments/values.yaml index b65965df03..25500c34d3 100644 --- a/environments/values.yaml +++ b/environments/values.yaml @@ -162,6 +162,9 @@ applications: # -- Enable the rubintv-dev application rubintv-dev: false + # -- Enable the s3proxy application + s3proxy: false + # -- Enable the sasquatch application sasquatch: false From 2d700a7e56b9c00c755eddeedbb9bb7033044a39 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Wed, 20 Nov 2024 12:34:25 -0800 Subject: [PATCH 505/567] Use tagged version. --- applications/s3proxy/Chart.yaml | 2 +- applications/s3proxy/values-usdfdev.yaml | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/applications/s3proxy/Chart.yaml b/applications/s3proxy/Chart.yaml index 53fe481e1c..a165868d44 100644 --- a/applications/s3proxy/Chart.yaml +++ b/applications/s3proxy/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: 0.1.0 +appVersion: 1.0.0 description: Simple application to gateway S3 URLs to HTTPS name: s3proxy sources: diff --git a/applications/s3proxy/values-usdfdev.yaml b/applications/s3proxy/values-usdfdev.yaml index 783b055acc..13cfe53903 100644 --- a/applications/s3proxy/values-usdfdev.yaml +++ b/applications/s3proxy/values-usdfdev.yaml @@ -1,6 +1,3 @@ -image: - tag: tickets/DM-47606 - config: profiles: - name: embargo From cd198addf97e0e53a9f02caf6fe813768bb9908f Mon Sep 17 00:00:00 2001 From: Hsin-Fang Chiang Date: Tue, 19 Nov 2024 14:02:31 -0800 Subject: [PATCH 506/567] Update the preload padding configs --- applications/prompt-proto-service-hsc-gpu/README.md | 2 +- applications/prompt-proto-service-hsc-gpu/values.yaml | 2 +- applications/prompt-proto-service-hsc/README.md | 2 +- applications/prompt-proto-service-hsc/values.yaml | 2 +- applications/prompt-proto-service-lsstcam/README.md | 2 +- applications/prompt-proto-service-lsstcam/values.yaml | 2 +- applications/prompt-proto-service-lsstcomcam/README.md | 2 +- applications/prompt-proto-service-lsstcomcam/values.yaml | 2 +- applications/prompt-proto-service-lsstcomcamsim/README.md | 2 +- applications/prompt-proto-service-lsstcomcamsim/values.yaml | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/applications/prompt-proto-service-hsc-gpu/README.md b/applications/prompt-proto-service-hsc-gpu/README.md index ac15e45b47..7f119bce01 100644 --- a/applications/prompt-proto-service-hsc-gpu/README.md +++ b/applications/prompt-proto-service-hsc-gpu/README.md @@ -33,7 +33,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `"HSC"` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | -| prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | +| prompt-proto-service.instrument.preloadPadding | int | `42` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"hsc_rings_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-hsc-gpu/values.yaml b/applications/prompt-proto-service-hsc-gpu/values.yaml index 9ccc7af9dd..4fbc4354e8 100644 --- a/applications/prompt-proto-service-hsc-gpu/values.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values.yaml @@ -48,7 +48,7 @@ prompt-proto-service: # -- Skymap to use with the instrument skymap: hsc_rings_v1 # -- Number of arcseconds to pad the spatial region in preloading. - preloadPadding: 30 + preloadPadding: 42 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set diff --git a/applications/prompt-proto-service-hsc/README.md b/applications/prompt-proto-service-hsc/README.md index ac633519c8..33a4a6d8b7 100644 --- a/applications/prompt-proto-service-hsc/README.md +++ b/applications/prompt-proto-service-hsc/README.md @@ -33,7 +33,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `"HSC"` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | -| prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | +| prompt-proto-service.instrument.preloadPadding | int | `42` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"hsc_rings_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-hsc/values.yaml b/applications/prompt-proto-service-hsc/values.yaml index 46ac0611d7..f2e5eba4a8 100644 --- a/applications/prompt-proto-service-hsc/values.yaml +++ b/applications/prompt-proto-service-hsc/values.yaml @@ -48,7 +48,7 @@ prompt-proto-service: # -- Skymap to use with the instrument skymap: hsc_rings_v1 # -- Number of arcseconds to pad the spatial region in preloading. - preloadPadding: 30 + preloadPadding: 42 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set diff --git a/applications/prompt-proto-service-lsstcam/README.md b/applications/prompt-proto-service-lsstcam/README.md index e1071dcac0..ca502f5749 100644 --- a/applications/prompt-proto-service-lsstcam/README.md +++ b/applications/prompt-proto-service-lsstcam/README.md @@ -33,7 +33,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `""` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | -| prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | +| prompt-proto-service.instrument.preloadPadding | int | `50` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `""` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-lsstcam/values.yaml b/applications/prompt-proto-service-lsstcam/values.yaml index 4ff089ed5e..3eecc4bf32 100644 --- a/applications/prompt-proto-service-lsstcam/values.yaml +++ b/applications/prompt-proto-service-lsstcam/values.yaml @@ -48,7 +48,7 @@ prompt-proto-service: # -- Skymap to use with the instrument skymap: "" # -- Number of arcseconds to pad the spatial region in preloading. - preloadPadding: 30 + preloadPadding: 50 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set diff --git a/applications/prompt-proto-service-lsstcomcam/README.md b/applications/prompt-proto-service-lsstcomcam/README.md index f4334af2ea..bb28592707 100644 --- a/applications/prompt-proto-service-lsstcomcam/README.md +++ b/applications/prompt-proto-service-lsstcomcam/README.md @@ -33,7 +33,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `"LSSTComCam"` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | -| prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | +| prompt-proto-service.instrument.preloadPadding | int | `50` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"lsst_cells_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-lsstcomcam/values.yaml b/applications/prompt-proto-service-lsstcomcam/values.yaml index 85b421ac96..13339b8fb8 100644 --- a/applications/prompt-proto-service-lsstcomcam/values.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values.yaml @@ -51,7 +51,7 @@ prompt-proto-service: # -- Skymap to use with the instrument skymap: "lsst_cells_v1" # -- Number of arcseconds to pad the spatial region in preloading. - preloadPadding: 30 + preloadPadding: 50 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set diff --git a/applications/prompt-proto-service-lsstcomcamsim/README.md b/applications/prompt-proto-service-lsstcomcamsim/README.md index f2874d9eae..2dd70b63f0 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/README.md +++ b/applications/prompt-proto-service-lsstcomcamsim/README.md @@ -33,7 +33,7 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.instrument.name | string | `"LSSTComCamSim"` | The "short" name of the instrument | | prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | | prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | -| prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | +| prompt-proto-service.instrument.preloadPadding | int | `50` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"ops_rehersal_prep_2k_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | | prompt-proto-service.knative.cpuRequest | int | `1` | The cpu cores requested for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-lsstcomcamsim/values.yaml b/applications/prompt-proto-service-lsstcomcamsim/values.yaml index 1f977a62bb..fc8dce6b9b 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values.yaml @@ -51,7 +51,7 @@ prompt-proto-service: # -- Skymap to use with the instrument skymap: ops_rehersal_prep_2k_v1 # -- Number of arcseconds to pad the spatial region in preloading. - preloadPadding: 30 + preloadPadding: 50 # -- URI to the shared repo used for calibrations, templates, and pipeline outputs. # If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. # @default -- None, must be set From f16fa67dc326e7e762c407a4c8aab8ede79b36c9 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Wed, 20 Nov 2024 14:58:30 -0700 Subject: [PATCH 507/567] Make doc string more specific. --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/strimzi-kafka/README.md | 2 +- applications/sasquatch/charts/strimzi-kafka/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 148e7b4ed9..7ed157c4f2 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -419,7 +419,7 @@ Rubin Observatory's telemetry service | strimzi-kafka.users.replicator.enabled | bool | `false` | Enable user replicator (used by Mirror Maker 2 and required at both source and target clusters) | | strimzi-kafka.users.telegraf.enabled | bool | `false` | Enable user telegraf (deployed by parent Sasquatch chart) | | strimzi-kafka.users.tsSalKafka.enabled | bool | `false` | Enable user ts-salkafka, used at the telescope environments | -| strimzi-kafka.users.tsSalKafka.topics | list | `[]` | Create topics for the ts-salkafka user not controlled by CSCs | +| strimzi-kafka.users.tsSalKafka.topics | list | `[]` | Create lsst.s3.* related topics for the ts-salkafka user. | | telegraf-kafka-consumer.affinity | object | `{}` | Affinity for pod assignment | | telegraf-kafka-consumer.args | list | `[]` | Arguments passed to the Telegraf agent containers | | telegraf-kafka-consumer.enabled | bool | `false` | Wether the Telegraf Kafka Consumer is enabled | diff --git a/applications/sasquatch/charts/strimzi-kafka/README.md b/applications/sasquatch/charts/strimzi-kafka/README.md index 7fbeddcc65..bb6ee20e92 100644 --- a/applications/sasquatch/charts/strimzi-kafka/README.md +++ b/applications/sasquatch/charts/strimzi-kafka/README.md @@ -72,4 +72,4 @@ A subchart to deploy Strimzi Kafka components for Sasquatch. | users.replicator.enabled | bool | `false` | Enable user replicator (used by Mirror Maker 2 and required at both source and target clusters) | | users.telegraf.enabled | bool | `false` | Enable user telegraf (deployed by parent Sasquatch chart) | | users.tsSalKafka.enabled | bool | `false` | Enable user ts-salkafka, used at the telescope environments | -| users.tsSalKafka.topics | list | `[]` | Create topics for the ts-salkafka user not controlled by CSCs | +| users.tsSalKafka.topics | list | `[]` | Create lsst.s3.* related topics for the ts-salkafka user. | diff --git a/applications/sasquatch/charts/strimzi-kafka/values.yaml b/applications/sasquatch/charts/strimzi-kafka/values.yaml index 47ded52266..85abde504a 100644 --- a/applications/sasquatch/charts/strimzi-kafka/values.yaml +++ b/applications/sasquatch/charts/strimzi-kafka/values.yaml @@ -263,7 +263,7 @@ users: # -- Enable user ts-salkafka, used at the telescope environments enabled: false - # -- Create topics for the ts-salkafka user not controlled by CSCs + # -- Create lsst.s3.* related topics for the ts-salkafka user. topics: [] kafdrop: From 33ec664f471cde61e1fde6482a2ffabfed498109 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Wed, 20 Nov 2024 14:52:55 -0800 Subject: [PATCH 508/567] Remove quoting. --- applications/s3proxy/templates/configmap.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/s3proxy/templates/configmap.yaml b/applications/s3proxy/templates/configmap.yaml index 261cbd8bf3..7284a9598d 100644 --- a/applications/s3proxy/templates/configmap.yaml +++ b/applications/s3proxy/templates/configmap.yaml @@ -10,5 +10,5 @@ data: S3PROXY_PROFILE: {{ .Values.config.logProfile | quote }} S3_ENDPOINT_URL: {{ .Values.config.s3EndpointUrl | quote }} {{- range .Values.config.profiles }} - LSST_RESOURCES_S3_PROFILE_{{ .name | quote }}: {{ .url | quote }} + LSST_RESOURCES_S3_PROFILE_{{ .name }}: {{ .url | quote }} {{- end }} From 43ecca7bf38935ff206de9e6681586c6c8e4e88d Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Wed, 20 Nov 2024 14:54:55 -0800 Subject: [PATCH 509/567] Fix resources. --- applications/s3proxy/values-usdfdev.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/s3proxy/values-usdfdev.yaml b/applications/s3proxy/values-usdfdev.yaml index 13cfe53903..87d1d48df5 100644 --- a/applications/s3proxy/values-usdfdev.yaml +++ b/applications/s3proxy/values-usdfdev.yaml @@ -6,8 +6,8 @@ config: resources: limits: - compute: 500m + cpu: 500m memory: 1Gi requests: - compute: 200m + cpu: 200m memory: 100Mi From 7200734d0db2814ffdae4ca4013170a298e0b457 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Wed, 20 Nov 2024 15:24:51 -0800 Subject: [PATCH 510/567] Fix volumes. --- applications/s3proxy/templates/deployment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/s3proxy/templates/deployment.yaml b/applications/s3proxy/templates/deployment.yaml index 0a93824c54..b3630f7b9c 100644 --- a/applications/s3proxy/templates/deployment.yaml +++ b/applications/s3proxy/templates/deployment.yaml @@ -50,7 +50,7 @@ spec: drop: - "all" readOnlyRootFilesystem: true - volumes: + volumeMounts: - name: pod-secrets mountPath: /pod-secrets initContainers: From 2a5b1d8a90439bb6629bffa5b6e2769ac4e90336 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Wed, 20 Nov 2024 15:31:16 -0800 Subject: [PATCH 511/567] Fix secret. --- applications/s3proxy/templates/deployment.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/applications/s3proxy/templates/deployment.yaml b/applications/s3proxy/templates/deployment.yaml index b3630f7b9c..35b5856c22 100644 --- a/applications/s3proxy/templates/deployment.yaml +++ b/applications/s3proxy/templates/deployment.yaml @@ -28,7 +28,7 @@ spec: - name: {{ .Chart.Name }} env: - name: AWS_SHARED_CREDENTIALS_FILE - value: /pod-secrets/aws-credentials.ini + value: /pod-secrets/secrets/aws-credentials.ini envFrom: - configMapRef: name: "s3proxy" @@ -86,4 +86,5 @@ spec: sizeLimit: 1Mi - name: secret-volume secret: - secretName: "aws-credentials.ini" + secretName: "s3proxy" + secretKey: "aws-credentials.ini" From c5c3da1930482a266b5a91faa067d9021ff47a59 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Wed, 20 Nov 2024 15:34:05 -0800 Subject: [PATCH 512/567] Fix command. --- applications/s3proxy/templates/deployment.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/s3proxy/templates/deployment.yaml b/applications/s3proxy/templates/deployment.yaml index 35b5856c22..d96e861819 100644 --- a/applications/s3proxy/templates/deployment.yaml +++ b/applications/s3proxy/templates/deployment.yaml @@ -59,8 +59,8 @@ spec: - /bin/ash - "-c" - | - cp -R /secrets /pod-secrets - chmod -R go-rwx /pod-secrets + cp -L -r /secrets /pod-secrets + chmod -R go-rwx /pod-secrets/* image: alpine volumeMounts: - name: secret-volume From ef2f4a077f198af58b4763897e2d9464d1322861 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Wed, 20 Nov 2024 15:40:38 -0800 Subject: [PATCH 513/567] Don't override app version. --- applications/s3proxy/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/s3proxy/values.yaml b/applications/s3proxy/values.yaml index ed8ba92582..4c855a038b 100644 --- a/applications/s3proxy/values.yaml +++ b/applications/s3proxy/values.yaml @@ -14,7 +14,7 @@ image: # -- Tag of image to use # @default -- The appVersion of the chart - tag: 0.1.0 + tag: "" config: # -- Logging level From 70a011ba6300a5efac29c7c2bf31935c52dcac33 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Wed, 20 Nov 2024 15:41:55 -0800 Subject: [PATCH 514/567] Fix org. --- applications/s3proxy/README.md | 2 +- applications/s3proxy/values.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/s3proxy/README.md b/applications/s3proxy/README.md index d8c3df29e6..aace497a8f 100644 --- a/applications/s3proxy/README.md +++ b/applications/s3proxy/README.md @@ -20,7 +20,7 @@ Simple application to gateway S3 URLs to HTTPS | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | | image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the s3proxy image | -| image.repository | string | `"ghcr.io/lsst-sqre/s3proxy"` | Image to use in the s3proxy deployment | +| image.repository | string | `"ghcr.io/lsst-dm/s3proxy"` | Image to use in the s3proxy deployment | | image.tag | string | The appVersion of the chart | Tag of image to use | | ingress.annotations | object | `{}` | Additional annotations for the ingress rule | | nodeSelector | object | `{}` | Node selection rules for the s3proxy deployment pod | diff --git a/applications/s3proxy/values.yaml b/applications/s3proxy/values.yaml index 4c855a038b..c422f53ccc 100644 --- a/applications/s3proxy/values.yaml +++ b/applications/s3proxy/values.yaml @@ -7,7 +7,7 @@ replicaCount: 1 image: # -- Image to use in the s3proxy deployment - repository: "ghcr.io/lsst-sqre/s3proxy" + repository: "ghcr.io/lsst-dm/s3proxy" # -- Pull policy for the s3proxy image pullPolicy: "IfNotPresent" From 2eaea20ce1649f45a843cec95eb0490cf1b68461 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Wed, 20 Nov 2024 17:10:29 -0300 Subject: [PATCH 515/567] rubintv: add NFS mount for ddv configurations on summit prod. --- applications/rubintv/values-summit.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index eef9d8dec8..6fe16e6ee8 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -42,6 +42,12 @@ rubintv: value: "/sdf/group/rubin/repo/ir2/butler.yaml" - name: DEPLOY_BRANCH value: *dbE + nfsMountpoint: + - name: project-rubintv-ddv-config + containerPath: /var/ddv-config + readOnly: false + server: nfs1.cp.lsst.org + serverPath: /project/rubintv/ddv-config resources: requests: cpu: 0.5 From 8fb1565d2a1829502abffc5eaf11798d177c3e2e Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Wed, 20 Nov 2024 17:27:36 -0300 Subject: [PATCH 516/567] rubintv: add PVC mount for ddv configurations on usdfprod. --- applications/rubintv/values-usdfprod.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index 68c74079d4..3d433d5d0b 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -53,6 +53,14 @@ rubintv: capacity: 1Gi accessMode: ReadOnlyMany mountPath: /sdf/data/rubin + - name: sdf-data-rubin-rubintv-ddv-config + persistentVolumeClaim: + name: sdf-data-rubin-rubintv-ddv-config + storageClassName: sdf-data-rubin + capacity: 1Gi + accessMode: ReadWriteMany + mountPath: /var/ddv-config + subPath: shared/rubintv-ddv-config resources: limits: cpu: 2.0 From b68060725d9f9ff19583974caf5f1f6aa34efba0 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Wed, 20 Nov 2024 17:28:35 -0300 Subject: [PATCH 517/567] rubintv: activate 1 workers replica on usdfprod. --- applications/rubintv/values-usdfprod.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index 3d433d5d0b..0d7df00a4b 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -20,7 +20,7 @@ rubintv: pullPolicy: Always workers: - replicas: 0 + replicas: 1 image: repository: ts-dockerhub.lsst.org/rapid-analysis tag: c0037 From 3f818e8172293796c03ec4acef60938dfe9c65f2 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Wed, 20 Nov 2024 17:31:17 -0300 Subject: [PATCH 518/567] rubintv: rename volumes entry to pvcMountpoint on usdfprod. --- applications/rubintv/values-usdfprod.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index 0d7df00a4b..753dc8bcd6 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -38,7 +38,7 @@ rubintv: value: "/sdf/group/rubin/repo/ir2/butler.yaml" - name: DEPLOY_BRANCH value: *dbE - volumes: + pvcMountpoint: - name: sdf-group-rubin persistentVolumeClaim: name: sdf-group-rubin From bd9036e4b9803b4385dc9c7aef7c13cdbc863765 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Wed, 20 Nov 2024 17:34:38 -0300 Subject: [PATCH 519/567] rubintv: change workers image to use usdf version and match user permissions. --- applications/rubintv/values-usdfprod.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index 753dc8bcd6..2390b798fa 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -22,11 +22,11 @@ rubintv: workers: replicas: 1 image: - repository: ts-dockerhub.lsst.org/rapid-analysis - tag: c0037 + repository: lsstts/rapid-analysis + tag: c0039_usdf pullPolicy: Always - uid: 73006 - gid: 73006 + uid: 17951 + gid: 4085 scriptsLocation: /repos/rubintv_analysis_service/scripts script: rubintv_worker.py -a rubintv -p 8080 -l usdf env: From 26e31d022af3055d7bfac95332287f7467b3b60d Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Thu, 21 Nov 2024 16:43:19 -0300 Subject: [PATCH 520/567] rubintv: add siteTag on usdfprod. --- applications/rubintv/values-usdfprod.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index 2390b798fa..bbb17cccc6 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -1,4 +1,6 @@ rubintv: + siteTag: "usdf-prod" + imagePullSecrets: - name: pull-secret From 2a5ba2e58e862a3f704790bf78a6faa18ef8ec9f Mon Sep 17 00:00:00 2001 From: adam Date: Thu, 21 Nov 2024 14:26:32 -0700 Subject: [PATCH 521/567] Turn off PDB for monitoring/telegraf --- applications/monitoring/README.md | 1 + applications/monitoring/values.yaml | 5 +++++ applications/telegraf/README.md | 1 + applications/telegraf/values.yaml | 2 ++ 4 files changed, 9 insertions(+) diff --git a/applications/monitoring/README.md b/applications/monitoring/README.md index 68e2befc31..b4d77d93ad 100644 --- a/applications/monitoring/README.md +++ b/applications/monitoring/README.md @@ -43,6 +43,7 @@ Monitoring suite: InfluxDB2, Chronograf, telegraf | influxdb2.ingress | object | disabled, must be enabled and configured at each site | InfluxDB2 ingress configuration. | | influxdb2.livenessProbe.failureThreshold | int | `10` | Number of checks to conclude whether InfluxDB has died | | influxdb2.livenessProbe.periodSeconds | int | `10` | Period between checks for whether InfluxDB is still alive | +| influxdb2.pdb | object | disabled; nonsensical for single replica | InfluxDB2 pod disruption budget. | | influxdb2.resources | object | See `values.yaml` | Resource limits and requests for the InfluxDB server instance | | influxdb2.startupProbe.enabled | bool | `true` | Whether to enable a startup probe | | influxdb2.startupProbe.failureThreshold | int | `60` | Number of checks to conclude whether InfluxDB won't start. High to allow up to 10 minutes for startup, because checking many shards can be slow. | diff --git a/applications/monitoring/values.yaml b/applications/monitoring/values.yaml index d0035f2dac..725c06d060 100644 --- a/applications/monitoring/values.yaml +++ b/applications/monitoring/values.yaml @@ -23,6 +23,11 @@ influxdb2: # -- Where we store secrets to run the server existingSecret: monitoring + # -- InfluxDB2 pod disruption budget. + # @default -- disabled; nonsensical for single replica + pdb: + create: false + # -- InfluxDB2 ingress configuration. # @default -- disabled, must be enabled and configured at each site ingress: diff --git a/applications/telegraf/README.md b/applications/telegraf/README.md index 84412df72c..574082f499 100644 --- a/applications/telegraf/README.md +++ b/applications/telegraf/README.md @@ -28,6 +28,7 @@ Application telemetry collection service | telegraf.env[0].valueFrom.secretKeyRef.name | string | `"telegraf"` | | | telegraf.mountPoints[0].mountPath | string | `"/etc/telegraf-generated"` | | | telegraf.mountPoints[0].name | string | `"telegraf-generated-config"` | | +| telegraf.pdb.create | bool | `false` | | | telegraf.podLabels."hub.jupyter.org/network-access-hub" | string | `"true"` | | | telegraf.rbac.clusterWide | bool | `true` | | | telegraf.resources.limits.cpu | string | `"1"` | | diff --git a/applications/telegraf/values.yaml b/applications/telegraf/values.yaml index fe48caaf04..3bdbd8065f 100644 --- a/applications/telegraf/values.yaml +++ b/applications/telegraf/values.yaml @@ -13,6 +13,8 @@ telegraf: requests: memory: "350Mi" cpu: "50m" + pdb: + create: false args: - "--config" - "/etc/telegraf-generated/telegraf-generated.conf" From a0a1652479568a8ad685ad3d508a88a11ed7bc4a Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Thu, 21 Nov 2024 15:13:40 -0800 Subject: [PATCH 522/567] Use weekly images for noteburst on usdfdev --- applications/noteburst/values-usdfdev.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/noteburst/values-usdfdev.yaml b/applications/noteburst/values-usdfdev.yaml index 612ce2000e..cffd799774 100644 --- a/applications/noteburst/values-usdfdev.yaml +++ b/applications/noteburst/values-usdfdev.yaml @@ -13,6 +13,7 @@ config: - username: "bot-noteburst03" - username: "bot-noteburst04" - username: "bot-noteburst05" + imageSelector: "weekly" # Use SSD for Redis storage. redis: From 4aa2ed55e0e733be7f7ef50cbba2f5471b0592df Mon Sep 17 00:00:00 2001 From: Tim Jenness Date: Thu, 21 Nov 2024 17:10:06 -0700 Subject: [PATCH 523/567] Add SP jira tickets to unfurlbot --- applications/unfurlbot/values.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/unfurlbot/values.yaml b/applications/unfurlbot/values.yaml index 207f34358e..d27c557272 100644 --- a/applications/unfurlbot/values.yaml +++ b/applications/unfurlbot/values.yaml @@ -95,6 +95,7 @@ config: PREOPS, OBS, SITCOM, + SP, BLOCK ingress: From adabe189fccc2fc09cf672aa8f98fafeef2312db Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 22 Nov 2024 07:42:28 -0700 Subject: [PATCH 524/567] BTS: Update to k0003 tag. --- environments/values-base.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environments/values-base.yaml b/environments/values-base.yaml index 03455ce0a7..ea94b64e03 100644 --- a/environments/values-base.yaml +++ b/environments/values-base.yaml @@ -35,6 +35,6 @@ applications: uws: true controlSystem: - imageTag: "k0002" + imageTag: "k0003" siteTag: "base" s3EndpointUrl: "https://s3.ls.lsst.org" From f40c3deedfdac0b0d06aaef99d55206e2cfbecb4 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Fri, 22 Nov 2024 15:30:55 -0300 Subject: [PATCH 525/567] rubintv: Add TODO for better handling of secrets. --- charts/rubintv/templates/deployment-workers.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/charts/rubintv/templates/deployment-workers.yaml b/charts/rubintv/templates/deployment-workers.yaml index b4784e839e..51010dce2f 100644 --- a/charts/rubintv/templates/deployment-workers.yaml +++ b/charts/rubintv/templates/deployment-workers.yaml @@ -100,6 +100,9 @@ spec: initContainers: - name: "secret-perm-fixer" image: "busybox" + # TODO: Note that rubintv-secrets and butler-secrets share the same + # aws-credentials.ini and postgres-credentials.txt files so this will + # need to be fixed. See DM-47762. command: - "/bin/sh" - "-c" From c6e0954ed97f5a89abcee519d9b9232e11f27e8d Mon Sep 17 00:00:00 2001 From: Erin Howard Date: Fri, 22 Nov 2024 15:28:35 -0500 Subject: [PATCH 526/567] Update Prompt Processing to 4.9.0. --- .../values-usdfprod-prompt-processing.yaml | 2 +- .../values-usdfprod-prompt-processing.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index 8325a34a2a..b8ff8138bc 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -14,7 +14,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.8.2 + tag: 4.9.0 instrument: pipelines: diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index 6e3885b223..ca5eec290a 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -15,7 +15,7 @@ prompt-proto-service: image: pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: 4.8.2 + tag: 4.9.0 instrument: pipelines: From 8e7da4c10811d795291f3569a79bc939ad692dc4 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Fri, 22 Nov 2024 13:56:36 -0700 Subject: [PATCH 527/567] Add EAS to TTS, BTS, summit and USDF-prod sasquatch consumers. --- applications/sasquatch/values-base.yaml | 2 +- applications/sasquatch/values-summit.yaml | 4 ++-- applications/sasquatch/values-tucson-teststand.yaml | 2 +- applications/sasquatch/values-usdfprod.yaml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/applications/sasquatch/values-base.yaml b/applications/sasquatch/values-base.yaml index 142a86e212..3700e87ec7 100644 --- a/applications/sasquatch/values-base.yaml +++ b/applications/sasquatch/values-base.yaml @@ -164,7 +164,7 @@ telegraf-kafka-consumer: enabled: true database: "efd" topicRegexps: | - [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + [ "lsst.sal.DIMM", "lsst.sal.DSM", "lsst.sal.EAS", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] debug: true latiss: enabled: true diff --git a/applications/sasquatch/values-summit.yaml b/applications/sasquatch/values-summit.yaml index 24afd30be1..2e8b86a7ff 100644 --- a/applications/sasquatch/values-summit.yaml +++ b/applications/sasquatch/values-summit.yaml @@ -180,7 +180,7 @@ kafka-connect-manager: eas: enabled: true repairerConnector: false - topicsRegex: ".*DIMM|.*DREAM|.*DSM|.*EPM|.*ESS|.*HVAC|.*WeatherForecast" + topicsRegex: ".*DIMM|.*DREAM|.*DSM|.*EAS|.*EPM|.*ESS|.*HVAC|.*WeatherForecast" latiss: enabled: true repairerConnector: false @@ -339,7 +339,7 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.DIMM", "lsst.sal.DREAM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + [ "lsst.sal.DIMM", "lsst.sal.DREAM", "lsst.sal.DSM", "lsst.sal.EAS", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] debug: true m1m3: enabled: true diff --git a/applications/sasquatch/values-tucson-teststand.yaml b/applications/sasquatch/values-tucson-teststand.yaml index 3cfe4b3025..2fb04f5339 100644 --- a/applications/sasquatch/values-tucson-teststand.yaml +++ b/applications/sasquatch/values-tucson-teststand.yaml @@ -104,7 +104,7 @@ telegraf-kafka-consumer: metric_batch_size: 100 flush_interval: 20s topicRegexps: | - [ "lsst.sal.DIMM", "lsst.sal.DREAM", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + [ "lsst.sal.DIMM", "lsst.sal.DREAM", "lsst.sal.DSM", "lsst.sal.EAS", "lsst.sal.EPM", "lsst.sal.ESS", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] debug: true latiss: enabled: true diff --git a/applications/sasquatch/values-usdfprod.yaml b/applications/sasquatch/values-usdfprod.yaml index 035ec26702..5aa6b0e191 100644 --- a/applications/sasquatch/values-usdfprod.yaml +++ b/applications/sasquatch/values-usdfprod.yaml @@ -165,7 +165,7 @@ telegraf-kafka-consumer: database: "efd" timestamp_field: "private_efdStamp" topicRegexps: | - [ "lsst.sal.DIMM", "lsst.sal.DREAM", "lsst.sal.ESS", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] + [ "lsst.sal.DIMM", "lsst.sal.DREAM", "lsst.sal.EAS", "lsst.sal.ESS", "lsst.sal.DSM", "lsst.sal.EPM", "lsst.sal.HVAC", "lsst.sal.WeatherForecast" ] debug: true m1m3: enabled: true From d8490f25894fcd7331f3200399cb28f31731aacf Mon Sep 17 00:00:00 2001 From: dspeck1 Date: Fri, 22 Nov 2024 16:12:21 -0600 Subject: [PATCH 528/567] Updated IP addresses for schema registry as they changed during sasquatch maintenance. --- .../next-visit-fan-out/values-usdfdev-prompt-processing.yaml | 2 +- .../next-visit-fan-out/values-usdfprod-prompt-processing.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml index fc97581a64..22c4ccfc71 100644 --- a/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfdev-prompt-processing.yaml @@ -3,7 +3,7 @@ knative: retryRequests: false kafka: - schemaRegistryUrl: http://10.96.181.159:8081 + schemaRegistryUrl: http://10.103.8.219:8081 sasquatchAddress: 10.100.226.209:9094 consumerGroup: test-group-3 nextVisitTopic: test.next-visit diff --git a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml index 75a9eccb55..c2fd7c9667 100644 --- a/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml +++ b/applications/next-visit-fan-out/values-usdfprod-prompt-processing.yaml @@ -1,8 +1,8 @@ knative: - maxMessages: 1000 # Kubernetes can't support more pods yet + maxMessages: 1000 # Kubernetes can't support more pods yet kafka: - schemaRegistryUrl: http://10.110.90.252:8081 + schemaRegistryUrl: http://10.96.24.88:8081 sasquatchAddress: 10.96.121.181:9094 consumerGroup: next-visit-fan-out-1 nextVisitTopic: lsst.sal.ScriptQueue.logevent_nextVisit From 03f04ca71991e3daa997e8f088cc9b8ad2418e2a Mon Sep 17 00:00:00 2001 From: "David H. Irving" Date: Wed, 20 Nov 2024 14:45:23 -0700 Subject: [PATCH 529/567] Enable Gafaelfawr caching for Butler server Butler clients often make large numbers of small requests using the same access token. Enabling this cache avoids hitting the Gafaelfawr service repeatedly for these requests, reducing load on the service and improving request latency. --- applications/butler/templates/ingress-authenticated.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/applications/butler/templates/ingress-authenticated.yaml b/applications/butler/templates/ingress-authenticated.yaml index bf7127de6e..d20e8bc8a2 100644 --- a/applications/butler/templates/ingress-authenticated.yaml +++ b/applications/butler/templates/ingress-authenticated.yaml @@ -5,6 +5,9 @@ metadata: labels: {{- include "butler.labels" . | nindent 4 }} config: + # The Butler server often services large numbers of small requests, + # so this cache reduces the load on Gafaelfawr. + authCacheDuration: 5m baseUrl: {{ .Values.global.baseUrl | quote }} scopes: all: From 96378fdea71bd78eb26728778c8af4b33d58f208 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Fri, 22 Nov 2024 17:19:04 -0800 Subject: [PATCH 530/567] Remove kubernetes-replicator kubernetes-replicator was previously used to copy Kafka secrets to other namespaces, but the new `KafkaAccess` Kubernetes resource now handles this in a cleaner way. Remove the replication annotations and the kubernetes-replicator application. --- applications/kubernetes-replicator/Chart.yaml | 11 ------ applications/kubernetes-replicator/README.md | 23 ------------- .../values-roundtable-dev.yaml | 0 .../values-roundtable-prod.yaml | 0 .../kubernetes-replicator/values.yaml | 18 ---------- .../square-events/templates/ook-user.yaml | 6 ---- .../templates/squarebot-user.yaml | 6 ---- .../templates/templatebot-user.yaml | 6 ---- .../templates/unfurlbot-user.yaml | 6 ---- .../kubernetes-replicator/index.rst | 17 ---------- .../kubernetes-replicator/values.md | 12 ------- docs/applications/support.rst | 1 - environments/README.md | 1 - .../support/kubernetes-replicator.yaml | 34 ------------------- environments/values-roundtable-dev.yaml | 1 - environments/values-roundtable-prod.yaml | 1 - environments/values.yaml | 3 -- 17 files changed, 146 deletions(-) delete mode 100644 applications/kubernetes-replicator/Chart.yaml delete mode 100644 applications/kubernetes-replicator/README.md delete mode 100644 applications/kubernetes-replicator/values-roundtable-dev.yaml delete mode 100644 applications/kubernetes-replicator/values-roundtable-prod.yaml delete mode 100644 applications/kubernetes-replicator/values.yaml delete mode 100644 docs/applications/kubernetes-replicator/index.rst delete mode 100644 docs/applications/kubernetes-replicator/values.md delete mode 100644 environments/templates/applications/support/kubernetes-replicator.yaml diff --git a/applications/kubernetes-replicator/Chart.yaml b/applications/kubernetes-replicator/Chart.yaml deleted file mode 100644 index 90365efd2c..0000000000 --- a/applications/kubernetes-replicator/Chart.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: v2 -name: kubernetes-replicator -version: 1.0.0 -description: Kafka secret replicator -home: https://github.com/mittwald/kubernetes-replicator -sources: - - https://github.com/mittwald/kubernetes-replicator -dependencies: - - name: kubernetes-replicator - version: 2.11.0 - repository: https://helm.mittwald.de diff --git a/applications/kubernetes-replicator/README.md b/applications/kubernetes-replicator/README.md deleted file mode 100644 index 400f41a809..0000000000 --- a/applications/kubernetes-replicator/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# kubernetes-replicator - -Kafka secret replicator - -**Homepage:** - -## Source Code - -* - -## Values - -| Key | Type | Default | Description | -|-----|------|---------|-------------| -| kubernetes-replicator.resources | object | See `values.yaml` | Resource requests and limits for kubernetes-replicator | -| kubernetes-replicator.serviceAccount.annotations | object | `{}` | | -| kubernetes-replicator.serviceAccount.create | bool | `true` | | -| kubernetes-replicator.serviceAccount.name | string | `nil` | | -| kubernetes-replicator.serviceAccount.privileges[0].apiGroups[0] | string | `""` | | -| kubernetes-replicator.serviceAccount.privileges[0].apiGroups[1] | string | `"apps"` | | -| kubernetes-replicator.serviceAccount.privileges[0].apiGroups[2] | string | `"extensions"` | | -| kubernetes-replicator.serviceAccount.privileges[0].resources[0] | string | `"secrets"` | | -| kubernetes-replicator.serviceAccount.privileges[0].resources[1] | string | `"configmaps"` | | diff --git a/applications/kubernetes-replicator/values-roundtable-dev.yaml b/applications/kubernetes-replicator/values-roundtable-dev.yaml deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/applications/kubernetes-replicator/values-roundtable-prod.yaml b/applications/kubernetes-replicator/values-roundtable-prod.yaml deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/applications/kubernetes-replicator/values.yaml b/applications/kubernetes-replicator/values.yaml deleted file mode 100644 index 38c314644b..0000000000 --- a/applications/kubernetes-replicator/values.yaml +++ /dev/null @@ -1,18 +0,0 @@ -kubernetes-replicator: - serviceAccount: - create: true - annotations: {} - name: - privileges: - - apiGroups: ["", "apps", "extensions"] - resources: ["secrets", "configmaps"] - - # -- Resource requests and limits for kubernetes-replicator - # @default -- See `values.yaml` - resources: - limits: - cpu: "1" - memory: "32Mi" - requests: - cpu: "1m" - memory: "14Mi" diff --git a/applications/sasquatch/charts/square-events/templates/ook-user.yaml b/applications/sasquatch/charts/square-events/templates/ook-user.yaml index 0c3bb352cc..ee43ce4ca1 100644 --- a/applications/sasquatch/charts/square-events/templates/ook-user.yaml +++ b/applications/sasquatch/charts/square-events/templates/ook-user.yaml @@ -6,12 +6,6 @@ metadata: labels: strimzi.io/cluster: {{ .Values.cluster.name }} spec: - template: - secret: - metadata: - annotations: - replicator.v1.mittwald.de/replication-allowed: "true" - replicator.v1.mittwald.de/replication-allowed-namespaces: "ook" authentication: type: tls authorization: diff --git a/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml b/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml index 1285a4ec6f..65f6bf698b 100644 --- a/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml +++ b/applications/sasquatch/charts/square-events/templates/squarebot-user.yaml @@ -6,12 +6,6 @@ metadata: labels: strimzi.io/cluster: {{ .Values.cluster.name }} spec: - template: - secret: - metadata: - annotations: - replicator.v1.mittwald.de/replication-allowed: "true" - replicator.v1.mittwald.de/replication-allowed-namespaces: "squarebot" authentication: type: tls authorization: diff --git a/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml b/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml index 580bfa028f..e3ea612ed6 100644 --- a/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml +++ b/applications/sasquatch/charts/square-events/templates/templatebot-user.yaml @@ -6,12 +6,6 @@ metadata: labels: strimzi.io/cluster: {{ .Values.cluster.name }} spec: - template: - secret: - metadata: - annotations: - replicator.v1.mittwald.de/replication-allowed: "true" - replicator.v1.mittwald.de/replication-allowed-namespaces: "templatebot" authentication: type: tls authorization: diff --git a/applications/sasquatch/charts/square-events/templates/unfurlbot-user.yaml b/applications/sasquatch/charts/square-events/templates/unfurlbot-user.yaml index fe7a0c965a..1caaa805c2 100644 --- a/applications/sasquatch/charts/square-events/templates/unfurlbot-user.yaml +++ b/applications/sasquatch/charts/square-events/templates/unfurlbot-user.yaml @@ -6,12 +6,6 @@ metadata: labels: strimzi.io/cluster: {{ .Values.cluster.name }} spec: - template: - secret: - metadata: - annotations: - replicator.v1.mittwald.de/replication-allowed: "true" - replicator.v1.mittwald.de/replication-allowed-namespaces: "unfurlbot" authentication: type: tls authorization: diff --git a/docs/applications/kubernetes-replicator/index.rst b/docs/applications/kubernetes-replicator/index.rst deleted file mode 100644 index f7eeb2ccdd..0000000000 --- a/docs/applications/kubernetes-replicator/index.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. px-app:: kubernetes-replicator - -################################################# -kubernetes-replicator — Cross-namespace resources -################################################# - -kubernetes-replicator is a Kubernetes operator that replicates resources across namespaces. - -.. jinja:: kubernetes-replicator - :file: applications/_summary.rst.jinja - -Guides -====== - -.. toctree:: - - values diff --git a/docs/applications/kubernetes-replicator/values.md b/docs/applications/kubernetes-replicator/values.md deleted file mode 100644 index da8e6f4d19..0000000000 --- a/docs/applications/kubernetes-replicator/values.md +++ /dev/null @@ -1,12 +0,0 @@ -```{px-app-values} kubernetes-replicator -``` - -# Kubernetes Helm values reference - -Helm values reference table for the {px-app}`kubernetes-replicator` application. - -```{include} ../../../applications/kubernetes-replicator/README.md ---- -start-after: "## Values" ---- -``` diff --git a/docs/applications/support.rst b/docs/applications/support.rst index 14c67ff93d..b43473ccca 100644 --- a/docs/applications/support.rst +++ b/docs/applications/support.rst @@ -11,7 +11,6 @@ Argo CD project: ``support`` :maxdepth: 1 ghostwriter/index - kubernetes-replicator/index postgres/index sqlproxy-cross-project/index strimzi/index diff --git a/environments/README.md b/environments/README.md index 3fce8c9d31..1ccb8660e5 100644 --- a/environments/README.md +++ b/environments/README.md @@ -26,7 +26,6 @@ | applications.hips | bool | `false` | Enable the HiPS application | | applications.ingress-nginx | bool | `true` | Enable the ingress-nginx application. This is required for all environments, but is still configurable because currently USDF uses an unsupported configuration with ingress-nginx deployed in a different cluster. | | applications.jira-data-proxy | bool | `false` | Enable the jira-data-proxy application | -| applications.kubernetes-replicator | bool | `false` | Enable the kubernetes-replicator application | | applications.livetap | bool | `false` | Enable the livetap application | | applications.love | bool | `false` | Enable the love control system application | | applications.mobu | bool | `false` | Enable the mobu application | diff --git a/environments/templates/applications/support/kubernetes-replicator.yaml b/environments/templates/applications/support/kubernetes-replicator.yaml deleted file mode 100644 index 8cdc7bd1a6..0000000000 --- a/environments/templates/applications/support/kubernetes-replicator.yaml +++ /dev/null @@ -1,34 +0,0 @@ -{{- if (index .Values "applications" "kubernetes-replicator") -}} -apiVersion: v1 -kind: Namespace -metadata: - name: "kubernetes-replicator" ---- -apiVersion: argoproj.io/v1alpha1 -kind: Application -metadata: - name: "kubernetes-replicator" - namespace: "argocd" - finalizers: - - "resources-finalizer.argocd.argoproj.io" -spec: - destination: - namespace: "kubernetes-replicator" - server: "https://kubernetes.default.svc" - project: "support" - source: - path: "applications/kubernetes-replicator" - repoURL: {{ .Values.repoUrl | quote }} - targetRevision: {{ .Values.targetRevision | quote }} - helm: - parameters: - - name: "global.host" - value: {{ .Values.fqdn | quote }} - - name: "global.baseUrl" - value: "https://{{ .Values.fqdn }}" - - name: "global.vaultSecretsPath" - value: {{ .Values.vaultPathPrefix | quote }} - valueFiles: - - "values.yaml" - - "values-{{ .Values.name }}.yaml" -{{- end -}} diff --git a/environments/values-roundtable-dev.yaml b/environments/values-roundtable-dev.yaml index a447b4b181..d97f6b3e86 100644 --- a/environments/values-roundtable-dev.yaml +++ b/environments/values-roundtable-dev.yaml @@ -13,7 +13,6 @@ vaultPathPrefix: "secret/phalanx/roundtable-dev" applications: giftless: true jira-data-proxy: true - kubernetes-replicator: true mobu: true monitoring: true onepassword-connect: true diff --git a/environments/values-roundtable-prod.yaml b/environments/values-roundtable-prod.yaml index c0625aa465..abf6d60994 100644 --- a/environments/values-roundtable-prod.yaml +++ b/environments/values-roundtable-prod.yaml @@ -14,7 +14,6 @@ applications: checkerboard: true giftless: true jira-data-proxy: true - kubernetes-replicator: true monitoring: true mobu: true onepassword-connect: true diff --git a/environments/values.yaml b/environments/values.yaml index 25500c34d3..5a4c7efe93 100644 --- a/environments/values.yaml +++ b/environments/values.yaml @@ -100,9 +100,6 @@ applications: # -- Enable the jira-data-proxy application jira-data-proxy: false - # -- Enable the kubernetes-replicator application - kubernetes-replicator: false - # -- Enable the livetap application livetap: false From 39598a2e7c24b85ff70fc07bc8f039c94e0e0496 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 10:34:19 +0000 Subject: [PATCH 531/567] Update Helm release argo-cd to v7.7.5 --- applications/argocd/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argocd/Chart.yaml b/applications/argocd/Chart.yaml index 11b5e03524..5a010b856e 100644 --- a/applications/argocd/Chart.yaml +++ b/applications/argocd/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-cd - version: 7.7.3 + version: 7.7.5 repository: https://argoproj.github.io/argo-helm From 75d99c1ca573b4854cf84f60115106b6889f5ef7 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 10:34:22 +0000 Subject: [PATCH 532/567] Update Helm release cert-manager to v1.16.2 --- applications/cert-manager/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/cert-manager/Chart.yaml b/applications/cert-manager/Chart.yaml index a1e9214962..4304834036 100644 --- a/applications/cert-manager/Chart.yaml +++ b/applications/cert-manager/Chart.yaml @@ -7,5 +7,5 @@ sources: - https://github.com/cert-manager/cert-manager dependencies: - name: cert-manager - version: v1.16.1 + version: v1.16.2 repository: https://charts.jetstack.io From 4ddb65f44197b07835b3e2976329c30e6583beee Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 17:05:27 +0000 Subject: [PATCH 533/567] Update Helm release vault to v0.29.1 --- applications/vault/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/vault/Chart.yaml b/applications/vault/Chart.yaml index 8226ae401f..ed7fcfcb46 100644 --- a/applications/vault/Chart.yaml +++ b/applications/vault/Chart.yaml @@ -4,5 +4,5 @@ version: 1.0.0 description: Secret Storage dependencies: - name: vault - version: 0.29.0 + version: 0.29.1 repository: https://helm.releases.hashicorp.com From b393b8cde1b24418fc81d23c3c2136a12ae5ff23 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 17:05:33 +0000 Subject: [PATCH 534/567] Update gcr.io/cloudsql-docker/gce-proxy Docker tag to v1.37.2 --- applications/gafaelfawr/values.yaml | 2 +- applications/nublado/values.yaml | 2 +- applications/sqlproxy-cross-project/values.yaml | 2 +- applications/times-square/values.yaml | 2 +- applications/vo-cutouts/values.yaml | 2 +- charts/cadc-tap/values.yaml | 2 +- starters/fastapi-safir-uws/values.yaml | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/applications/gafaelfawr/values.yaml b/applications/gafaelfawr/values.yaml index 95820eabb6..d51d343ee3 100644 --- a/applications/gafaelfawr/values.yaml +++ b/applications/gafaelfawr/values.yaml @@ -311,7 +311,7 @@ cloudsql: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.1" + tag: "1.37.2" # -- Tag suffix to use for the proxy for schema updates schemaUpdateTagSuffix: "-alpine" diff --git a/applications/nublado/values.yaml b/applications/nublado/values.yaml index 79049d8170..524c7c7788 100644 --- a/applications/nublado/values.yaml +++ b/applications/nublado/values.yaml @@ -585,7 +585,7 @@ cloudsql: pullPolicy: "IfNotPresent" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.1" + tag: "1.37.2" # -- Instance connection name for a Cloud SQL PostgreSQL instance # @default -- None, must be set if Cloud SQL Auth Proxy is enabled diff --git a/applications/sqlproxy-cross-project/values.yaml b/applications/sqlproxy-cross-project/values.yaml index ac677e8060..46d9ed7585 100644 --- a/applications/sqlproxy-cross-project/values.yaml +++ b/applications/sqlproxy-cross-project/values.yaml @@ -14,7 +14,7 @@ image: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Tag of Cloud SQL Proxy image to use - tag: "1.37.1" + tag: "1.37.2" # -- Pull policy for the Cloud SQL Proxy image pullPolicy: "IfNotPresent" diff --git a/applications/times-square/values.yaml b/applications/times-square/values.yaml index f5509e689f..cfe2ff9d4c 100644 --- a/applications/times-square/values.yaml +++ b/applications/times-square/values.yaml @@ -156,7 +156,7 @@ cloudsql: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.1" + tag: "1.37.2" # -- Pull policy for Cloud SQL Auth Proxy images pullPolicy: "IfNotPresent" diff --git a/applications/vo-cutouts/values.yaml b/applications/vo-cutouts/values.yaml index 199c0ce730..40e4a9aa56 100644 --- a/applications/vo-cutouts/values.yaml +++ b/applications/vo-cutouts/values.yaml @@ -94,7 +94,7 @@ cloudsql: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.1" + tag: "1.37.2" # -- Tag suffix to use for the proxy for schema updates schemaUpdateTagSuffix: "-alpine" diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index a755e17f89..e54ff5e156 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -229,7 +229,7 @@ cloudsql: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.1" + tag: "1.37.2" # -- Pull policy for Cloud SQL Auth Proxy images pullPolicy: "IfNotPresent" diff --git a/starters/fastapi-safir-uws/values.yaml b/starters/fastapi-safir-uws/values.yaml index 44d1783c3b..8a08de91f7 100644 --- a/starters/fastapi-safir-uws/values.yaml +++ b/starters/fastapi-safir-uws/values.yaml @@ -86,7 +86,7 @@ cloudsql: repository: "gcr.io/cloudsql-docker/gce-proxy" # -- Cloud SQL Auth Proxy tag to use - tag: "1.37.1" + tag: "1.37.2" # -- Pull policy for Cloud SQL Auth Proxy images pullPolicy: "IfNotPresent" From 205fe3e1522ff9c5837e4293de23eef61e3e0771 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 17:05:38 +0000 Subject: [PATCH 535/567] Update Helm release argo-workflows to v0.45.0 --- applications/argo-workflows/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argo-workflows/Chart.yaml b/applications/argo-workflows/Chart.yaml index 01ae908e43..265be52294 100644 --- a/applications/argo-workflows/Chart.yaml +++ b/applications/argo-workflows/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-workflows - version: 0.42.7 + version: 0.45.0 repository: https://argoproj.github.io/argo-helm From 434bc4f55ce573d19a386429b5e32019921c978b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 25 Nov 2024 17:05:45 +0000 Subject: [PATCH 536/567] Update postgres Docker tag to v17.2 --- applications/siav2/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/siav2/values.yaml b/applications/siav2/values.yaml index f9ce8acc2f..8c313d75ba 100644 --- a/applications/siav2/values.yaml +++ b/applications/siav2/values.yaml @@ -79,7 +79,7 @@ uws: pullPolicy: "IfNotPresent" # -- Tag of UWS database image to use - tag: "17.0" + tag: "17.2" # -- Resource limits and requests for the UWS database pod resources: From 409383c9bf8ac86458c898c6e29d23708a8f8aa7 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 25 Nov 2024 09:29:09 -0800 Subject: [PATCH 537/567] Regenerate Helm docs --- applications/gafaelfawr/README.md | 2 +- applications/nublado/README.md | 2 +- applications/sqlproxy-cross-project/README.md | 2 +- applications/times-square/README.md | 2 +- applications/vo-cutouts/README.md | 2 +- charts/cadc-tap/README.md | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/applications/gafaelfawr/README.md b/applications/gafaelfawr/README.md index 51df4c45c1..39c62850a4 100644 --- a/applications/gafaelfawr/README.md +++ b/applications/gafaelfawr/README.md @@ -18,7 +18,7 @@ Authentication and identity system | cloudsql.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for Cloud SQL Auth Proxy images | | cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Auth Proxy image to use | | cloudsql.image.schemaUpdateTagSuffix | string | `"-alpine"` | Tag suffix to use for the proxy for schema updates | -| cloudsql.image.tag | string | `"1.37.1"` | Cloud SQL Auth Proxy tag to use | +| cloudsql.image.tag | string | `"1.37.2"` | Cloud SQL Auth Proxy tag to use | | cloudsql.instanceConnectionName | string | None, must be set if Cloud SQL Auth Proxy is enabled | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.nodeSelector | object | `{}` | Node selection rules for the Cloud SQL Proxy pod | | cloudsql.podAnnotations | object | `{}` | Annotations for the Cloud SQL Proxy pod | diff --git a/applications/nublado/README.md b/applications/nublado/README.md index 1ecc7508f2..0dff4471ee 100644 --- a/applications/nublado/README.md +++ b/applications/nublado/README.md @@ -17,7 +17,7 @@ JupyterHub and custom spawner for the Rubin Science Platform | cloudsql.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for Cloud SQL Auth Proxy images | | cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Auth Proxy image to use | | cloudsql.image.resources | object | See `values.yaml` | Resource requests and limits for Cloud SQL pod | -| cloudsql.image.tag | string | `"1.37.1"` | Cloud SQL Auth Proxy tag to use | +| cloudsql.image.tag | string | `"1.37.2"` | Cloud SQL Auth Proxy tag to use | | cloudsql.instanceConnectionName | string | None, must be set if Cloud SQL Auth Proxy is enabled | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.nodeSelector | object | `{}` | Node selection rules for the Cloud SQL Auth Proxy pod | | cloudsql.podAnnotations | object | `{}` | Annotations for the Cloud SQL Auth Proxy pod | diff --git a/applications/sqlproxy-cross-project/README.md b/applications/sqlproxy-cross-project/README.md index 74b079d5d9..cc9e12ac31 100644 --- a/applications/sqlproxy-cross-project/README.md +++ b/applications/sqlproxy-cross-project/README.md @@ -19,7 +19,7 @@ GCP SQL Proxy as a service | fullnameOverride | string | `""` | Override the full name for resources (includes the release name) | | image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the Cloud SQL Proxy image | | image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Proxy image to use | -| image.tag | string | `"1.37.1"` | Tag of Cloud SQL Proxy image to use | +| image.tag | string | `"1.37.2"` | Tag of Cloud SQL Proxy image to use | | nameOverride | string | `""` | Override the base name for resources | | nodeSelector | object | `{}` | Node selector rules for the Cloud SQL Proxy pod | | podAnnotations | object | `{}` | Annotations for the Cloud SQL Proxy pod | diff --git a/applications/times-square/README.md b/applications/times-square/README.md index 366950ca28..3339c5b90e 100644 --- a/applications/times-square/README.md +++ b/applications/times-square/README.md @@ -19,7 +19,7 @@ An API service for managing and rendering parameterized Jupyter notebooks. | cloudsql.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for Cloud SQL Auth Proxy images | | cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Auth Proxy image to use | | cloudsql.image.resources | object | see `values.yaml` | Resource requests and limits for Cloud SQL pod | -| cloudsql.image.tag | string | `"1.37.1"` | Cloud SQL Auth Proxy tag to use | +| cloudsql.image.tag | string | `"1.37.2"` | Cloud SQL Auth Proxy tag to use | | cloudsql.instanceConnectionName | string | `""` | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.serviceAccount | string | `""` | The Google service account that has an IAM binding to the `times-square` Kubernetes service accounts and has the `cloudsql.client` role | | config.databaseUrl | string | None, must be set | URL for the PostgreSQL database | diff --git a/applications/vo-cutouts/README.md b/applications/vo-cutouts/README.md index 97213c69f0..0a51e70249 100644 --- a/applications/vo-cutouts/README.md +++ b/applications/vo-cutouts/README.md @@ -14,7 +14,7 @@ Image cutout service complying with IVOA SODA | cloudsql.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for Cloud SQL Auth Proxy images | | cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Auth Proxy image to use | | cloudsql.image.schemaUpdateTagSuffix | string | `"-alpine"` | Tag suffix to use for the proxy for schema updates | -| cloudsql.image.tag | string | `"1.37.1"` | Cloud SQL Auth Proxy tag to use | +| cloudsql.image.tag | string | `"1.37.2"` | Cloud SQL Auth Proxy tag to use | | cloudsql.instanceConnectionName | string | None, must be set if Cloud SQL is used | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.resources | object | See `values.yaml` | Resource limits and requests for the Cloud SQL Proxy container | | config.databaseUrl | string | None, must be set if `cloudsql.enabled` is false | URL for the PostgreSQL database if Cloud SQL is not in use | diff --git a/charts/cadc-tap/README.md b/charts/cadc-tap/README.md index 8adaa4f4fd..d66a03be28 100644 --- a/charts/cadc-tap/README.md +++ b/charts/cadc-tap/README.md @@ -17,7 +17,7 @@ IVOA TAP service | cloudsql.enabled | bool | `false` | Enable the Cloud SQL Auth Proxy sidecar, used with Cloud SQL databases on Google Cloud | | cloudsql.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for Cloud SQL Auth Proxy images | | cloudsql.image.repository | string | `"gcr.io/cloudsql-docker/gce-proxy"` | Cloud SQL Auth Proxy image to use | -| cloudsql.image.tag | string | `"1.37.1"` | Cloud SQL Auth Proxy tag to use | +| cloudsql.image.tag | string | `"1.37.2"` | Cloud SQL Auth Proxy tag to use | | cloudsql.instanceConnectionName | string | `""` | Instance connection name for a Cloud SQL PostgreSQL instance | | cloudsql.resources | object | See `values.yaml` | Resource limits and requests for the Cloud SQL Proxy container | | cloudsql.serviceAccount | string | None, must be set | The Google service account that has an IAM binding to the `cadc-tap` Kubernetes service accounts and has the `cloudsql.client` role, access | From ad89bca9937ba80a9fe4d5395ddccaf229f65661 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 25 Nov 2024 09:29:50 -0800 Subject: [PATCH 538/567] Regenerate Helm docs --- applications/siav2/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/siav2/README.md b/applications/siav2/README.md index 92993339b1..0f33a7137b 100644 --- a/applications/siav2/README.md +++ b/applications/siav2/README.md @@ -28,7 +28,7 @@ Simple Image Access v2 service | uws.affinity | object | `{}` | Affinity rules for the UWS database pod | | uws.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the UWS database image | | uws.image.repository | string | `"library/postgres"` | UWS database image to use | -| uws.image.tag | string | `"17.0"` | Tag of UWS database image to use | +| uws.image.tag | string | `"17.2"` | Tag of UWS database image to use | | uws.nodeSelector | object | `{}` | Node selection rules for the UWS database pod | | uws.podAnnotations | object | `{}` | Annotations for the UWS databse pod | | uws.resources | object | `{"limits":{"cpu":2,"memory":"4Gi"},"requests":{"cpu":0.25,"memory":"1Gi"}}` | Resource limits and requests for the UWS database pod | From eab1b5c59250ea9da39f1f3496dc21bb26d12e93 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 25 Nov 2024 09:33:41 -0800 Subject: [PATCH 539/567] Update pre-commit and Python dependencies Update the shared Ruff configuration file for some diagnostic renamings in Ruff 0.8.0. --- .pre-commit-config.yaml | 2 +- requirements/dev.txt | 407 +++++++++++++++++++++------------------- requirements/main.txt | 257 +++++++++++++------------ requirements/tox.txt | 38 ++-- ruff-shared.toml | 6 +- src/phalanx/cli.py | 4 +- 6 files changed, 367 insertions(+), 347 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a5a2f68946..5a4d453fff 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,7 +46,7 @@ repos: - --template-files=../helm-docs.md.gotmpl - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.4 + rev: v0.8.0 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/requirements/dev.txt b/requirements/dev.txt index 3c3e316dab..367b8ee354 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -244,99 +244,99 @@ comm==0.2.2 \ --hash=sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e \ --hash=sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3 # via ipykernel -coverage==7.6.7 \ - --hash=sha256:0266b62cbea568bd5e93a4da364d05de422110cbed5056d69339bd5af5685433 \ - --hash=sha256:0573f5cbf39114270842d01872952d301027d2d6e2d84013f30966313cadb529 \ - --hash=sha256:0ddcb70b3a3a57581b450571b31cb774f23eb9519c2aaa6176d3a84c9fc57671 \ - --hash=sha256:108bb458827765d538abcbf8288599fee07d2743357bdd9b9dad456c287e121e \ - --hash=sha256:14045b8bfd5909196a90da145a37f9d335a5d988a83db34e80f41e965fb7cb42 \ - --hash=sha256:1a5407a75ca4abc20d6252efeb238377a71ce7bda849c26c7a9bece8680a5d99 \ - --hash=sha256:2bc3e45c16564cc72de09e37413262b9f99167803e5e48c6156bccdfb22c8327 \ - --hash=sha256:2d608a7808793e3615e54e9267519351c3ae204a6d85764d8337bd95993581a8 \ - --hash=sha256:34d23e28ccb26236718a3a78ba72744212aa383141961dd6825f6595005c8b06 \ - --hash=sha256:37a15573f988b67f7348916077c6d8ad43adb75e478d0910957394df397d2874 \ - --hash=sha256:3c0317288f032221d35fa4cbc35d9f4923ff0dfd176c79c9b356e8ef8ef2dff4 \ - --hash=sha256:3c42ec2c522e3ddd683dec5cdce8e62817afb648caedad9da725001fa530d354 \ - --hash=sha256:3c6b24007c4bcd0b19fac25763a7cac5035c735ae017e9a349b927cfc88f31c1 \ - --hash=sha256:40cca284c7c310d622a1677f105e8507441d1bb7c226f41978ba7c86979609ab \ - --hash=sha256:46f21663e358beae6b368429ffadf14ed0a329996248a847a4322fb2e35d64d3 \ - --hash=sha256:49ed5ee4109258973630c1f9d099c7e72c5c36605029f3a91fe9982c6076c82b \ - --hash=sha256:5c95e0fa3d1547cb6f021ab72f5c23402da2358beec0a8e6d19a368bd7b0fb37 \ - --hash=sha256:5dd4e4a49d9c72a38d18d641135d2fb0bdf7b726ca60a103836b3d00a1182acd \ - --hash=sha256:5e444b8e88339a2a67ce07d41faabb1d60d1004820cee5a2c2b54e2d8e429a0f \ - --hash=sha256:60dcf7605c50ea72a14490d0756daffef77a5be15ed1b9fea468b1c7bda1bc3b \ - --hash=sha256:623e6965dcf4e28a3debaa6fcf4b99ee06d27218f46d43befe4db1c70841551c \ - --hash=sha256:673184b3156cba06154825f25af33baa2671ddae6343f23175764e65a8c4c30b \ - --hash=sha256:6cf96ceaa275f071f1bea3067f8fd43bec184a25a962c754024c973af871e1b7 \ - --hash=sha256:70a56a2ec1869e6e9fa69ef6b76b1a8a7ef709972b9cc473f9ce9d26b5997ce3 \ - --hash=sha256:77256ad2345c29fe59ae861aa11cfc74579c88d4e8dbf121cbe46b8e32aec808 \ - --hash=sha256:796c9b107d11d2d69e1849b2dfe41730134b526a49d3acb98ca02f4985eeff7a \ - --hash=sha256:7c07de0d2a110f02af30883cd7dddbe704887617d5c27cf373362667445a4c76 \ - --hash=sha256:7e61b0e77ff4dddebb35a0e8bb5a68bf0f8b872407d8d9f0c726b65dfabe2469 \ - --hash=sha256:82c809a62e953867cf57e0548c2b8464207f5f3a6ff0e1e961683e79b89f2c55 \ - --hash=sha256:850cfd2d6fc26f8346f422920ac204e1d28814e32e3a58c19c91980fa74d8289 \ - --hash=sha256:87ea64b9fa52bf395272e54020537990a28078478167ade6c61da7ac04dc14bc \ - --hash=sha256:90746521206c88bdb305a4bf3342b1b7316ab80f804d40c536fc7d329301ee13 \ - --hash=sha256:951aade8297358f3618a6e0660dc74f6b52233c42089d28525749fc8267dccd2 \ - --hash=sha256:963e4a08cbb0af6623e61492c0ec4c0ec5c5cf74db5f6564f98248d27ee57d30 \ - --hash=sha256:987a8e3da7da4eed10a20491cf790589a8e5e07656b6dc22d3814c4d88faf163 \ - --hash=sha256:9c2eb378bebb2c8f65befcb5147877fc1c9fbc640fc0aad3add759b5df79d55d \ - --hash=sha256:a1ab9763d291a17b527ac6fd11d1a9a9c358280adb320e9c2672a97af346ac2c \ - --hash=sha256:a3b925300484a3294d1c70f6b2b810d6526f2929de954e5b6be2bf8caa1f12c1 \ - --hash=sha256:acbb8af78f8f91b3b51f58f288c0994ba63c646bc1a8a22ad072e4e7e0a49f1c \ - --hash=sha256:ad32a981bcdedb8d2ace03b05e4fd8dace8901eec64a532b00b15217d3677dd2 \ - --hash=sha256:aee9cf6b0134d6f932d219ce253ef0e624f4fa588ee64830fcba193269e4daa3 \ - --hash=sha256:af05bbba896c4472a29408455fe31b3797b4d8648ed0a2ccac03e074a77e2314 \ - --hash=sha256:b6cce5c76985f81da3769c52203ee94722cd5d5889731cd70d31fee939b74bf0 \ - --hash=sha256:bb684694e99d0b791a43e9fc0fa58efc15ec357ac48d25b619f207c41f2fd384 \ - --hash=sha256:c132b5a22821f9b143f87446805e13580b67c670a548b96da945a8f6b4f2efbb \ - --hash=sha256:c296263093f099da4f51b3dff1eff5d4959b527d4f2f419e16508c5da9e15e8c \ - --hash=sha256:c973b2fe4dc445cb865ab369df7521df9c27bf40715c837a113edaa2aa9faf45 \ - --hash=sha256:cdd94501d65adc5c24f8a1a0eda110452ba62b3f4aeaba01e021c1ed9cb8f34a \ - --hash=sha256:d79d4826e41441c9a118ff045e4bccb9fdbdcb1d02413e7ea6eb5c87b5439d24 \ - --hash=sha256:dbba8210f5067398b2c4d96b4e64d8fb943644d5eb70be0d989067c8ca40c0f8 \ - --hash=sha256:df002e59f2d29e889c37abd0b9ee0d0e6e38c24f5f55d71ff0e09e3412a340ec \ - --hash=sha256:dfd14bcae0c94004baba5184d1c935ae0d1231b8409eb6c103a5fd75e8ecdc56 \ - --hash=sha256:e25bacb53a8c7325e34d45dddd2f2fbae0dbc230d0e2642e264a64e17322a777 \ - --hash=sha256:e2c8e3384c12dfa19fa9a52f23eb091a8fad93b5b81a41b14c17c78e23dd1d8b \ - --hash=sha256:e5f2a0f161d126ccc7038f1f3029184dbdf8f018230af17ef6fd6a707a5b881f \ - --hash=sha256:e69ad502f1a2243f739f5bd60565d14a278be58be4c137d90799f2c263e7049a \ - --hash=sha256:ead9b9605c54d15be228687552916c89c9683c215370c4a44f1f217d2adcc34d \ - --hash=sha256:f07ff574986bc3edb80e2c36391678a271d555f91fd1d332a1e0f4b5ea4b6ea9 \ - --hash=sha256:f2c7a045eef561e9544359a0bf5784b44e55cefc7261a20e730baa9220c83413 \ - --hash=sha256:f3e8796434a8106b3ac025fd15417315d7a58ee3e600ad4dbcfddc3f4b14342c \ - --hash=sha256:f63e21ed474edd23f7501f89b53280014436e383a14b9bd77a648366c81dce7b \ - --hash=sha256:fd49c01e5057a451c30c9b892948976f5d38f2cbd04dc556a82743ba8e27ed8c +coverage==7.6.8 \ + --hash=sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5 \ + --hash=sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf \ + --hash=sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb \ + --hash=sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638 \ + --hash=sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4 \ + --hash=sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc \ + --hash=sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed \ + --hash=sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a \ + --hash=sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d \ + --hash=sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649 \ + --hash=sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c \ + --hash=sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b \ + --hash=sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4 \ + --hash=sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443 \ + --hash=sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83 \ + --hash=sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee \ + --hash=sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e \ + --hash=sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e \ + --hash=sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3 \ + --hash=sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0 \ + --hash=sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb \ + --hash=sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076 \ + --hash=sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb \ + --hash=sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787 \ + --hash=sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1 \ + --hash=sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e \ + --hash=sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce \ + --hash=sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801 \ + --hash=sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764 \ + --hash=sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365 \ + --hash=sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf \ + --hash=sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6 \ + --hash=sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71 \ + --hash=sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002 \ + --hash=sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4 \ + --hash=sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c \ + --hash=sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8 \ + --hash=sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4 \ + --hash=sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146 \ + --hash=sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc \ + --hash=sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea \ + --hash=sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4 \ + --hash=sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad \ + --hash=sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28 \ + --hash=sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451 \ + --hash=sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50 \ + --hash=sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779 \ + --hash=sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63 \ + --hash=sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e \ + --hash=sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc \ + --hash=sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022 \ + --hash=sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d \ + --hash=sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94 \ + --hash=sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b \ + --hash=sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d \ + --hash=sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331 \ + --hash=sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a \ + --hash=sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0 \ + --hash=sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee \ + --hash=sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92 \ + --hash=sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a \ + --hash=sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9 # via # -r requirements/dev.in # pytest-cov -debugpy==1.8.8 \ - --hash=sha256:09cc7b162586ea2171eea055985da2702b0723f6f907a423c9b2da5996ad67ba \ - --hash=sha256:0cc94186340be87b9ac5a707184ec8f36547fb66636d1029ff4f1cc020e53996 \ - --hash=sha256:143ef07940aeb8e7316de48f5ed9447644da5203726fca378f3a6952a50a9eae \ - --hash=sha256:19ffbd84e757a6ca0113574d1bf5a2298b3947320a3e9d7d8dc3377f02d9f864 \ - --hash=sha256:26b461123a030e82602a750fb24d7801776aa81cd78404e54ab60e8b5fecdad5 \ - --hash=sha256:3a9c013077a3a0000e83d97cf9cc9328d2b0bbb31f56b0e99ea3662d29d7a6a2 \ - --hash=sha256:4b93e4832fd4a759a0c465c967214ed0c8a6e8914bced63a28ddb0dd8c5f078b \ - --hash=sha256:535f4fb1c024ddca5913bb0eb17880c8f24ba28aa2c225059db145ee557035e9 \ - --hash=sha256:53709d4ec586b525724819dc6af1a7703502f7e06f34ded7157f7b1f963bb854 \ - --hash=sha256:5c0e5a38c7f9b481bf31277d2f74d2109292179081f11108e668195ef926c0f9 \ - --hash=sha256:5c6e885dbf12015aed73770f29dec7023cb310d0dc2ba8bfbeb5c8e43f80edc9 \ - --hash=sha256:64674e95916e53c2e9540a056e5f489e0ad4872645399d778f7c598eacb7b7f9 \ - --hash=sha256:705cd123a773d184860ed8dae99becd879dfec361098edbefb5fc0d3683eb804 \ - --hash=sha256:890fd16803f50aa9cb1a9b9b25b5ec321656dd6b78157c74283de241993d086f \ - --hash=sha256:90244598214bbe704aa47556ec591d2f9869ff9e042e301a2859c57106649add \ - --hash=sha256:a6531d952b565b7cb2fbd1ef5df3d333cf160b44f37547a4e7cf73666aca5d8d \ - --hash=sha256:b01f4a5e5c5fb1d34f4ccba99a20ed01eabc45a4684f4948b5db17a319dfb23f \ - --hash=sha256:c399023146e40ae373753a58d1be0a98bf6397fadc737b97ad612886b53df318 \ - --hash=sha256:d4483836da2a533f4b1454dffc9f668096ac0433de855f0c22cdce8c9f7e10c4 \ - --hash=sha256:e59b1607c51b71545cb3496876544f7186a7a27c00b436a62f285603cc68d1c6 \ - --hash=sha256:e6355385db85cbd666be703a96ab7351bc9e6c61d694893206f8001e22aee091 \ - --hash=sha256:ec684553aba5b4066d4de510859922419febc710df7bba04fe9e7ef3de15d34f \ - --hash=sha256:eea8821d998ebeb02f0625dd0d76839ddde8cbf8152ebbe289dd7acf2cdc6b98 \ - --hash=sha256:f3cbf1833e644a3100eadb6120f25be8a532035e8245584c4f7532937edc652a \ - --hash=sha256:f95651bdcbfd3b27a408869a53fbefcc2bcae13b694daee5f1365b1b83a00113 \ - --hash=sha256:ffe94dd5e9a6739a75f0b85316dc185560db3e97afa6b215628d1b6a17561cb2 +debugpy==1.8.9 \ + --hash=sha256:1339e14c7d980407248f09824d1b25ff5c5616651689f1e0f0e51bdead3ea13e \ + --hash=sha256:17c5e0297678442511cf00a745c9709e928ea4ca263d764e90d233208889a19e \ + --hash=sha256:1efbb3ff61487e2c16b3e033bc8595aea578222c08aaf3c4bf0f93fadbd662ee \ + --hash=sha256:365e556a4772d7d0d151d7eb0e77ec4db03bcd95f26b67b15742b88cacff88e9 \ + --hash=sha256:3d9755e77a2d680ce3d2c5394a444cf42be4a592caaf246dbfbdd100ffcf7ae5 \ + --hash=sha256:3e59842d6c4569c65ceb3751075ff8d7e6a6ada209ceca6308c9bde932bcef11 \ + --hash=sha256:472a3994999fe6c0756945ffa359e9e7e2d690fb55d251639d07208dbc37caea \ + --hash=sha256:54a7e6d3014c408eb37b0b06021366ee985f1539e12fe49ca2ee0d392d9ceca5 \ + --hash=sha256:5e565fc54b680292b418bb809f1386f17081d1346dca9a871bf69a8ac4071afe \ + --hash=sha256:62d22dacdb0e296966d7d74a7141aaab4bec123fa43d1a35ddcb39bf9fd29d70 \ + --hash=sha256:66eeae42f3137eb428ea3a86d4a55f28da9bd5a4a3d369ba95ecc3a92c1bba53 \ + --hash=sha256:6953b335b804a41f16a192fa2e7851bdcfd92173cbb2f9f777bb934f49baab65 \ + --hash=sha256:7c4d65d03bee875bcb211c76c1d8f10f600c305dbd734beaed4077e902606fee \ + --hash=sha256:7e646e62d4602bb8956db88b1e72fe63172148c1e25c041e03b103a25f36673c \ + --hash=sha256:7e8b079323a56f719977fde9d8115590cb5e7a1cba2fcee0986ef8817116e7c1 \ + --hash=sha256:8138efff315cd09b8dcd14226a21afda4ca582284bf4215126d87342bba1cc66 \ + --hash=sha256:8e99c0b1cc7bf86d83fb95d5ccdc4ad0586d4432d489d1f54e4055bcc795f693 \ + --hash=sha256:957363d9a7a6612a37458d9a15e72d03a635047f946e5fceee74b50d52a9c8e2 \ + --hash=sha256:957ecffff80d47cafa9b6545de9e016ae8c9547c98a538ee96ab5947115fb3dd \ + --hash=sha256:ada7fb65102a4d2c9ab62e8908e9e9f12aed9d76ef44880367bc9308ebe49a0f \ + --hash=sha256:b74a49753e21e33e7cf030883a92fa607bddc4ede1aa4145172debc637780040 \ + --hash=sha256:c36856343cbaa448171cba62a721531e10e7ffb0abff838004701454149bc037 \ + --hash=sha256:cc37a6c9987ad743d9c3a14fa1b1a14b7e4e6041f9dd0c8abf8895fe7a97b899 \ + --hash=sha256:cfe1e6c6ad7178265f74981edf1154ffce97b69005212fbc90ca22ddfe3d017e \ + --hash=sha256:e46b420dc1bea64e5bbedd678148be512442bc589b0111bd799367cde051e71a \ + --hash=sha256:ff54ef77ad9f5c425398efb150239f6fe8e20c53ae2f68367eba7ece1e96226d # via ipykernel decorator==5.1.1 \ --hash=sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330 \ @@ -766,104 +766,115 @@ pycparser==2.22 ; implementation_name == 'pypy' \ # via # -c requirements/main.txt # cffi -pydantic==2.9.2 \ - --hash=sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f \ - --hash=sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12 +pydantic==2.10.1 \ + --hash=sha256:a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560 \ + --hash=sha256:a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e # via # -c requirements/main.txt # autodoc-pydantic # documenteer # pydantic-settings -pydantic-core==2.23.4 \ - --hash=sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36 \ - --hash=sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05 \ - --hash=sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071 \ - --hash=sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327 \ - --hash=sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c \ - --hash=sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36 \ - --hash=sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29 \ - --hash=sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744 \ - --hash=sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d \ - --hash=sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec \ - --hash=sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e \ - --hash=sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e \ - --hash=sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577 \ - --hash=sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232 \ - --hash=sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863 \ - --hash=sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6 \ - --hash=sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368 \ - --hash=sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480 \ - --hash=sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2 \ - --hash=sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2 \ - --hash=sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6 \ - --hash=sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769 \ - --hash=sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d \ - --hash=sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2 \ - --hash=sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84 \ - --hash=sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166 \ - --hash=sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271 \ - --hash=sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5 \ - --hash=sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb \ - --hash=sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13 \ - --hash=sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323 \ - --hash=sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556 \ - --hash=sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665 \ - --hash=sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef \ - --hash=sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb \ - --hash=sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119 \ - --hash=sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126 \ - --hash=sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510 \ - --hash=sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b \ - --hash=sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87 \ - --hash=sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f \ - --hash=sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc \ - --hash=sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8 \ - --hash=sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21 \ - --hash=sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f \ - --hash=sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6 \ - --hash=sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658 \ - --hash=sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b \ - --hash=sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3 \ - --hash=sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb \ - --hash=sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59 \ - --hash=sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24 \ - --hash=sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9 \ - --hash=sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3 \ - --hash=sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd \ - --hash=sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753 \ - --hash=sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55 \ - --hash=sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad \ - --hash=sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a \ - --hash=sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605 \ - --hash=sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e \ - --hash=sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b \ - --hash=sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433 \ - --hash=sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8 \ - --hash=sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07 \ - --hash=sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728 \ - --hash=sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0 \ - --hash=sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327 \ - --hash=sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555 \ - --hash=sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64 \ - --hash=sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6 \ - --hash=sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea \ - --hash=sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b \ - --hash=sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df \ - --hash=sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e \ - --hash=sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd \ - --hash=sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068 \ - --hash=sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3 \ - --hash=sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040 \ - --hash=sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12 \ - --hash=sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916 \ - --hash=sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f \ - --hash=sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f \ - --hash=sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801 \ - --hash=sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231 \ - --hash=sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5 \ - --hash=sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8 \ - --hash=sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee \ - --hash=sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607 +pydantic-core==2.27.1 \ + --hash=sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9 \ + --hash=sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b \ + --hash=sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c \ + --hash=sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529 \ + --hash=sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc \ + --hash=sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854 \ + --hash=sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d \ + --hash=sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278 \ + --hash=sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a \ + --hash=sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c \ + --hash=sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f \ + --hash=sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27 \ + --hash=sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f \ + --hash=sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac \ + --hash=sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2 \ + --hash=sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97 \ + --hash=sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a \ + --hash=sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919 \ + --hash=sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9 \ + --hash=sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4 \ + --hash=sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c \ + --hash=sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131 \ + --hash=sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5 \ + --hash=sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd \ + --hash=sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089 \ + --hash=sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107 \ + --hash=sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6 \ + --hash=sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60 \ + --hash=sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf \ + --hash=sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5 \ + --hash=sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08 \ + --hash=sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05 \ + --hash=sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2 \ + --hash=sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e \ + --hash=sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c \ + --hash=sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17 \ + --hash=sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62 \ + --hash=sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23 \ + --hash=sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be \ + --hash=sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067 \ + --hash=sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02 \ + --hash=sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f \ + --hash=sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235 \ + --hash=sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840 \ + --hash=sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5 \ + --hash=sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807 \ + --hash=sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16 \ + --hash=sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c \ + --hash=sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864 \ + --hash=sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e \ + --hash=sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a \ + --hash=sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35 \ + --hash=sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737 \ + --hash=sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a \ + --hash=sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3 \ + --hash=sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52 \ + --hash=sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05 \ + --hash=sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31 \ + --hash=sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89 \ + --hash=sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de \ + --hash=sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6 \ + --hash=sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36 \ + --hash=sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c \ + --hash=sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154 \ + --hash=sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb \ + --hash=sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e \ + --hash=sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd \ + --hash=sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3 \ + --hash=sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f \ + --hash=sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78 \ + --hash=sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960 \ + --hash=sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618 \ + --hash=sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08 \ + --hash=sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4 \ + --hash=sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c \ + --hash=sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c \ + --hash=sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330 \ + --hash=sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8 \ + --hash=sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792 \ + --hash=sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025 \ + --hash=sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9 \ + --hash=sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f \ + --hash=sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01 \ + --hash=sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337 \ + --hash=sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4 \ + --hash=sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f \ + --hash=sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd \ + --hash=sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51 \ + --hash=sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab \ + --hash=sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc \ + --hash=sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676 \ + --hash=sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381 \ + --hash=sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed \ + --hash=sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb \ + --hash=sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967 \ + --hash=sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073 \ + --hash=sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae \ + --hash=sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c \ + --hash=sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206 \ + --hash=sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b # via # -c requirements/main.txt # pydantic @@ -1215,9 +1226,9 @@ rpds-py==0.21.0 \ # via # jsonschema # referencing -setuptools==75.5.0 \ - --hash=sha256:5c4ccb41111392671f02bb5f8436dfc5a9a7185e80500531b133f5775c4163ef \ - --hash=sha256:87cb777c3b96d638ca02031192d40390e0ad97737e27b6b4fa831bea86f2f829 +setuptools==75.6.0 \ + --hash=sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6 \ + --hash=sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d # via # documenteer # sphinxcontrib-bibtex @@ -1427,18 +1438,18 @@ tomlkit==0.13.2 \ --hash=sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde \ --hash=sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79 # via documenteer -tornado==6.4.1 \ - --hash=sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8 \ - --hash=sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f \ - --hash=sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4 \ - --hash=sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3 \ - --hash=sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14 \ - --hash=sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842 \ - --hash=sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9 \ - --hash=sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698 \ - --hash=sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7 \ - --hash=sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d \ - --hash=sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4 +tornado==6.4.2 \ + --hash=sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803 \ + --hash=sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec \ + --hash=sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482 \ + --hash=sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634 \ + --hash=sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38 \ + --hash=sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b \ + --hash=sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c \ + --hash=sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf \ + --hash=sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946 \ + --hash=sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73 \ + --hash=sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1 # via # ipykernel # jupyter-client diff --git a/requirements/main.txt b/requirements/main.txt index 489a168c0c..bbd62ceaae 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -10,34 +10,32 @@ anyio==4.6.2.post1 \ # via # httpcore # starlette -bcrypt==4.2.0 \ - --hash=sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb \ - --hash=sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399 \ - --hash=sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291 \ - --hash=sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d \ - --hash=sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7 \ - --hash=sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170 \ - --hash=sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d \ - --hash=sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe \ - --hash=sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060 \ - --hash=sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184 \ - --hash=sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a \ - --hash=sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68 \ - --hash=sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c \ - --hash=sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458 \ - --hash=sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9 \ - --hash=sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328 \ - --hash=sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7 \ - --hash=sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34 \ - --hash=sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e \ - --hash=sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2 \ - --hash=sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5 \ - --hash=sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae \ - --hash=sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00 \ - --hash=sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841 \ - --hash=sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8 \ - --hash=sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221 \ - --hash=sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db +bcrypt==4.2.1 \ + --hash=sha256:041fa0155c9004eb98a232d54da05c0b41d4b8e66b6fc3cb71b4b3f6144ba837 \ + --hash=sha256:04e56e3fe8308a88b77e0afd20bec516f74aecf391cdd6e374f15cbed32783d6 \ + --hash=sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17 \ + --hash=sha256:533e7f3bcf2f07caee7ad98124fab7499cb3333ba2274f7a36cf1daee7409d99 \ + --hash=sha256:6765386e3ab87f569b276988742039baab087b2cdb01e809d74e74503c2faafe \ + --hash=sha256:687cf30e6681eeda39548a93ce9bfbb300e48b4d445a43db4298d2474d2a1e54 \ + --hash=sha256:76132c176a6d9953cdc83c296aeaed65e1a708485fd55abf163e0d9f8f16ce0e \ + --hash=sha256:76d3e352b32f4eeb34703370e370997065d28a561e4a18afe4fef07249cb4396 \ + --hash=sha256:807261df60a8b1ccd13e6599c779014a362ae4e795f5c59747f60208daddd96d \ + --hash=sha256:89df2aea2c43be1e1fa066df5f86c8ce822ab70a30e4c210968669565c0f4685 \ + --hash=sha256:8ad2f4528cbf0febe80e5a3a57d7a74e6635e41af1ea5675282a33d769fba413 \ + --hash=sha256:8c458cd103e6c5d1d85cf600e546a639f234964d0228909d8f8dbeebff82d526 \ + --hash=sha256:8dbd0747208912b1e4ce730c6725cb56c07ac734b3629b60d4398f082ea718ad \ + --hash=sha256:909faa1027900f2252a9ca5dfebd25fc0ef1417943824783d1c8418dd7d6df4a \ + --hash=sha256:aaa2e285be097050dba798d537b6efd9b698aa88eef52ec98d23dcd6d7cf6fea \ + --hash=sha256:adadd36274510a01f33e6dc08f5824b97c9580583bd4487c564fc4617b328005 \ + --hash=sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f \ + --hash=sha256:b588af02b89d9fad33e5f98f7838bf590d6d692df7153647724a7f20c186f6bf \ + --hash=sha256:b7703ede632dc945ed1172d6f24e9f30f27b1b1a067f32f68bf169c5f08d0425 \ + --hash=sha256:c6f5fa3775966cca251848d4d5393ab016b3afed251163c1436fefdec3b02c84 \ + --hash=sha256:cde78d385d5e93ece5479a0a87f73cd6fa26b171c786a884f955e165032b262c \ + --hash=sha256:cfdf3d7530c790432046c40cda41dfee8c83e29482e6a604f8930b9930e94139 \ + --hash=sha256:e158009a54c4c8bc91d5e0da80920d048f918c61a581f0a63e4e93bb556d362f \ + --hash=sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c \ + --hash=sha256:f85b1ffa09240c89aa2e1ae9f3b1c687104f7b2b9d2098da4e923f1b7082d331 # via phalanx (pyproject.toml) certifi==2024.8.30 \ --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ @@ -380,103 +378,114 @@ pycparser==2.22 ; platform_python_implementation != 'PyPy' \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pydantic==2.9.2 \ - --hash=sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f \ - --hash=sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12 +pydantic==2.10.1 \ + --hash=sha256:a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560 \ + --hash=sha256:a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e # via # phalanx (pyproject.toml) # fastapi # safir -pydantic-core==2.23.4 \ - --hash=sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36 \ - --hash=sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05 \ - --hash=sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071 \ - --hash=sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327 \ - --hash=sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c \ - --hash=sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36 \ - --hash=sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29 \ - --hash=sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744 \ - --hash=sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d \ - --hash=sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec \ - --hash=sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e \ - --hash=sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e \ - --hash=sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577 \ - --hash=sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232 \ - --hash=sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863 \ - --hash=sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6 \ - --hash=sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368 \ - --hash=sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480 \ - --hash=sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2 \ - --hash=sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2 \ - --hash=sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6 \ - --hash=sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769 \ - --hash=sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d \ - --hash=sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2 \ - --hash=sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84 \ - --hash=sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166 \ - --hash=sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271 \ - --hash=sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5 \ - --hash=sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb \ - --hash=sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13 \ - --hash=sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323 \ - --hash=sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556 \ - --hash=sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665 \ - --hash=sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef \ - --hash=sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb \ - --hash=sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119 \ - --hash=sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126 \ - --hash=sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510 \ - --hash=sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b \ - --hash=sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87 \ - --hash=sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f \ - --hash=sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc \ - --hash=sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8 \ - --hash=sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21 \ - --hash=sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f \ - --hash=sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6 \ - --hash=sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658 \ - --hash=sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b \ - --hash=sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3 \ - --hash=sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb \ - --hash=sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59 \ - --hash=sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24 \ - --hash=sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9 \ - --hash=sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3 \ - --hash=sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd \ - --hash=sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753 \ - --hash=sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55 \ - --hash=sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad \ - --hash=sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a \ - --hash=sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605 \ - --hash=sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e \ - --hash=sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b \ - --hash=sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433 \ - --hash=sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8 \ - --hash=sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07 \ - --hash=sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728 \ - --hash=sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0 \ - --hash=sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327 \ - --hash=sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555 \ - --hash=sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64 \ - --hash=sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6 \ - --hash=sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea \ - --hash=sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b \ - --hash=sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df \ - --hash=sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e \ - --hash=sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd \ - --hash=sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068 \ - --hash=sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3 \ - --hash=sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040 \ - --hash=sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12 \ - --hash=sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916 \ - --hash=sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f \ - --hash=sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f \ - --hash=sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801 \ - --hash=sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231 \ - --hash=sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5 \ - --hash=sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8 \ - --hash=sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee \ - --hash=sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607 +pydantic-core==2.27.1 \ + --hash=sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9 \ + --hash=sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b \ + --hash=sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c \ + --hash=sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529 \ + --hash=sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc \ + --hash=sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854 \ + --hash=sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d \ + --hash=sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278 \ + --hash=sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a \ + --hash=sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c \ + --hash=sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f \ + --hash=sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27 \ + --hash=sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f \ + --hash=sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac \ + --hash=sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2 \ + --hash=sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97 \ + --hash=sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a \ + --hash=sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919 \ + --hash=sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9 \ + --hash=sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4 \ + --hash=sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c \ + --hash=sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131 \ + --hash=sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5 \ + --hash=sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd \ + --hash=sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089 \ + --hash=sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107 \ + --hash=sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6 \ + --hash=sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60 \ + --hash=sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf \ + --hash=sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5 \ + --hash=sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08 \ + --hash=sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05 \ + --hash=sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2 \ + --hash=sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e \ + --hash=sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c \ + --hash=sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17 \ + --hash=sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62 \ + --hash=sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23 \ + --hash=sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be \ + --hash=sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067 \ + --hash=sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02 \ + --hash=sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f \ + --hash=sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235 \ + --hash=sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840 \ + --hash=sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5 \ + --hash=sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807 \ + --hash=sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16 \ + --hash=sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c \ + --hash=sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864 \ + --hash=sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e \ + --hash=sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a \ + --hash=sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35 \ + --hash=sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737 \ + --hash=sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a \ + --hash=sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3 \ + --hash=sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52 \ + --hash=sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05 \ + --hash=sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31 \ + --hash=sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89 \ + --hash=sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de \ + --hash=sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6 \ + --hash=sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36 \ + --hash=sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c \ + --hash=sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154 \ + --hash=sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb \ + --hash=sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e \ + --hash=sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd \ + --hash=sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3 \ + --hash=sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f \ + --hash=sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78 \ + --hash=sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960 \ + --hash=sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618 \ + --hash=sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08 \ + --hash=sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4 \ + --hash=sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c \ + --hash=sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c \ + --hash=sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330 \ + --hash=sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8 \ + --hash=sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792 \ + --hash=sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025 \ + --hash=sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9 \ + --hash=sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f \ + --hash=sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01 \ + --hash=sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337 \ + --hash=sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4 \ + --hash=sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f \ + --hash=sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd \ + --hash=sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51 \ + --hash=sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab \ + --hash=sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc \ + --hash=sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676 \ + --hash=sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381 \ + --hash=sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed \ + --hash=sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb \ + --hash=sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967 \ + --hash=sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073 \ + --hash=sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae \ + --hash=sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c \ + --hash=sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206 \ + --hash=sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b # via # pydantic # safir @@ -574,9 +583,9 @@ sniffio==1.3.1 \ # anyio # httpcore # httpx -starlette==0.41.2 \ - --hash=sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62 \ - --hash=sha256:fbc189474b4731cf30fcef52f18a8d070e3f3b46c6a04c97579e85e6ffca942d +starlette==0.41.3 \ + --hash=sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835 \ + --hash=sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7 # via # fastapi # safir diff --git a/requirements/tox.txt b/requirements/tox.txt index a3206bec05..dcbc6bf87d 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -60,25 +60,25 @@ tox-uv==1.16.0 \ --hash=sha256:71b2e2fa6c35c1360b91a302df1d65b3e5a1f656b321c5ebf7b84545804c9f01 \ --hash=sha256:e6f0b525a687e745ab878d07cbf5c7e85d582028d4a7c8935f95e84350651432 # via -r requirements/tox.in -uv==0.5.2 \ - --hash=sha256:15c7ffa08ae21abd221dbdf9ba25c8969235f587cec6df8035552434e5ca1cc5 \ - --hash=sha256:2597e91be45b3f4458d0d16a5a1cda7e93af7d6dbfddf251aae5377f9187fa88 \ - --hash=sha256:27d666da8fbb0f87d9df67abf9feea0da4ee1336730f2c4be29a11f3feaa0a29 \ - --hash=sha256:374e9498e155fcaa8728a6770b84f03781106d705332f4ec059e1cc93c8f4d8a \ - --hash=sha256:5052758d374dd769efd0c70b4789ffb08439567eb114ad8fe728536bb5cc5299 \ - --hash=sha256:675ca34829ceca3e9de395cf05e8f881334a24488f97dd923c463830270d52a7 \ - --hash=sha256:67776d34cba359c63919c5ad50331171261d2ec7a83fd07f032eb8cc22e22b8e \ - --hash=sha256:71467545d51883d1af7094c8f6da69b55e7d49b742c2dc707d644676dcb66515 \ - --hash=sha256:772b32d157ec8f27c0099ecac94cf5cd298bce72f1a1f512205591de4e9f0c5c \ - --hash=sha256:7bde66f13571e437fd45f32f5742ab53d5e011b4edb1c74cb74cb8b1cbb828b5 \ - --hash=sha256:89e60ad9601f35f187326de84f35e7517c6eb1438359da42ec85cfd9c1895957 \ - --hash=sha256:a4d4fdad03e6dc3e8216192b8a12bcf2c71c8b12046e755575c7f262cbb61924 \ - --hash=sha256:a8a9897dd7657258c53f41aecdbe787da99f4fc0775f19826ab65cc0a7136cbf \ - --hash=sha256:c9795b990fb0b2a18d3a8cef8822e13c6a6f438bc16d34ccf01d931c76cfd5da \ - --hash=sha256:cfba5b0070652da4174083b78852f3ab3d262ba1c8b63a4d5ae497263b02b834 \ - --hash=sha256:d0834c6b37750c045bbea80600d3ae3e95becc4db148f5c0d0bc3ec6a7924e8f \ - --hash=sha256:d1fe4e025dbb9ec5c9250bfc1231847b8487706538f94d10c769f0a54db3e0af \ - --hash=sha256:dfcd8275ff8cb59d5f26f826a44270b2fe8f38aa7188d7355c48d3e9b759d0c0 +uv==0.5.4 \ + --hash=sha256:05b45c7eefb178dcdab0d49cd642fb7487377d00727102a8d6d306cc034c0d83 \ + --hash=sha256:2118bb99cbc9787cb5e5cc4a507201e25a3fe88a9f389e8ffb84f242d96038c2 \ + --hash=sha256:30ce031e36c54d4ba791d743d992d0a4fd8d70480db781d30a2f6f5125f39194 \ + --hash=sha256:4432215deb8d5c1ccab17ee51cb80f5de1a20865ee02df47532f87442a3d6a58 \ + --hash=sha256:493aedc3c758bbaede83ecc8d5f7e6a9279ebec151c7f756aa9ea898c73f8ddb \ + --hash=sha256:69079e900bd26b0f65069ac6fa684c74662ed87121c076f2b1cbcf042539034c \ + --hash=sha256:8d7a4a3df943a7c16cd032ccbaab8ed21ff64f4cb090b3a0a15a8b7502ccd876 \ + --hash=sha256:928ed95fefe4e1338d0a7ad2f6b635de59e2ec92adaed4a267f7501a3b252263 \ + --hash=sha256:a79a0885df364b897da44aae308e6ed9cca3a189d455cf1c205bd6f7b03daafa \ + --hash=sha256:ca72e6a4c3c6b8b5605867e16a7f767f5c99b7f526de6bbb903c60eb44fd1e01 \ + --hash=sha256:cd7a5a3a36f975a7678f27849a2d49bafe7272143d938e9b6f3bf28392a3ba00 \ + --hash=sha256:dd2df2ba823e6684230ab4c581f2320be38d7f46de11ce21d2dbba631470d7b6 \ + --hash=sha256:df3cb58b7da91f4fc647d09c3e96006cd6c7bd424a81ce2308a58593c6887c39 \ + --hash=sha256:ed5659cde099f39995f4cb793fd939d2260b4a26e4e29412c91e7537f53d8d25 \ + --hash=sha256:f07e5e0df40a09154007da41b76932671333f9fecb0735c698b19da25aa08927 \ + --hash=sha256:f40c6c6c3a1b398b56d3a8b28f7b455ac1ce4cbb1469f8d35d3bbc804d83daa4 \ + --hash=sha256:f511faf719b797ef0f14688f1abe20b3fd126209cf58512354d1813249745119 \ + --hash=sha256:f806af0ee451a81099c449c4cff0e813056fdf7dd264f3d3a8fd321b17ff9efc # via tox-uv virtualenv==20.27.1 \ --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ diff --git a/ruff-shared.toml b/ruff-shared.toml index dc46f19b43..3617fad62c 100644 --- a/ruff-shared.toml +++ b/ruff-shared.toml @@ -57,9 +57,9 @@ ignore = [ "S607", # using PATH is not a security vulnerability "SIM102", # sometimes the formatting of nested if statements is clearer "SIM117", # sometimes nested with contexts are clearer - "TCH001", # we decided to not maintain separate TYPE_CHECKING blocks - "TCH002", # we decided to not maintain separate TYPE_CHECKING blocks - "TCH003", # we decided to not maintain separate TYPE_CHECKING blocks + "TC001", # we decided to not maintain separate TYPE_CHECKING blocks + "TC002", # we decided to not maintain separate TYPE_CHECKING blocks + "TC003", # we decided to not maintain separate TYPE_CHECKING blocks "TD003", # we don't require issues be created for TODOs "TID252", # if we're going to use relative imports, use them always "TRY003", # good general advice but lint is way too aggressive diff --git a/src/phalanx/cli.py b/src/phalanx/cli.py index fb1a6c4ba8..5401b4ee2f 100644 --- a/src/phalanx/cli.py +++ b/src/phalanx/cli.py @@ -34,8 +34,6 @@ P = ParamSpec("P") __all__ = [ - "main", - "help", "application", "application_add_helm_repos", "application_create", @@ -47,6 +45,8 @@ "environment_lint", "environment_schema", "environment_template", + "help", + "main", "secrets", "secrets_audit", "secrets_list", From 23e76e7f6a32273ca44f329106989dd217a2b956 Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Mon, 25 Nov 2024 16:59:48 -0300 Subject: [PATCH 540/567] Update rubintv app version to v2.5.4 for summit and usdf production deployments. --- applications/rubintv/values-summit.yaml | 2 +- applications/rubintv/values-usdfprod.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index 6fe16e6ee8..2f0cc03489 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -20,7 +20,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.5.3 + tag: v2.5.4 pullPolicy: Always workers: diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index bbb17cccc6..c86a44f612 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -18,7 +18,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.5.3 + tag: v2.5.4 pullPolicy: Always workers: From d7b0880629c3bbead336e4b4b05498885d0804f8 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Tue, 12 Nov 2024 14:25:07 -0800 Subject: [PATCH 541/567] Switch Prompt Processing pipelines config from Python-like to YAML. Delegating parsing to YAML makes it much easier to add more fields to the pipelines spec. The parsing is done by Prompt Processing instead of during Helm processing to keep the expanded configuration easier to read in Argo. --- .../prompt-proto-service-hsc-gpu/README.md | 4 +- .../values-usdfdev-prompt-processing.yaml | 8 +- .../prompt-proto-service-hsc-gpu/values.yaml | 6 +- .../prompt-proto-service-hsc/README.md | 4 +- .../values-usdfdev-prompt-processing.yaml | 8 +- .../prompt-proto-service-hsc/values.yaml | 6 +- .../prompt-proto-service-latiss/README.md | 4 +- .../values-usdfdev-prompt-processing.yaml | 13 +++- .../values-usdfprod-prompt-processing.yaml | 48 ++++++------ .../prompt-proto-service-latiss/values.yaml | 6 +- .../prompt-proto-service-lsstcam/README.md | 4 +- .../prompt-proto-service-lsstcam/values.yaml | 6 +- .../prompt-proto-service-lsstcomcam/README.md | 4 +- .../values-usdfdev-prompt-processing.yaml | 12 ++- .../values-usdfprod-prompt-processing.yaml | 73 ++++++++++--------- .../values.yaml | 6 +- .../README.md | 4 +- .../values-usdfdev-prompt-processing.yaml | 14 ++-- .../values.yaml | 6 +- charts/prompt-proto-service/README.md | 4 +- .../templates/prompt-proto-service.yaml | 6 +- charts/prompt-proto-service/values.yaml | 6 +- 22 files changed, 139 insertions(+), 113 deletions(-) diff --git a/applications/prompt-proto-service-hsc-gpu/README.md b/applications/prompt-proto-service-hsc-gpu/README.md index 7f119bce01..8d393c1e28 100644 --- a/applications/prompt-proto-service-hsc-gpu/README.md +++ b/applications/prompt-proto-service-hsc-gpu/README.md @@ -31,8 +31,8 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.imageNotifications.topic | string | None, must be set | Topic where raw image arrival notifications appear | | prompt-proto-service.instrument.calibRepo | string | None, must be set | URI to the shared repo used for calibrations, templates, and pipeline outputs. If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. | | prompt-proto-service.instrument.name | string | `"HSC"` | The "short" name of the instrument | -| prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | -| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.pipelines.main | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run for which visits' raws. Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | +| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.preloadPadding | int | `42` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"hsc_rings_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml index 33666a8bf6..0c3a27f4da 100644 --- a/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values-usdfdev-prompt-processing.yaml @@ -12,8 +12,12 @@ prompt-proto-service: instrument: pipelines: - main: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/ApPipe.yaml] - preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/Preprocessing.yaml] + main: |- + - survey: SURVEY + pipelines: ['${PROMPT_PROCESSING_DIR}/pipelines/HSC/ApPipe.yaml'] + preprocessing: |- + - survey: SURVEY + pipelines: ['${PROMPT_PROCESSING_DIR}/pipelines/HSC/Preprocessing.yaml'] calibRepo: s3://rubin-pp-dev-users/central_repo_2 s3: diff --git a/applications/prompt-proto-service-hsc-gpu/values.yaml b/applications/prompt-proto-service-hsc-gpu/values.yaml index 4fbc4354e8..6cf07db2d8 100644 --- a/applications/prompt-proto-service-hsc-gpu/values.yaml +++ b/applications/prompt-proto-service-hsc-gpu/values.yaml @@ -38,11 +38,11 @@ prompt-proto-service: # -- The "short" name of the instrument name: HSC pipelines: - # -- Machine-readable string describing which pipeline(s) should be run for which visits. - # Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. + # -- YAML-formatted config describing which pipeline(s) should be run for which visits' raws. + # Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. # @default -- None, must be set main: "" - # -- Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. + # -- YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. # @default -- None, must be set preprocessing: "" # -- Skymap to use with the instrument diff --git a/applications/prompt-proto-service-hsc/README.md b/applications/prompt-proto-service-hsc/README.md index 33a4a6d8b7..3f6af2f7fb 100644 --- a/applications/prompt-proto-service-hsc/README.md +++ b/applications/prompt-proto-service-hsc/README.md @@ -31,8 +31,8 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.imageNotifications.topic | string | None, must be set | Topic where raw image arrival notifications appear | | prompt-proto-service.instrument.calibRepo | string | None, must be set | URI to the shared repo used for calibrations, templates, and pipeline outputs. If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. | | prompt-proto-service.instrument.name | string | `"HSC"` | The "short" name of the instrument | -| prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | -| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.pipelines.main | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run for which visits' raws. Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | +| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.preloadPadding | int | `42` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"hsc_rings_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml index 6ef44857a0..d7faeaf5c0 100644 --- a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml @@ -13,8 +13,12 @@ prompt-proto-service: instrument: pipelines: - main: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/ApPipe.yaml] - preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/HSC/Preprocessing.yaml] + main: |- + - survey: SURVEY + pipelines: ['${PROMPT_PROCESSING_DIR}/pipelines/HSC/ApPipe.yaml'] + preprocessing: |- + - survey: SURVEY + pipelines: ['${PROMPT_PROCESSING_DIR}/pipelines/HSC/Preprocessing.yaml'] calibRepo: s3://rubin-pp-dev-users/central_repo_2 s3: diff --git a/applications/prompt-proto-service-hsc/values.yaml b/applications/prompt-proto-service-hsc/values.yaml index f2e5eba4a8..9cfc798c4e 100644 --- a/applications/prompt-proto-service-hsc/values.yaml +++ b/applications/prompt-proto-service-hsc/values.yaml @@ -38,11 +38,11 @@ prompt-proto-service: # -- The "short" name of the instrument name: HSC pipelines: - # -- Machine-readable string describing which pipeline(s) should be run for which visits. - # Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. + # -- YAML-formatted config describing which pipeline(s) should be run for which visits' raws. + # Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. # @default -- None, must be set main: "" - # -- Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. + # -- YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. # @default -- None, must be set preprocessing: "" # -- Skymap to use with the instrument diff --git a/applications/prompt-proto-service-latiss/README.md b/applications/prompt-proto-service-latiss/README.md index b5028a2171..338d44300f 100644 --- a/applications/prompt-proto-service-latiss/README.md +++ b/applications/prompt-proto-service-latiss/README.md @@ -31,8 +31,8 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.imageNotifications.topic | string | None, must be set | Topic where raw image arrival notifications appear | | prompt-proto-service.instrument.calibRepo | string | None, must be set | URI to the shared repo used for calibrations, templates, and pipeline outputs. If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. | | prompt-proto-service.instrument.name | string | `"LATISS"` | The "short" name of the instrument | -| prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | -| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.pipelines.main | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run for which visits' raws. Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | +| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"latiss_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml index 4bd5b8032d..fbc8b126c0 100644 --- a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml @@ -11,10 +11,15 @@ prompt-proto-service: instrument: pipelines: - main: >- - (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/ApPipe.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr.yaml] - preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Preprocessing.yaml] + main: |- + - survey: SURVEY + pipelines: + - ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/ApPipe.yaml + # Fallback for an upload.py image without templates + - ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr.yaml + preprocessing: |- + - survey: SURVEY + pipelines: ['${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Preprocessing.yaml'] calibRepo: s3://rubin-pp-dev-users/central_repo_2 s3: diff --git a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml index b8ff8138bc..4cf418992a 100644 --- a/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfprod-prompt-processing.yaml @@ -18,30 +18,30 @@ prompt-proto-service: instrument: pipelines: - # BLOCK-306 is photographic imaging - # BLOCK-T17 is daytime checkout - # BLOCK-271 is photon transfer curve calibrations - # BLOCK-295 is the daily calibration sequence as of May 27, 2024 - main: >- - (survey="BLOCK-306")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/ApPipe.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/SingleFrame.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr.yaml] - (survey="BLOCK-T17")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr-cal.yaml] - (survey="cwfs")=[] - (survey="cwfs-focus-sweep")=[] - (survey="spec-survey")=[] - (survey="BLOCK-271")=[] - (survey="BLOCK-295")=[] - (survey="")=[] - preprocessing: >- - (survey="BLOCK-306")=[${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Preprocessing.yaml] - (survey="BLOCK-T17")=[] - (survey="cwfs")=[] - (survey="cwfs-focus-sweep")=[] - (survey="spec-survey")=[] - (survey="BLOCK-271")=[] - (survey="BLOCK-295")=[] - (survey="")=[] + main: |- + - survey: BLOCK-306 # photographic imaging + pipelines: + - ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/ApPipe.yaml + - ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/SingleFrame.yaml + - ${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr.yaml + - survey: BLOCK-T17 # daytime checkout + pipelines: ['${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Isr-cal.yaml'] + - {survey: cwfs, pipelines: []} + - {survey: cwfs-focus-sweep, pipelines: []} + - {survey: spec-survey, pipelines: []} + - {survey: BLOCK-271, pipelines: []} # photon transfer curve calibrations + - {survey: BLOCK-295, pipelines: []} # the daily calibration sequence as of May 27, 2024 + - {survey: "", pipelines: []} + preprocessing: |- + - survey: BLOCK-306 + pipelines: ['${PROMPT_PROCESSING_DIR}/pipelines/LATISS/Preprocessing.yaml'] + - {survey: BLOCK-T17, pipelines: []} + - {survey: cwfs, pipelines: []} + - {survey: cwfs-focus-sweep, pipelines: []} + - {survey: spec-survey, pipelines: []} + - {survey: BLOCK-271, pipelines: []} + - {survey: BLOCK-295, pipelines: []} + - {survey: "", pipelines: []} calibRepo: s3://rubin-summit-users s3: diff --git a/applications/prompt-proto-service-latiss/values.yaml b/applications/prompt-proto-service-latiss/values.yaml index 9229ff3319..10e1fdcd2b 100644 --- a/applications/prompt-proto-service-latiss/values.yaml +++ b/applications/prompt-proto-service-latiss/values.yaml @@ -39,11 +39,11 @@ prompt-proto-service: # -- The "short" name of the instrument name: LATISS pipelines: - # -- Machine-readable string describing which pipeline(s) should be run for which visits. - # Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. + # -- YAML-formatted config describing which pipeline(s) should be run for which visits' raws. + # Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. # @default -- None, must be set main: "" - # -- Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. + # -- YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. # @default -- None, must be set preprocessing: "" # -- Skymap to use with the instrument diff --git a/applications/prompt-proto-service-lsstcam/README.md b/applications/prompt-proto-service-lsstcam/README.md index ca502f5749..9ad358da9f 100644 --- a/applications/prompt-proto-service-lsstcam/README.md +++ b/applications/prompt-proto-service-lsstcam/README.md @@ -31,8 +31,8 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.imageNotifications.topic | string | None, must be set | Topic where raw image arrival notifications appear | | prompt-proto-service.instrument.calibRepo | string | None, must be set | URI to the shared repo used for calibrations, templates, and pipeline outputs. If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. | | prompt-proto-service.instrument.name | string | `""` | The "short" name of the instrument | -| prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | -| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.pipelines.main | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run for which visits' raws. Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | +| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.preloadPadding | int | `50` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `""` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-lsstcam/values.yaml b/applications/prompt-proto-service-lsstcam/values.yaml index 3eecc4bf32..d5c1b4d8e9 100644 --- a/applications/prompt-proto-service-lsstcam/values.yaml +++ b/applications/prompt-proto-service-lsstcam/values.yaml @@ -38,11 +38,11 @@ prompt-proto-service: # -- The "short" name of the instrument name: "" pipelines: - # -- Machine-readable string describing which pipeline(s) should be run for which visits. - # Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. + # -- YAML-formatted config describing which pipeline(s) should be run for which visits' raws. + # Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. # @default -- None, must be set main: "" - # -- Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. + # -- YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. # @default -- None, must be set preprocessing: "" # -- Skymap to use with the instrument diff --git a/applications/prompt-proto-service-lsstcomcam/README.md b/applications/prompt-proto-service-lsstcomcam/README.md index bb28592707..c08de7a78c 100644 --- a/applications/prompt-proto-service-lsstcomcam/README.md +++ b/applications/prompt-proto-service-lsstcomcam/README.md @@ -31,8 +31,8 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.imageNotifications.topic | string | None, must be set | Topic where raw image arrival notifications appear | | prompt-proto-service.instrument.calibRepo | string | None, must be set | URI to the shared repo used for calibrations, templates, and pipeline outputs. If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. | | prompt-proto-service.instrument.name | string | `"LSSTComCam"` | The "short" name of the instrument | -| prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | -| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.pipelines.main | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run for which visits' raws. Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | +| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.preloadPadding | int | `50` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"lsst_cells_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml index e8fbd39cd6..deacc0bbc8 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml @@ -11,10 +11,14 @@ prompt-proto-service: instrument: pipelines: - main: >- - (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr.yaml] - preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml] + main: |- + - survey: SURVEY + pipelines: + - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml + - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr.yaml + preprocessing: |- + - survey: SURVEY + pipelines: ['${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml'] calibRepo: s3://rubin-pp-dev-users/central_repo_2 s3: diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml index ca5eec290a..72653c9f52 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfprod-prompt-processing.yaml @@ -19,41 +19,44 @@ prompt-proto-service: instrument: pipelines: - # BLOCK-T60 is optics alignment - # BLOCK-T75 is giant donuts - # BLOCK-T88 is optics alignment - # BLOCK-T215 is evening twilight flats - # BLOCK-T216 is morning twilight flats - # BLOCK-T219 is pretty picture - # BLOCK-T246 is instrument checkout - # BLOCK-T249 is AOS alignment - # BLOCK-T250 is TMA daytime checkout - main: >- - (survey="BLOCK-320")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/SingleFrame.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr.yaml] - (survey="BLOCK-T60")=[] - (survey="BLOCK-T75")=[] - (survey="BLOCK-T88")=[] - (survey="BLOCK-T215")=[] - (survey="BLOCK-T216")=[] - (survey="BLOCK-T219")=[] - (survey="BLOCK-T246")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml] - (survey="BLOCK-T249")=[] - (survey="BLOCK-T250")=[] - (survey="")=[] - preprocessing: >- - (survey="BLOCK-320")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml] - (survey="BLOCK-T60")=[] - (survey="BLOCK-T75")=[] - (survey="BLOCK-T88")=[] - (survey="BLOCK-T215")=[] - (survey="BLOCK-T216")=[] - (survey="BLOCK-T219")=[] - (survey="BLOCK-T246")=[] - (survey="BLOCK-T249")=[] - (survey="BLOCK-T250")=[] - (survey="")=[] + main: |- + - survey: BLOCK-320 # Science Validation + pipelines: + - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/ApPipe.yaml + - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/SingleFrame.yaml + - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr.yaml + - survey: BLOCK-T60 # optics alignment + pipelines: [] + - survey: BLOCK-T75 # giant donuts + pipelines: [] + - survey: BLOCK-T88 # optics alignment + pipelines: [] + - survey: BLOCK-T215 # twilight flats + pipelines: [] + - survey: BLOCK-T216 # twilight flats + pipelines: [] + - survey: BLOCK-T219 # pretty pictures + pipeliens: [] + - survey: BLOCK-T246 # instrument checkout + pipelines: ['${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Isr-cal.yaml'] + - survey: BLOCK-T249 # AOS alignment + pipelines: [] + - survey: BLOCK-T250 # TMA daytime checkout + pipelines: [] + - {survey: "", pipelines: []} + preprocessing: |- + - survey: BLOCK-320 + pipelines: ['${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCam/Preprocessing.yaml'] + - {survey: BLOCK-T60, pipelines: []} + - {survey: BLOCK-T75, pipelines: []} + - {survey: BLOCK-T88, pipelines: []} + - {survey: BLOCK-T215, pipelines: []} + - {survey: BLOCK-T216, pipelines: []} + - {survey: BLOCK-T219, pipelines: []} + - {survey: BLOCK-T246, pipelines: []} + - {survey: BLOCK-T249, pipelines: []} + - {survey: BLOCK-T250, pipelines: []} + - {survey: "", pipelines: []} calibRepo: s3://rubin-summit-users s3: diff --git a/applications/prompt-proto-service-lsstcomcam/values.yaml b/applications/prompt-proto-service-lsstcomcam/values.yaml index 13339b8fb8..4a46d6bb1a 100644 --- a/applications/prompt-proto-service-lsstcomcam/values.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values.yaml @@ -41,11 +41,11 @@ prompt-proto-service: # -- The "short" name of the instrument name: LSSTComCam pipelines: - # -- Machine-readable string describing which pipeline(s) should be run for which visits. - # Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. + # -- YAML-formatted config describing which pipeline(s) should be run for which visits' raws. + # Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. # @default -- None, must be set main: "" - # -- Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. + # -- YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. # @default -- None, must be set preprocessing: "" # -- Skymap to use with the instrument diff --git a/applications/prompt-proto-service-lsstcomcamsim/README.md b/applications/prompt-proto-service-lsstcomcamsim/README.md index 2dd70b63f0..9c1fa3e437 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/README.md +++ b/applications/prompt-proto-service-lsstcomcamsim/README.md @@ -31,8 +31,8 @@ Prompt Proto Service is an event driven service for processing camera images. Th | prompt-proto-service.imageNotifications.topic | string | None, must be set | Topic where raw image arrival notifications appear | | prompt-proto-service.instrument.calibRepo | string | None, must be set | URI to the shared repo used for calibrations, templates, and pipeline outputs. If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. | | prompt-proto-service.instrument.name | string | `"LSSTComCamSim"` | The "short" name of the instrument | -| prompt-proto-service.instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | -| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| prompt-proto-service.instrument.pipelines.main | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run for which visits' raws. Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | +| prompt-proto-service.instrument.pipelines.preprocessing | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. | | prompt-proto-service.instrument.preloadPadding | int | `50` | Number of arcseconds to pad the spatial region in preloading. | | prompt-proto-service.instrument.skymap | string | `"ops_rehersal_prep_2k_v1"` | Skymap to use with the instrument | | prompt-proto-service.knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | diff --git a/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml index 17d39cf975..194d937317 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values-usdfdev-prompt-processing.yaml @@ -11,11 +11,15 @@ prompt-proto-service: instrument: pipelines: - main: >- - (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/ApPipe.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/SingleFrame.yaml, - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Isr.yaml] - preprocessing: (survey="SURVEY")=[${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Preprocessing.yaml] + main: |- + - survey: SURVEY + pipelines: + - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/ApPipe.yaml + - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/SingleFrame.yaml + - ${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Isr.yaml + preprocessing: |- + - survey: SURVEY + pipelines: ['${PROMPT_PROCESSING_DIR}/pipelines/LSSTComCamSim/Preprocessing.yaml'] calibRepo: s3://rubin-pp-dev-users/central_repo_2 s3: diff --git a/applications/prompt-proto-service-lsstcomcamsim/values.yaml b/applications/prompt-proto-service-lsstcomcamsim/values.yaml index fc8dce6b9b..2b80a022a7 100644 --- a/applications/prompt-proto-service-lsstcomcamsim/values.yaml +++ b/applications/prompt-proto-service-lsstcomcamsim/values.yaml @@ -41,11 +41,11 @@ prompt-proto-service: # -- The "short" name of the instrument name: LSSTComCamSim pipelines: - # -- Machine-readable string describing which pipeline(s) should be run for which visits. - # Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. + # -- YAML-formatted config describing which pipeline(s) should be run for which visits' raws. + # Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. # @default -- None, must be set main: "" - # -- Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. + # -- YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. # @default -- None, must be set preprocessing: "" # -- Skymap to use with the instrument diff --git a/charts/prompt-proto-service/README.md b/charts/prompt-proto-service/README.md index 06c8d5eff9..41c08554c0 100644 --- a/charts/prompt-proto-service/README.md +++ b/charts/prompt-proto-service/README.md @@ -34,8 +34,8 @@ Event-driven processing of camera images | imagePullSecrets | list | `[]` | | | instrument.calibRepo | string | None, must be set | URI to the shared repo used for calibrations, templates, and pipeline outputs. If `registry.centralRepoFile` is set, this URI points to a local redirect instead of the central repo itself. | | instrument.name | string | None, must be set | The "short" name of the instrument | -| instrument.pipelines.main | string | None, must be set | Machine-readable string describing which pipeline(s) should be run for which visits' raws. Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | -| instrument.pipelines.preprocessing | string | None, must be set | Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. | +| instrument.pipelines.main | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run for which visits' raws. Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. | +| instrument.pipelines.preprocessing | string | None, must be set | YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. | | instrument.preloadPadding | int | `30` | Number of arcseconds to pad the spatial region in preloading. | | instrument.skymap | string | `""` | Skymap to use with the instrument | | knative.cpuLimit | int | `1` | The maximum cpu cores for the full pod (see `containerConcurrency`). | diff --git a/charts/prompt-proto-service/templates/prompt-proto-service.yaml b/charts/prompt-proto-service/templates/prompt-proto-service.yaml index aca24d55ba..23eb47fd11 100644 --- a/charts/prompt-proto-service/templates/prompt-proto-service.yaml +++ b/charts/prompt-proto-service/templates/prompt-proto-service.yaml @@ -42,9 +42,11 @@ spec: - name: RUBIN_INSTRUMENT value: {{ .Values.instrument.name }} - name: PREPROCESSING_PIPELINES_CONFIG - value: {{ .Values.instrument.pipelines.preprocessing }} + value: |- + {{- .Values.instrument.pipelines.preprocessing | nindent 12 }} - name: MAIN_PIPELINES_CONFIG - value: {{ .Values.instrument.pipelines.main }} + value: |- + {{- .Values.instrument.pipelines.main | nindent 12 }} - name: SKYMAP value: {{ .Values.instrument.skymap }} - name: PRELOAD_PADDING diff --git a/charts/prompt-proto-service/values.yaml b/charts/prompt-proto-service/values.yaml index 04c55b7472..66fd0642e3 100644 --- a/charts/prompt-proto-service/values.yaml +++ b/charts/prompt-proto-service/values.yaml @@ -40,11 +40,11 @@ instrument: # @default -- None, must be set name: "" pipelines: - # -- Machine-readable string describing which pipeline(s) should be run for which visits' raws. - # Notation is complex and still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. + # -- YAML-formatted config describing which pipeline(s) should be run for which visits' raws. + # Fields are still in flux; see [the source code](https://github.com/lsst-dm/prompt_processing/blob/main/python/activator/config.py) for examples. # @default -- None, must be set main: "" - # -- Machine-readable string describing which pipeline(s) should be run before which visits' raw arrival. + # -- YAML-formatted config describing which pipeline(s) should be run before which visits' raw arrival. # @default -- None, must be set preprocessing: "" # -- Skymap to use with the instrument From 6061ca5722f5e7fbfb1353e29fda101c44d00714 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Tue, 26 Nov 2024 08:31:03 -0700 Subject: [PATCH 542/567] BTS: Remove tags from love config. --- applications/love/values-base.yaml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/applications/love/values-base.yaml b/applications/love/values-base.yaml index 090346b111..5beb740aeb 100644 --- a/applications/love/values-base.yaml +++ b/applications/love/values-base.yaml @@ -23,7 +23,6 @@ love-manager: frontend: image: repository: ts-dockerhub.lsst.org/love-manager - tag: k0002 pullPolicy: Always env: SERVER_URL: base-lsp.lsst.codes @@ -63,7 +62,6 @@ love-manager: - name: general image: repository: ts-dockerhub.lsst.org/love-manager - tag: k0002 pullPolicy: Always env: LOVE_SITE: base @@ -127,7 +125,6 @@ love-manager: - name: queue image: repository: ts-dockerhub.lsst.org/love-manager - tag: k0002 pullPolicy: Always env: LOVE_SITE: base @@ -191,7 +188,6 @@ love-manager: - name: m1m3 image: repository: ts-dockerhub.lsst.org/love-manager - tag: k0002 pullPolicy: Always env: LOVE_SITE: base From 796498662913223177a35decde94132fd2352659 Mon Sep 17 00:00:00 2001 From: pav511 <38131208+pav511@users.noreply.github.com> Date: Tue, 26 Nov 2024 11:11:55 -0800 Subject: [PATCH 543/567] tobyj usdf-cm[-dev] argocd rbac --- applications/argocd/values-usdf-cm-dev.yaml | 1 + applications/argocd/values-usdf-cm.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/applications/argocd/values-usdf-cm-dev.yaml b/applications/argocd/values-usdf-cm-dev.yaml index cd292535bc..cd9f3f6702 100644 --- a/applications/argocd/values-usdf-cm-dev.yaml +++ b/applications/argocd/values-usdf-cm-dev.yaml @@ -30,6 +30,7 @@ argo-cd: g, cslater@slac.stanford.edu, role:admin g, yusra@slac.stanford.edu, role:admin g, rra@slac.stanford.edu, role:admin + g, tobyj@slac.stanford.edu, role:admin scopes: "[email]" server: diff --git a/applications/argocd/values-usdf-cm.yaml b/applications/argocd/values-usdf-cm.yaml index 878e4d74d4..6dd18686df 100644 --- a/applications/argocd/values-usdf-cm.yaml +++ b/applications/argocd/values-usdf-cm.yaml @@ -29,6 +29,7 @@ argo-cd: g, fritzm@slac.stanford.edu, role:admin g, cslater@slac.stanford.edu, role:admin g, yusra@slac.stanford.edu, role:admin + g, tobyj@slac.stanford.edu, role:admin scopes: "[email]" server: From f8fdfc67a8f74947fd00157ceb96f1462c002a40 Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Mon, 25 Nov 2024 13:33:49 -0800 Subject: [PATCH 544/567] Add s3proxy to usdfprod. --- applications/s3proxy/values-usdfprod.yaml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 applications/s3proxy/values-usdfprod.yaml diff --git a/applications/s3proxy/values-usdfprod.yaml b/applications/s3proxy/values-usdfprod.yaml new file mode 100644 index 0000000000..87d1d48df5 --- /dev/null +++ b/applications/s3proxy/values-usdfprod.yaml @@ -0,0 +1,13 @@ +config: + profiles: + - name: embargo + url: "https://sdfembs3.sdf.slac.stanford.edu/" + s3EndpointUrl: "https://s3dfrgw.slac.stanford.edu/" + +resources: + limits: + cpu: 500m + memory: 1Gi + requests: + cpu: 200m + memory: 100Mi From ede1cf78407b78a79bd5ec2ab80f80fc6413b42f Mon Sep 17 00:00:00 2001 From: Kian-Tat Lim Date: Mon, 25 Nov 2024 13:44:06 -0800 Subject: [PATCH 545/567] Enable s3proxy in usdfprod environment. --- environments/values-usdfprod.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/environments/values-usdfprod.yaml b/environments/values-usdfprod.yaml index 641d4bad06..16e5b9da08 100644 --- a/environments/values-usdfprod.yaml +++ b/environments/values-usdfprod.yaml @@ -24,6 +24,7 @@ applications: portal: true postgres: true rubintv: true + s3proxy: true sasquatch: true semaphore: true siav2: true From 374f3d9e15345cfcadd937bd8262d26f257611a1 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Wed, 20 Nov 2024 16:09:23 -0800 Subject: [PATCH 546/567] Update to Gafaelfawr 12.2.0 Update Gafaelfawr to the 12.2.0 release, which allows the new CADC authentication code to use the OpenID Connect userinfo route, hopefully allowing us to drop the old CADC-specific route. This release also fixes some performance issues and adds optional Sentry support, which will let us send traces, errors, and other telemetry to Sentry. --- applications/gafaelfawr/Chart.yaml | 2 +- applications/gafaelfawr/README.md | 1 + applications/gafaelfawr/secrets.yaml | 4 ++++ applications/gafaelfawr/templates/_helpers.tpl | 11 +++++++++++ applications/gafaelfawr/templates/cronjob-audit.yaml | 2 +- .../gafaelfawr/templates/cronjob-maintenance.yaml | 2 +- .../gafaelfawr/templates/deployment-operator.yaml | 2 +- applications/gafaelfawr/templates/deployment.yaml | 2 +- .../gafaelfawr/templates/job-schema-update.yaml | 2 +- applications/gafaelfawr/values.yaml | 5 +++++ 10 files changed, 27 insertions(+), 6 deletions(-) diff --git a/applications/gafaelfawr/Chart.yaml b/applications/gafaelfawr/Chart.yaml index c2b7e19a1e..3f21bbb591 100644 --- a/applications/gafaelfawr/Chart.yaml +++ b/applications/gafaelfawr/Chart.yaml @@ -5,7 +5,7 @@ description: "Authentication and identity system" home: "https://gafaelfawr.lsst.io/" sources: - "https://github.com/lsst-sqre/gafaelfawr" -appVersion: 12.1.1 +appVersion: 12.2.0 dependencies: - name: "redis" diff --git a/applications/gafaelfawr/README.md b/applications/gafaelfawr/README.md index 39c62850a4..c609f0f9e4 100644 --- a/applications/gafaelfawr/README.md +++ b/applications/gafaelfawr/README.md @@ -33,6 +33,7 @@ Authentication and identity system | config.cilogon.test | bool | `false` | Whether to use the test instance of CILogon | | config.cilogon.usernameClaim | string | `"username"` | Claim from which to get the username | | config.databaseUrl | string | None, must be set if neither `cloudsql.enabled` nor | URL for the PostgreSQL database `config.internalDatabase` are true | +| config.enableSentry | bool | `false` | Whether to send trace and telemetry information to Sentry. This traces every call and therefore should only be enabled in non-production environments. | | config.errorFooter | string | `nil` | HTML footer to add to any login error page (will be enclosed in a

tag). | | config.firestore.project | string | Firestore support is disabled | If set, assign UIDs and GIDs using Google Firestore in the given project. Cloud SQL must be enabled and the Cloud SQL service account must have read/write access to that Firestore instance. | | config.github.clientId | string | `nil` | GitHub client ID. One and only one of this, `config.cilogon.clientId`, or `config.oidc.clientId` must be set. | diff --git a/applications/gafaelfawr/secrets.yaml b/applications/gafaelfawr/secrets.yaml index d4c8227de1..5846f0d884 100644 --- a/applications/gafaelfawr/secrets.yaml +++ b/applications/gafaelfawr/secrets.yaml @@ -58,6 +58,10 @@ redis-password: deployments will then have to be restarted to pick up the new value. generate: type: password +sentry-dsn: + description: >- + DSN URL to which Sentry trace and error logging will be sent. + if: config.enableSentry session-secret: description: >- Encryption key used to encrypt the contents of Redis and the cookie data diff --git a/applications/gafaelfawr/templates/_helpers.tpl b/applications/gafaelfawr/templates/_helpers.tpl index eb32f96cd7..c20121c751 100644 --- a/applications/gafaelfawr/templates/_helpers.tpl +++ b/applications/gafaelfawr/templates/_helpers.tpl @@ -141,4 +141,15 @@ Common environment variables name: "gafaelfawr-kafka" key: "securityProtocol" {{- end }} +{{- if .Values.config.enableSentry }} +- name: SENTRY_DSN + valueFrom: + secretKeyRef: + name: "gafaelfawr" + key: "sentry-dsn" +- name: SENTRY_RELEASE + value: {{ .Chart.Name }}@{{ .Chart.AppVersion }} +- name: SENTRY_ENVIRONMENT + value: {{ .Values.global.host }} +{{- end }} {{- end }} diff --git a/applications/gafaelfawr/templates/cronjob-audit.yaml b/applications/gafaelfawr/templates/cronjob-audit.yaml index df5bbd3453..6bf0c0aa2e 100644 --- a/applications/gafaelfawr/templates/cronjob-audit.yaml +++ b/applications/gafaelfawr/templates/cronjob-audit.yaml @@ -37,7 +37,7 @@ spec: - "gafaelfawr" - "audit" env: - {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values) | nindent 16 }} + {{- include "gafaelfawr.envVars" (dict "Chart" .Chart "Release" .Release "Values" .Values) | nindent 16 }} {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" diff --git a/applications/gafaelfawr/templates/cronjob-maintenance.yaml b/applications/gafaelfawr/templates/cronjob-maintenance.yaml index 7108a75266..d842a29cd0 100644 --- a/applications/gafaelfawr/templates/cronjob-maintenance.yaml +++ b/applications/gafaelfawr/templates/cronjob-maintenance.yaml @@ -36,7 +36,7 @@ spec: - "gafaelfawr" - "maintenance" env: - {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values) | nindent 16 }} + {{- include "gafaelfawr.envVars" (dict "Chart" .Chart "Release" .Release "Values" .Values) | nindent 16 }} {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" diff --git a/applications/gafaelfawr/templates/deployment-operator.yaml b/applications/gafaelfawr/templates/deployment-operator.yaml index 821ca0fabc..977fefb83c 100644 --- a/applications/gafaelfawr/templates/deployment-operator.yaml +++ b/applications/gafaelfawr/templates/deployment-operator.yaml @@ -42,7 +42,7 @@ spec: - "-m" - "gafaelfawr.operator" env: - {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values) | nindent 12 }} + {{- include "gafaelfawr.envVars" (dict "Chart" .Chart "Release" .Release "Values" .Values) | nindent 12 }} {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" diff --git a/applications/gafaelfawr/templates/deployment.yaml b/applications/gafaelfawr/templates/deployment.yaml index 22dc810318..bf8c7be68f 100644 --- a/applications/gafaelfawr/templates/deployment.yaml +++ b/applications/gafaelfawr/templates/deployment.yaml @@ -54,7 +54,7 @@ spec: {{- end }} - name: "gafaelfawr" env: - {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "sidecar" true) | nindent 12 }} + {{- include "gafaelfawr.envVars" (dict "Chart" .Chart "Release" .Release "Values" .Values "sidecar" true) | nindent 12 }} {{- if .Values.config.metrics.enabled }} - name: "KAFKA_CLIENT_CERT_PATH" value: "/etc/gafaelfawr-kafka/user.crt" diff --git a/applications/gafaelfawr/templates/job-schema-update.yaml b/applications/gafaelfawr/templates/job-schema-update.yaml index 65bc2b52a0..b945525f3d 100644 --- a/applications/gafaelfawr/templates/job-schema-update.yaml +++ b/applications/gafaelfawr/templates/job-schema-update.yaml @@ -79,7 +79,7 @@ spec: gafaelfawr update-schema touch /lifecycle/main-terminated env: - {{- include "gafaelfawr.envVars" (dict "Release" .Release "Values" .Values "sidecar" true) | nindent 12 }} + {{- include "gafaelfawr.envVars" (dict "Chart" .Chart "Release" .Release "Values" .Values "sidecar" true) | nindent 12 }} image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" imagePullPolicy: {{ .Values.image.pullPolicy | quote }} {{- with .Values.resources }} diff --git a/applications/gafaelfawr/values.yaml b/applications/gafaelfawr/values.yaml index d51d343ee3..284f2a7fab 100644 --- a/applications/gafaelfawr/values.yaml +++ b/applications/gafaelfawr/values.yaml @@ -51,6 +51,11 @@ config: # `config.internalDatabase` are true databaseUrl: null + # -- Whether to send trace and telemetry information to Sentry. This traces + # every call and therefore should only be enabled in non-production + # environments. + enableSentry: false + # -- HTML footer to add to any login error page (will be enclosed in a

# tag). errorFooter: null From d45505b6447d190937b665ac34e533e8868db0a3 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Tue, 26 Nov 2024 14:09:03 -0800 Subject: [PATCH 547/567] Move Prompt Processing debugging flags into a debug block. This change keeps temporary "hack" settings together. --- .../values-usdfdev-prompt-processing.yaml | 5 +++-- .../values-usdfdev-prompt-processing.yaml | 5 +++-- .../values-usdfdev-prompt-processing.yaml | 5 +++-- charts/prompt-proto-service/README.md | 2 +- .../templates/prompt-proto-service.yaml | 2 +- charts/prompt-proto-service/values.yaml | 7 ++++--- 6 files changed, 15 insertions(+), 11 deletions(-) diff --git a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml index d7faeaf5c0..66dc8938bc 100644 --- a/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-hsc/values-usdfdev-prompt-processing.yaml @@ -39,7 +39,8 @@ prompt-proto-service: endpointUrl: https://usdf-rsp-dev.slac.stanford.edu/sasquatch-rest-proxy auth_env: false - # A cache efficiency workaround breaks on RC2 tests; see DM-43205. - cacheCalibs: false + debug: + # A cache efficiency workaround breaks on RC2 tests; see DM-43205. + cacheCalibs: false fullnameOverride: "prompt-proto-service-hsc" diff --git a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml index fbc8b126c0..fd5ab7ce4f 100644 --- a/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-latiss/values-usdfdev-prompt-processing.yaml @@ -40,7 +40,8 @@ prompt-proto-service: endpointUrl: https://usdf-rsp-dev.slac.stanford.edu/sasquatch-rest-proxy auth_env: false - # A cache efficiency workaround breaks when mixing observing dates; see DM-43205, DM-43913. - cacheCalibs: false + debug: + # A cache efficiency workaround breaks when mixing observing dates; see DM-43205, DM-43913. + cacheCalibs: false fullnameOverride: "prompt-proto-service-latiss" diff --git a/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml b/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml index deacc0bbc8..d7b6b0847e 100644 --- a/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml +++ b/applications/prompt-proto-service-lsstcomcam/values-usdfdev-prompt-processing.yaml @@ -39,7 +39,8 @@ prompt-proto-service: endpointUrl: https://usdf-rsp-dev.slac.stanford.edu/sasquatch-rest-proxy auth_env: false - # A cache efficiency workaround breaks when mixing observing dates; see DM-43205, DM-43913. - cacheCalibs: false + debug: + # A cache efficiency workaround breaks when mixing observing dates; see DM-43205, DM-43913. + cacheCalibs: false fullnameOverride: "prompt-proto-service-lsstcomcam" diff --git a/charts/prompt-proto-service/README.md b/charts/prompt-proto-service/README.md index 41c08554c0..c50a573380 100644 --- a/charts/prompt-proto-service/README.md +++ b/charts/prompt-proto-service/README.md @@ -22,8 +22,8 @@ Event-driven processing of camera images | cache.maxFilters | int | `20` | The maximum number of datasets of a given type the service might load if the filter is unknown. Should be greater than or equal to the number of filters that have e.g. flats or transmission curves. | | cache.patchesPerImage | int | `4` | A factor by which to multiply `baseSize` for templates and other patch-based datasets. | | cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | -| cacheCalibs | bool | `true` | Whether or not calibs should be cached between runs of a pod. This is a temporary flag that should only be unset in specific circumstances, and only in the development environment. | | containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | +| debug.cacheCalibs | bool | `true` | Whether or not calibs should be cached between runs of a pod. This is a temporary flag that should only be unset in specific circumstances, and only in the development environment. | | fullnameOverride | string | `"prompt-proto-service"` | Override the full name for resources (includes the release name) | | image.pullPolicy | string | `IfNotPresent` in prod, `Always` in dev | Pull policy for the PP image | | image.repository | string | `"ghcr.io/lsst-dm/prompt-service"` | Image to use in the PP deployment | diff --git a/charts/prompt-proto-service/templates/prompt-proto-service.yaml b/charts/prompt-proto-service/templates/prompt-proto-service.yaml index 23eb47fd11..18eb30c683 100644 --- a/charts/prompt-proto-service/templates/prompt-proto-service.yaml +++ b/charts/prompt-proto-service/templates/prompt-proto-service.yaml @@ -122,7 +122,7 @@ spec: - name: FILTERS_WITH_CALIBS value: {{ .Values.cache.maxFilters | toString | quote }} - name: DEBUG_CACHE_CALIBS - value: {{ if .Values.cacheCalibs }}'1'{{ else }}'0'{{ end }} + value: {{ if .Values.debug.cacheCalibs }}'1'{{ else }}'0'{{ end }} volumeMounts: - mountPath: /tmp-butler name: ephemeral diff --git a/charts/prompt-proto-service/values.yaml b/charts/prompt-proto-service/values.yaml index 66fd0642e3..d32ef08132 100644 --- a/charts/prompt-proto-service/values.yaml +++ b/charts/prompt-proto-service/values.yaml @@ -168,9 +168,10 @@ fullnameOverride: "prompt-proto-service" # -- The number of Knative requests that can be handled simultaneously by one container containerConcurrency: 1 -# -- Whether or not calibs should be cached between runs of a pod. -# This is a temporary flag that should only be unset in specific circumstances, and only in the development environment. -cacheCalibs: true +debug: + # -- Whether or not calibs should be cached between runs of a pod. + # This is a temporary flag that should only be unset in specific circumstances, and only in the development environment. + cacheCalibs: true # -- Kubernetes YAML configs for extra container volume(s). # Any volumes required by other config options are automatically handled by the Helm chart. From 93ac0ead5a054150d45fdcc978736d4490e4ba26 Mon Sep 17 00:00:00 2001 From: Krzysztof Findeisen Date: Tue, 26 Nov 2024 14:14:02 -0800 Subject: [PATCH 548/567] Add Prompt Processing config for turning off repo export. This flag is used for tests where we want to avoid conflicting updates to the central repo. --- charts/prompt-proto-service/README.md | 1 + .../prompt-proto-service/templates/prompt-proto-service.yaml | 2 ++ charts/prompt-proto-service/values.yaml | 3 +++ 3 files changed, 6 insertions(+) diff --git a/charts/prompt-proto-service/README.md b/charts/prompt-proto-service/README.md index c50a573380..f2d8e7f96d 100644 --- a/charts/prompt-proto-service/README.md +++ b/charts/prompt-proto-service/README.md @@ -24,6 +24,7 @@ Event-driven processing of camera images | cache.refcatsPerImage | int | `4` | A factor by which to multiply `baseSize` for refcat datasets. | | containerConcurrency | int | `1` | The number of Knative requests that can be handled simultaneously by one container | | debug.cacheCalibs | bool | `true` | Whether or not calibs should be cached between runs of a pod. This is a temporary flag that should only be unset in specific circumstances, and only in the development environment. | +| debug.exportOutputs | bool | `true` | Whether or not pipeline outputs should be exported to the central repo. This flag does not turn off APDB writes or alert generation; those must be handled at the pipeline level or by setting up an alternative destination. | | fullnameOverride | string | `"prompt-proto-service"` | Override the full name for resources (includes the release name) | | image.pullPolicy | string | `IfNotPresent` in prod, `Always` in dev | Pull policy for the PP image | | image.repository | string | `"ghcr.io/lsst-dm/prompt-service"` | Image to use in the PP deployment | diff --git a/charts/prompt-proto-service/templates/prompt-proto-service.yaml b/charts/prompt-proto-service/templates/prompt-proto-service.yaml index 18eb30c683..ac77260360 100644 --- a/charts/prompt-proto-service/templates/prompt-proto-service.yaml +++ b/charts/prompt-proto-service/templates/prompt-proto-service.yaml @@ -123,6 +123,8 @@ spec: value: {{ .Values.cache.maxFilters | toString | quote }} - name: DEBUG_CACHE_CALIBS value: {{ if .Values.debug.cacheCalibs }}'1'{{ else }}'0'{{ end }} + - name: DEBUG_EXPORT_OUTPUTS + value: {{ if .Values.debug.exportOutputs }}'1'{{ else }}'0'{{ end }} volumeMounts: - mountPath: /tmp-butler name: ephemeral diff --git a/charts/prompt-proto-service/values.yaml b/charts/prompt-proto-service/values.yaml index d32ef08132..45eea955ed 100644 --- a/charts/prompt-proto-service/values.yaml +++ b/charts/prompt-proto-service/values.yaml @@ -172,6 +172,9 @@ debug: # -- Whether or not calibs should be cached between runs of a pod. # This is a temporary flag that should only be unset in specific circumstances, and only in the development environment. cacheCalibs: true + # -- Whether or not pipeline outputs should be exported to the central repo. + # This flag does not turn off APDB writes or alert generation; those must be handled at the pipeline level or by setting up an alternative destination. + exportOutputs: true # -- Kubernetes YAML configs for extra container volume(s). # Any volumes required by other config options are automatically handled by the Helm chart. From 10d8ce5f31711d52e329e0dc0d3244ee0fbf9d65 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Mon, 25 Nov 2024 16:54:08 -0700 Subject: [PATCH 549/567] Add resources to hexapod-sim chart. --- applications/auxtel/README.md | 4 ++++ applications/auxtel/charts/hexapod-sim/README.md | 4 ++++ .../charts/hexapod-sim/templates/deployment.yaml | 16 ++++++++++++++++ .../auxtel/charts/hexapod-sim/values.yaml | 8 ++++++++ applications/auxtel/values-base.yaml | 7 +++++++ 5 files changed, 39 insertions(+) diff --git a/applications/auxtel/README.md b/applications/auxtel/README.md index 776d27f8ae..ee0124b771 100644 --- a/applications/auxtel/README.md +++ b/applications/auxtel/README.md @@ -28,8 +28,12 @@ Deployment for the Auxiliary Telescope CSCs | atspectrograph-sim.enabled | bool | `false` | Enable the ATSpectograph simulator CSC | | atspectrograph.enabled | bool | `false` | Enable the ATSpectrograph CSC | | hexapod-sim.enabled | bool | `false` | Enable the hexapod controller simulator | +| hexapod-sim.affinity | object | `{}` | This specifies the scheduling constraints of the pod | | hexapod-sim.image | object | `{"pullPolicy":"Always","repository":"ts-dockerhub.lsst.org/hexapod_simulator","tag":"latest"}` | This section holds the configuration of the container image | | hexapod-sim.image.pullPolicy | string | `"Always"` | The policy to apply when pulling an image for deployment | | hexapod-sim.image.repository | string | `"ts-dockerhub.lsst.org/hexapod_simulator"` | The Docker registry name of the container image | | hexapod-sim.image.tag | string | `"latest"` | The tag of the container image | | hexapod-sim.namespace | string | `"auxtel"` | This is the namespace in which the hexapod controller simulator will be placed | +| hexapod-sim.nodeSelector | object | `{}` | This allows the specification of using specific nodes to run the pod | +| hexapod-sim.resources | object | `{}` | This allows the specification of resources (CPU, memory) requires to run the container | +| hexapod-sim.tolerations | list | `[]` | This specifies the tolerations of the pod for any system taints | diff --git a/applications/auxtel/charts/hexapod-sim/README.md b/applications/auxtel/charts/hexapod-sim/README.md index b5a01ae11b..07accd9bd1 100644 --- a/applications/auxtel/charts/hexapod-sim/README.md +++ b/applications/auxtel/charts/hexapod-sim/README.md @@ -6,8 +6,12 @@ Chart for the hexapod simulator that supports the ATHexapod | Key | Type | Default | Description | |-----|------|---------|-------------| +| affinity | object | `{}` | This specifies the scheduling constraints of the pod | | image | object | `{"pullPolicy":"Always","repository":"ts-dockerhub.lsst.org/hexapod_simulator","tag":"latest"}` | This section holds the configuration of the container image | | image.pullPolicy | string | `"Always"` | The policy to apply when pulling an image for deployment | | image.repository | string | `"ts-dockerhub.lsst.org/hexapod_simulator"` | The Docker registry name of the container image | | image.tag | string | `"latest"` | The tag of the container image | | namespace | string | `"auxtel"` | This is the namespace in which the hexapod controller simulator will be placed | +| nodeSelector | object | `{}` | This allows the specification of using specific nodes to run the pod | +| resources | object | `{}` | This allows the specification of resources (CPU, memory) requires to run the container | +| tolerations | list | `[]` | This specifies the tolerations of the pod for any system taints | diff --git a/applications/auxtel/charts/hexapod-sim/templates/deployment.yaml b/applications/auxtel/charts/hexapod-sim/templates/deployment.yaml index 5b874eed64..b03ba53a04 100644 --- a/applications/auxtel/charts/hexapod-sim/templates/deployment.yaml +++ b/applications/auxtel/charts/hexapod-sim/templates/deployment.yaml @@ -23,5 +23,21 @@ spec: imagePullPolicy: {{ .Values.image.pullPolicy }} stdin: true tty: true + {{- with .Values.resources }} + resources: + {{- toYaml . | nindent 12 }} + {{- end }} imagePullSecrets: - name: pull-secret + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} diff --git a/applications/auxtel/charts/hexapod-sim/values.yaml b/applications/auxtel/charts/hexapod-sim/values.yaml index e3daccc617..70ed9d97e1 100644 --- a/applications/auxtel/charts/hexapod-sim/values.yaml +++ b/applications/auxtel/charts/hexapod-sim/values.yaml @@ -8,3 +8,11 @@ image: tag: latest # -- The policy to apply when pulling an image for deployment pullPolicy: Always +# -- This allows the specification of resources (CPU, memory) requires to run the container +resources: {} +# -- This allows the specification of using specific nodes to run the pod +nodeSelector: {} +# -- This specifies the tolerations of the pod for any system taints +tolerations: [] +# -- This specifies the scheduling constraints of the pod +affinity: {} diff --git a/applications/auxtel/values-base.yaml b/applications/auxtel/values-base.yaml index c7350107ab..07cd34b437 100644 --- a/applications/auxtel/values-base.yaml +++ b/applications/auxtel/values-base.yaml @@ -144,3 +144,10 @@ atspectrograph-sim: hexapod-sim: enabled: true + resources: + limits: + cpu: 100m + memory: 500Mi + requests: + cpu: 20m + memory: 200Mi From 4008e23a815a0bedd86f7053af57379e0e25bb21 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Wed, 27 Nov 2024 08:38:32 -0700 Subject: [PATCH 550/567] Bump resource requests on M1M3 LOVE producer. --- applications/love/values-base.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/love/values-base.yaml b/applications/love/values-base.yaml index 5beb740aeb..97398a15ff 100644 --- a/applications/love/values-base.yaml +++ b/applications/love/values-base.yaml @@ -507,10 +507,10 @@ love-producer: WEBSOCKET_HOST: love-nginx-service/love/manager/m1m3/ws/subscription resources: requests: - cpu: 10m - memory: 200Mi - limits: cpu: 100m + memory: 220Mi + limits: + cpu: 500m memory: 600Mi - name: mtm2 csc: MTM2:0 --log-level 10 From a849d04d7c87c90466925dcf52f39ba07ad1ab98 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Wed, 27 Nov 2024 08:59:11 -0700 Subject: [PATCH 551/567] Add simulate flag to ATMCS and ATPneumatics. --- applications/auxtel/values-base.yaml | 4 ++++ applications/auxtel/values-tucson-teststand.yaml | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/applications/auxtel/values-base.yaml b/applications/auxtel/values-base.yaml index 07cd34b437..de3ccbcd9d 100644 --- a/applications/auxtel/values-base.yaml +++ b/applications/auxtel/values-base.yaml @@ -45,6 +45,8 @@ atmcs-sim: image: repository: ts-dockerhub.lsst.org/atmcs_sim pullPolicy: Always + env: + RUN_ARG: --simulate atoods: image: @@ -126,6 +128,8 @@ atpneumatics-sim: image: repository: ts-dockerhub.lsst.org/at_pneumatics_sim pullPolicy: Always + env: + RUN_ARG: --simulate atptg: image: diff --git a/applications/auxtel/values-tucson-teststand.yaml b/applications/auxtel/values-tucson-teststand.yaml index da292e8617..2f11bee094 100644 --- a/applications/auxtel/values-tucson-teststand.yaml +++ b/applications/auxtel/values-tucson-teststand.yaml @@ -45,6 +45,8 @@ atmcs-sim: image: repository: ts-dockerhub.lsst.org/atmcs_sim pullPolicy: Always + env: + RUN_ARG: --simulate atoods: image: @@ -126,6 +128,8 @@ atpneumatics-sim: image: repository: ts-dockerhub.lsst.org/at_pneumatics_sim pullPolicy: Always + env: + RUN_ARG: --simulate atptg: image: From 5466fab191f542f5ebad629ba10bd52593c5cbf0 Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Wed, 27 Nov 2024 10:22:34 -0700 Subject: [PATCH 552/567] Handle ESS reindexing changes. --- applications/eas/Chart.yaml | 70 ++++++++------ applications/eas/README.md | 18 ++-- applications/eas/values-base.yaml | 62 ++++++------- applications/eas/values-tucson-teststand.yaml | 62 ++++++------- applications/eas/values.yaml | 64 +++++++------ applications/love/values-base.yaml | 27 +++--- .../love/values-tucson-teststand.yaml | 92 +++++++++++++++---- 7 files changed, 237 insertions(+), 158 deletions(-) diff --git a/applications/eas/Chart.yaml b/applications/eas/Chart.yaml index 8b77c6ca92..cb8006fe6b 100644 --- a/applications/eas/Chart.yaml +++ b/applications/eas/Chart.yaml @@ -47,24 +47,34 @@ dependencies: condition: auxtel-ess204-sim.enabled repository: file://../../charts/csc - name: csc - alias: auxtel-ess205 + alias: calibhill-ess301 version: 1.0.0 - condition: auxtel-ess205.enabled + condition: calibhill-ess301.enabled repository: file://../../charts/csc - name: csc - alias: auxtel-ess205-sim + alias: calibhill-ess301-sim version: 1.0.0 - condition: auxtel-ess205-sim.enabled + condition: calibhill-ess301-sim.enabled repository: file://../../charts/csc - name: csc - alias: calibhill-ess301 + alias: camera-ess111 version: 1.0.0 - condition: calibhill-ess301.enabled + condition: camera-ess111.enabled repository: file://../../charts/csc - name: csc - alias: calibhill-ess301-sim + alias: camera-ess111-sim version: 1.0.0 - condition: calibhill-ess301-sim.enabled + condition: camera-ess111-sim.enabled + repository: file://../../charts/csc +- name: csc + alias: cleanroom-ess109 + version: 1.0.0 + condition: cleanroom-ess109.enabled + repository: file://../../charts/csc +- name: csc + alias: cleanroom-ess109-sim + version: 1.0.0 + condition: cleanroom-ess109-sim.enabled repository: file://../../charts/csc - name: csc alias: dimm1 @@ -117,44 +127,34 @@ dependencies: condition: epm1-sim.enabled repository: file://../../charts/csc - name: csc - alias: m2-ess106 - version: 1.0.0 - condition: m2-ess106.enabled - repository: file://../../charts/csc -- name: csc - alias: m2-ess106-sim + alias: m1m3-ess113 version: 1.0.0 - condition: m2-ess106-sim.enabled + condition: m1m3-ess113.enabled repository: file://../../charts/csc - name: csc - alias: mtdome-ess101 + alias: m1m3-ess113-sim version: 1.0.0 - condition: mtdome-ess101.enabled + condition: m1m3-ess113-sim.enabled repository: file://../../charts/csc - name: csc - alias: mtdome-ess101-sim - version: 1.0.0 - condition: mtdome-ess101-sim.enabled - repository: file://../../charts/csc -- name: csc - alias: mtdome-ess102 + alias: m2-ess106 version: 1.0.0 - condition: mtdome-ess102.enabled + condition: m2-ess106.enabled repository: file://../../charts/csc - name: csc - alias: mtdome-ess102-sim + alias: m2-ess106-sim version: 1.0.0 - condition: mtdome-ess102-sim.enabled + condition: m2-ess106-sim.enabled repository: file://../../charts/csc - name: csc - alias: mtdome-ess103 + alias: m2-ess112 version: 1.0.0 - condition: mtdome-ess103.enabled + condition: m2-ess112.enabled repository: file://../../charts/csc - name: csc - alias: mtdome-ess103-sim + alias: m2-ess112-sim version: 1.0.0 - condition: mtdome-ess103-sim.enabled + condition: m2-ess112-sim.enabled repository: file://../../charts/csc - name: csc alias: mtdome-ess107 @@ -206,6 +206,16 @@ dependencies: version: 1.0.0 condition: tma-ess105-sim.enabled repository: file://../../charts/csc +- name: csc + alias: tma-ess110 + version: 1.0.0 + condition: tma-ess110.enabled + repository: file://../../charts/csc +- name: csc + alias: tma-ess110-sim + version: 1.0.0 + condition: tma-ess110-sim.enabled + repository: file://../../charts/csc - name: csc alias: weatherforecast version: 1.0.0 diff --git a/applications/eas/README.md b/applications/eas/README.md index e03edc0f9f..0523a891d1 100644 --- a/applications/eas/README.md +++ b/applications/eas/README.md @@ -14,10 +14,12 @@ Deployment for the Environmental Awareness Systems CSCs | auxtel-ess203.enabled | bool | `false` | Enable the ESS:203 CSC | | auxtel-ess204-sim.enabled | bool | `false` | Enable the ESS:204 simulator CSC | | auxtel-ess204.enabled | bool | `false` | Enable the ESS:204 CSC | -| auxtel-ess205-sim.enabled | bool | `false` | Enable the ESS:205 simulator CSC | -| auxtel-ess205.enabled | bool | `false` | Enable the ESS:205 CSC | | calibhill-ess301-sim.enabled | bool | `false` | Enable the ESS:301 simulator CSC | | calibhill-ess301.enabled | bool | `false` | Enable the ESS:301 CSC | +| camera-ess111-sim.enabled | bool | `false` | Enable the ESS:111 simulator CSC | +| camera-ess111.enabled | bool | `false` | Enable the ESS:111 CSC | +| cleanroom-ess109-sim.enabled | bool | `false` | Enable the ESS:109 simulator CSC | +| cleanroom-ess109.enabled | bool | `false` | Enable the ESS:109 CSC | | dimm1-sim.enabled | bool | `false` | Enable the DIMM:1 simulator CSC | | dimm1.enabled | bool | `false` | Enable the DIMM:1 CSC | | dimm2-sim.enabled | bool | `false` | Enable the DIMM:2 simulator CSC | @@ -39,14 +41,12 @@ Deployment for the Environmental Awareness Systems CSCs | global.controlSystem.topicName | string | Set by ArgoCD | Topic name tag for the control system deployment | | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | +| m1m3-ess113-sim.enabled | bool | `false` | Enable the ESS:113 simulator CSC | +| m1m3-ess113.enabled | bool | `false` | Enable the ESS:113 CSC | | m2-ess106-sim.enabled | bool | `false` | Enable the ESS:106 simulator CSC | | m2-ess106.enabled | bool | `false` | Enable the ESS:106 CSC | -| mtdome-ess101-sim.enabled | bool | `false` | Enable the ESS:101 simulator CSC | -| mtdome-ess101.enabled | bool | `false` | Enable the ESS:101 CSC | -| mtdome-ess102-sim.enabled | bool | `false` | Enable the ESS:102 simulator CSC | -| mtdome-ess102.enabled | bool | `false` | Enable the ESS:102 CSC | -| mtdome-ess103-sim.enabled | bool | `false` | Enable the ESS:103 simulator CSC | -| mtdome-ess103.enabled | bool | `false` | Enable the ESS:103 CSC | +| m2-ess112-sim.enabled | bool | `false` | Enable the ESS:112 simulator CSC | +| m2-ess112.enabled | bool | `false` | Enable the ESS:112 CSC | | mtdome-ess107-sim.enabled | bool | `false` | Enable the ESS:107 simulator CSC | | mtdome-ess107.enabled | bool | `false` | Enable the ESS:107 CSC | | mtdome-ess108-sim.enabled | bool | `false` | Enable the ESS:108 simulator CSC | @@ -57,3 +57,5 @@ Deployment for the Environmental Awareness Systems CSCs | tma-ess104.enabled | bool | `false` | Enable the ESS:104 CSC | | tma-ess105-sim.enabled | bool | `false` | Enable the ESS:105 simulator CSC | | tma-ess105.enabled | bool | `false` | Enable the ESS:105 CSC | +| tma-ess110-sim.enabled | bool | `false` | Enable the ESS:110 simulator CSC | +| tma-ess110.enabled | bool | `false` | Enable the ESS:110 CSC | diff --git a/applications/eas/values-base.yaml b/applications/eas/values-base.yaml index 5a3a9c3a69..38497e55c6 100644 --- a/applications/eas/values-base.yaml +++ b/applications/eas/values-base.yaml @@ -62,14 +62,14 @@ auxtel-ess204-sim: cpu: 190m memory: 900Mi -auxtel-ess205-sim: +calibhill-ess301-sim: enabled: true - classifier: ess205 + classifier: ess301 image: repository: ts-dockerhub.lsst.org/ess pullPolicy: Always env: - RUN_ARG: 205 --simulate + RUN_ARG: 301 --simulate resources: requests: cpu: 19m @@ -78,14 +78,14 @@ auxtel-ess205-sim: cpu: 190m memory: 900Mi -calibhill-ess301-sim: +camera-ess111-sim: enabled: true - classifier: ess301 + classifier: ess111 image: repository: ts-dockerhub.lsst.org/ess pullPolicy: Always env: - RUN_ARG: 301 --simulate + RUN_ARG: 111 --simulate resources: requests: cpu: 19m @@ -172,30 +172,14 @@ epm1-sim: cpu: 190m memory: 900Mi -m2-ess106-sim: - enabled: true - classifier: ess106 - image: - repository: ts-dockerhub.lsst.org/ess - pullPolicy: Always - env: - RUN_ARG: 106 --simulate - resources: - requests: - cpu: 19m - memory: 90Mi - limits: - cpu: 190m - memory: 900Mi - -mtdome-ess101-sim: +m1m3-ess113-sim: enabled: true - classifier: ess101 + classifier: ess113 image: repository: ts-dockerhub.lsst.org/ess pullPolicy: Always env: - RUN_ARG: 101 --simulate + RUN_ARG: 113 --simulate resources: requests: cpu: 19m @@ -204,14 +188,14 @@ mtdome-ess101-sim: cpu: 190m memory: 900Mi -mtdome-ess102-sim: +m2-ess106-sim: enabled: true - classifier: ess102 + classifier: ess106 image: repository: ts-dockerhub.lsst.org/ess pullPolicy: Always env: - RUN_ARG: 102 --simulate + RUN_ARG: 106 --simulate resources: requests: cpu: 19m @@ -220,14 +204,14 @@ mtdome-ess102-sim: cpu: 190m memory: 900Mi -mtdome-ess103-sim: +m2-ess112-sim: enabled: true - classifier: ess103 + classifier: ess112 image: repository: ts-dockerhub.lsst.org/ess pullPolicy: Always env: - RUN_ARG: 103 --simulate + RUN_ARG: 112 --simulate resources: requests: cpu: 19m @@ -316,6 +300,22 @@ tma-ess105-sim: cpu: 190m memory: 900Mi +tma-ess110-sim: + enabled: true + classifier: ess110 + image: + repository: ts-dockerhub.lsst.org/ess + pullPolicy: Always + env: + RUN_ARG: 110 --simulate + resources: + requests: + cpu: 19m + memory: 90Mi + limits: + cpu: 190m + memory: 900Mi + weatherforecast: image: repository: ts-dockerhub.lsst.org/weatherforecast diff --git a/applications/eas/values-tucson-teststand.yaml b/applications/eas/values-tucson-teststand.yaml index 5a3a9c3a69..38497e55c6 100644 --- a/applications/eas/values-tucson-teststand.yaml +++ b/applications/eas/values-tucson-teststand.yaml @@ -62,14 +62,14 @@ auxtel-ess204-sim: cpu: 190m memory: 900Mi -auxtel-ess205-sim: +calibhill-ess301-sim: enabled: true - classifier: ess205 + classifier: ess301 image: repository: ts-dockerhub.lsst.org/ess pullPolicy: Always env: - RUN_ARG: 205 --simulate + RUN_ARG: 301 --simulate resources: requests: cpu: 19m @@ -78,14 +78,14 @@ auxtel-ess205-sim: cpu: 190m memory: 900Mi -calibhill-ess301-sim: +camera-ess111-sim: enabled: true - classifier: ess301 + classifier: ess111 image: repository: ts-dockerhub.lsst.org/ess pullPolicy: Always env: - RUN_ARG: 301 --simulate + RUN_ARG: 111 --simulate resources: requests: cpu: 19m @@ -172,30 +172,14 @@ epm1-sim: cpu: 190m memory: 900Mi -m2-ess106-sim: - enabled: true - classifier: ess106 - image: - repository: ts-dockerhub.lsst.org/ess - pullPolicy: Always - env: - RUN_ARG: 106 --simulate - resources: - requests: - cpu: 19m - memory: 90Mi - limits: - cpu: 190m - memory: 900Mi - -mtdome-ess101-sim: +m1m3-ess113-sim: enabled: true - classifier: ess101 + classifier: ess113 image: repository: ts-dockerhub.lsst.org/ess pullPolicy: Always env: - RUN_ARG: 101 --simulate + RUN_ARG: 113 --simulate resources: requests: cpu: 19m @@ -204,14 +188,14 @@ mtdome-ess101-sim: cpu: 190m memory: 900Mi -mtdome-ess102-sim: +m2-ess106-sim: enabled: true - classifier: ess102 + classifier: ess106 image: repository: ts-dockerhub.lsst.org/ess pullPolicy: Always env: - RUN_ARG: 102 --simulate + RUN_ARG: 106 --simulate resources: requests: cpu: 19m @@ -220,14 +204,14 @@ mtdome-ess102-sim: cpu: 190m memory: 900Mi -mtdome-ess103-sim: +m2-ess112-sim: enabled: true - classifier: ess103 + classifier: ess112 image: repository: ts-dockerhub.lsst.org/ess pullPolicy: Always env: - RUN_ARG: 103 --simulate + RUN_ARG: 112 --simulate resources: requests: cpu: 19m @@ -316,6 +300,22 @@ tma-ess105-sim: cpu: 190m memory: 900Mi +tma-ess110-sim: + enabled: true + classifier: ess110 + image: + repository: ts-dockerhub.lsst.org/ess + pullPolicy: Always + env: + RUN_ARG: 110 --simulate + resources: + requests: + cpu: 19m + memory: 90Mi + limits: + cpu: 190m + memory: 900Mi + weatherforecast: image: repository: ts-dockerhub.lsst.org/weatherforecast diff --git a/applications/eas/values.yaml b/applications/eas/values.yaml index f46c764372..a05b191fd9 100644 --- a/applications/eas/values.yaml +++ b/applications/eas/values.yaml @@ -30,14 +30,6 @@ auxtel-ess204-sim: # -- Enable the ESS:204 simulator CSC enabled: false -auxtel-ess205: - # -- Enable the ESS:205 CSC - enabled: false - -auxtel-ess205-sim: - # -- Enable the ESS:205 simulator CSC - enabled: false - calibhill-ess301: # -- Enable the ESS:301 CSC enabled: false @@ -46,6 +38,22 @@ calibhill-ess301-sim: # -- Enable the ESS:301 simulator CSC enabled: false +camera-ess111: + # -- Enable the ESS:111 CSC + enabled: false + +camera-ess111-sim: + # -- Enable the ESS:111 simulator CSC + enabled: false + +cleanroom-ess109: + # -- Enable the ESS:109 CSC + enabled: false + +cleanroom-ess109-sim: + # -- Enable the ESS:109 simulator CSC + enabled: false + dimm1: # -- Enable the DIMM:1 CSC enabled: false @@ -86,36 +94,28 @@ epm1-sim: # -- Enable the EPM:1 simulator CSC enabled: false -m2-ess106: - # -- Enable the ESS:106 CSC - enabled: false - -m2-ess106-sim: - # -- Enable the ESS:106 simulator CSC +m1m3-ess113: + # -- Enable the ESS:113 CSC enabled: false -mtdome-ess101: - # -- Enable the ESS:101 CSC +m1m3-ess113-sim: + # -- Enable the ESS:113 simulator CSC enabled: false -mtdome-ess101-sim: - # -- Enable the ESS:101 simulator CSC - enabled: false - -mtdome-ess102: - # -- Enable the ESS:102 CSC +m2-ess106: + # -- Enable the ESS:106 CSC enabled: false -mtdome-ess102-sim: - # -- Enable the ESS:102 simulator CSC +m2-ess106-sim: + # -- Enable the ESS:106 simulator CSC enabled: false -mtdome-ess103: - # -- Enable the ESS:103 CSC +m2-ess112: + # -- Enable the ESS:112 CSC enabled: false -mtdome-ess103-sim: - # -- Enable the ESS:103 simulator CSC +m2-ess112-sim: + # -- Enable the ESS:112 simulator CSC enabled: false mtdome-ess107: @@ -158,6 +158,14 @@ tma-ess105-sim: # -- Enable the ESS:105 simulator CSC enabled: false +tma-ess110: + # -- Enable the ESS:110 CSC + enabled: false + +tma-ess110-sim: + # -- Enable the ESS:110 simulator CSC + enabled: false + # The following will be set by parameters injected by Argo CD and should not # be set in the individual environment values files. global: diff --git a/applications/love/values-base.yaml b/applications/love/values-base.yaml index 97398a15ff..68afdb0fa0 100644 --- a/applications/love/values-base.yaml +++ b/applications/love/values-base.yaml @@ -430,15 +430,18 @@ love-producer: - name: auxteless204 csc: ESS:204 --log-level 10 WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - - name: auxteless205 - csc: ESS:205 --log-level 10 - WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: calibhilless301 csc: ESS:301 --log-level 10 WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription + - name: cameraess111 + csc: ESS:111 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: camerahexapod csc: MTHexapod:1 --log-level 10 WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription + - name: cleanroomess109 + csc: ESS:109 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: dimm1 csc: DIMM:1 --log-level 10 WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription @@ -466,9 +469,15 @@ love-producer: - name: love csc: LOVE:0 --log-level 10 WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription + - name: m1m3ess113 + csc: ESS:113 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: m2ess106 csc: ESS:106 --log-level 10 WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription + - name: m2ess112 + csc: ESS:112 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: m2hexapod csc: MTHexapod:2 --log-level 10 WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription @@ -484,15 +493,6 @@ love-producer: - name: mtdome csc: MTDome:0 --log-level 10 WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - - name: mtdomeess101 - csc: ESS:101 --log-level 10 - WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - - name: mtdomeess102 - csc: ESS:102 --log-level 10 - WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - - name: mtdomeess103 - csc: ESS:103 --log-level 10 - WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdomeess107 csc: ESS:107 --log-level 10 WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription @@ -545,6 +545,9 @@ love-producer: - name: tmaess105 csc: ESS:105 --log-level 10 WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription + - name: tmaess110 + csc: ESS:110 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: watcher csc: Watcher:0 --log-level 10 WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription diff --git a/applications/love/values-tucson-teststand.yaml b/applications/love/values-tucson-teststand.yaml index 17839b75b6..e7333ceb30 100644 --- a/applications/love/values-tucson-teststand.yaml +++ b/applications/love/values-tucson-teststand.yaml @@ -200,8 +200,6 @@ love-producer: image: repository: ts-dockerhub.lsst.org/love-producer pullPolicy: Always - env: - WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription resources: requests: cpu: 10m @@ -212,121 +210,179 @@ love-producer: producers: - name: ataos csc: ATAOS:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atcamera csc: ATCamera:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atdome csc: ATDome:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atdometrajectory csc: ATDomeTrajectory:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atheaderservice csc: ATHeaderService:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: athexapod csc: ATHexapod:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atmcs csc: ATMCS:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atocps csc: OCPS:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atoods csc: ATOODS:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atpneumatics csc: ATPneumatics:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atptg csc: ATPtg:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atscheduler csc: Scheduler:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: atscriptqueue csc: ScriptQueue:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/queue/ws/subscription - name: atspectrograph csc: ATSpectrograph:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: auxteless201 csc: ESS:201 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: auxteless202 csc: ESS:202 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: auxteless203 csc: ESS:203 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: auxteless204 csc: ESS:204 --log-level 10 - - name: auxteless205 - csc: ESS:205 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: calibhilless301 csc: ESS:301 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription + - name: cameraess111 + csc: ESS:111 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: camerahexapod csc: MTHexapod:1 --log-level 10 - - name: cccamera - csc: CCCamera:0 --log-level 10 - - name: ccheaderservice - csc: CCHeaderService:0 --log-level 10 - - name: ccoods - csc: CCOODS:0 --log-level 10 - - name: ccocps - csc: OCPS:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription + - name: cleanroomess109 + csc: ESS:109 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: dimm1 csc: DIMM:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: dimm2 csc: DIMM:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: dsm1 csc: DSM:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: dsm2 csc: DSM:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: epm1 csc: EPM:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: gcheaderservice1 csc: GCHeaderService:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: genericcamera1 csc: GenericCamera:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: lasertracker1 csc: LaserTracker:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: love csc: LOVE:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription + - name: m1m3ess113 + csc: ESS:113 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: m2ess106 csc: ESS:106 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription + - name: m2ess112 + csc: ESS:112 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: m2hexapod csc: MTHexapod:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtaircompressor1 csc: MTAirCompressor:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtaircompressor2 csc: MTAirCompressor:2 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtaos csc: MTAOS:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdome csc: MTDome:0 --log-level 10 - - name: mtdomeess101 - csc: ESS:101 --log-level 10 - - name: mtdomeess102 - csc: ESS:102 --log-level 10 - - name: mtdomeess103 - csc: ESS:103 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdomeess107 csc: ESS:107 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdomeess108 csc: ESS:108 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtdometrajectory csc: MTDomeTrajectory:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtm1m3 csc: MTM1M3:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/m1m3/ws/subscription + resources: + requests: + cpu: 100m + memory: 220Mi + limits: + cpu: 500m + memory: 600Mi - name: mtm2 csc: MTM2:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtmount csc: MTMount:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtptg csc: MTPtg:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtrotator csc: MTRotator:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtscheduler csc: Scheduler:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: mtscriptqueue csc: ScriptQueue:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/queue/ws/subscription - name: ocsscheduler csc: Scheduler:3 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: ocsscriptqueue csc: ScriptQueue:3 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/queue/ws/subscription - name: tmaess001 csc: ESS:1 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: tmaess104 csc: ESS:104 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: tmaess105 csc: ESS:105 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription + - name: tmaess110 + csc: ESS:110 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: watcher csc: Watcher:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription - name: weatherforecast csc: WeatherForecast:0 --log-level 10 + WEBSOCKET_HOST: love-nginx-service/love/manager/producers/ws/subscription From 899da0f1805bcaef6126ce33553a3eab8c2698ae Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Wed, 27 Nov 2024 10:55:44 -0700 Subject: [PATCH 553/567] Update LOVE config for TTS. --- .../love/values-tucson-teststand.yaml | 154 +++++++++++++++++- 1 file changed, 153 insertions(+), 1 deletion(-) diff --git a/applications/love/values-tucson-teststand.yaml b/applications/love/values-tucson-teststand.yaml index e7333ceb30..6892caa197 100644 --- a/applications/love/values-tucson-teststand.yaml +++ b/applications/love/values-tucson-teststand.yaml @@ -59,19 +59,171 @@ love-manager: initialDelaySeconds: 20 periodSeconds: 10 producers: + - name: general image: repository: ts-dockerhub.lsst.org/love-manager pullPolicy: Always env: + LOVE_SITE: tucson SERVER_URL: tucson-teststand.lsst.codes OLE_API_HOSTNAME: tucson-teststand.lsst.codes AUTH_LDAP_1_SERVER_URI: ldap://ipa1.tu.lsst.org AUTH_LDAP_2_SERVER_URI: ldap://ipa2.tu.lsst.org AUTH_LDAP_3_SERVER_URI: ldap://ipa3.tu.lsst.org + COMMANDER_HOSTNAME: love-commander-service + COMMANDER_PORT: 5000 DB_HOST: postgresdb01.tu.lsst.org + DB_ENGINE: postgresql + DB_NAME: love + DB_PORT: 5432 + DB_USER: love + HEARTBEAT_QUERY_COMMANDER: false + JIRA_API_HOSTNAME: rubinobs.atlassian.net + JIRA_PROJECT_ID: 10063 + REDIS_CONFIG_CAPACITY: 5000 + REDIS_CONFIG_EXPIRY: 5 + REDIS_HOST: love-manager-redis-service + REMOTE_STORAGE: true + URL_SUBPATH: /love + envSecrets: + SECRET_KEY: manager-secret-key + PROCESS_CONNECTION_PASS: process-connection-pass + ADMIN_USER_PASS: admin-user-pass + USER_USER_PASS: user-user-pass + CMD_USER_PASS: cmd-user-pass + AUTHLIST_USER_PASS: authlist-user-pass + AUTH_LDAP_BIND_PASSWORD: auth-ldap-bind-password + DB_PASS: db-pass + REDIS_PASS: redis-pass + replicas: 10 + autoscaling: + enabled: false + minReplicas: 2 + maxReplicas: 25 + targetCPUUtilizationPercentage: 50 + scaleDownPolicy: + policies: + - type: Pods + value: 2 + periodSeconds: 120 + - type: Percent + value: 10 + periodSeconds: 120 + selectPolicy: Min + resources: + requests: + cpu: 150m + memory: 200Mi + limits: + cpu: 1000m + memory: 1500Mi + readinessProbe: + tcpSocket: + port: 8000 + initialDelaySeconds: 20 + periodSeconds: 10 + - name: queue + image: + repository: ts-dockerhub.lsst.org/love-manager + pullPolicy: Always + env: LOVE_SITE: tucson + SERVER_URL: tucson-teststand.lsst.codes + OLE_API_HOSTNAME: tucson-teststand.lsst.codes + AUTH_LDAP_1_SERVER_URI: ldap://ipa1.tu.lsst.org + AUTH_LDAP_2_SERVER_URI: ldap://ipa2.tu.lsst.org + AUTH_LDAP_3_SERVER_URI: ldap://ipa3.tu.lsst.org + COMMANDER_HOSTNAME: love-commander-service + COMMANDER_PORT: 5000 + DB_HOST: postgresdb01.tu.lsst.org + DB_ENGINE: postgresql + DB_NAME: love + DB_PORT: 5432 + DB_USER: love + HEARTBEAT_QUERY_COMMANDER: false + JIRA_API_HOSTNAME: rubinobs.atlassian.net + JIRA_PROJECT_ID: 10063 + REDIS_CONFIG_CAPACITY: 5000 + REDIS_CONFIG_EXPIRY: 5 + REDIS_HOST: love-manager-redis-service + REMOTE_STORAGE: true + URL_SUBPATH: /love + envSecrets: + SECRET_KEY: manager-secret-key + PROCESS_CONNECTION_PASS: process-connection-pass + ADMIN_USER_PASS: admin-user-pass + USER_USER_PASS: user-user-pass + CMD_USER_PASS: cmd-user-pass + AUTHLIST_USER_PASS: authlist-user-pass + AUTH_LDAP_BIND_PASSWORD: auth-ldap-bind-password + DB_PASS: db-pass + REDIS_PASS: redis-pass + replicas: 3 autoscaling: - enabled: true + enabled: false + minReplicas: 2 + maxReplicas: 25 + targetCPUUtilizationPercentage: 50 + scaleDownPolicy: + policies: + - type: Pods + value: 2 + periodSeconds: 120 + - type: Percent + value: 10 + periodSeconds: 120 + selectPolicy: Min + resources: + requests: + cpu: 150m + memory: 200Mi + limits: + cpu: 1000m + memory: 1500Mi + readinessProbe: + tcpSocket: + port: 8000 + initialDelaySeconds: 20 + periodSeconds: 10 + - name: m1m3 + image: + repository: ts-dockerhub.lsst.org/love-manager + pullPolicy: Always + env: + LOVE_SITE: tucson + SERVER_URL: tucson-teststand.lsst.codes + OLE_API_HOSTNAME: tucson-teststand.lsst.codes + AUTH_LDAP_1_SERVER_URI: ldap://ipa1.tu.lsst.org + AUTH_LDAP_2_SERVER_URI: ldap://ipa2.tu.lsst.org + AUTH_LDAP_3_SERVER_URI: ldap://ipa3.tu.lsst.org + COMMANDER_HOSTNAME: love-commander-service + COMMANDER_PORT: 5000 + DB_HOST: postgresdb01.tu.lsst.org + DB_ENGINE: postgresql + DB_NAME: love + DB_PORT: 5432 + DB_USER: love + HEARTBEAT_QUERY_COMMANDER: false + JIRA_API_HOSTNAME: rubinobs.atlassian.net + JIRA_PROJECT_ID: 10063 + REDIS_CONFIG_CAPACITY: 5000 + REDIS_CONFIG_EXPIRY: 5 + REDIS_HOST: love-manager-redis-service + REMOTE_STORAGE: true + URL_SUBPATH: /love + envSecrets: + SECRET_KEY: manager-secret-key + PROCESS_CONNECTION_PASS: process-connection-pass + ADMIN_USER_PASS: admin-user-pass + USER_USER_PASS: user-user-pass + CMD_USER_PASS: cmd-user-pass + AUTHLIST_USER_PASS: authlist-user-pass + AUTH_LDAP_BIND_PASSWORD: auth-ldap-bind-password + DB_PASS: db-pass + REDIS_PASS: redis-pass + replicas: 1 + autoscaling: + enabled: false minReplicas: 2 maxReplicas: 25 targetCPUUtilizationPercentage: 50 From 2d068d43359d73798f659ce800d97904fe784ab2 Mon Sep 17 00:00:00 2001 From: Stelios Voutsinas Date: Mon, 25 Nov 2024 11:36:30 -0700 Subject: [PATCH 554/567] Change OIDC provider in the cadc-tap config to base path --- charts/cadc-tap/README.md | 6 +++--- charts/cadc-tap/templates/configmap.yaml | 2 +- charts/cadc-tap/values.yaml | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/charts/cadc-tap/README.md b/charts/cadc-tap/README.md index d66a03be28..9e9367dc3d 100644 --- a/charts/cadc-tap/README.md +++ b/charts/cadc-tap/README.md @@ -31,12 +31,12 @@ IVOA TAP service | config.pg.host | string | None, must be set if backend is `pg` | Host to connect to | | config.pg.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the TAP image | | config.pg.image.repository | string | `"ghcr.io/lsst-sqre/tap-postgres-service"` | TAP image to use | -| config.pg.image.tag | string | `"1.18.6"` | Tag of TAP image to use | +| config.pg.image.tag | string | `"1.19.0"` | Tag of TAP image to use | | config.pg.username | string | None, must be set if backend is `pg` | Username to connect with | | config.qserv.host | string | `"mock-db:3306"` (the mock QServ) | QServ hostname:port to connect to | | config.qserv.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the TAP image | | config.qserv.image.repository | string | `"ghcr.io/lsst-sqre/lsst-tap-service"` | TAP image to use | -| config.qserv.image.tag | string | `"2.4.7"` | Tag of TAP image to use | +| config.qserv.image.tag | string | `"2.5.0"` | Tag of TAP image to use | | config.qserv.jdbcParams | string | `""` | Extra JDBC connection parameters | | config.qserv.passwordEnabled | bool | false | Whether the Qserv database is password protected | | config.tapSchemaAddress | string | `"cadc-tap-schema-db:3306"` | Address to a MySQL database containing TAP schema data | @@ -78,7 +78,7 @@ IVOA TAP service | uws.affinity | object | `{}` | Affinity rules for the UWS database pod | | uws.image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the UWS database image | | uws.image.repository | string | `"ghcr.io/lsst-sqre/lsst-tap-uws-db"` | UWS database image to use | -| uws.image.tag | string | `"2.4.7"` | Tag of UWS database image to use | +| uws.image.tag | string | `"2.5.0"` | Tag of UWS database image to use | | uws.nodeSelector | object | `{}` | Node selection rules for the UWS database pod | | uws.podAnnotations | object | `{}` | Annotations for the UWS databse pod | | uws.resources | object | See `values.yaml` | Resource limits and requests for the UWS database pod | diff --git a/charts/cadc-tap/templates/configmap.yaml b/charts/cadc-tap/templates/configmap.yaml index c7a872633b..4bceb70294 100644 --- a/charts/cadc-tap/templates/configmap.yaml +++ b/charts/cadc-tap/templates/configmap.yaml @@ -6,7 +6,7 @@ metadata: {{- include "cadc-tap.labels" . | nindent 4 }} data: cadc-registry.properties: | - ivo://ivoa.net/sso#OpenID = {{ .Values.global.baseUrl }}/auth/cadc + ivo://ivoa.net/sso#OpenID = {{ .Values.global.baseUrl }} catalina.properties: | # tomcat properties tomcat.connector.connectionTimeout=20000 diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index e54ff5e156..da92d70b04 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -71,7 +71,7 @@ config: pullPolicy: "IfNotPresent" # -- Tag of TAP image to use - tag: "1.18.6" + tag: "1.19.0" qserv: # -- QServ hostname:port to connect to @@ -89,7 +89,7 @@ config: pullPolicy: "IfNotPresent" # -- Tag of TAP image to use - tag: "2.4.7" + tag: "2.5.0" # -- Whether the Qserv database is password protected # @default -- false @@ -195,7 +195,7 @@ uws: pullPolicy: "IfNotPresent" # -- Tag of UWS database image to use - tag: "2.4.7" + tag: "2.5.0" # -- Resource limits and requests for the UWS database pod # @default -- See `values.yaml` From 3ea497f2d8c834f707bdedb37f3ddf3b4178e36f Mon Sep 17 00:00:00 2001 From: Michael Reuter Date: Wed, 27 Nov 2024 12:05:11 -0700 Subject: [PATCH 555/567] Add ESS:109. --- applications/eas/values-base.yaml | 16 ++++++++++++++++ applications/eas/values-tucson-teststand.yaml | 16 ++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/applications/eas/values-base.yaml b/applications/eas/values-base.yaml index 38497e55c6..2a6c08a2f7 100644 --- a/applications/eas/values-base.yaml +++ b/applications/eas/values-base.yaml @@ -94,6 +94,22 @@ camera-ess111-sim: cpu: 190m memory: 900Mi +cleanroom-ess109-sim: + enabled: true + classifier: ess109 + image: + repository: ts-dockerhub.lsst.org/ess + pullPolicy: Always + env: + RUN_ARG: 109 --simulate + resources: + requests: + cpu: 19m + memory: 90Mi + limits: + cpu: 190m + memory: 900Mi + dimm1-sim: enabled: true image: diff --git a/applications/eas/values-tucson-teststand.yaml b/applications/eas/values-tucson-teststand.yaml index 38497e55c6..2a6c08a2f7 100644 --- a/applications/eas/values-tucson-teststand.yaml +++ b/applications/eas/values-tucson-teststand.yaml @@ -94,6 +94,22 @@ camera-ess111-sim: cpu: 190m memory: 900Mi +cleanroom-ess109-sim: + enabled: true + classifier: ess109 + image: + repository: ts-dockerhub.lsst.org/ess + pullPolicy: Always + env: + RUN_ARG: 109 --simulate + resources: + requests: + cpu: 19m + memory: 90Mi + limits: + cpu: 190m + memory: 900Mi + dimm1-sim: enabled: true image: From 85ac91f33ff6f424f4a30e55f9524b3de4ee2cb0 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Wed, 27 Nov 2024 15:36:45 -0800 Subject: [PATCH 556/567] Enable Gafaelfawr metrics on idfprod --- applications/gafaelfawr/values-idfprod.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/applications/gafaelfawr/values-idfprod.yaml b/applications/gafaelfawr/values-idfprod.yaml index 6e55cc5d89..dcbf813f34 100644 --- a/applications/gafaelfawr/values-idfprod.yaml +++ b/applications/gafaelfawr/values-idfprod.yaml @@ -36,6 +36,10 @@ config: - "dp0.2" - "dp0.3" + # Enable metrics reporting. + metrics: + enabled: true + # User quota settings for services. quota: default: From 8e060e1514647d4ca12054be1ea521dc68516b13 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Wed, 27 Nov 2024 16:05:48 -0800 Subject: [PATCH 557/567] Tag every GafaelfawrIngress with a service Add service information to every authenticated `GafaelfawrIngress` resource. This will be used for metrics reporting. --- .../charts/alert-database/templates/ingress.yaml | 1 + applications/argo-workflows/templates/ingress.yaml | 3 ++- .../butler/templates/ingress-authenticated.yaml | 1 + applications/checkerboard/templates/ingress.yaml | 1 + applications/cm-service/templates/ingress.yaml | 1 + applications/consdb/templates/ingress.yaml | 1 + applications/datalinker/templates/ingress-image.yaml | 1 + applications/datalinker/templates/ingress-tap.yaml | 1 + applications/exposurelog/templates/ingress.yaml | 1 + applications/fastapi-bootcamp/templates/ingress.yaml | 1 + .../ghostwriter/templates/ingress-toplevel.yaml | 1 + applications/ghostwriter/templates/ingress.yaml | 1 + applications/giftless/templates/ingress.yaml | 1 + applications/hips/templates/ingress.yaml | 1 + applications/jira-data-proxy/templates/ingress.yaml | 3 ++- applications/livetap/README.md | 3 ++- applications/livetap/values.yaml | 5 ++++- applications/mobu/templates/ingress.yaml | 3 ++- applications/narrativelog/templates/ingress.yaml | 1 + applications/nightreport/templates/ingress.yaml | 1 + applications/noteburst/templates/ingress.yaml | 3 ++- .../nublado/templates/controller-ingress-admin.yaml | 1 + .../nublado/templates/controller-ingress-files.yaml | 3 ++- .../nublado/templates/controller-ingress-hub.yaml | 1 + .../nublado/templates/controller-ingress-user.yaml | 1 + applications/nublado/templates/proxy-ingress.yaml | 3 ++- .../nublado/templates/proxy-spawn-ingress.yaml | 3 ++- .../charts/obsenv-ui/templates/ingress.yaml | 3 ++- applications/ook/templates/ingress.yaml | 3 ++- applications/plot-navigator/templates/ingress.yaml | 3 ++- applications/portal/templates/ingress-admin.yaml | 3 ++- applications/portal/templates/ingress.yaml | 3 ++- applications/ppdb-replication/templates/ingress.yaml | 1 + applications/production-tools/templates/ingress.yaml | 3 ++- applications/s3proxy/templates/ingress.yaml | 1 + .../schedview-snapshot/templates/ingress.yaml | 3 ++- applications/sia/templates/ingress.yaml | 1 + applications/siav2/templates/ingress.yaml | 11 ++++++----- .../squareone/templates/ingress-times-square.yaml | 3 ++- applications/ssotap/README.md | 5 +++-- applications/ssotap/values.yaml | 7 +++++-- applications/tap/README.md | 5 +++-- applications/tap/values.yaml | 7 +++++-- .../times-square/templates/ingress-templates.yaml | 3 ++- applications/times-square/templates/ingress.yaml | 3 ++- applications/vo-cutouts/templates/ingress.yaml | 1 + charts/cadc-tap/README.md | 1 + .../cadc-tap/templates/tap-ingress-authenticated.yaml | 5 +++-- charts/cadc-tap/values.yaml | 5 +++++ charts/rubintv/templates/ingress.yaml | 3 ++- starters/fastapi-safir-uws/templates/ingress.yaml | 1 + starters/fastapi-safir/templates/ingress.yaml | 1 + starters/web-service/templates/ingress.yaml | 1 + 53 files changed, 98 insertions(+), 35 deletions(-) diff --git a/applications/alert-stream-broker/charts/alert-database/templates/ingress.yaml b/applications/alert-stream-broker/charts/alert-database/templates/ingress.yaml index 774dadf5a1..2205bd50a7 100644 --- a/applications/alert-stream-broker/charts/alert-database/templates/ingress.yaml +++ b/applications/alert-stream-broker/charts/alert-database/templates/ingress.yaml @@ -10,6 +10,7 @@ config: scopes: all: - "read:alertdb" + service: "alert-stream-broker" template: metadata: name: {{ template "alertDatabase.fullname" . }} diff --git a/applications/argo-workflows/templates/ingress.yaml b/applications/argo-workflows/templates/ingress.yaml index 03a5c5ed65..66a2ec497c 100644 --- a/applications/argo-workflows/templates/ingress.yaml +++ b/applications/argo-workflows/templates/ingress.yaml @@ -4,9 +4,10 @@ metadata: name: argo-workflows config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: {{ .Values.ingress.scopes }} - loginRedirect: true + service: "argo-workflows" template: metadata: name: argo-workflows diff --git a/applications/butler/templates/ingress-authenticated.yaml b/applications/butler/templates/ingress-authenticated.yaml index bf7127de6e..8837ae054a 100644 --- a/applications/butler/templates/ingress-authenticated.yaml +++ b/applications/butler/templates/ingress-authenticated.yaml @@ -16,6 +16,7 @@ config: internal: service: "butler" scopes: [] + service: "butler" template: metadata: diff --git a/applications/checkerboard/templates/ingress.yaml b/applications/checkerboard/templates/ingress.yaml index edf033a11e..fb11ba4d0a 100644 --- a/applications/checkerboard/templates/ingress.yaml +++ b/applications/checkerboard/templates/ingress.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:checkerboard" + service: "checkerboard" template: metadata: name: {{ template "checkerboard.fullname" . }} diff --git a/applications/cm-service/templates/ingress.yaml b/applications/cm-service/templates/ingress.yaml index 882de320dc..0820003c40 100644 --- a/applications/cm-service/templates/ingress.yaml +++ b/applications/cm-service/templates/ingress.yaml @@ -10,6 +10,7 @@ config: scopes: all: - "exec:internal-tools" + service: "cm-service" template: metadata: name: "cm-service" diff --git a/applications/consdb/templates/ingress.yaml b/applications/consdb/templates/ingress.yaml index 18a6c605df..1b0dfc38bd 100644 --- a/applications/consdb/templates/ingress.yaml +++ b/applications/consdb/templates/ingress.yaml @@ -11,6 +11,7 @@ config: scopes: all: - "read:image" + service: "consdb" template: metadata: name: "consdb-pq" diff --git a/applications/datalinker/templates/ingress-image.yaml b/applications/datalinker/templates/ingress-image.yaml index 325bfb41e4..7c6c9a3c82 100644 --- a/applications/datalinker/templates/ingress-image.yaml +++ b/applications/datalinker/templates/ingress-image.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:image" + service: "datalinker" # Request a delegated token to use for making calls to Butler server with the # end-user's credentials. delegate: diff --git a/applications/datalinker/templates/ingress-tap.yaml b/applications/datalinker/templates/ingress-tap.yaml index 99245d239e..acd37d39aa 100644 --- a/applications/datalinker/templates/ingress-tap.yaml +++ b/applications/datalinker/templates/ingress-tap.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:tap" + service: "datalinker" template: metadata: name: {{ include "datalinker.fullname" . }}-tap diff --git a/applications/exposurelog/templates/ingress.yaml b/applications/exposurelog/templates/ingress.yaml index c5eba0a88a..eb85e73506 100644 --- a/applications/exposurelog/templates/ingress.yaml +++ b/applications/exposurelog/templates/ingress.yaml @@ -11,6 +11,7 @@ config: scopes: all: - "exec:internal-tools" + service: "exposurelog" {{- else }} scopes: anonymous: true diff --git a/applications/fastapi-bootcamp/templates/ingress.yaml b/applications/fastapi-bootcamp/templates/ingress.yaml index 8e3c72bf44..8ba9afa2a7 100644 --- a/applications/fastapi-bootcamp/templates/ingress.yaml +++ b/applications/fastapi-bootcamp/templates/ingress.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:image" + service: "fastapi-bootcamp" template: metadata: name: "fastapi-bootcamp" diff --git a/applications/ghostwriter/templates/ingress-toplevel.yaml b/applications/ghostwriter/templates/ingress-toplevel.yaml index fb8ebbf3b5..af4ef97d56 100644 --- a/applications/ghostwriter/templates/ingress-toplevel.yaml +++ b/applications/ghostwriter/templates/ingress-toplevel.yaml @@ -11,6 +11,7 @@ config: scopes: all: - "read:image" + service: "ghostwriter" delegate: notebook: {} template: diff --git a/applications/ghostwriter/templates/ingress.yaml b/applications/ghostwriter/templates/ingress.yaml index 1570a890a7..bac29c13a3 100644 --- a/applications/ghostwriter/templates/ingress.yaml +++ b/applications/ghostwriter/templates/ingress.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:image" + service: "ghostwriter" delegate: notebook: {} template: diff --git a/applications/giftless/templates/ingress.yaml b/applications/giftless/templates/ingress.yaml index 77e97b5227..bf291fcd7a 100644 --- a/applications/giftless/templates/ingress.yaml +++ b/applications/giftless/templates/ingress.yaml @@ -91,6 +91,7 @@ config: scopes: all: - "write:git-lfs" + service: "giftless" template: metadata: name: {{ template "giftless.fullname" . }}-rw diff --git a/applications/hips/templates/ingress.yaml b/applications/hips/templates/ingress.yaml index 78bfe06ee9..8f87d5e1ac 100644 --- a/applications/hips/templates/ingress.yaml +++ b/applications/hips/templates/ingress.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:image" + service: "hips" template: metadata: name: "hips" diff --git a/applications/jira-data-proxy/templates/ingress.yaml b/applications/jira-data-proxy/templates/ingress.yaml index 771d96fc5e..8d91d25dab 100644 --- a/applications/jira-data-proxy/templates/ingress.yaml +++ b/applications/jira-data-proxy/templates/ingress.yaml @@ -6,10 +6,11 @@ metadata: {{- include "jira-data-proxy.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: false # endpoint is for API use only scopes: all: - "exec:notebook" - loginRedirect: false # endpoint is for API use only + service: "jira-data-proxy" template: metadata: name: "jira-data-proxy" diff --git a/applications/livetap/README.md b/applications/livetap/README.md index 0884f22fe9..cbcac45566 100644 --- a/applications/livetap/README.md +++ b/applications/livetap/README.md @@ -16,7 +16,8 @@ IVOA TAP service | cadc-tap.config.pg.host | string | `"mock-pg:5432"` (the mock pg) | Postgres hostname:port to connect to | | cadc-tap.config.pg.username | string | `"rubin"` | Postgres username to use to connect | | cadc-tap.config.vaultSecretName | string | `"livetap"` | Vault secret name: the final key in the vault path | -| cadc-tap.ingress.path | string | `"live"` | | +| cadc-tap.ingress.path | string | `"live"` | Ingress path that should be routed to this service | +| cadc-tap.service | string | `"livetap"` | Name of the service from Gafaelfawr's perspective | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | diff --git a/applications/livetap/values.yaml b/applications/livetap/values.yaml index df18fd489a..3c5c686e26 100644 --- a/applications/livetap/values.yaml +++ b/applications/livetap/values.yaml @@ -1,6 +1,6 @@ cadc-tap: - # Settings for the ingress rules. ingress: + # -- Ingress path that should be routed to this service path: "live" config: @@ -21,6 +21,9 @@ cadc-tap: # -- Vault secret name: the final key in the vault path vaultSecretName: "livetap" + # -- Name of the service from Gafaelfawr's perspective + service: "livetap" + # The following will be set by parameters injected by Argo CD and should not # be set in the individual environment values files. global: diff --git a/applications/mobu/templates/ingress.yaml b/applications/mobu/templates/ingress.yaml index ce60afd027..73d69de2eb 100644 --- a/applications/mobu/templates/ingress.yaml +++ b/applications/mobu/templates/ingress.yaml @@ -6,10 +6,11 @@ metadata: {{- include "mobu.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - "exec:admin" - loginRedirect: true + service: "mobu" template: metadata: name: {{ template "mobu.fullname" . }} diff --git a/applications/narrativelog/templates/ingress.yaml b/applications/narrativelog/templates/ingress.yaml index 796e78fd1d..73ad20a4d8 100644 --- a/applications/narrativelog/templates/ingress.yaml +++ b/applications/narrativelog/templates/ingress.yaml @@ -11,6 +11,7 @@ config: scopes: all: - "exec:internal-tools" + service: "narrativelog" {{- else }} scopes: anonymous: true diff --git a/applications/nightreport/templates/ingress.yaml b/applications/nightreport/templates/ingress.yaml index 930d61eab8..a6600c25a5 100644 --- a/applications/nightreport/templates/ingress.yaml +++ b/applications/nightreport/templates/ingress.yaml @@ -11,6 +11,7 @@ config: scopes: all: - "exec:internal-tools" + service: "nightreport" {{- else }} scopes: anonymous: true diff --git a/applications/noteburst/templates/ingress.yaml b/applications/noteburst/templates/ingress.yaml index 2072a48326..c468e359dc 100644 --- a/applications/noteburst/templates/ingress.yaml +++ b/applications/noteburst/templates/ingress.yaml @@ -6,10 +6,11 @@ metadata: {{- include "noteburst.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - "exec:notebook" - loginRedirect: true + service: "noteburst" template: metadata: name: {{ template "noteburst.fullname" . }} diff --git a/applications/nublado/templates/controller-ingress-admin.yaml b/applications/nublado/templates/controller-ingress-admin.yaml index 43043b1cc4..1778da8f75 100644 --- a/applications/nublado/templates/controller-ingress-admin.yaml +++ b/applications/nublado/templates/controller-ingress-admin.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "exec:admin" + service: "nublado-controller" template: metadata: name: "controller-admin" diff --git a/applications/nublado/templates/controller-ingress-files.yaml b/applications/nublado/templates/controller-ingress-files.yaml index 19d7506b89..de85296010 100644 --- a/applications/nublado/templates/controller-ingress-files.yaml +++ b/applications/nublado/templates/controller-ingress-files.yaml @@ -10,9 +10,10 @@ config: scopes: all: - "write:files" + service: "nublado-files" delegate: internal: - service: "nublado" + service: "nublado-files" scopes: [] template: metadata: diff --git a/applications/nublado/templates/controller-ingress-hub.yaml b/applications/nublado/templates/controller-ingress-hub.yaml index 2c1c00c611..c25dbc96a7 100644 --- a/applications/nublado/templates/controller-ingress-hub.yaml +++ b/applications/nublado/templates/controller-ingress-hub.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "admin:jupyterlab" + service: "nublado-controller" template: metadata: name: "controller-hub" diff --git a/applications/nublado/templates/controller-ingress-user.yaml b/applications/nublado/templates/controller-ingress-user.yaml index 45549f4703..ca6b9d26d8 100644 --- a/applications/nublado/templates/controller-ingress-user.yaml +++ b/applications/nublado/templates/controller-ingress-user.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "exec:notebook" + service: "nublado-controller" delegate: notebook: {} template: diff --git a/applications/nublado/templates/proxy-ingress.yaml b/applications/nublado/templates/proxy-ingress.yaml index 4a7b13f7ce..96dc37c762 100644 --- a/applications/nublado/templates/proxy-ingress.yaml +++ b/applications/nublado/templates/proxy-ingress.yaml @@ -7,10 +7,11 @@ metadata: config: baseUrl: {{ .Values.global.baseUrl | quote }} authCacheDuration: "5m" + loginRedirect: true scopes: all: - "exec:notebook" - loginRedirect: true + service: "nublado" delegate: notebook: {} template: diff --git a/applications/nublado/templates/proxy-spawn-ingress.yaml b/applications/nublado/templates/proxy-spawn-ingress.yaml index 06af378237..54c4761c36 100644 --- a/applications/nublado/templates/proxy-spawn-ingress.yaml +++ b/applications/nublado/templates/proxy-spawn-ingress.yaml @@ -7,10 +7,11 @@ metadata: {{- include "nublado.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - "exec:notebook" - loginRedirect: true + service: "nublado" delegate: {{- if .Values.hub.minimumTokenLifetime }} minimumLifetime: {{ .Values.hub.minimumTokenLifetime }} diff --git a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml index de30d87046..1ad03831c5 100644 --- a/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml +++ b/applications/obsenv-management/charts/obsenv-ui/templates/ingress.yaml @@ -10,9 +10,10 @@ config: scopes: all: - "exec:internal-tools" + service: "obsenv-ui" delegate: internal: - service: "obsenv-api" + service: "obsenv-ui" scopes: [] template: metadata: diff --git a/applications/ook/templates/ingress.yaml b/applications/ook/templates/ingress.yaml index 41fe9578a7..d43bc0a839 100644 --- a/applications/ook/templates/ingress.yaml +++ b/applications/ook/templates/ingress.yaml @@ -6,10 +6,11 @@ metadata: {{- include "ook.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - "exec:admin" - loginRedirect: true + service: "ook" template: metadata: name: {{ template "ook.fullname" . }} diff --git a/applications/plot-navigator/templates/ingress.yaml b/applications/plot-navigator/templates/ingress.yaml index a0c1d19102..e32f7e32e5 100644 --- a/applications/plot-navigator/templates/ingress.yaml +++ b/applications/plot-navigator/templates/ingress.yaml @@ -6,10 +6,11 @@ metadata: {{- include "plot-navigator.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - "exec:portal" - loginRedirect: true + service: "plot-navigator" delegate: internal: scopes: [] diff --git a/applications/portal/templates/ingress-admin.yaml b/applications/portal/templates/ingress-admin.yaml index 2a107ab1f5..65fc675e29 100644 --- a/applications/portal/templates/ingress-admin.yaml +++ b/applications/portal/templates/ingress-admin.yaml @@ -6,10 +6,11 @@ metadata: {{- include "portal.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - "exec:admin" - loginRedirect: true + service: "portal" template: metadata: name: {{ include "portal.fullname" . }}-admin diff --git a/applications/portal/templates/ingress.yaml b/applications/portal/templates/ingress.yaml index 0d7d6fc957..b14411788a 100644 --- a/applications/portal/templates/ingress.yaml +++ b/applications/portal/templates/ingress.yaml @@ -7,10 +7,11 @@ metadata: config: baseUrl: {{ .Values.global.baseUrl | quote }} authCacheDuration: "5m" + loginRedirect: true scopes: all: - "exec:portal" - loginRedirect: true + service: "portal" delegate: internal: service: "portal" diff --git a/applications/ppdb-replication/templates/ingress.yaml b/applications/ppdb-replication/templates/ingress.yaml index 381bce084c..849ae8c296 100644 --- a/applications/ppdb-replication/templates/ingress.yaml +++ b/applications/ppdb-replication/templates/ingress.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:image" + service: "ppdb-replication" template: metadata: name: "ppdb-replication" diff --git a/applications/production-tools/templates/ingress.yaml b/applications/production-tools/templates/ingress.yaml index fbf1fb3bde..fab4f59fbf 100644 --- a/applications/production-tools/templates/ingress.yaml +++ b/applications/production-tools/templates/ingress.yaml @@ -6,10 +6,11 @@ metadata: {{- include "production-tools.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - "exec:portal" - loginRedirect: true + service: "production-tools" template: metadata: name: {{ template "production-tools.fullname" . }} diff --git a/applications/s3proxy/templates/ingress.yaml b/applications/s3proxy/templates/ingress.yaml index 61ce54a2ff..ed4bf7d933 100644 --- a/applications/s3proxy/templates/ingress.yaml +++ b/applications/s3proxy/templates/ingress.yaml @@ -13,6 +13,7 @@ config: scopes: all: - "read:image" + service: "s3proxy" template: metadata: name: "s3proxy" diff --git a/applications/schedview-snapshot/templates/ingress.yaml b/applications/schedview-snapshot/templates/ingress.yaml index b6cb8ae716..9aa158b2f5 100644 --- a/applications/schedview-snapshot/templates/ingress.yaml +++ b/applications/schedview-snapshot/templates/ingress.yaml @@ -6,10 +6,11 @@ metadata: {{- include "schedview-snapshot.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - "exec:portal" - loginRedirect: true + service: "schedview-snapshot" template: metadata: name: "schedview-snapshot" diff --git a/applications/sia/templates/ingress.yaml b/applications/sia/templates/ingress.yaml index bb9638b596..b548e371bb 100644 --- a/applications/sia/templates/ingress.yaml +++ b/applications/sia/templates/ingress.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:image" + service: "sia" delegate: internal: service: "sia" diff --git a/applications/siav2/templates/ingress.yaml b/applications/siav2/templates/ingress.yaml index 7f9fa4bd21..2af5227a0e 100644 --- a/applications/siav2/templates/ingress.yaml +++ b/applications/siav2/templates/ingress.yaml @@ -7,16 +7,17 @@ metadata: config: authType: "basic" baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: false + scopes: + all: + - "read:image" + service: "siav2" delegate: internal: scopes: - - read:tap + - "read:tap" service: "siav2" useAuthorization: true - loginRedirect: false - scopes: - all: - - read:image template: metadata: name: "siav2-authenticated" diff --git a/applications/squareone/templates/ingress-times-square.yaml b/applications/squareone/templates/ingress-times-square.yaml index e820c1b32e..3c5dad1ac0 100644 --- a/applications/squareone/templates/ingress-times-square.yaml +++ b/applications/squareone/templates/ingress-times-square.yaml @@ -6,10 +6,11 @@ metadata: {{- include "squareone.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - {{ .Values.ingress.timesSquareScope | quote }} - loginRedirect: true + service: "times-square" template: metadata: name: {{ template "squareone.fullname" . }}-times-square diff --git a/applications/ssotap/README.md b/applications/ssotap/README.md index 248e9b3cb4..7c2d741eef 100644 --- a/applications/ssotap/README.md +++ b/applications/ssotap/README.md @@ -16,8 +16,9 @@ IVOA TAP service for Solar System Objects | cadc-tap.config.pg.host | string | `"usdf-pg-catalogs.slac.stanford.edu:5432"` | Postgres hostname:port to connect to | | cadc-tap.config.pg.username | string | `"dp03"` | Postgres username to use to connect | | cadc-tap.config.vaultSecretName | string | `"ssotap"` | Vault secret name: the final key in the vault path | -| cadc-tap.ingress.path | string | `"ssotap"` | | -| cadc-tap.serviceAccount.name | string | `"ssotap"` | | +| cadc-tap.ingress.path | string | `"ssotap"` | Ingress path that should be routed to this service | +| cadc-tap.service | string | `"ssotap"` | Name of the service from Gafaelfawr's perspective | +| cadc-tap.serviceAccount.name | string | `"ssotap"` | Name of the Kubernetes `ServiceAccount`, used for CloudSQL access | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | diff --git a/applications/ssotap/values.yaml b/applications/ssotap/values.yaml index bf5c3b7b82..b22e4e2e99 100644 --- a/applications/ssotap/values.yaml +++ b/applications/ssotap/values.yaml @@ -1,6 +1,6 @@ cadc-tap: - # Settings for the ingress rules. ingress: + # -- Ingress path that should be routed to this service path: "ssotap" config: @@ -21,9 +21,12 @@ cadc-tap: vaultSecretName: "ssotap" serviceAccount: - # Name of Service Account + # -- Name of the Kubernetes `ServiceAccount`, used for CloudSQL access name: "ssotap" + # -- Name of the service from Gafaelfawr's perspective + service: "ssotap" + # The following will be set by parameters injected by Argo CD and should not # be set in the individual environment values files. global: diff --git a/applications/tap/README.md b/applications/tap/README.md index 62685520be..92ccd1b29e 100644 --- a/applications/tap/README.md +++ b/applications/tap/README.md @@ -13,8 +13,9 @@ IVOA TAP service |-----|------|---------|-------------| | cadc-tap.config.backend | string | `"qserv"` | What type of backend? | | cadc-tap.config.vaultSecretName | string | `"tap"` | Vault secret name: the final key in the vault path | -| cadc-tap.ingress.path | string | `"tap"` | | -| cadc-tap.serviceAccount.name | string | `"tap"` | | +| cadc-tap.ingress.path | string | `"tap"` | Ingress path that should be routed to this service | +| cadc-tap.serviceAccount.name | string | `"tap"` | Name of the Kubernetes `ServiceAccount`, used for CloudSQL access | +| cadc-tap.serviceName | string | `"tap"` | Name of the service from Gafaelfawr's perspective | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | diff --git a/applications/tap/values.yaml b/applications/tap/values.yaml index 21e256eeee..e7b34a6998 100644 --- a/applications/tap/values.yaml +++ b/applications/tap/values.yaml @@ -1,6 +1,6 @@ cadc-tap: - # Settings for the ingress rules. ingress: + # -- Ingress path that should be routed to this service path: "tap" config: @@ -11,9 +11,12 @@ cadc-tap: vaultSecretName: "tap" serviceAccount: - # Name of Service Account + # -- Name of the Kubernetes `ServiceAccount`, used for CloudSQL access name: "tap" + # -- Name of the service from Gafaelfawr's perspective + serviceName: "tap" + # The following will be set by parameters injected by Argo CD and should not # be set in the individual environment values files. global: diff --git a/applications/times-square/templates/ingress-templates.yaml b/applications/times-square/templates/ingress-templates.yaml index 28a4d8ad51..439a3d2cd8 100644 --- a/applications/times-square/templates/ingress-templates.yaml +++ b/applications/times-square/templates/ingress-templates.yaml @@ -6,10 +6,11 @@ metadata: {{- include "times-square.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - {{ .Values.ingress.templateApiScope | quote }} - loginRedirect: true + service: "times-square" template: metadata: name: "{{ template "times-square.fullname" . }}-templates" diff --git a/applications/times-square/templates/ingress.yaml b/applications/times-square/templates/ingress.yaml index 5f1a447ed5..5a74f8a68e 100644 --- a/applications/times-square/templates/ingress.yaml +++ b/applications/times-square/templates/ingress.yaml @@ -6,10 +6,11 @@ metadata: {{- include "times-square.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - {{ .Values.ingress.defaultScope }} - loginRedirect: true + service: "times-square" template: metadata: name: {{ template "times-square.fullname" . }} diff --git a/applications/vo-cutouts/templates/ingress.yaml b/applications/vo-cutouts/templates/ingress.yaml index 584bad30ec..cc11ae5341 100644 --- a/applications/vo-cutouts/templates/ingress.yaml +++ b/applications/vo-cutouts/templates/ingress.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:image" + service: "vo-cutouts" # Request a delegated token to use for making calls to Butler server with the # end-user's credentials. delegate: diff --git a/charts/cadc-tap/README.md b/charts/cadc-tap/README.md index 9e9367dc3d..1dcfc1198a 100644 --- a/charts/cadc-tap/README.md +++ b/charts/cadc-tap/README.md @@ -39,6 +39,7 @@ IVOA TAP service | config.qserv.image.tag | string | `"2.5.0"` | Tag of TAP image to use | | config.qserv.jdbcParams | string | `""` | Extra JDBC connection parameters | | config.qserv.passwordEnabled | bool | false | Whether the Qserv database is password protected | +| config.serviceName | string | None, must be set | Name of the service from Gafaelfawr's perspective, used for metrics reporting | | config.tapSchemaAddress | string | `"cadc-tap-schema-db:3306"` | Address to a MySQL database containing TAP schema data | | config.vaultSecretName | string | `""` | Vault secret name, this is appended to the global path to find the vault secrets associated with this deployment. | | fullnameOverride | string | `"cadc-tap"` | Override the full name for resources (includes the release name) | diff --git a/charts/cadc-tap/templates/tap-ingress-authenticated.yaml b/charts/cadc-tap/templates/tap-ingress-authenticated.yaml index c4c0f4aaea..933baaba60 100644 --- a/charts/cadc-tap/templates/tap-ingress-authenticated.yaml +++ b/charts/cadc-tap/templates/tap-ingress-authenticated.yaml @@ -6,14 +6,15 @@ metadata: {{- include "cadc-tap.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + authType: "basic" scopes: all: - "read:tap" - authType: "basic" + service: {{ required "config.serviceName must be set" .Values.config.serviceName | quote }} delegate: internal: scopes: [] - service: "tap" + service: {{ .Values.config.serviceName | quote }} useAuthorization: true template: metadata: diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index da92d70b04..8f043c830f 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -115,6 +115,11 @@ config: # maths. jvmMaxHeapSize: 31G + # -- Name of the service from Gafaelfawr's perspective, used for metrics + # reporting + # @default -- None, must be set + serviceName: "" + # -- Vault secret name, this is appended to the global path to find the # vault secrets associated with this deployment. vaultSecretName: "" diff --git a/charts/rubintv/templates/ingress.yaml b/charts/rubintv/templates/ingress.yaml index 9234160481..834447a46c 100644 --- a/charts/rubintv/templates/ingress.yaml +++ b/charts/rubintv/templates/ingress.yaml @@ -6,10 +6,11 @@ metadata: {{- include "rubintv.labels" . | nindent 4 }} config: baseUrl: {{ .Values.global.baseUrl | quote }} + loginRedirect: true scopes: all: - "read:image" - loginRedirect: true + service: "rubintv" template: metadata: name: {{ template "rubintv.fullname" . }} diff --git a/starters/fastapi-safir-uws/templates/ingress.yaml b/starters/fastapi-safir-uws/templates/ingress.yaml index 8b59497224..377d44ca08 100644 --- a/starters/fastapi-safir-uws/templates/ingress.yaml +++ b/starters/fastapi-safir-uws/templates/ingress.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:image" + service: "" # Request a delegated token to use for making calls to Butler server with the # end-user's credentials. delegate: diff --git a/starters/fastapi-safir/templates/ingress.yaml b/starters/fastapi-safir/templates/ingress.yaml index ddbe364182..8d326fb9f3 100644 --- a/starters/fastapi-safir/templates/ingress.yaml +++ b/starters/fastapi-safir/templates/ingress.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:image" + service: "" template: metadata: name: "" diff --git a/starters/web-service/templates/ingress.yaml b/starters/web-service/templates/ingress.yaml index b3a26d8bbf..e66ab1a80f 100644 --- a/starters/web-service/templates/ingress.yaml +++ b/starters/web-service/templates/ingress.yaml @@ -9,6 +9,7 @@ config: scopes: all: - "read:image" + service: "" template: metadata: name: "" From 62a010aa5a2009eb953f287aa904e4c0cd789e75 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 14:49:05 +0000 Subject: [PATCH 558/567] Update Helm release argo-cd to v7.7.7 --- applications/argocd/Chart.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/argocd/Chart.yaml b/applications/argocd/Chart.yaml index 5a010b856e..d7c73d2000 100644 --- a/applications/argocd/Chart.yaml +++ b/applications/argocd/Chart.yaml @@ -8,5 +8,5 @@ sources: - https://github.com/argoproj/argo-helm dependencies: - name: argo-cd - version: 7.7.5 + version: 7.7.7 repository: https://argoproj.github.io/argo-helm From bab2624e878cfb3364e37a646bd43c2e55db82e3 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 16:53:59 +0000 Subject: [PATCH 559/567] Update nginx Docker tag to v1.27.3 --- applications/love/charts/love-nginx/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/love/charts/love-nginx/values.yaml b/applications/love/charts/love-nginx/values.yaml index d4e96320fa..969d18791b 100644 --- a/applications/love/charts/love-nginx/values.yaml +++ b/applications/love/charts/love-nginx/values.yaml @@ -4,7 +4,7 @@ image: # -- The NGINX image to use repository: nginx # -- The tag to use for the NGINX image - tag: 1.27.2 + tag: 1.27.3 # -- The pull policy on the NGINX image pullPolicy: IfNotPresent # -- Service type specification From 3a8db1a40bbd1c7d5de2b519c3a39f31f9f74d44 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 2 Dec 2024 09:08:07 -0800 Subject: [PATCH 560/567] Update Helm docs --- applications/love/README.md | 2 +- applications/love/charts/love-nginx/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/love/README.md b/applications/love/README.md index d0e10912c7..2c4cc73cbe 100644 --- a/applications/love/README.md +++ b/applications/love/README.md @@ -146,7 +146,7 @@ Deployment for the LSST Operators Visualization Environment | love-nginx.affinity | object | `{}` | Affinity rules for the NGINX pod | | love-nginx.image.pullPolicy | string | `"IfNotPresent"` | The pull policy on the NGINX image | | love-nginx.image.repository | string | `"nginx"` | The NGINX image to use | -| love-nginx.image.tag | string | `"1.27.2"` | The tag to use for the NGINX image | +| love-nginx.image.tag | string | `"1.27.3"` | The tag to use for the NGINX image | | love-nginx.imagePullSecrets | list | `[]` | The list of pull secrets needed for the images. If this section is used, each object listed can have the following attributes defined: _name_ (The label identifying the pull-secret to use) | | love-nginx.ingress.annotations | object | `{}` | Annotations for the NGINX ingress | | love-nginx.ingress.className | string | `"nginx"` | Assign the Ingress class name | diff --git a/applications/love/charts/love-nginx/README.md b/applications/love/charts/love-nginx/README.md index 6a1289a87e..d0adb7822a 100644 --- a/applications/love/charts/love-nginx/README.md +++ b/applications/love/charts/love-nginx/README.md @@ -9,7 +9,7 @@ Helm chart for the LOVE Nginx server. | affinity | object | `{}` | Affinity rules for the NGINX pod | | image.pullPolicy | string | `"IfNotPresent"` | The pull policy on the NGINX image | | image.repository | string | `"nginx"` | The NGINX image to use | -| image.tag | string | `"1.27.2"` | The tag to use for the NGINX image | +| image.tag | string | `"1.27.3"` | The tag to use for the NGINX image | | imagePullSecrets | list | `[]` | The list of pull secrets needed for the images. If this section is used, each object listed can have the following attributes defined: _name_ (The label identifying the pull-secret to use) | | ingress.annotations | object | `{}` | Annotations for the NGINX ingress | | ingress.className | string | `"nginx"` | Assign the Ingress class name | From 7057440d414bad2cda9a34189edbeda2bf9b7a5c Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 2 Dec 2024 09:48:53 -0800 Subject: [PATCH 561/567] Fix GafaelfawrIngress for TAP services Move the setting of the service name to the correct location so that the GafaelfawrIngress resources are generated correctly for TAP services. --- applications/livetap/README.md | 2 +- applications/livetap/values.yaml | 6 +++--- applications/ssotap/README.md | 2 +- applications/ssotap/values.yaml | 6 +++--- applications/tap/README.md | 2 +- applications/tap/values.yaml | 6 +++--- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/applications/livetap/README.md b/applications/livetap/README.md index cbcac45566..d5a049e88c 100644 --- a/applications/livetap/README.md +++ b/applications/livetap/README.md @@ -15,9 +15,9 @@ IVOA TAP service | cadc-tap.config.pg.database | string | `"lsstdb1"` | Postgres database to connect to | | cadc-tap.config.pg.host | string | `"mock-pg:5432"` (the mock pg) | Postgres hostname:port to connect to | | cadc-tap.config.pg.username | string | `"rubin"` | Postgres username to use to connect | +| cadc-tap.config.service | string | `"livetap"` | Name of the service from Gafaelfawr's perspective | | cadc-tap.config.vaultSecretName | string | `"livetap"` | Vault secret name: the final key in the vault path | | cadc-tap.ingress.path | string | `"live"` | Ingress path that should be routed to this service | -| cadc-tap.service | string | `"livetap"` | Name of the service from Gafaelfawr's perspective | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | diff --git a/applications/livetap/values.yaml b/applications/livetap/values.yaml index 3c5c686e26..d397e4eb58 100644 --- a/applications/livetap/values.yaml +++ b/applications/livetap/values.yaml @@ -7,6 +7,9 @@ cadc-tap: # -- What type of backend? backend: "pg" + # -- Name of the service from Gafaelfawr's perspective + service: "livetap" + pg: # -- Postgres hostname:port to connect to # @default -- `"mock-pg:5432"` (the mock pg) @@ -21,9 +24,6 @@ cadc-tap: # -- Vault secret name: the final key in the vault path vaultSecretName: "livetap" - # -- Name of the service from Gafaelfawr's perspective - service: "livetap" - # The following will be set by parameters injected by Argo CD and should not # be set in the individual environment values files. global: diff --git a/applications/ssotap/README.md b/applications/ssotap/README.md index 7c2d741eef..1fb274fc57 100644 --- a/applications/ssotap/README.md +++ b/applications/ssotap/README.md @@ -15,9 +15,9 @@ IVOA TAP service for Solar System Objects | cadc-tap.config.pg.database | string | `"dp03_catalogs"` | Postgres database to connect to | | cadc-tap.config.pg.host | string | `"usdf-pg-catalogs.slac.stanford.edu:5432"` | Postgres hostname:port to connect to | | cadc-tap.config.pg.username | string | `"dp03"` | Postgres username to use to connect | +| cadc-tap.config.service | string | `"ssotap"` | Name of the service from Gafaelfawr's perspective | | cadc-tap.config.vaultSecretName | string | `"ssotap"` | Vault secret name: the final key in the vault path | | cadc-tap.ingress.path | string | `"ssotap"` | Ingress path that should be routed to this service | -| cadc-tap.service | string | `"ssotap"` | Name of the service from Gafaelfawr's perspective | | cadc-tap.serviceAccount.name | string | `"ssotap"` | Name of the Kubernetes `ServiceAccount`, used for CloudSQL access | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | diff --git a/applications/ssotap/values.yaml b/applications/ssotap/values.yaml index b22e4e2e99..2a9372f439 100644 --- a/applications/ssotap/values.yaml +++ b/applications/ssotap/values.yaml @@ -7,6 +7,9 @@ cadc-tap: # -- What type of backend? backend: "pg" + # -- Name of the service from Gafaelfawr's perspective + service: "ssotap" + pg: # -- Postgres hostname:port to connect to host: "usdf-pg-catalogs.slac.stanford.edu:5432" @@ -24,9 +27,6 @@ cadc-tap: # -- Name of the Kubernetes `ServiceAccount`, used for CloudSQL access name: "ssotap" - # -- Name of the service from Gafaelfawr's perspective - service: "ssotap" - # The following will be set by parameters injected by Argo CD and should not # be set in the individual environment values files. global: diff --git a/applications/tap/README.md b/applications/tap/README.md index 92ccd1b29e..b72ac23eb7 100644 --- a/applications/tap/README.md +++ b/applications/tap/README.md @@ -12,10 +12,10 @@ IVOA TAP service | Key | Type | Default | Description | |-----|------|---------|-------------| | cadc-tap.config.backend | string | `"qserv"` | What type of backend? | +| cadc-tap.config.serviceName | string | `"tap"` | Name of the service from Gafaelfawr's perspective | | cadc-tap.config.vaultSecretName | string | `"tap"` | Vault secret name: the final key in the vault path | | cadc-tap.ingress.path | string | `"tap"` | Ingress path that should be routed to this service | | cadc-tap.serviceAccount.name | string | `"tap"` | Name of the Kubernetes `ServiceAccount`, used for CloudSQL access | -| cadc-tap.serviceName | string | `"tap"` | Name of the service from Gafaelfawr's perspective | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | | global.host | string | Set by Argo CD | Host name for ingress | | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | diff --git a/applications/tap/values.yaml b/applications/tap/values.yaml index e7b34a6998..2e97e2b4fb 100644 --- a/applications/tap/values.yaml +++ b/applications/tap/values.yaml @@ -10,13 +10,13 @@ cadc-tap: # -- Vault secret name: the final key in the vault path vaultSecretName: "tap" + # -- Name of the service from Gafaelfawr's perspective + serviceName: "tap" + serviceAccount: # -- Name of the Kubernetes `ServiceAccount`, used for CloudSQL access name: "tap" - # -- Name of the service from Gafaelfawr's perspective - serviceName: "tap" - # The following will be set by parameters injected by Argo CD and should not # be set in the individual environment values files. global: From 943b17fbb09f60473f32cb8d99af390bfba711d6 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 15:15:47 +0000 Subject: [PATCH 562/567] Update confluentinc/cp-kafka-rest Docker tag to v7.8.0 --- applications/sasquatch/README.md | 2 +- applications/sasquatch/charts/rest-proxy/README.md | 2 +- applications/sasquatch/charts/rest-proxy/values.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/applications/sasquatch/README.md b/applications/sasquatch/README.md index 7ed157c4f2..f21d37bc5f 100644 --- a/applications/sasquatch/README.md +++ b/applications/sasquatch/README.md @@ -336,7 +336,7 @@ Rubin Observatory's telemetry service | rest-proxy.heapOptions | string | `"-Xms512M -Xmx512M"` | Kafka REST proxy JVM Heap Option | | rest-proxy.image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | rest-proxy.image.repository | string | `"confluentinc/cp-kafka-rest"` | Kafka REST proxy image repository | -| rest-proxy.image.tag | string | `"7.7.1"` | Kafka REST proxy image tag | +| rest-proxy.image.tag | string | `"7.8.0"` | Kafka REST proxy image tag | | rest-proxy.ingress.annotations | object | See `values.yaml` | Additional annotations to add to the ingress | | rest-proxy.ingress.enabled | bool | `false` | Whether to enable the ingress | | rest-proxy.ingress.hostname | string | None, must be set if ingress is enabled | Ingress hostname | diff --git a/applications/sasquatch/charts/rest-proxy/README.md b/applications/sasquatch/charts/rest-proxy/README.md index eea798d3ae..baeb2a47f0 100644 --- a/applications/sasquatch/charts/rest-proxy/README.md +++ b/applications/sasquatch/charts/rest-proxy/README.md @@ -16,7 +16,7 @@ A subchart to deploy Confluent REST proxy for Sasquatch. | heapOptions | string | `"-Xms512M -Xmx512M"` | Kafka REST proxy JVM Heap Option | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"confluentinc/cp-kafka-rest"` | Kafka REST proxy image repository | -| image.tag | string | `"7.7.1"` | Kafka REST proxy image tag | +| image.tag | string | `"7.8.0"` | Kafka REST proxy image tag | | ingress.annotations | object | See `values.yaml` | Additional annotations to add to the ingress | | ingress.enabled | bool | `false` | Whether to enable the ingress | | ingress.hostname | string | None, must be set if ingress is enabled | Ingress hostname | diff --git a/applications/sasquatch/charts/rest-proxy/values.yaml b/applications/sasquatch/charts/rest-proxy/values.yaml index ef0cd8cbac..b61fc87780 100644 --- a/applications/sasquatch/charts/rest-proxy/values.yaml +++ b/applications/sasquatch/charts/rest-proxy/values.yaml @@ -11,7 +11,7 @@ image: pullPolicy: IfNotPresent # -- Kafka REST proxy image tag - tag: 7.7.1 + tag: 7.8.0 service: # -- Kafka REST proxy service port From ddc3521760ef977993da8acf360b1b11aacabd79 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 2 Dec 2024 10:54:40 -0800 Subject: [PATCH 563/567] Fix Gafaelfawr service name for ssotap and livetap --- applications/livetap/README.md | 2 +- applications/livetap/values.yaml | 2 +- applications/ssotap/README.md | 2 +- applications/ssotap/values.yaml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/livetap/README.md b/applications/livetap/README.md index d5a049e88c..221ada8fcc 100644 --- a/applications/livetap/README.md +++ b/applications/livetap/README.md @@ -15,7 +15,7 @@ IVOA TAP service | cadc-tap.config.pg.database | string | `"lsstdb1"` | Postgres database to connect to | | cadc-tap.config.pg.host | string | `"mock-pg:5432"` (the mock pg) | Postgres hostname:port to connect to | | cadc-tap.config.pg.username | string | `"rubin"` | Postgres username to use to connect | -| cadc-tap.config.service | string | `"livetap"` | Name of the service from Gafaelfawr's perspective | +| cadc-tap.config.serviceName | string | `"livetap"` | Name of the service from Gafaelfawr's perspective | | cadc-tap.config.vaultSecretName | string | `"livetap"` | Vault secret name: the final key in the vault path | | cadc-tap.ingress.path | string | `"live"` | Ingress path that should be routed to this service | | global.baseUrl | string | Set by Argo CD | Base URL for the environment | diff --git a/applications/livetap/values.yaml b/applications/livetap/values.yaml index d397e4eb58..778b85d536 100644 --- a/applications/livetap/values.yaml +++ b/applications/livetap/values.yaml @@ -8,7 +8,7 @@ cadc-tap: backend: "pg" # -- Name of the service from Gafaelfawr's perspective - service: "livetap" + serviceName: "livetap" pg: # -- Postgres hostname:port to connect to diff --git a/applications/ssotap/README.md b/applications/ssotap/README.md index 1fb274fc57..c436610a22 100644 --- a/applications/ssotap/README.md +++ b/applications/ssotap/README.md @@ -15,7 +15,7 @@ IVOA TAP service for Solar System Objects | cadc-tap.config.pg.database | string | `"dp03_catalogs"` | Postgres database to connect to | | cadc-tap.config.pg.host | string | `"usdf-pg-catalogs.slac.stanford.edu:5432"` | Postgres hostname:port to connect to | | cadc-tap.config.pg.username | string | `"dp03"` | Postgres username to use to connect | -| cadc-tap.config.service | string | `"ssotap"` | Name of the service from Gafaelfawr's perspective | +| cadc-tap.config.serviceName | string | `"ssotap"` | Name of the service from Gafaelfawr's perspective | | cadc-tap.config.vaultSecretName | string | `"ssotap"` | Vault secret name: the final key in the vault path | | cadc-tap.ingress.path | string | `"ssotap"` | Ingress path that should be routed to this service | | cadc-tap.serviceAccount.name | string | `"ssotap"` | Name of the Kubernetes `ServiceAccount`, used for CloudSQL access | diff --git a/applications/ssotap/values.yaml b/applications/ssotap/values.yaml index 2a9372f439..e70295723c 100644 --- a/applications/ssotap/values.yaml +++ b/applications/ssotap/values.yaml @@ -8,7 +8,7 @@ cadc-tap: backend: "pg" # -- Name of the service from Gafaelfawr's perspective - service: "ssotap" + serviceName: "ssotap" pg: # -- Postgres hostname:port to connect to From f405684a5cf53f22fa8b45eccfea8dc377dd0840 Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 2 Dec 2024 10:56:12 -0800 Subject: [PATCH 564/567] Update Python and pre-commit dependencies --- .pre-commit-config.yaml | 4 +- requirements/dev.txt | 230 +++++++++++++++++++++------------------- requirements/main.txt | 12 +-- requirements/tox.txt | 44 ++++---- 4 files changed, 150 insertions(+), 140 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5a4d453fff..d65cf43ef0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,7 +14,7 @@ repos: - -c=.yamllint.yml - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.29.4 + rev: 0.30.0 hooks: - id: check-jsonschema files: ^applications/.*/secrets(-[^./-]+)?\.yaml @@ -46,7 +46,7 @@ repos: - --template-files=../helm-docs.md.gotmpl - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.0 + rev: v0.8.1 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/requirements/dev.txt b/requirements/dev.txt index 367b8ee354..36fff76eba 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -14,9 +14,9 @@ appnope==0.1.4 ; platform_system == 'Darwin' \ --hash=sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee \ --hash=sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c # via ipykernel -asttokens==2.4.1 \ - --hash=sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24 \ - --hash=sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0 +asttokens==3.0.0 \ + --hash=sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7 \ + --hash=sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2 # via stack-data attrs==24.2.0 \ --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ @@ -367,9 +367,9 @@ executing==2.1.0 \ --hash=sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf \ --hash=sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab # via stack-data -fastjsonschema==2.20.0 \ - --hash=sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23 \ - --hash=sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a +fastjsonschema==2.21.1 \ + --hash=sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4 \ + --hash=sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667 # via nbformat gitdb==4.0.11 \ --hash=sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4 \ @@ -487,9 +487,9 @@ ipykernel==6.29.5 \ --hash=sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5 \ --hash=sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215 # via myst-nb -ipython==8.29.0 \ - --hash=sha256:0188a1bd83267192123ccea7f4a8ed0a78910535dbaa3f37671dca76ebd429c8 \ - --hash=sha256:40b60e15b22591450eef73e40a027cf77bd652e757523eebc5bd7c7c498290eb +ipython==8.30.0 \ + --hash=sha256:85ec56a7e20f6c38fce7727dcca699ae4ffc85985aa7b23635a8008f918ae321 \ + --hash=sha256:cb0a405a306d2995a5cbb9901894d240784a9f341394c6ba3f4fe8c6eb89ff6e # via # ipykernel # myst-nb @@ -677,9 +677,9 @@ myst-parser==4.0.0 \ # via # documenteer # myst-nb -nbclient==0.10.0 \ - --hash=sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09 \ - --hash=sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f +nbclient==0.10.1 \ + --hash=sha256:3e93e348ab27e712acd46fccd809139e356eb9a31aab641d1a7991a6eb4e6f68 \ + --hash=sha256:949019b9240d66897e442888cfb618f69ef23dc71c01cb5fced8499c2cfc084d # via # jupyter-cache # myst-nb @@ -766,9 +766,9 @@ pycparser==2.22 ; implementation_name == 'pypy' \ # via # -c requirements/main.txt # cffi -pydantic==2.10.1 \ - --hash=sha256:a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560 \ - --hash=sha256:a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e +pydantic==2.10.2 \ + --hash=sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa \ + --hash=sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e # via # -c requirements/main.txt # autodoc-pydantic @@ -897,9 +897,9 @@ pygments==2.18.0 \ pylatexenc==2.10 \ --hash=sha256:3dd8fd84eb46dc30bee1e23eaab8d8fb5a7f507347b23e5f38ad9675c84f40d3 # via documenteer -pytest==8.3.3 \ - --hash=sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181 \ - --hash=sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2 +pytest==8.3.4 \ + --hash=sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6 \ + --hash=sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761 # via # -r requirements/dev.in # pytest-cov @@ -1132,97 +1132,108 @@ requests==2.32.3 \ # documenteer # sphinx # sphinxcontrib-youtube -rpds-py==0.21.0 \ - --hash=sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba \ - --hash=sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d \ - --hash=sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e \ - --hash=sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a \ - --hash=sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202 \ - --hash=sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271 \ - --hash=sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250 \ - --hash=sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d \ - --hash=sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928 \ - --hash=sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0 \ - --hash=sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d \ - --hash=sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333 \ - --hash=sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e \ - --hash=sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a \ - --hash=sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18 \ - --hash=sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044 \ - --hash=sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677 \ - --hash=sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664 \ - --hash=sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75 \ - --hash=sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89 \ - --hash=sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027 \ - --hash=sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9 \ - --hash=sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e \ - --hash=sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8 \ - --hash=sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44 \ - --hash=sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3 \ - --hash=sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95 \ - --hash=sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd \ - --hash=sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab \ - --hash=sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a \ - --hash=sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560 \ - --hash=sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035 \ - --hash=sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919 \ - --hash=sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c \ - --hash=sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266 \ - --hash=sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e \ - --hash=sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592 \ - --hash=sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9 \ - --hash=sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3 \ - --hash=sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624 \ - --hash=sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9 \ - --hash=sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b \ - --hash=sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f \ - --hash=sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca \ - --hash=sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1 \ - --hash=sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8 \ - --hash=sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590 \ - --hash=sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed \ - --hash=sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952 \ - --hash=sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11 \ - --hash=sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061 \ - --hash=sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c \ - --hash=sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74 \ - --hash=sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c \ - --hash=sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94 \ - --hash=sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c \ - --hash=sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8 \ - --hash=sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf \ - --hash=sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a \ - --hash=sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5 \ - --hash=sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6 \ - --hash=sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5 \ - --hash=sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3 \ - --hash=sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed \ - --hash=sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87 \ - --hash=sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b \ - --hash=sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72 \ - --hash=sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05 \ - --hash=sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed \ - --hash=sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f \ - --hash=sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c \ - --hash=sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153 \ - --hash=sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b \ - --hash=sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0 \ - --hash=sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d \ - --hash=sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d \ - --hash=sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e \ - --hash=sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e \ - --hash=sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd \ - --hash=sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682 \ - --hash=sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4 \ - --hash=sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db \ - --hash=sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976 \ - --hash=sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937 \ - --hash=sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1 \ - --hash=sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb \ - --hash=sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a \ - --hash=sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7 \ - --hash=sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356 \ - --hash=sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be +rpds-py==0.22.0 \ + --hash=sha256:034964ea0ea09645bdde13038b38abb14be0aa747f20fcfab6181207dd9e0483 \ + --hash=sha256:0686f2c16eafdc2c6b4ce6e86e5b3092e87db09ae64be2787616444eb35b9756 \ + --hash=sha256:0903ffdb5b9007e503203b6285e4ff0faf96d875c19f1d103b475acf7d9f7311 \ + --hash=sha256:1212cb231f2002934cd8d71a0d718fdd9d9a2dd671e0feef8501038df3508026 \ + --hash=sha256:1357c3092702078b7782b6ebd5ba9b22c1a291c34fbf9d8f1a48237466ac7758 \ + --hash=sha256:1a6cc4eb1e86364331928acafb2bb41d8ab735ca3caf2d6019b9f6dac3f4f65d \ + --hash=sha256:208ce1d8e3af138d1d9b21d7206356b7f29b96675e0113aea652cf024e4ddfdc \ + --hash=sha256:2498ff422823be087b48bc82710deb87ac34f6b7c8034ee39920647647de1e60 \ + --hash=sha256:24c28df05bd284879d0fac850ba697077d2a33b7ebcaea6318d6b6cdfdc86ddc \ + --hash=sha256:2a57300cc8b034c5707085249efd09f19116bb80278d0ec925d7f3710165c510 \ + --hash=sha256:2d2fc3ab021be3e0b5aec6d4164f2689d231b8bfc5185cc454314746aa4aee72 \ + --hash=sha256:2f513758e7cda8bc262e80299a8e3395d7ef7f4ae705be62632f229bc6c33208 \ + --hash=sha256:306da3dfa174b489a3fc63b0872e2226a5ddf94c59875a770d72aff945d5ed96 \ + --hash=sha256:326e42f2b49462e05f8527a1311ce98f9f97c484b3e443ec0ea4638bed3aebcf \ + --hash=sha256:32a0e24cab2daae0503b06666d516e90a080c1a95aff0406b9f03c6489177c4b \ + --hash=sha256:32de71c393f126d8203e9815557c7ff4d72ed1ad3aa3f52f6c7938413176750a \ + --hash=sha256:341a07a4b55126bfae68c9bf24220a73d456111e5eb3dcbdab9fd16de2341224 \ + --hash=sha256:38cacf1f378571450576f2c8ce87da6f3fddc59d744de5c12b37acc23285b1e1 \ + --hash=sha256:3b94b074dcce39976db22ea75c7aea8b22d95e6d3b62f76e20e1179a278521d8 \ + --hash=sha256:3dc7c64b56b82428894f056e9ff6e8ee917ff74fc26b65211a33602c2372e928 \ + --hash=sha256:3f7a048ec1ebc991331d709be4884dc318c9eaafa66dcde8be0933ac0e702149 \ + --hash=sha256:41f65a97bf2c4b161c9f8f89bc37058346bec9b36e373c8ad00a16c957bff625 \ + --hash=sha256:48c95997af9314f4034fe5ba2d837399e786586e220835a578d28fe8161e6ae5 \ + --hash=sha256:49e084d47a66027ac72844f9f52f13d347a9a1f05d4f84381b420e47f836a7fd \ + --hash=sha256:4b5d17d8f5b885ce50e0cda85f99c0719e365e98b587338535fa566a48375afb \ + --hash=sha256:4c0321bc03a1c513eca1837e3bba948b975bcf3a172aebc197ab3573207f137a \ + --hash=sha256:4e7c9aa2353eb0b0d845323857197daa036c2ff8624df990b0d886d22a8f665e \ + --hash=sha256:4fc4824e38c1e91a73bc820e7caacaf19d0acd557465aceef0420ca59489b390 \ + --hash=sha256:54d8f94dec5765a9edc19610fecf0fdf9cab36cbb9def1213188215f735a6f98 \ + --hash=sha256:574c5c94213bc9990805bfd7e4ba3826d3c098516cbc19f0d0ef0433ad93fa06 \ + --hash=sha256:59e63da174ff287db05ef7c21d75974a5bac727ed60452aeb3a14278477842a8 \ + --hash=sha256:5ae7927cd2b869ca4dc645169d8af5494a29c99afd0ea0f24dd00c811ab1d8b8 \ + --hash=sha256:5f21e1278c9456cd601832375c778ca44614d3433996488221a56572c223f04a \ + --hash=sha256:5fdf91a7c07f40e47b193f2acae0ed9da35d09325d7c3c3279f722b7cbf3d264 \ + --hash=sha256:62ab12fe03ffc49978d29de9c31bbb216610157f7e5ca8e172fed6642aead3be \ + --hash=sha256:632d2fdddd9fbe3ac8896a119fd18a71fc95ca9c4cbe5223096c142d8c4a2b1d \ + --hash=sha256:64a0c965a1e299c9b280006bdb15c276c427c45360aed676305dc36bcaa4d13c \ + --hash=sha256:67e013a17a3db4d98cc228fd5aeb36a51b0f5cf7330b9102a552060f1fe4e560 \ + --hash=sha256:6b639a19e1791b646d27f15d17530a51722cc728d43b2dff3aeb904f92d91bac \ + --hash=sha256:6b6e4bcfc32f831bfe3d6d8a5acedfbfd5e252a03c83fa24813b277a3a8a13ca \ + --hash=sha256:7539dbb8f705e13629ba6f23388976aad809e387f32a6e5c0712e4e8d9bfcce7 \ + --hash=sha256:758098b38c344d9a7f279baf0689261777e601f620078ef5afdc9bd3339965c3 \ + --hash=sha256:762206ba3bf1d6c8c9e0055871d3c0d5b074b7c3120193e6c067e7866f106ab1 \ + --hash=sha256:771c9a3851beaa617d8c8115d65f834a2b52490f42ee2b88b13f1fc5529e9e0c \ + --hash=sha256:81e7a27365b02fe70a77f1365376879917235b3fec551d19b4c91b51d0bc1d07 \ + --hash=sha256:8338db3c76833d02dc21c3e2c42534091341d26e4f7ba32c6032bb558a02e07b \ + --hash=sha256:8426f97117b914b9bfb2a7bd46edc148e8defda728a55a5df3a564abe70cd7a4 \ + --hash=sha256:842855bbb113a19c393c6de5aa6ed9a26c6b13c2fead5e49114d39f0d08b94d8 \ + --hash=sha256:87453d491369cd8018016d2714a13e8461975161703c18ee31eecf087a8ae5d4 \ + --hash=sha256:875fe8dffb43c20f68379ee098b035a7038d7903c795d46715f66575a7050b19 \ + --hash=sha256:8ad4dfda52e64af3202ceb2143a62deba97894b71c64a4405ee80f6b3ea77285 \ + --hash=sha256:8c48fc7458fe3a74dcdf56ba3534ff41bd421f69436df09ff3497fdaac18b431 \ + --hash=sha256:8cbb040fec8eddd5a6a75e737fd73c9ce37e51f94bacdd0b178d0174a4758395 \ + --hash=sha256:92d28a608127b357da47c99e0d0e0655ca2060286540fe9f2a25a2e8ac666e05 \ + --hash=sha256:931bf3d0705b2834fed29354f35170fa022fe22a95542b61b7c66aca5f8a224f \ + --hash=sha256:93bbd66f46dddc41e8c656130c97c0fb515e0fa44e1eebb2592769dbbd41b2f5 \ + --hash=sha256:9ad4640a409bc2b7d22b7921e7660f0db96c5c8c69fbb2e8f3261d4f71d33983 \ + --hash=sha256:a4366f264fa60d3c109f0b27af0cd9eb8d46746bd70bd3d9d425f035b6c7e286 \ + --hash=sha256:a73ed43d64209e853bba567a543170267a5cd64f359540b0ca2d597e329ba172 \ + --hash=sha256:a810a57ce5e8ecf8eac6ec4dab534ff80c34e5a2c31db60e992009cd20f58e0f \ + --hash=sha256:b4660943030406aaa40ec9f51960dd88049903d9536bc3c8ebb5cc4e1f119bbe \ + --hash=sha256:b8906f537978da3f7f0bd1ba37b69f6a877bb43312023b086582707d2835bf2f \ + --hash=sha256:b91bfef5daa2a5a4fe62f8d317fc91a626073639f951f851bd2cb252d01bc6c5 \ + --hash=sha256:ba1fc34d0b2f6fd53377a4c954116251eba6d076bf64f903311f4a7d27d10acd \ + --hash=sha256:ba235e00e0878ba1080b0f2a761f143b2a2d1c354f3d8e507fbf2f3de401bf18 \ + --hash=sha256:bb11809b0de643a292a82f728c494a2bbef0e30a7c42d37464abbd6bef7ca7b1 \ + --hash=sha256:c17b43fe9c6da16885e3fe28922bcd1a029e61631fb771c7d501019b40bcc904 \ + --hash=sha256:c1c21030ed494deb10226f90e2dbd84a012d59810c409832714a3dd576527be2 \ + --hash=sha256:c398a5a8e258dfdc5ea2aa4e5aa2ca3207f654a8eb268693dd1a76939074a588 \ + --hash=sha256:c637188b930175c256f13adbfc427b83ec7e64476d1ec9d6608f312bb84e06c3 \ + --hash=sha256:c7b4450093c0c909299770226fb0285be47b0a57545bae25b5c4e51566b0e587 \ + --hash=sha256:c8fd7a16f7a047e06c747cfcf2acef3ac316132df1c6077445b29ee6f3f3a70b \ + --hash=sha256:ca505fd3767a09a139737f3278bc8a485cb64043062da89bcba27e2f2ea78d33 \ + --hash=sha256:d1522025cda9e57329aade769f56e5793b2a5da7759a21914ee10e67e17e601e \ + --hash=sha256:d276280649305c1da6cdd84585d48ae1f0efa67434d8b10d2df95228e59a05bb \ + --hash=sha256:d33622dc63c295788eed09dbb1d11bed178909d3267b02d873116ee6be368244 \ + --hash=sha256:d4f2af3107fe4dc40c0d1a2409863f5249c6796398a1d83c1d99a0b3fa6cfb8d \ + --hash=sha256:d5469b347445d1c31105f33e7bfc9a8ba213d48e42641a610dda65bf9e3c83f5 \ + --hash=sha256:d80fd710b3307a3c63809048b72c536689b9b0b31a2518339c3f1a4d29c73d7a \ + --hash=sha256:d9bb9242b38a664f307b3b897f093896f7ed51ef4fe25a0502e5a368de9151ea \ + --hash=sha256:d9ceca96df54cb1675a0b7f52f1c6d5d1df62c5b40741ba211780f1b05a282a2 \ + --hash=sha256:dc2c00acdf68f1f69a476b770af311a7dc3955b7de228b04a40bcc51ac4d743b \ + --hash=sha256:dfdabdf8519c93908b2bf0f87c3f86f9e88bab279fb4acfd0907519ca5a1739f \ + --hash=sha256:e04919ffa9a728c446b27b6b625fa1d00ece221bdb9d633e978a7e0353a12c0e \ + --hash=sha256:e0abcce5e874474d3eab5ad53be03dae2abe651d248bdeaabe83708e82969e78 \ + --hash=sha256:e1c04fb380bc8efaae2fdf17ed6cd5d223da78a8b0b18a610f53d4c5d6e31dfd \ + --hash=sha256:e23dcdd4b2ff9c6b3317ea7921b210d39592f8ca1cdea58ada25b202c65c0a69 \ + --hash=sha256:e34a3e665d38d0749072e6565400c8ce9abae976e338919a0dfbfb0e1ba43068 \ + --hash=sha256:e6da2e0500742e0f157f005924a0589f2e2dcbfdd6cd0cc0abce367433e989be \ + --hash=sha256:e9aa4af6b879bb75a3c7766fbf49d77f4097dd12b548ecbbd8b3f85caa833281 \ + --hash=sha256:e9bbdba9e75b1a9ee1dd1335034dad998ef1acc08492226c6fd50aa773bdfa7d \ + --hash=sha256:e9d4293b21c69ee4f9e1a99ac4f772951d345611c614a0cfae2ec6b565279bc9 \ + --hash=sha256:eadd2417e83a77ce3ae4a0efd08cb0ebdfd317b6406d11020354a53ad458ec84 \ + --hash=sha256:ed0102146574e5e9f079b2e1a06e6b5b12a691f9c74a65b93b7f3d4feda566c6 \ + --hash=sha256:f0fb8efc9e579acf1e556fd86277fecec320c21ca9b5d39db96433ad8c45bc4a \ + --hash=sha256:f4e9946c8c7def17e4fcb5eddb14c4eb6ebc7f6f309075e6c8d23b133c104607 \ + --hash=sha256:f7649c8b8e4bd1ccc5fcbd51a855d57a617deeba19c66e3d04b1abecc61036b2 \ + --hash=sha256:f980a0640599a74f27fd9d50c84c293f1cb7afc2046c5c6d3efaf8ec7cdbc326 \ + --hash=sha256:f9dc2113e0cf0dd637751ca736186fca63664939ceb9f9f67e93ade88c69c0c9 \ + --hash=sha256:fde778947304e55fc732bc8ea5c6063e74244ac1808471cb498983a210aaf62c \ + --hash=sha256:fe23687924b25a2dee52fab15976fd6577ed8518072bcda9ff2e2b88ab1f168b # via # jsonschema # referencing @@ -1237,7 +1248,6 @@ six==1.16.0 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via # -c requirements/main.txt - # asttokens # pybtex # python-dateutil # sphinxcontrib-redoc diff --git a/requirements/main.txt b/requirements/main.txt index bbd62ceaae..714a655a93 100644 --- a/requirements/main.txt +++ b/requirements/main.txt @@ -378,9 +378,9 @@ pycparser==2.22 ; platform_python_implementation != 'PyPy' \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pydantic==2.10.1 \ - --hash=sha256:a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560 \ - --hash=sha256:a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e +pydantic==2.10.2 \ + --hash=sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa \ + --hash=sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e # via # phalanx (pyproject.toml) # fastapi @@ -489,9 +489,9 @@ pydantic-core==2.27.1 \ # via # pydantic # safir -pyjwt==2.10.0 \ - --hash=sha256:543b77207db656de204372350926bed5a86201c4cbff159f623f79c7bb487a15 \ - --hash=sha256:7628a7eb7938959ac1b26e819a1df0fd3259505627b575e4bad6d08f76db695c +pyjwt==2.10.1 \ + --hash=sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953 \ + --hash=sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb # via gidgethub python-dateutil==2.9.0.post0 \ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ diff --git a/requirements/tox.txt b/requirements/tox.txt index dcbc6bf87d..136300fdd7 100644 --- a/requirements/tox.txt +++ b/requirements/tox.txt @@ -60,27 +60,27 @@ tox-uv==1.16.0 \ --hash=sha256:71b2e2fa6c35c1360b91a302df1d65b3e5a1f656b321c5ebf7b84545804c9f01 \ --hash=sha256:e6f0b525a687e745ab878d07cbf5c7e85d582028d4a7c8935f95e84350651432 # via -r requirements/tox.in -uv==0.5.4 \ - --hash=sha256:05b45c7eefb178dcdab0d49cd642fb7487377d00727102a8d6d306cc034c0d83 \ - --hash=sha256:2118bb99cbc9787cb5e5cc4a507201e25a3fe88a9f389e8ffb84f242d96038c2 \ - --hash=sha256:30ce031e36c54d4ba791d743d992d0a4fd8d70480db781d30a2f6f5125f39194 \ - --hash=sha256:4432215deb8d5c1ccab17ee51cb80f5de1a20865ee02df47532f87442a3d6a58 \ - --hash=sha256:493aedc3c758bbaede83ecc8d5f7e6a9279ebec151c7f756aa9ea898c73f8ddb \ - --hash=sha256:69079e900bd26b0f65069ac6fa684c74662ed87121c076f2b1cbcf042539034c \ - --hash=sha256:8d7a4a3df943a7c16cd032ccbaab8ed21ff64f4cb090b3a0a15a8b7502ccd876 \ - --hash=sha256:928ed95fefe4e1338d0a7ad2f6b635de59e2ec92adaed4a267f7501a3b252263 \ - --hash=sha256:a79a0885df364b897da44aae308e6ed9cca3a189d455cf1c205bd6f7b03daafa \ - --hash=sha256:ca72e6a4c3c6b8b5605867e16a7f767f5c99b7f526de6bbb903c60eb44fd1e01 \ - --hash=sha256:cd7a5a3a36f975a7678f27849a2d49bafe7272143d938e9b6f3bf28392a3ba00 \ - --hash=sha256:dd2df2ba823e6684230ab4c581f2320be38d7f46de11ce21d2dbba631470d7b6 \ - --hash=sha256:df3cb58b7da91f4fc647d09c3e96006cd6c7bd424a81ce2308a58593c6887c39 \ - --hash=sha256:ed5659cde099f39995f4cb793fd939d2260b4a26e4e29412c91e7537f53d8d25 \ - --hash=sha256:f07e5e0df40a09154007da41b76932671333f9fecb0735c698b19da25aa08927 \ - --hash=sha256:f40c6c6c3a1b398b56d3a8b28f7b455ac1ce4cbb1469f8d35d3bbc804d83daa4 \ - --hash=sha256:f511faf719b797ef0f14688f1abe20b3fd126209cf58512354d1813249745119 \ - --hash=sha256:f806af0ee451a81099c449c4cff0e813056fdf7dd264f3d3a8fd321b17ff9efc +uv==0.5.5 \ + --hash=sha256:0314a4b9a25bf00afe4e5472c338c8c6bd34688c23d63ce1ad35462cf087b492 \ + --hash=sha256:0f7f04ae5a5430873d8610d8ea0a5d35df92e60bf701f80b3cf24857e0ac5e72 \ + --hash=sha256:29286cd6b9f8e040d02894a67c6b6304811ea393ca9dfade109e93cf4b3b842c \ + --hash=sha256:34e894c922ba29a59bbe812a458a7095a575f76b87dfc362e0c3f4f650d6f631 \ + --hash=sha256:365715e7247c2cd8ef661e8f96927b181248f689c07e48b076c9dbc78a4a0877 \ + --hash=sha256:3dee9517ebba13d07d8f139c439c5ff63e438d31ebda4d7eb0af8d0f0cc6a181 \ + --hash=sha256:553901e95cb5a4da1da19e288c29c5f886793f981750400e5cef48e3031b970b \ + --hash=sha256:59d53cce11718ce5d5367afc8c93ebcfc5e1cddfa4a44aedbf08d08d9b738381 \ + --hash=sha256:5a47345ccafc0105b2f0cc22fcb0bb05be4d0e60df67f5beea28069b0bb372c8 \ + --hash=sha256:69e15f24493d86c3a2da3764891e35a033ceda09404c1f9b386671d509db95f3 \ + --hash=sha256:7f8db4bdf7eaef6be271457c4b2a167f41ad115434944a09f5034018a29b4093 \ + --hash=sha256:9af7018430da1f0960eee1592c820c343e2619f2d71f66c3be62da330826c537 \ + --hash=sha256:a4f0c7647187044056dc6f6f5d31b01f445d8695eb7d2f442b29fd5c9216a56f \ + --hash=sha256:b55d507bfe2bd2330c41680e4b0077972381f40975a59b53007254196abc4477 \ + --hash=sha256:d091e88a9c2c830169c3ccf95fd972759e0ab629dacc2d5eff525e5ba3583904 \ + --hash=sha256:f0bfc7ced2fe0c85b3070dfa219072a1406133e18aab2f2fe10b6455ede0f8b2 \ + --hash=sha256:f4e9ddcffc29f009f692cda699912b02f6a12089d741b71d2fcd0b181eb71c5d \ + --hash=sha256:f5569798fc8eaad58fbb4fb70ced8f09ebe607fbbfb95fa42c559f57bbe0cabd # via tox-uv -virtualenv==20.27.1 \ - --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ - --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 +virtualenv==20.28.0 \ + --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ + --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa # via tox From 2aa62a8ec08729e0d67ba9b2a2c627874054bdef Mon Sep 17 00:00:00 2001 From: Sebastian Aranda Sanchez Date: Mon, 2 Dec 2024 16:26:19 -0300 Subject: [PATCH 565/567] Update rubintv app version to v2.5.5 for summit and usdf production deployments. --- applications/rubintv/values-summit.yaml | 2 +- applications/rubintv/values-usdfprod.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/applications/rubintv/values-summit.yaml b/applications/rubintv/values-summit.yaml index 2f0cc03489..e0ae0f7e77 100644 --- a/applications/rubintv/values-summit.yaml +++ b/applications/rubintv/values-summit.yaml @@ -20,7 +20,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.5.4 + tag: v2.5.5 pullPolicy: Always workers: diff --git a/applications/rubintv/values-usdfprod.yaml b/applications/rubintv/values-usdfprod.yaml index c86a44f612..1d920e76cc 100644 --- a/applications/rubintv/values-usdfprod.yaml +++ b/applications/rubintv/values-usdfprod.yaml @@ -18,7 +18,7 @@ rubintv: - name: DDV_CLIENT_WS_ADDRESS value: "rubintv/ws/ddv" image: - tag: v2.5.4 + tag: v2.5.5 pullPolicy: Always workers: From 1cab9994243cb3d3a13e0cc5071bcf8ab10d785a Mon Sep 17 00:00:00 2001 From: Russ Allbery Date: Mon, 2 Dec 2024 15:35:31 -0800 Subject: [PATCH 566/567] Bump Unfurlbot to version 0.3.2 Reorganize `values.yaml` somewhat to match the current starter and remove the obsolete (unused) autoscaling configuration. If we later want to autoscale this service, we'll need to write a new configuration for the current API version anyway. Add labels to a few Kubernetes resources that didn't have them. --- applications/unfurlbot/Chart.yaml | 2 +- applications/unfurlbot/README.md | 6 +--- applications/unfurlbot/secrets.yaml | 3 +- .../unfurlbot/templates/deployment.yaml | 2 -- applications/unfurlbot/templates/hpa.yaml | 28 ------------------- .../unfurlbot/templates/kafkaaccess.yaml | 18 ++++++------ .../unfurlbot/templates/networkpolicy.yaml | 2 ++ applications/unfurlbot/values.yaml | 26 ++++------------- 8 files changed, 22 insertions(+), 65 deletions(-) delete mode 100644 applications/unfurlbot/templates/hpa.yaml diff --git a/applications/unfurlbot/Chart.yaml b/applications/unfurlbot/Chart.yaml index 9f739e437f..f6421a6498 100644 --- a/applications/unfurlbot/Chart.yaml +++ b/applications/unfurlbot/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: "0.3.1" +appVersion: "0.3.2" description: Squarebot backend that unfurls Jira issues. name: unfurlbot sources: diff --git a/applications/unfurlbot/README.md b/applications/unfurlbot/README.md index 4d615367d9..fb494f08bb 100644 --- a/applications/unfurlbot/README.md +++ b/applications/unfurlbot/README.md @@ -11,10 +11,6 @@ Squarebot backend that unfurls Jira issues. | Key | Type | Default | Description | |-----|------|---------|-------------| | affinity | object | `{}` | Affinity rules for the unfurlbot deployment pod | -| autoscaling.enabled | bool | `false` | Enable autoscaling of unfurlbot deployment | -| autoscaling.maxReplicas | int | `100` | Maximum number of unfurlbot deployment pods | -| autoscaling.minReplicas | int | `1` | Minimum number of unfurlbot deployment pods | -| autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization of unfurlbot deployment pods | | config.jiraProjects | string | See `values.yaml` | Names of Jira projects to unfurl (comma-separated) | | config.jiraUrl | string | `"https://rubinobs.atlassian.net/"` | Jira base URL | | config.logLevel | string | `"INFO"` | Logging level: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL" | @@ -29,7 +25,7 @@ Squarebot backend that unfurls Jira issues. | global.vaultSecretsPath | string | Set by Argo CD | Base path for Vault secrets | | image.pullPolicy | string | `"IfNotPresent"` | Pull policy for the unfurlbot image | | image.repository | string | `"ghcr.io/lsst-sqre/unfurlbot"` | Image to use in the unfurlbot deployment | -| image.tag | string | `""` | Overrides the image tag whose default is the chart appVersion. | +| image.tag | string | The appVersion of the chart | Image tag to use | | ingress.annotations | object | `{}` | Additional annotations for the ingress rule | | nodeSelector | object | `{}` | Node selection rules for the unfurlbot deployment pod | | podAnnotations | object | `{}` | Annotations for the unfurlbot deployment pod | diff --git a/applications/unfurlbot/secrets.yaml b/applications/unfurlbot/secrets.yaml index 1ccca0b2c9..da469635b6 100644 --- a/applications/unfurlbot/secrets.yaml +++ b/applications/unfurlbot/secrets.yaml @@ -20,7 +20,8 @@ UNFURLBOT_SLACK_APP_ID: key: SQUAREBOT_SLACK_APP_ID UNFURLBOT_SLACK_TOKEN: description: >- - The Slack bot user oauth token for the Slack App shared by all Squarebot services. + The Slack bot user OAuth token for the Slack App shared by all Squarebot + services. copy: application: squarebot key: SQUAREBOT_SLACK_TOKEN diff --git a/applications/unfurlbot/templates/deployment.yaml b/applications/unfurlbot/templates/deployment.yaml index e511d62527..018fbb3c7f 100644 --- a/applications/unfurlbot/templates/deployment.yaml +++ b/applications/unfurlbot/templates/deployment.yaml @@ -7,9 +7,7 @@ metadata: app.kubernetes.io/component: "server" app.kubernetes.io/part-of: "unfurlbot" spec: - {{- if not .Values.autoscaling.enabled }} replicas: {{ .Values.replicaCount }} - {{- end }} selector: matchLabels: {{- include "unfurlbot.selectorLabels" . | nindent 6 }} diff --git a/applications/unfurlbot/templates/hpa.yaml b/applications/unfurlbot/templates/hpa.yaml deleted file mode 100644 index 01f98ad397..0000000000 --- a/applications/unfurlbot/templates/hpa.yaml +++ /dev/null @@ -1,28 +0,0 @@ -{{- if .Values.autoscaling.enabled }} -apiVersion: autoscaling/v2beta1 -kind: HorizontalPodAutoscaler -metadata: - name: "unfurlbot" - labels: - {{- include "unfurlbot.labels" . | nindent 4 }} -spec: - scaleTargetRef: - apiVersion: apps/v1 - kind: Deployment - name: "unfurlbot" - minReplicas: {{ .Values.autoscaling.minReplicas }} - maxReplicas: {{ .Values.autoscaling.maxReplicas }} - metrics: - {{- if .Values.autoscaling.targetCPUUtilizationPercentage }} - - type: Resource - resource: - name: "cpu" - targetAverageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }} - {{- end }} - {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }} - - type: Resource - resource: - name: "memory" - targetAverageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }} - {{- end }} -{{- end }} diff --git a/applications/unfurlbot/templates/kafkaaccess.yaml b/applications/unfurlbot/templates/kafkaaccess.yaml index 0c1db33801..322ad17ed6 100644 --- a/applications/unfurlbot/templates/kafkaaccess.yaml +++ b/applications/unfurlbot/templates/kafkaaccess.yaml @@ -1,14 +1,16 @@ apiVersion: access.strimzi.io/v1alpha1 kind: KafkaAccess metadata: - name: unfurlbot-kafka + name: "unfurlbot-kafka" + labels: + {{- include "unfurlbot.labels" . | nindent 4 }} spec: kafka: - name: sasquatch - namespace: sasquatch - listener: tls + name: "sasquatch" + namespace: "sasquatch" + listener: "tls" user: - kind: KafkaUser - apiGroup: kafka.strimzi.io - name: unfurlbot - namespace: sasquatch + kind: "KafkaUser" + apiGroup: "kafka.strimzi.io" + name: "unfurlbot" + namespace: "sasquatch" diff --git a/applications/unfurlbot/templates/networkpolicy.yaml b/applications/unfurlbot/templates/networkpolicy.yaml index 914cd78765..bfb84dd0b2 100644 --- a/applications/unfurlbot/templates/networkpolicy.yaml +++ b/applications/unfurlbot/templates/networkpolicy.yaml @@ -2,6 +2,8 @@ apiVersion: networking.k8s.io/v1 kind: NetworkPolicy metadata: name: "unfurlbot" + labels: + {{- include "unfurlbot.labels" . | nindent 4 }} spec: podSelector: matchLabels: diff --git a/applications/unfurlbot/values.yaml b/applications/unfurlbot/values.yaml index d27c557272..cb997a6f8e 100644 --- a/applications/unfurlbot/values.yaml +++ b/applications/unfurlbot/values.yaml @@ -12,7 +12,8 @@ image: # -- Pull policy for the unfurlbot image pullPolicy: "IfNotPresent" - # -- Overrides the image tag whose default is the chart appVersion. + # -- Image tag to use + # @default -- The appVersion of the chart tag: "" config: @@ -103,7 +104,6 @@ ingress: annotations: {} redis: - # -- Resource requests and limits for the redis pod # @default -- see `values.yaml` resources: @@ -114,19 +114,11 @@ redis: cpu: "2m" memory: "3Mi" -autoscaling: - # -- Enable autoscaling of unfurlbot deployment - enabled: false - - # -- Minimum number of unfurlbot deployment pods - minReplicas: 1 - - # -- Maximum number of unfurlbot deployment pods - maxReplicas: 100 +# -- Affinity rules for the unfurlbot deployment pod +affinity: {} - # -- Target CPU utilization of unfurlbot deployment pods - targetCPUUtilizationPercentage: 80 - # targetMemoryUtilizationPercentage: 80 +# -- Node selection rules for the unfurlbot deployment pod +nodeSelector: {} # -- Annotations for the unfurlbot deployment pod podAnnotations: {} @@ -141,15 +133,9 @@ resources: cpu: "9m" memory: "60Mi" -# -- Node selection rules for the unfurlbot deployment pod -nodeSelector: {} - # -- Tolerations for the unfurlbot deployment pod tolerations: [] -# -- Affinity rules for the unfurlbot deployment pod -affinity: {} - # The following will be set by parameters injected by Argo CD and should not # be set in the individual environment values files. global: From de1b8017d9331d23157465d98337dfa9a6dbebce Mon Sep 17 00:00:00 2001 From: Dave McKay Date: Tue, 3 Dec 2024 14:27:29 +0000 Subject: [PATCH 567/567] Update values.yaml --- charts/cadc-tap/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/cadc-tap/values.yaml b/charts/cadc-tap/values.yaml index 8f043c830f..89d9c34a22 100644 --- a/charts/cadc-tap/values.yaml +++ b/charts/cadc-tap/values.yaml @@ -167,7 +167,7 @@ tapSchema: pullPolicy: "IfNotPresent" # -- Tag of TAP schema image - tag: "v3.3.0" + tag: "2.3.0" # -- Resource limits and requests for the TAP schema database pod # @default -- See `values.yaml`