From 245821f2ceb83fea460cacf991c68107b01cfa3a Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Mon, 10 Jun 2024 14:00:09 -0500 Subject: [PATCH 01/23] Add logging statements --- panoptes_aggregation/batch_aggregation.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/panoptes_aggregation/batch_aggregation.py b/panoptes_aggregation/batch_aggregation.py index 330b11f2..6b34f777 100644 --- a/panoptes_aggregation/batch_aggregation.py +++ b/panoptes_aggregation/batch_aggregation.py @@ -23,15 +23,20 @@ def run_aggregation(project_id, workflow_id, user_id): ba = BatchAggregator(project_id, workflow_id, user_id) if not ba.check_permission(): - print(f'Batch Aggregation: Unauthorized attempt by user {user_id} to aggregate workflow {workflow_id}') + print(f'[Batch Aggregation] Unauthorized attempt by user {user_id} to aggregate workflow {workflow_id}') # Exit the task gracefully without retrying or erroring sys.exit() + print(f'[Batch Aggregation] Run beginning for workflow {workflow_id} by user {user_id}') + + print(f'[Batch Aggregation] Saving exports for workflow {workflow_id})') ba.save_exports() + print(f'[Batch Aggregation] Processing exports for workflow {workflow_id})') ba.process_wf_export(ba.wf_csv) cls_df = ba.process_cls_export(ba.cls_csv) + print(f'[Batch Aggregation] Extacting workflow {workflow_id})') extractor_config = workflow_extractor_config(ba.tasks) extracted_data = batch_utils.batch_extract(cls_df, extractor_config) @@ -40,6 +45,7 @@ def run_aggregation(project_id, workflow_id, user_id): 'survey_extractor': ['survey_reducer'] } + print(f'[Batch Aggregation] Reducing workflow {workflow_id})') for task_type, extract_df in extracted_data.items(): extract_df.to_csv(f'{ba.output_path}/{ba.workflow_id}_{task_type}.csv') reducer_list = batch_standard_reducers[task_type] @@ -55,14 +61,16 @@ def run_aggregation(project_id, workflow_id, user_id): reduced_data[reducer].to_csv(filename, mode='a') # Upload zip & reduction files to blob storage + print(f'[Batch Aggregation] Uploading results for {workflow_id})') ba.upload_files() # This could catch PanoptesAPIException, but what to do if it fails? + print(f'[Batch Aggregation] Updating Panoptes for {workflow_id})') success_attrs = {'uuid': ba.id, 'status': 'completed'} ba.update_panoptes(success_attrs) # STDOUT messages get printed to kubernetes logs - print(f'Batch Aggregation: Run successful for workflow {workflow_id} by user {user_id}') + print(f'[Batch Aggregation] Run successful for workflow {workflow_id} by user {user_id}') class BatchAggregator: From e084ad8a485d22b3627f59d135fb817363f66bbe Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Mon, 10 Jun 2024 15:01:02 -0500 Subject: [PATCH 02/23] Update celery task namespace --- docker-compose.yml | 4 ++-- scripts/start-celery.sh | 2 +- scripts/start-flower.sh | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index b1c2acd5..5c1574f3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -29,7 +29,7 @@ services: context: ./ args: REVISION: fake-git-sha-id - command: celery --app panoptes_aggregation.tasks.celery worker --loglevel=info + command: celery --app panoptes_aggregation.batch_aggregation.celery worker --loglevel=info volumes: - ./:/usr/src/aggregation environment: @@ -45,7 +45,7 @@ services: dashboard: build: . - command: celery --app panoptes_aggregation.tasks.celery flower --port=5555 --broker=redis://redis:6379/0 + command: celery --app panoptes_aggregation.batch_aggregation.celery flower --port=5555 --broker=redis://redis:6379/0 ports: - 5556:5555 environment: diff --git a/scripts/start-celery.sh b/scripts/start-celery.sh index 082f6d72..3cd41639 100755 --- a/scripts/start-celery.sh +++ b/scripts/start-celery.sh @@ -1,3 +1,3 @@ #!/bin/bash -e -exec celery --app panoptes_aggregation.tasks.celery worker --loglevel=info +exec celery --app panoptes_aggregation.batch_aggregation.celery worker --loglevel=info diff --git a/scripts/start-flower.sh b/scripts/start-flower.sh index 68789359..bb39483c 100755 --- a/scripts/start-flower.sh +++ b/scripts/start-flower.sh @@ -1,4 +1,4 @@ #!/bin/bash -e BROKER=${CELERY_BROKER_URL:='redis://redis:6379/0'} -exec celery --app panoptes_aggregation.tasks.celery flower --port=5555 --broker=$BROKER +exec celery --app panoptes_aggregation.batch_aggregation.celery flower --port=5555 --broker=$BROKER From 0e8c4297abc8ee4834837895d3ad6ee84b6728c7 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Mon, 10 Jun 2024 15:20:23 -0500 Subject: [PATCH 03/23] Add staging deployment template --- kubernetes/deployment-staging.yaml | 327 +++++++++++++++++++++++++++++ 1 file changed, 327 insertions(+) create mode 100644 kubernetes/deployment-staging.yaml diff --git a/kubernetes/deployment-staging.yaml b/kubernetes/deployment-staging.yaml new file mode 100644 index 00000000..1456dd8a --- /dev/null +++ b/kubernetes/deployment-staging.yaml @@ -0,0 +1,327 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: aggregation-caesar-staging + labels: + app: aggregation-caesar-staging +spec: + selector: + matchLabels: + app: aggregation-caesar-staging + template: + metadata: + labels: + app: aggregation-caesar-staging + spec: + containers: + - name: aggregation-caesar-app-staging + image: ghcr.io/zooniverse/aggregation-for-caesar:batch-aggregation-staging + ports: + - containerPort: 80 + resources: + requests: + memory: "500Mi" + cpu: "500m" + limits: + memory: "1000Mi" + cpu: "1000m" + startupProbe: + httpGet: + path: / + port: 80 + # wait 6 * 10 seconds(default periodSeconds) for the container to start + # after this succeeds once the liveness probe takes over + failureThreshold: 6 + livenessProbe: + httpGet: + path: / + port: 80 + # allow a longer response time than 1s + timeoutSeconds: 10 + readinessProbe: + httpGet: + path: / + port: 80 + # start checking for readiness after 20s (to serve traffic) + initialDelaySeconds: 20 + # allow a longer response time than 1s + timeoutSeconds: 10 + env: + - name: FLASK_ENV + value: production + - name: CELERY_BROKER_URL + value: redis://aggregation-staging-redis:6379/0 + - name: CELERY_RESULT_BACKEND + value: redis://aggregation-staging-redis:6379/0 + - name: PANOPTES_URL + value: https://panoptes.zooniverse.org/ + - name: PANOPTES_CLIENT_ID + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: PANOPTES_CLIENT_ID + - name: PANOPTES_CLIENT_SECRET + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: PANOPTES_CLIENT_SECRET + - name: MAST_AUTH_TOKEN + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: MAST_AUTH_TOKEN + - name: MAST_PROD_TOKEN + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: MAST_PROD_TOKEN + - name: SENTRY_DSN + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: SENTRY_DSN + - name: NEW_RELIC_LICENSE_KEY + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: NEW_RELIC_LICENSE_KEY + - name: NEW_RELIC_APP_NAME + value: 'Aggregation Caesar Staging' + volumeMounts: + - name: aggregation-staging-volume + mountPath: /usr/src/aggregation/tmp + volumes: + - name: aggregation-staging-volume + persistentVolumeClaim: + claimName: aggregation-staging-data-storage +--- +apiVersion: v1 +kind: Service +metadata: + name: aggregation-caesar-staging +spec: + selector: + app: aggregation-caesar-staging + ports: + - protocol: TCP + port: 80 + targetPort: 80 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: aggregation-caesar-staging-celery + labels: + app: aggregation-caesar-staging-celery +spec: + selector: + matchLabels: + app: aggregation-caesar-staging-celery + template: + metadata: + labels: + app: aggregation-caesar-staging-celery + spec: + containers: + - name: aggregation-caesar-app-staging-celery + image: ghcr.io/zooniverse/aggregation-for-caesar:batch-aggregation-staging + resources: + requests: + memory: "500Mi" + cpu: "500m" + limits: + memory: "1000Mi" + cpu: "1000m" + livenessProbe: + exec: + command: + - sh + - -c + - celery inspect ping -d celery@$(hostname) | grep -q OK + initialDelaySeconds: 30 + periodSeconds: 15 + failureThreshold: 3 + readinessProbe: + exec: + command: + - sh + - -c + - celery inspect ping -d celery@$(hostname) | grep -q OK + initialDelaySeconds: 60 + periodSeconds: 15 + failureThreshold: 3 + args: ["/usr/src/aggregation/scripts/start-celery.sh"] + env: + - name: FLASK_ENV + value: production + - name: CELERY_BROKER_URL + value: redis://aggregation-staging-redis:6379/0 + - name: CELERY_RESULT_BACKEND + value: redis://aggregation-staging-redis:6379/0 + - name: PANOPTES_URL + value: https://panoptes.zooniverse.org/ + - name: PANOPTES_CLIENT_ID + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: PANOPTES_CLIENT_ID + - name: PANOPTES_CLIENT_SECRET + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: PANOPTES_CLIENT_SECRET + - name: MAST_AUTH_TOKEN + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: MAST_AUTH_TOKEN + - name: MAST_PROD_TOKEN + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: MAST_PROD_TOKEN + - name: SENTRY_DSN + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: SENTRY_DSN + - name: NEW_RELIC_LICENSE_KEY + valueFrom: + secretKeyRef: + name: aggregation-for-caesar-environment + key: NEW_RELIC_LICENSE_KEY + - name: NEW_RELIC_APP_NAME + value: 'Aggregation Caesar (Staging)' + volumeMounts: + - name: aggregation-staging-volume + mountPath: /usr/src/aggregation/tmp + volumes: + - name: aggregation-staging-volume + persistentVolumeClaim: + claimName: aggregation-staging-data-storage +--- +apiVersion: v1 +kind: Service +metadata: + name: aggregation-caesar-staging +spec: + selector: + app: aggregation-caesar-staging + ports: + - protocol: TCP + port: 80 + targetPort: 80 +--- +kind: PersistentVolumeClaim +apiVersion: v1 +metadata: + name: aggregation-staging-redis +spec: + accessModes: + - ReadWriteOnce + storageClassName: azurefile + resources: + requests: + storage: 1Gi +--- +kind: PersistentVolumeClaim +apiVersion: v1 +metadata: + name: aggregation-staging-data-storage +spec: + accessModes: + - ReadWriteOnce + storageClassName: azurefile + resources: + requests: + storage: 20Gi +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: aggregation-staging-redis + labels: + app: aggregation-staging-redis +spec: + replicas: 1 + strategy: + type: Recreate + selector: + matchLabels: + app: aggregation-staging-redis + template: + metadata: + labels: + app: aggregation-staging-redis + spec: + tolerations: + - key: "servicelife" + operator: "Equal" + value: "longlife" + effect: "NoSchedule" + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: servicelife + operator: In + values: + - longlife + containers: + - name: aggregation-staging-redis + image: redis + resources: + requests: + memory: "100Mi" + cpu: "10m" + limits: + memory: "100Mi" + cpu: "500m" + volumeMounts: + - name: aggregation-staging-redis-data + mountPath: "/data" + volumes: + - name: aggregation-staging-redis-data + persistentVolumeClaim: + claimName: aggregation-staging-redis +--- +apiVersion: v1 +kind: Service +metadata: + name: aggregation-staging-redis +spec: + selector: + app: aggregation-staging-redis + ports: + - protocol: TCP + port: 6379 + targetPort: 6379 + type: NodePort +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: aggregation-staging-ingress + annotations: + ingressClassName: nginx + cert-manager.io/cluster-issuer: letsencrypt-prod + nginx.ingress.kubernetes.io/proxy-buffer-size: "128k" + nginx.ingress.kubernetes.io/proxy-body-size: 20m + nginx.ingress.kubernetes.io/set-real-ip-from: "10.0.0.0/8" +spec: + tls: + - hosts: + - aggregation-staging.zooniverse.org + secretName: aggregation-staging-tls-secret + rules: + - host: aggregation-staging.zooniverse.org + http: + paths: + - pathType: Prefix + path: / + backend: + service: + name: aggregation-staging-app + port: + number: 80 \ No newline at end of file From 5a53178af3673afa52708c470ba43d835c3362c4 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Mon, 10 Jun 2024 15:33:49 -0500 Subject: [PATCH 04/23] Clean up new resource names --- kubernetes/deployment-staging.yaml | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/kubernetes/deployment-staging.yaml b/kubernetes/deployment-staging.yaml index 1456dd8a..f1ae8ad7 100644 --- a/kubernetes/deployment-staging.yaml +++ b/kubernetes/deployment-staging.yaml @@ -1,20 +1,20 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: aggregation-caesar-staging + name: aggregation-staging labels: - app: aggregation-caesar-staging + app: aggregation-staging spec: selector: matchLabels: - app: aggregation-caesar-staging + app: aggregation-staging template: metadata: labels: - app: aggregation-caesar-staging + app: aggregation-staging spec: containers: - - name: aggregation-caesar-app-staging + - name: aggregation-staging-app image: ghcr.io/zooniverse/aggregation-for-caesar:batch-aggregation-staging ports: - containerPort: 80 @@ -98,10 +98,10 @@ spec: apiVersion: v1 kind: Service metadata: - name: aggregation-caesar-staging + name: aggregation-staging spec: selector: - app: aggregation-caesar-staging + app: aggregation-staging ports: - protocol: TCP port: 80 @@ -110,20 +110,20 @@ spec: apiVersion: apps/v1 kind: Deployment metadata: - name: aggregation-caesar-staging-celery + name: aggregation-staging-celery labels: - app: aggregation-caesar-staging-celery + app: aggregation-staging-celery spec: selector: matchLabels: - app: aggregation-caesar-staging-celery + app: aggregation-staging-celery template: metadata: labels: - app: aggregation-caesar-staging-celery + app: aggregation-staging-celery spec: containers: - - name: aggregation-caesar-app-staging-celery + - name: aggregation-staging-celery image: ghcr.io/zooniverse/aggregation-for-caesar:batch-aggregation-staging resources: requests: @@ -203,10 +203,10 @@ spec: apiVersion: v1 kind: Service metadata: - name: aggregation-caesar-staging + name: aggregation-staging-celery spec: selector: - app: aggregation-caesar-staging + app: aggregation-staging-celery ports: - protocol: TCP port: 80 From 05baab33b7b4443ff5f5ef452bbc126d082acddb Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Thu, 13 Jun 2024 15:10:26 -0500 Subject: [PATCH 05/23] Build to a single docker image --- docker-compose.yml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 5c1574f3..414ad5ba 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,10 +1,11 @@ -version: '2' +version: '3' services: aggregation: build: context: ./ args: REVISION: fake-git-sha-id + image: aggregation-for-caesar:local volumes: - ./:/usr/src/aggregation - ~/.aws:/root/.aws @@ -25,10 +26,7 @@ services: - redis:redis worker: - build: - context: ./ - args: - REVISION: fake-git-sha-id + image: aggregation-for-caesar:local command: celery --app panoptes_aggregation.batch_aggregation.celery worker --loglevel=info volumes: - ./:/usr/src/aggregation @@ -44,7 +42,7 @@ services: - redis dashboard: - build: . + image: aggregation-for-caesar:local command: celery --app panoptes_aggregation.batch_aggregation.celery flower --port=5555 --broker=redis://redis:6379/0 ports: - 5556:5555 From af57d8cc1cdf0274806dbf09eb489a6001df10d9 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Thu, 13 Jun 2024 15:11:54 -0500 Subject: [PATCH 06/23] Rename deployment & use Panoptes staging in staging deploy --- kubernetes/deployment-staging.yaml | 48 ++++++++++++++++-------------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/kubernetes/deployment-staging.yaml b/kubernetes/deployment-staging.yaml index f1ae8ad7..6cbdcfde 100644 --- a/kubernetes/deployment-staging.yaml +++ b/kubernetes/deployment-staging.yaml @@ -1,17 +1,17 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: aggregation-staging + name: aggregation-staging-app labels: - app: aggregation-staging + app: aggregation-staging-app spec: selector: matchLabels: - app: aggregation-staging + app: aggregation-staging-app template: metadata: labels: - app: aggregation-staging + app: aggregation-staging-app spec: containers: - name: aggregation-staging-app @@ -53,18 +53,18 @@ spec: value: redis://aggregation-staging-redis:6379/0 - name: CELERY_RESULT_BACKEND value: redis://aggregation-staging-redis:6379/0 - - name: PANOPTES_URL - value: https://panoptes.zooniverse.org/ - - name: PANOPTES_CLIENT_ID + - name: PANOPTES_STAGING_URL + value: https://panoptes-staging.zooniverse.org/ + - name: PANOPTES_STAGING_CLIENT_ID valueFrom: secretKeyRef: name: aggregation-for-caesar-environment - key: PANOPTES_CLIENT_ID - - name: PANOPTES_CLIENT_SECRET + key: PANOPTES_STAGING_CLIENT_ID + - name: PANOPTES_STAGING_CLIENT_SECRET valueFrom: secretKeyRef: name: aggregation-for-caesar-environment - key: PANOPTES_CLIENT_SECRET + key: PANOPTES_STAGING_CLIENT_SECRET - name: MAST_AUTH_TOKEN valueFrom: secretKeyRef: @@ -86,7 +86,7 @@ spec: name: aggregation-for-caesar-environment key: NEW_RELIC_LICENSE_KEY - name: NEW_RELIC_APP_NAME - value: 'Aggregation Caesar Staging' + value: 'Aggregation Caesar (Staging)' volumeMounts: - name: aggregation-staging-volume mountPath: /usr/src/aggregation/tmp @@ -98,10 +98,10 @@ spec: apiVersion: v1 kind: Service metadata: - name: aggregation-staging + name: aggregation-staging-app spec: selector: - app: aggregation-staging + app: aggregation-staging-app ports: - protocol: TCP port: 80 @@ -139,7 +139,8 @@ spec: - -c - celery inspect ping -d celery@$(hostname) | grep -q OK initialDelaySeconds: 30 - periodSeconds: 15 + periodSeconds: 30 + timeoutSeconds: 10 failureThreshold: 3 readinessProbe: exec: @@ -148,7 +149,8 @@ spec: - -c - celery inspect ping -d celery@$(hostname) | grep -q OK initialDelaySeconds: 60 - periodSeconds: 15 + periodSeconds: 30 + timeoutSeconds: 10 failureThreshold: 3 args: ["/usr/src/aggregation/scripts/start-celery.sh"] env: @@ -158,18 +160,18 @@ spec: value: redis://aggregation-staging-redis:6379/0 - name: CELERY_RESULT_BACKEND value: redis://aggregation-staging-redis:6379/0 - - name: PANOPTES_URL - value: https://panoptes.zooniverse.org/ - - name: PANOPTES_CLIENT_ID + - name: PANOPTES_STAGING_URL + value: https://panoptes-staging.zooniverse.org/ + - name: PANOPTES_STAGING_CLIENT_ID valueFrom: secretKeyRef: name: aggregation-for-caesar-environment - key: PANOPTES_CLIENT_ID - - name: PANOPTES_CLIENT_SECRET + key: PANOPTES_STAGING_CLIENT_ID + - name: PANOPTES_STAGING_CLIENT_SECRET valueFrom: secretKeyRef: name: aggregation-for-caesar-environment - key: PANOPTES_CLIENT_SECRET + key: PANOPTES_CLIENT_STAGING_SECRET - name: MAST_AUTH_TOKEN valueFrom: secretKeyRef: @@ -304,7 +306,7 @@ kind: Ingress metadata: name: aggregation-staging-ingress annotations: - ingressClassName: nginx + kubernetes.io/ingress.class: nginx cert-manager.io/cluster-issuer: letsencrypt-prod nginx.ingress.kubernetes.io/proxy-buffer-size: "128k" nginx.ingress.kubernetes.io/proxy-body-size: 20m @@ -324,4 +326,4 @@ spec: service: name: aggregation-staging-app port: - number: 80 \ No newline at end of file + number: 80 From 14ed70a7647fd08ceb589c628f2ddf5b51bf8438 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Thu, 13 Jun 2024 15:14:09 -0500 Subject: [PATCH 07/23] Fix secret name --- kubernetes/deployment-staging.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kubernetes/deployment-staging.yaml b/kubernetes/deployment-staging.yaml index 6cbdcfde..33908649 100644 --- a/kubernetes/deployment-staging.yaml +++ b/kubernetes/deployment-staging.yaml @@ -171,7 +171,7 @@ spec: valueFrom: secretKeyRef: name: aggregation-for-caesar-environment - key: PANOPTES_CLIENT_STAGING_SECRET + key: PANOPTES_STAGING_CLIENT_SECRET - name: MAST_AUTH_TOKEN valueFrom: secretKeyRef: From a50b9dcd14e39eda1ec186c090ad1765a3b285d6 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Thu, 13 Jun 2024 15:34:17 -0500 Subject: [PATCH 08/23] Sringify ID in comparison to value returned from Panoptes --- panoptes_aggregation/batch_aggregation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/panoptes_aggregation/batch_aggregation.py b/panoptes_aggregation/batch_aggregation.py index 6b34f777..2edff216 100644 --- a/panoptes_aggregation/batch_aggregation.py +++ b/panoptes_aggregation/batch_aggregation.py @@ -154,7 +154,7 @@ def check_permission(self): project = Project.find(self.project_id) permission = False for user in project.collaborators(): - if user.id == self.user_id: + if user.id == str(self.user_id): permission = True return permission From 0d48f490429cc61bb328c6d3261553fe8b1e559c Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Thu, 13 Jun 2024 15:40:43 -0500 Subject: [PATCH 09/23] Update test --- .../tests/batch_aggregation/test_batch_aggregation.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py index d68ca476..73d6c3dc 100644 --- a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py +++ b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py @@ -103,7 +103,8 @@ def test_upload_file_to_storage(self): @patch("panoptes_aggregation.batch_aggregation.Project") def test_check_permission_success(self, mock_project): mock_user = MagicMock() - mock_user.id = 100 + # Panoptes responses return strings + mock_user.id = '100' mock_project.find().collaborators.return_value = [mock_user] ba = batch_agg.BatchAggregator(1, 10, 100) From e555e555f237cd828490ec31d25d6aee9abf2c0d Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Fri, 14 Jun 2024 13:04:24 -0500 Subject: [PATCH 10/23] Fix mock data type --- .../tests/batch_aggregation/test_batch_aggregation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py index 73d6c3dc..de019d4b 100644 --- a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py +++ b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py @@ -118,7 +118,7 @@ def test_check_permission_failure(self, mock_project): mock_user = MagicMock() # List of collaborators does not include initiating user - mock_user.id = 999 + mock_user.id = '999' mock_project.find().collaborators.return_value = [mock_user] ba = batch_agg.BatchAggregator(1, 10, 100) From 76ab07f1e5fdcb2042dde8e99126a54dd2e49736 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Fri, 14 Jun 2024 13:05:51 -0500 Subject: [PATCH 11/23] Use client's admin mode --- panoptes_aggregation/batch_aggregation.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/panoptes_aggregation/batch_aggregation.py b/panoptes_aggregation/batch_aggregation.py index 2edff216..ce4a656d 100644 --- a/panoptes_aggregation/batch_aggregation.py +++ b/panoptes_aggregation/batch_aggregation.py @@ -177,5 +177,6 @@ def _connect_api_client(self): Panoptes.connect( endpoint=os.getenv('PANOPTES_URL', 'https://panoptes.zooniverse.org/'), client_id=os.getenv('PANOPTES_CLIENT_ID'), - client_secret=os.getenv('PANOPTES_CLIENT_SECRET') + client_secret=os.getenv('PANOPTES_CLIENT_SECRET'), + admin='true' ) From c4526239df14b0a562f2222a8355ea215aca2f82 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Fri, 14 Jun 2024 14:17:41 -0500 Subject: [PATCH 12/23] Fix a couple filepaths --- panoptes_aggregation/batch_aggregation.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/panoptes_aggregation/batch_aggregation.py b/panoptes_aggregation/batch_aggregation.py index ce4a656d..b2f30bf6 100644 --- a/panoptes_aggregation/batch_aggregation.py +++ b/panoptes_aggregation/batch_aggregation.py @@ -47,7 +47,8 @@ def run_aggregation(project_id, workflow_id, user_id): print(f'[Batch Aggregation] Reducing workflow {workflow_id})') for task_type, extract_df in extracted_data.items(): - extract_df.to_csv(f'{ba.output_path}/{ba.workflow_id}_{task_type}.csv') + csv_filepath = os.path.join(ba.output_path, f'{ba.workflow_id}_{task_type}.csv') + extract_df.to_csv(csv_filepath) reducer_list = batch_standard_reducers[task_type] reduced_data = {} @@ -57,7 +58,7 @@ def run_aggregation(project_id, workflow_id, user_id): reducer_config = {'reducer_config': {reducer: {}}} reduced_data[reducer] = batch_utils.batch_reduce(extract_df, reducer_config) # filename = f'{ba.output_path}/{ba.workflow_id}_reductions.csv' - filename = os.path.join(ba.output_path, ba.workflow_id, '_reductions.csv') + filename = os.path.join(ba.output_path, f'{ba.workflow_id}_reductions.csv') reduced_data[reducer].to_csv(filename, mode='a') # Upload zip & reduction files to blob storage @@ -87,7 +88,7 @@ def __init__(self, project_id, workflow_id, user_id): def save_exports(self): self.output_path = os.path.join('tmp', str(self.workflow_id)) - os.mkdir(self.output_path) + os.makedirs(self.output_path, exist_ok=True) cls_export = Workflow(self.workflow_id).describe_export('classifications') full_cls_url = cls_export['media'][0]['src'] From eb66c5f0053099772ae95717c87250c89be3388e Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Fri, 14 Jun 2024 14:28:18 -0500 Subject: [PATCH 13/23] Use UUID as tmpdir path --- panoptes_aggregation/batch_aggregation.py | 4 ++-- .../tests/batch_aggregation/test_batch_aggregation.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/panoptes_aggregation/batch_aggregation.py b/panoptes_aggregation/batch_aggregation.py index b2f30bf6..f89aa022 100644 --- a/panoptes_aggregation/batch_aggregation.py +++ b/panoptes_aggregation/batch_aggregation.py @@ -87,8 +87,8 @@ def __init__(self, project_id, workflow_id, user_id): self._connect_api_client() def save_exports(self): - self.output_path = os.path.join('tmp', str(self.workflow_id)) - os.makedirs(self.output_path, exist_ok=True) + self.output_path = os.path.join('tmp', str(self.id)) + os.makedirs(self.output_path) cls_export = Workflow(self.workflow_id).describe_export('classifications') full_cls_url = cls_export['media'][0]['src'] diff --git a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py index de019d4b..9558e176 100644 --- a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py +++ b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py @@ -52,8 +52,9 @@ def test_save_exports(self, mock_project, mock_workflow, mock_mkdir): mock_project.return_value.describe_export.return_value = csv_dict mock_workflow.return_value.describe_export.return_value = csv_dict ba = batch_agg.BatchAggregator(1, 10, 100) + ba.id = 'asdf123asdf' batch_agg.BatchAggregator._download_export = MagicMock(side_effect=['./cls_export.csv', './wf_export.csv']) - expected_response = {'classifications': 'tmp/10/10_cls_export.csv', 'workflows': 'tmp/10/10_workflow_export.csv'} + expected_response = {'classifications': 'tmp/asdf123asdf/10_cls_export.csv', 'workflows': 'tmp/asdf123asdf/10_workflow_export.csv'} response = ba.save_exports() From 0f2a33480e5a2c0c2b4536cec2bd71a0881edf5e Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Fri, 14 Jun 2024 16:10:52 -0500 Subject: [PATCH 14/23] Finish run if Panoptes is unupdateable --- panoptes_aggregation/batch_aggregation.py | 3 +++ .../batch_aggregation/test_batch_aggregation.py | 12 +++++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/panoptes_aggregation/batch_aggregation.py b/panoptes_aggregation/batch_aggregation.py index f89aa022..0681803c 100644 --- a/panoptes_aggregation/batch_aggregation.py +++ b/panoptes_aggregation/batch_aggregation.py @@ -142,6 +142,9 @@ def update_panoptes(self, body_attributes): # An Aggregation class can be added to the python client to avoid doing this manually params = {'workflow_id': self.workflow_id} response = Panoptes.client().get('/aggregations', params=params) + if response[0] is None: + print(f'[Batch Aggregation] Panoptes Aggregation resource not found. Unable to update.') + return False agg_id = response[0]['aggregations'][0]['id'] fresh_etag = response[1] diff --git a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py index 9558e176..c96880ca 100644 --- a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py +++ b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py @@ -142,7 +142,7 @@ def test_update_panoptes_success(self, mock_get, mock_put): @patch("panoptes_aggregation.batch_aggregation.Panoptes.put") @patch("panoptes_aggregation.batch_aggregation.Panoptes.get") - def test_update_panoptes_failure(self, mock_get, mock_put): + def test_update_panoptes_put_failure(self, mock_get, mock_put): ba = batch_agg.BatchAggregator(1, 10, 100) mock_get.return_value = ({'aggregations': [{'id': 5555}]}, 'thisisanetag') body = {'status': 'failure'} @@ -150,6 +150,16 @@ def test_update_panoptes_failure(self, mock_get, mock_put): mock_get.assert_called_with('/aggregations', params={'workflow_id': 10}) mock_put.assert_called_with('/aggregations/5555', etag='thisisanetag', json={'aggregations': body}) + @patch("panoptes_aggregation.batch_aggregation.Panoptes.put") + @patch("panoptes_aggregation.batch_aggregation.Panoptes.get") + def test_update_panoptes_get_failure(self, mock_get, mock_put): + ba = batch_agg.BatchAggregator(1, 10, 100) + mock_get.return_value = (None, None) + body = {'status': 'failure'} + ba.update_panoptes(body) + mock_get.assert_called_with('/aggregations', params={'workflow_id': 10}) + mock_put.assert_not_called() + @patch("panoptes_aggregation.batch_aggregation.BlobServiceClient") def test_connect_blob_storage(self, mock_client): ba = batch_agg.BatchAggregator(1, 10, 100) From 8b6ad3d82724686575b032baff99a395c6afce3c Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Fri, 14 Jun 2024 16:46:02 -0500 Subject: [PATCH 15/23] When the update panoptes resource doesn't exist but the call is successful --- panoptes_aggregation/batch_aggregation.py | 2 +- .../tests/batch_aggregation/test_batch_aggregation.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/panoptes_aggregation/batch_aggregation.py b/panoptes_aggregation/batch_aggregation.py index 0681803c..0835c2d2 100644 --- a/panoptes_aggregation/batch_aggregation.py +++ b/panoptes_aggregation/batch_aggregation.py @@ -142,7 +142,7 @@ def update_panoptes(self, body_attributes): # An Aggregation class can be added to the python client to avoid doing this manually params = {'workflow_id': self.workflow_id} response = Panoptes.client().get('/aggregations', params=params) - if response[0] is None: + if not response[0]['aggregations']: print(f'[Batch Aggregation] Panoptes Aggregation resource not found. Unable to update.') return False agg_id = response[0]['aggregations'][0]['id'] diff --git a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py index c96880ca..559e5a30 100644 --- a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py +++ b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py @@ -132,7 +132,7 @@ def test_check_permission_failure(self, mock_project): @patch("panoptes_aggregation.batch_aggregation.Panoptes.put") @patch("panoptes_aggregation.batch_aggregation.Panoptes.get") - def test_update_panoptes_success(self, mock_get, mock_put): + def test_update_panoptes_run_success(self, mock_get, mock_put): ba = batch_agg.BatchAggregator(1, 10, 100) mock_get.return_value = ({'aggregations': [{'id': 5555}]}, 'thisisanetag') body = {'uuid': ba.id, 'status': 'completed'} @@ -142,7 +142,7 @@ def test_update_panoptes_success(self, mock_get, mock_put): @patch("panoptes_aggregation.batch_aggregation.Panoptes.put") @patch("panoptes_aggregation.batch_aggregation.Panoptes.get") - def test_update_panoptes_put_failure(self, mock_get, mock_put): + def test_update_panoptes_run_failure(self, mock_get, mock_put): ba = batch_agg.BatchAggregator(1, 10, 100) mock_get.return_value = ({'aggregations': [{'id': 5555}]}, 'thisisanetag') body = {'status': 'failure'} @@ -154,7 +154,7 @@ def test_update_panoptes_put_failure(self, mock_get, mock_put): @patch("panoptes_aggregation.batch_aggregation.Panoptes.get") def test_update_panoptes_get_failure(self, mock_get, mock_put): ba = batch_agg.BatchAggregator(1, 10, 100) - mock_get.return_value = (None, None) + mock_get.return_value = ({'aggregations': []}, 'etag') body = {'status': 'failure'} ba.update_panoptes(body) mock_get.assert_called_with('/aggregations', params={'workflow_id': 10}) From 7fa5e6f643c9e35e7def667e3013aa0f3c97d566 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Fri, 14 Jun 2024 19:36:51 -0500 Subject: [PATCH 16/23] Use jsonify to set mimetype --- panoptes_aggregation/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/panoptes_aggregation/routes.py b/panoptes_aggregation/routes.py index 4a8e9daf..9d73648e 100644 --- a/panoptes_aggregation/routes.py +++ b/panoptes_aggregation/routes.py @@ -124,7 +124,7 @@ def run_aggregation(): workflow_id = content['workflow_id'] user_id = content['user_id'] task = batch_aggregation.run_aggregation.delay(project_id, workflow_id, user_id) - return json.dumps({"task_id": task.id}), 202 + return jsonify({"task_id": task.id}), 202 @application.route('/tasks/', methods=['GET']) def get_status(task_id): From 70bfe221330743272165100581ddc31083da71aa Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Fri, 14 Jun 2024 19:37:10 -0500 Subject: [PATCH 17/23] cast inputs to ints just in case --- panoptes_aggregation/batch_aggregation.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/panoptes_aggregation/batch_aggregation.py b/panoptes_aggregation/batch_aggregation.py index 0835c2d2..ac6a2882 100644 --- a/panoptes_aggregation/batch_aggregation.py +++ b/panoptes_aggregation/batch_aggregation.py @@ -80,9 +80,9 @@ class BatchAggregator: """ def __init__(self, project_id, workflow_id, user_id): - self.project_id = project_id - self.workflow_id = workflow_id - self.user_id = user_id + self.project_id = int(project_id) + self.workflow_id = int(workflow_id) + self.user_id = int(user_id) self._generate_uuid() self._connect_api_client() From c9ef8d07b09ee4f4f871f7977c79548a808c755c Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Fri, 14 Jun 2024 20:33:11 -0500 Subject: [PATCH 18/23] Enable public access to new containers --- panoptes_aggregation/batch_aggregation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/panoptes_aggregation/batch_aggregation.py b/panoptes_aggregation/batch_aggregation.py index ac6a2882..bb2fb35d 100644 --- a/panoptes_aggregation/batch_aggregation.py +++ b/panoptes_aggregation/batch_aggregation.py @@ -122,7 +122,7 @@ def process_cls_export(self, cls_csv): def connect_blob_storage(self): connect_str = os.getenv('AZURE_STORAGE_CONNECTION_STRING') self.blob_service_client = BlobServiceClient.from_connection_string(connect_str) - self.blob_service_client.create_container(name=self.id) + self.blob_service_client.create_container(name=self.id, public_access='container') def upload_file_to_storage(self, container_name, filepath): blob = filepath.split('/')[-1] From 87a756d1a74bab14f1d095b1e5c4f6d7c4cd43f4 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Mon, 1 Jul 2024 16:35:56 -0500 Subject: [PATCH 19/23] Deploy staging with action --- .github/workflows/deploy_staging.yml | 44 ++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 .github/workflows/deploy_staging.yml diff --git a/.github/workflows/deploy_staging.yml b/.github/workflows/deploy_staging.yml new file mode 100644 index 00000000..5caf7666 --- /dev/null +++ b/.github/workflows/deploy_staging.yml @@ -0,0 +1,44 @@ +name: Deploy to Staging + +on: + push: + branches: + - master + workflow_dispatch: + +jobs: + build_and_push_image: + name: Build and Push Image + uses: zooniverse/ci-cd/.github/workflows/build_and_push_image.yaml@main + with: + repo_name: aggregation-for-caesar + commit_id: ${{ github.sha }} + latest: true + build_args: "REVISION=${{ github.sha }}" + + deploy_staging: + name: Deploy to Staging + uses: zooniverse/ci-cd/.github/workflows/deploy_app.yaml@main + needs: build_and_push_image + with: + app_name: aggregation + repo_name: aggregation-for-caesar + commit_id: ${{ github.sha }} + environment: staging + deploy_check: false + secrets: + creds: ${{ secrets.AZURE_AKS }} + + slack_notification: + name: Slack notification + uses: zooniverse/ci-cd/.github/workflows/slack_notification.yaml@main + needs: deploy_staging + if: always() + with: + commit_id: ${{ github.sha }} + job_name: Deploy to Staging / deploy_app + status: ${{ needs.deploy_staging.result }} + title: "Aggregation Staging deploy complete" + title_link: "https://aggregation-staging.zooniverse.org" + secrets: + slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }} From 5f15610cb583c2695f176bb7c2ff934399ebd895 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Mon, 1 Jul 2024 16:37:05 -0500 Subject: [PATCH 20/23] hound? --- panoptes_aggregation/batch_aggregation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/panoptes_aggregation/batch_aggregation.py b/panoptes_aggregation/batch_aggregation.py index bb2fb35d..fe8eae43 100644 --- a/panoptes_aggregation/batch_aggregation.py +++ b/panoptes_aggregation/batch_aggregation.py @@ -47,7 +47,7 @@ def run_aggregation(project_id, workflow_id, user_id): print(f'[Batch Aggregation] Reducing workflow {workflow_id})') for task_type, extract_df in extracted_data.items(): - csv_filepath = os.path.join(ba.output_path, f'{ba.workflow_id}_{task_type}.csv') + csv_filepath = os.path.join(ba.output_path, f'{ba.workflow_id}_{task_type}.csv') extract_df.to_csv(csv_filepath) reducer_list = batch_standard_reducers[task_type] reduced_data = {} From c4e453254833ed0a5f8584b485f8e84a8eaca8e1 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Mon, 1 Jul 2024 16:51:31 -0500 Subject: [PATCH 21/23] test fixes --- .../tests/batch_aggregation/test_batch_aggregation.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py index 559e5a30..1ca25376 100644 --- a/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py +++ b/panoptes_aggregation/tests/batch_aggregation/test_batch_aggregation.py @@ -43,10 +43,10 @@ def test_run_aggregation_success(self, mock_aggregator, mock_wf_ext_conf): mock_aggregator_instance.upload_files.assert_called_once() mock_aggregator_instance.update_panoptes.assert_called_once() - @patch("panoptes_aggregation.batch_aggregation.os.mkdir") + @patch("panoptes_aggregation.batch_aggregation.os.makedirs") @patch("panoptes_aggregation.batch_aggregation.Workflow") @patch("panoptes_aggregation.batch_aggregation.Project") - def test_save_exports(self, mock_project, mock_workflow, mock_mkdir): + def test_save_exports(self, mock_project, mock_workflow, mock_makedirs): # Test that Panoptes calls are made and files are saved csv_dict = {'media': [{'src': 'http://zooniverse.org/123.csv'}]} mock_project.return_value.describe_export.return_value = csv_dict @@ -60,7 +60,7 @@ def test_save_exports(self, mock_project, mock_workflow, mock_mkdir): assert ba.id is not None self.assertEqual(response, expected_response) - mock_mkdir.assert_called_once() + mock_makedirs.assert_called_once() mock_project.assert_called_once_with(1) mock_workflow.assert_called_once_with(10) mock_project.return_value.describe_export.assert_called_once_with('workflows') @@ -164,4 +164,4 @@ def test_update_panoptes_get_failure(self, mock_get, mock_put): def test_connect_blob_storage(self, mock_client): ba = batch_agg.BatchAggregator(1, 10, 100) ba.connect_blob_storage() - ba.blob_service_client.create_container.assert_called_once_with(name=ba.id) + ba.blob_service_client.create_container.assert_called_once_with(name=ba.id, public_access='container') From 0122cf8b674650f8ecb3a13450af365cd4055fcb Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Mon, 1 Jul 2024 16:53:21 -0500 Subject: [PATCH 22/23] new hound --- panoptes_aggregation/batch_aggregation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/panoptes_aggregation/batch_aggregation.py b/panoptes_aggregation/batch_aggregation.py index fe8eae43..e7e3788b 100644 --- a/panoptes_aggregation/batch_aggregation.py +++ b/panoptes_aggregation/batch_aggregation.py @@ -143,7 +143,7 @@ def update_panoptes(self, body_attributes): params = {'workflow_id': self.workflow_id} response = Panoptes.client().get('/aggregations', params=params) if not response[0]['aggregations']: - print(f'[Batch Aggregation] Panoptes Aggregation resource not found. Unable to update.') + print('[Batch Aggregation] Panoptes Aggregation resource not found. Unable to update.') return False agg_id = response[0]['aggregations'][0]['id'] fresh_etag = response[1] From c70c121a907e4b5976d7260ea82427fb3af168f3 Mon Sep 17 00:00:00 2001 From: Zach Wolfenbarger Date: Mon, 1 Jul 2024 17:42:47 -0500 Subject: [PATCH 23/23] Use correct k8s secret --- kubernetes/deployment-staging.yaml | 44 +++++++++++++++--------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/kubernetes/deployment-staging.yaml b/kubernetes/deployment-staging.yaml index 33908649..659c14b1 100644 --- a/kubernetes/deployment-staging.yaml +++ b/kubernetes/deployment-staging.yaml @@ -53,37 +53,37 @@ spec: value: redis://aggregation-staging-redis:6379/0 - name: CELERY_RESULT_BACKEND value: redis://aggregation-staging-redis:6379/0 - - name: PANOPTES_STAGING_URL + - name: PANOPTES_URL value: https://panoptes-staging.zooniverse.org/ - - name: PANOPTES_STAGING_CLIENT_ID + - name: PANOPTES_CLIENT_ID valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment - key: PANOPTES_STAGING_CLIENT_ID - - name: PANOPTES_STAGING_CLIENT_SECRET + name: aggregation-staging-env + key: PANOPTES_CLIENT_ID + - name: PANOPTES_CLIENT_SECRET valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment - key: PANOPTES_STAGING_CLIENT_SECRET + name: aggregation-staging-env + key: PANOPTES_CLIENT_SECRET - name: MAST_AUTH_TOKEN valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment + name: aggregation-staging-env key: MAST_AUTH_TOKEN - name: MAST_PROD_TOKEN valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment + name: aggregation-staging-env key: MAST_PROD_TOKEN - name: SENTRY_DSN valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment + name: aggregation-staging-env key: SENTRY_DSN - name: NEW_RELIC_LICENSE_KEY valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment + name: aggregation-staging-env key: NEW_RELIC_LICENSE_KEY - name: NEW_RELIC_APP_NAME value: 'Aggregation Caesar (Staging)' @@ -160,37 +160,37 @@ spec: value: redis://aggregation-staging-redis:6379/0 - name: CELERY_RESULT_BACKEND value: redis://aggregation-staging-redis:6379/0 - - name: PANOPTES_STAGING_URL + - name: PANOPTES_URL value: https://panoptes-staging.zooniverse.org/ - - name: PANOPTES_STAGING_CLIENT_ID + - name: PANOPTES_CLIENT_ID valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment - key: PANOPTES_STAGING_CLIENT_ID - - name: PANOPTES_STAGING_CLIENT_SECRET + name: aggregation-staging-env + key: PANOPTES_CLIENT_ID + - name: PANOPTES_CLIENT_SECRET valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment - key: PANOPTES_STAGING_CLIENT_SECRET + name: aggregation-staging-env + key: PANOPTES_CLIENT_SECRET - name: MAST_AUTH_TOKEN valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment + name: aggregation-staging-env key: MAST_AUTH_TOKEN - name: MAST_PROD_TOKEN valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment + name: aggregation-staging-env key: MAST_PROD_TOKEN - name: SENTRY_DSN valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment + name: aggregation-staging-env key: SENTRY_DSN - name: NEW_RELIC_LICENSE_KEY valueFrom: secretKeyRef: - name: aggregation-for-caesar-environment + name: aggregation-staging-env key: NEW_RELIC_LICENSE_KEY - name: NEW_RELIC_APP_NAME value: 'Aggregation Caesar (Staging)'