diff --git a/.docker/docker_start_script.sh b/.docker/docker_start_script.sh index f41528c0f..7debd03ba 100644 --- a/.docker/docker_start_script.sh +++ b/.docker/docker_start_script.sh @@ -1,27 +1,4 @@ #!/usr/bin/env bash -#Configure web server - -# cd /usr/src/app/e-mission-server - -#set database URL using environment variable -echo ${DB_HOST} -if [ -z ${DB_HOST} ] ; then - local_host=`hostname -i` - jq --arg db_host "$local_host" '.timeseries.url = $db_host' conf/storage/db.conf.sample > conf/storage/db.conf -else - jq --arg db_host "$DB_HOST" '.timeseries.url = $db_host' conf/storage/db.conf.sample > conf/storage/db.conf -fi -cat conf/storage/db.conf - -#set Web Server host using environment variable -echo ${WEB_SERVER_HOST} -if [ -z ${WEB_SERVER_HOST} ] ; then - local_host=`hostname -i` - sed "s_localhost_${local_host}_" conf/net/api/webserver.conf.sample > conf/net/api/webserver.conf -else - sed "s_localhost_${WEB_SERVER_HOST}_" conf/net/api/webserver.conf.sample > conf/net/api/webserver.conf -fi -cat conf/net/api/webserver.conf if [ -z ${LIVERELOAD_SRC} ] ; then echo "Live reload disabled, " diff --git a/.github/workflows/image_build_push.yml b/.github/workflows/image_build_push.yml index 3b991786a..46a22b630 100644 --- a/.github/workflows/image_build_push.yml +++ b/.github/workflows/image_build_push.yml @@ -1,29 +1,22 @@ -# This is a basic workflow to help you get started with Actions - name: docker image -# Controls when the action will run. Triggers the workflow on push or pull request -# events but only for the master branch on: push: branches: [ master, gis-based-mode-detection ] - -# Env variable +#Dockerhub credentials are set as environment variables env: DOCKER_USER: ${{secrets.DOCKER_USER}} DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} -# A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: - # This workflow contains a single job called "build" build: - # The type of runner that the job will run on runs-on: ubuntu-latest - # Steps represent a sequence of tasks that will be executed as part of the job + outputs: + date: ${{ steps.date.outputs.date }} + steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v2 - name: docker login run: | # log into docker hub account @@ -46,3 +39,40 @@ jobs: - name: push docker image run: | docker push $DOCKER_USER/${GITHUB_REPOSITORY#*/}:${GITHUB_REF##*/}_${{ steps.date.outputs.date }} + + - name: Create a text file + run: | + echo ${{ steps.date.outputs.date }} > tag_file.txt + echo "Created tag text file" + + - name: Upload Artifact + uses: actions/upload-artifact@v4 + with: + name: docker-image-tag + path: tag_file.txt + overwrite: true + + dispatch: + needs: build + runs-on: ubuntu-latest + + env: + DOCKER_IMAGE_TAG: ${{ needs.build.outputs.date }} + + strategy: + matrix: + repo: ['e-mission/op-admin-dashboard', 'e-mission/em-public-dashboard'] + + steps: + - uses: actions/checkout@v4 + + - name: Trigger workflow in admin-dash, public-dash + # TODO: Create Fine-grained token with "Actions: write" permissions + run: | + curl -L \ + -X POST \ + -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer ${{ secrets.GH_FG_PAT_TAGS }}" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + https://api.github.com/repos/${{ matrix.repo }}/actions/workflows/image_build_push.yml/dispatches \ + -d '{"ref":"master", "inputs": {"docker_image_tag" : "${{ env.DOCKER_IMAGE_TAG }}"}}' diff --git a/.gitignore b/.gitignore index 1b467ec07..0f5c8be38 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,8 @@ CFC_DataCollector/moves_collect.log webapp/www/lib conf/**/*.json !conf/**/*.schema.json +!conf/analysis/debug.conf.dev.json +!conf/analysis/debug.conf.prod.json *.ipynb_checkpoints* diff --git a/Dockerfile b/Dockerfile index 7fa923ea0..e98dc2124 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,8 +28,8 @@ RUN chmod u+x ./.docker/setup_config.sh RUN bash -c "./.docker/setup_config.sh" # #declare environment variables -ENV DB_HOST='' -ENV WEB_SERVER_HOST='' +ENV DB_HOST='db' +ENV WEB_SERVER_HOST=0.0.0.0 ENV LIVERELOAD_SRC='' ENV STUDY_CONFIG='' diff --git a/conf/analysis/debug.conf.json.sample b/conf/analysis/debug.conf.dev.json similarity index 89% rename from conf/analysis/debug.conf.json.sample rename to conf/analysis/debug.conf.dev.json index 23c184aa7..b778a692b 100644 --- a/conf/analysis/debug.conf.json.sample +++ b/conf/analysis/debug.conf.dev.json @@ -10,5 +10,5 @@ "section.startStopRadius": 150, "section.endStopRadius": 150, "analysis.result.section.key": "analysis/inferred_section", - "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/trip_user_input", "manual/place_user_input"] + "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/replaced_mode", "manual/trip_user_input", "manual/place_user_input"] } diff --git a/conf/analysis/debug.conf.prod.json b/conf/analysis/debug.conf.prod.json new file mode 100644 index 000000000..a234b1162 --- /dev/null +++ b/conf/analysis/debug.conf.prod.json @@ -0,0 +1,14 @@ +{ + "intake.segmentation.section_segmentation.sectionValidityAssertions": true, + "intake.cleaning.clean_and_resample.speedDistanceAssertions": false, + "intake.cleaning.clean_and_resample.sectionValidityAssertions": false, + "intake.cleaning.filter_accuracy.enable": false, + "classification.inference.mode.useAdvancedFeatureIndices": true, + "classification.inference.mode.useBusTrainFeatureIndices": true, + "classification.validityAssertions": true, + "output.conversion.validityAssertions": true, + "section.startStopRadius": 150, + "section.endStopRadius": 150, + "analysis.result.section.key": "analysis/inferred_section", + "userinput.keylist": ["manual/mode_confirm", "manual/purpose_confirm", "manual/replaced_mode", "manual/trip_user_input", "manual/place_user_input"] +} diff --git a/emission/analysis/config.py b/emission/analysis/config.py index 3f41d22f1..d484e5354 100644 --- a/emission/analysis/config.py +++ b/emission/analysis/config.py @@ -1,11 +1,17 @@ import json +import os def get_config_data(): try: + print("Trying to open debug.conf.json") config_file = open('conf/analysis/debug.conf.json') except: - print("analysis.debug.conf.json not configured, falling back to sample, default configuration") - config_file = open('conf/analysis/debug.conf.json.sample') + if os.getenv("PROD_STAGE") == "TRUE": + print("In production environment, config not overridden, using default production debug.conf") + config_file = open('conf/analysis/debug.conf.prod.json') + else: + print("analysis.debug.conf.json not configured, falling back to sample, default configuration") + config_file = open('conf/analysis/debug.conf.dev.json') ret_val = json.load(config_file) config_file.close() return ret_val diff --git a/emission/core/backwards_compat_config.py b/emission/core/backwards_compat_config.py new file mode 100644 index 000000000..afd793504 --- /dev/null +++ b/emission/core/backwards_compat_config.py @@ -0,0 +1,42 @@ +import json +import logging +import os +import numpy as np +import pandas as pd + +# if there is a config file and the environment variable is set, we need to +# decide which one wins. I would argue for the environment variable, to allow +# for a migration to the new model and for us to remove the obsolete code. +# Although arguably, the converse will also work, since we can set the +# variable while the file is present, and then remove the file in a second +# round of changes. Let's keep the order unchanged for now for simplicity, and +# modify as needed later. + +def get_config(config_file_name, var_path_mapping): + # Since a `config_data` field would be at the module level, and we want + # the module to be reusable, we are not going to cache the result. It is + # not clear that we need to cache the result anyway, given that we + # typically initialize the config variables at the beginning of the + # modules in which they are used. If we feel like this is an issue, we can + # switch to creating a class instead. + ret_val = {} + try: + config_file = open(config_file_name) + # we only have a single entry in the config json, not an array + # and there is no way for json_normalize to return a series + # so we will just take the first row of the dataframe + loaded_val = pd.json_normalize(json.load(config_file)).iloc[0] + for var, path in var_path_mapping.items(): + ret_val[var] = loaded_val[path] + # Ensure that the returned values are regular ints + # https://github.com/e-mission/e-mission-server/pull/961#issuecomment-2282206511 + if type(ret_val[var]) is np.int64: + ret_val[var] = int(ret_val[var]) + config_file.close() + except Exception as e: + if isinstance(e, KeyError) or isinstance(e, json.decoder.JSONDecodeError): + logging.exception(e) + print("Config file not found, returning a copy of the environment variables instead...") + # https://github.com/e-mission/e-mission-server/pull/961#issuecomment-2282209006 + ret_val = dict(os.environ) + return ret_val diff --git a/emission/core/get_database.py b/emission/core/get_database.py index 0939b41d9..4af873934 100644 --- a/emission/core/get_database.py +++ b/emission/core/get_database.py @@ -10,16 +10,14 @@ import os import json -try: - config_file = open('conf/storage/db.conf') -except: - print("storage not configured, falling back to sample, default configuration") - config_file = open('conf/storage/db.conf.sample') +import emission.core.backwards_compat_config as ecbc + +config = ecbc.get_config('conf/storage/db.conf', + {"DB_HOST": "timeseries.url", "DB_RESULT_LIMIT": "timeseries.result_limit"}) -config_data = json.load(config_file) -url = config_data["timeseries"]["url"] -result_limit = config_data["timeseries"]["result_limit"] -config_file.close() +print("Retrieved config %s" % config) +url = config.get("DB_HOST", "localhost") +result_limit = config.get("DB_RESULT_LIMIT", 250000) try: parsed=pymongo.uri_parser.parse_uri(url) diff --git a/emission/integrationTests/start_integration_tests.sh b/emission/integrationTests/start_integration_tests.sh index da2e30e5b..af792a5e0 100644 --- a/emission/integrationTests/start_integration_tests.sh +++ b/emission/integrationTests/start_integration_tests.sh @@ -2,15 +2,7 @@ # Using an automated install cd /src/e-mission-server -#set database URL using environment variable echo ${DB_HOST} -if [ -z ${DB_HOST} ] ; then - local_host=`hostname -i` - sed "s_localhost_${local_host}_" conf/storage/db.conf.sample > conf/storage/db.conf -else - sed "s_localhost_${DB_HOST}_" conf/storage/db.conf.sample > conf/storage/db.conf -fi -cat conf/storage/db.conf echo "Setting up conda..." source setup/setup_conda.sh Linux-x86_64 @@ -25,4 +17,4 @@ echo "Adding permissions for the runIntegrationTests.sh script" chmod +x runIntegrationTests.sh echo "Permissions added for the runIntegrationTests.sh script" -./runIntegrationTests.sh \ No newline at end of file +./runIntegrationTests.sh diff --git a/emission/integrationTests/storageTests/TestMongodbAuth.py b/emission/integrationTests/storageTests/TestMongodbAuth.py index 13f89d9a3..fbb842841 100644 --- a/emission/integrationTests/storageTests/TestMongodbAuth.py +++ b/emission/integrationTests/storageTests/TestMongodbAuth.py @@ -47,10 +47,15 @@ def setUp(self): self.uuid = uuid.uuid4() self.testUserId = self.uuid self.db_conf_file = "conf/storage/db.conf" + self.originalDBEnvVars = {} self.createAdmin() def tearDown(self): self.admin_auth.command({"dropAllUsersFromDatabase": 1}) + logging.debug("Deleting test db environment variables") + ecc.restoreOriginalEnvVars(self.originalDBEnvVars, self.modifiedEnvVars) + logging.debug("Finished restoring original db environment variables") + logging.debug("Restored original values are = %s" % self.originalDBEnvVars) try: os.remove(self.db_conf_file) except FileNotFoundError as e: @@ -67,14 +72,19 @@ def createAdmin(self): self.admin_auth = pymongo.MongoClient(self.getURL(self.test_username, self.test_password)).admin def configureDB(self, url): - config = { - "timeseries": { - "url": url, - "result_limit": 250000 - } + self.testModifiedEnvVars = { + 'DB_HOST' : url } - with open(self.db_conf_file, "w") as fp: - json.dump(config, fp, indent=4) + + self.orginalDBEnvVars = dict(os.environ) + + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): + # Setting db environment variables with test values + os.environ[env_var_name] = env_var_value + + logging.debug("Finished setting up test db environment variables") + logging.debug("Current original values are = %s" % self.originalDBEnvVars) + logging.debug("Current modified values are = %s" % self.testModifiedEnvVars) def getURL(self, username, password, dbname="admin"): return "mongodb://%s:%s@localhost/%s?authSource=admin&authMechanism=SCRAM-SHA-1" % (username, password, dbname) diff --git a/emission/net/api/cfc_webapp.py b/emission/net/api/cfc_webapp.py index e585d6a25..9e2eb68fa 100644 --- a/emission/net/api/cfc_webapp.py +++ b/emission/net/api/cfc_webapp.py @@ -51,27 +51,22 @@ import emission.storage.timeseries.cache_series as esdc import emission.core.timer as ect import emission.core.get_database as edb +import emission.core.backwards_compat_config as ecbc -try: - config_file = open('conf/net/api/webserver.conf') -except: - logging.debug("webserver not configured, falling back to sample, default configuration") - config_file = open('conf/net/api/webserver.conf.sample') - -OPENPATH_URL="https://www.nrel.gov/transportation/openpath.html" STUDY_CONFIG = os.getenv('STUDY_CONFIG', "stage-program") -config_data = json.load(config_file) -config_file.close() -static_path = config_data["paths"]["static_path"] -python_path = config_data["paths"]["python_path"] -server_host = config_data["server"]["host"] -server_port = config_data["server"]["port"] -socket_timeout = config_data["server"]["timeout"] -log_base_dir = config_data["paths"]["log_base_dir"] -auth_method = config_data["server"]["auth"] -aggregate_call_auth = config_data["server"]["aggregate_call_auth"] -not_found_redirect = config_data["paths"].get("404_redirect", OPENPATH_URL) +# Constants that we don't read from the configuration +WEBSERVER_STATIC_PATH="webapp/www" +WEBSERVER_HOST="0.0.0.0" + +config = ecbc.get_config('conf/net/api/webserver.conf', + {"WEBSERVER_PORT": "server.port", "WEBSERVER_TIMEOUT": "server.timeout", + "WEBSERVER_AUTH": "server.auth", "WEBSERVER_AGGREGATE_CALL_AUTH": "server.aggregate_call_auth"}) +server_port = config.get("WEBSERVER_PORT", 8080) +socket_timeout = config.get("WEBSERVER_TIMEOUT", 3600) +auth_method = config.get("WEBSERVER_AUTH", "skip") +aggregate_call_auth = config.get("WEBSERVER_AGGREGATE_CALL_AUTH", "no_auth") +not_found_redirect = config.get("WEBSERVER_NOT_FOUND_REDIRECT", "https://nrel.gov/openpath") BaseRequest.MEMFILE_MAX = 1024 * 1024 * 1024 # Allow the request size to be 1G # to accomodate large section sizes @@ -89,7 +84,7 @@ #Simple path that serves up a static landing page with javascript in it @route('/') def index(): - return static_file("index.html", static_path) + return static_file("index.html", WEBSERVER_STATIC_PATH) # Backward compat to handle older clients # Remove in 2023 after everybody has upgraded @@ -558,6 +553,4 @@ def resolve_auth(auth_method): else: # Non SSL option for testing on localhost print("Running with HTTPS turned OFF - use a reverse proxy on production") - run(host=server_host, port=server_port, server='cheroot', debug=True) - - # run(host="0.0.0.0", port=server_port, server='cherrypy', debug=True) + run(host=WEBSERVER_HOST, port=server_port, server='cheroot', debug=True) diff --git a/emission/net/auth/secret.py b/emission/net/auth/secret.py index 1593fc2fb..f96a060d1 100644 --- a/emission/net/auth/secret.py +++ b/emission/net/auth/secret.py @@ -4,7 +4,11 @@ class SecretMethod(object): def __init__(self): - key_file = open('conf/net/auth/secret_list.json') + try: + key_file = open('conf/net/auth/secret_list.json') + except: + print("secret_list.json not configured, falling back to sample, default configuration") + key_file = open('conf/net/auth/secret_list.json.sample') key_data = json.load(key_file) key_file.close() self.client_secret_list = key_data["client_secret_list"] diff --git a/emission/net/ext_service/push/notify_interface.py b/emission/net/ext_service/push/notify_interface.py index 6b94857f6..8363011e1 100644 --- a/emission/net/ext_service/push/notify_interface.py +++ b/emission/net/ext_service/push/notify_interface.py @@ -11,22 +11,26 @@ import logging import importlib +import emission.core.backwards_compat_config as ecbc + # Note that the URL is hardcoded because the API endpoints are not standardized. # If we change a push provider, we will need to modify to match their endpoints. # Hardcoding will remind us of this :) # We can revisit this if push providers eventually decide to standardize... +push_config = ecbc.get_config('conf/net/ext_service/push.json', + {"PUSH_PROVIDER": "provider", "PUSH_SERVER_AUTH_TOKEN": "server_auth_token", + "PUSH_APP_PACKAGE_NAME": "app_package_name", "PUSH_IOS_TOKEN_FORMAT": "ios_token_format"}) + try: - push_config_file = open('conf/net/ext_service/push.json') - push_config = json.load(push_config_file) - push_config_file.close() + logging.info(f"Push configured for app {push_config.get('PUSH_SERVER_AUTH_TOKEN')} using platform {os.getenv('PUSH_PROVIDER')} with token {os.getenv('PUSH_SERVER_AUTH_TOKEN')[:10]}... of length {len(os.getenv('PUSH_SERVER_AUTH_TOKEN'))}") except: logging.warning("push service not configured, push notifications not supported") class NotifyInterfaceFactory(object): @staticmethod def getDefaultNotifyInterface(): - return NotifyInterfaceFactory.getNotifyInterface(push_config["provider"]) + return NotifyInterfaceFactory.getNotifyInterface(push_config.get("PUSH_PROVIDER")) @staticmethod def getNotifyInterface(pushProvider): diff --git a/emission/net/ext_service/push/notify_interface_impl/firebase.py b/emission/net/ext_service/push/notify_interface_impl/firebase.py index 34593f82f..a33824349 100644 --- a/emission/net/ext_service/push/notify_interface_impl/firebase.py +++ b/emission/net/ext_service/push/notify_interface_impl/firebase.py @@ -21,13 +21,13 @@ def get_interface(push_config): class FirebasePush(pni.NotifyInterface): def __init__(self, push_config): - self.server_auth_token = push_config["server_auth_token"] - if "app_package_name" in push_config: - self.app_package_name = push_config["app_package_name"] + self.server_auth_token = push_config.get("PUSH_SERVER_AUTH_TOKEN") + if "PUSH_APP_PACKAGE_NAME" in push_config: + self.app_package_name = push_config.get("PUSH_APP_PACKAGE_NAME") else: logging.warning("No package name specified, defaulting to embase") self.app_package_name = "edu.berkeley.eecs.embase" - self.is_fcm_format = push_config["ios_token_format"] == "fcm" + self.is_fcm_format = push_config.get("PUSH_IOS_TOKEN_FORMAT") == "fcm" def get_and_invalidate_entries(self): # Need to figure out how to do this on firebase diff --git a/emission/tests/common.py b/emission/tests/common.py index 122b2fc7f..baae6053c 100644 --- a/emission/tests/common.py +++ b/emission/tests/common.py @@ -10,6 +10,7 @@ import logging from datetime import datetime, timedelta import json +import os import emission.storage.json_wrappers as esj import uuid import pymongo @@ -171,6 +172,13 @@ def setupIncomingEntries(): return (entry_list, ios_entry_list) +def restoreOriginalEnvVars(originalEnvVars, modifiedEnvVars): + for env_var_name, env_var_value in modifiedEnvVars.items(): + del os.environ[env_var_name] + # Restoring original db environment variables + for env_var_name, env_var_value in originalEnvVars.items(): + os.environ[env_var_name] = env_var_value + def runIntakePipeline(uuid): # Move these imports here so that we don't inadvertently load the modules, # and any related config modules, before we want to @@ -259,7 +267,7 @@ def set_analysis_config(key, value): import shutil analysis_conf_path = "conf/analysis/debug.conf.json" - shutil.copyfile("%s.sample" % analysis_conf_path, + shutil.copyfile("conf/analysis/debug.conf.dev.json", analysis_conf_path) with open(analysis_conf_path) as fd: curr_config = json.load(fd) diff --git a/emission/tests/netTests/TestPush.py b/emission/tests/netTests/TestPush.py index af529594d..2b6ef636c 100644 --- a/emission/tests/netTests/TestPush.py +++ b/emission/tests/netTests/TestPush.py @@ -40,22 +40,33 @@ def generate_fake_result(successful_tokens, failed_tokens): class TestPush(unittest.TestCase): def setUp(self): - import shutil - self.push_conf_path = "conf/net/ext_service/push.json" - shutil.copyfile("%s.sample" % self.push_conf_path, - self.push_conf_path) - with open(self.push_conf_path, "w") as fd: - fd.write(json.dumps({ - "provider": "firebase", - "server_auth_token": "firebase_api_key", - "ios_token_format": "apns" - })) - logging.debug("Finished setting up %s" % self.push_conf_path) - with open(self.push_conf_path) as fd: - logging.debug("Current values are %s" % json.load(fd)) + self.originalPushEnvVars = {} + self.testModifiedEnvVars = { + 'PUSH_PROVIDER' : "firebase", + 'PUSH_SERVER_AUTH_TOKEN' : "firebase_api_key", + 'PUSH_IOS_TOKEN_FORMAT' : "apns" + } + + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): + if os.getenv(env_var_name) is not None: + # Storing original push environment variables before modification + self.originalPushEnvVars[env_var_name] = os.getenv(env_var_name) + # Setting push environment variables with test values + os.environ[env_var_name] = env_var_value + + logging.debug("Finished setting up test push environment variables") + logging.debug("Current original values are = %s" % self.originalPushEnvVars) + logging.debug("Current modified values are = %s" % self.testModifiedEnvVars) def tearDown(self): - os.remove(self.push_conf_path) + logging.debug("Deleting test push environment variables") + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): + del os.environ[env_var_name] + # Restoring original push environment variables + for env_var_name, env_var_value in self.originalPushEnvVars.items(): + os.environ[env_var_name] = env_var_value + logging.debug("Finished restoring original push environment variables") + logging.debug("Restored original values are = %s" % self.originalPushEnvVars) def testGetInterface(self): import emission.net.ext_service.push.notify_interface as pni @@ -110,7 +121,7 @@ def testFcmMapping(self): logging.debug("test token map = %s" % self.test_token_map) try: - fcm_instance = pnif.get_interface({"server_auth_token": "firebase_api_key", "ios_token_format": "apns"}) + fcm_instance = pnif.get_interface({"PUSH_SERVER_AUTH_TOKEN": "firebase_api_key", "PUSH_IOS_TOKEN_FORMAT": "apns"}) (mapped_token_map, unmapped_token_list) = fcm_instance.map_existing_fcm_tokens(self.test_token_map) # At this point, there is nothing in the database, so no iOS tokens will be mapped self.assertEqual(len(mapped_token_map["ios"]), 0) @@ -165,7 +176,7 @@ def testFcmNoMapping(self): "android": self.test_token_list_android} logging.debug("test token map = %s" % self.test_token_map) - fcm_instance = pnif.get_interface({"server_auth_token": "firebase_api_key", "ios_token_format": "fcm"}) + fcm_instance = pnif.get_interface({"PUSH_SERVER_AUTH_TOKEN": "firebase_api_key", "PUSH_IOS_TOKEN_FORMAT": "fcm"}) (mapped_token_map, unmapped_token_list) = fcm_instance.map_existing_fcm_tokens(self.test_token_map) # These are assumed to be FCM tokens directly, so no mapping required self.assertEqual(len(mapped_token_map["ios"]), 10) diff --git a/emission/tests/netTests/TestWebserver.py b/emission/tests/netTests/TestWebserver.py index 4316365df..fc91e5ab1 100644 --- a/emission/tests/netTests/TestWebserver.py +++ b/emission/tests/netTests/TestWebserver.py @@ -23,39 +23,27 @@ class TestWebserver(unittest.TestCase): def setUp(self): - import shutil - - self.webserver_conf_path = "conf/net/api/webserver.conf" - shutil.copyfile( - "%s.sample" % self.webserver_conf_path, self.webserver_conf_path - ) - with open(self.webserver_conf_path, "w") as fd: - fd.write( - json.dumps( - { - "paths": { - "static_path": "webapp/www", - "python_path": "main", - "log_base_dir": ".", - "log_file": "debug.log", - "404_redirect": "http://somewhere.else", - }, - "server": { - "host": "0.0.0.0", - "port": "8080", - "timeout": "3600", - "auth": "skip", - "aggregate_call_auth": "no_auth", - }, - } - ) - ) - logging.debug("Finished setting up %s" % self.webserver_conf_path) - with open(self.webserver_conf_path) as fd: - logging.debug("Current values are %s" % json.load(fd)) + self.originalWebserverEnvVars = {} + self.testModifiedEnvVars = { + 'WEBSERVER_NOT_FOUND_REDIRECT' : "http://somewhere.else" + } + + self.orginalDBEnvVars = dict(os.environ) + + for env_var_name, env_var_value in self.testModifiedEnvVars.items(): + # Setting webserver environment variables with test values + os.environ[env_var_name] = env_var_value + + logging.debug("Finished setting up test webserver environment variables") + logging.debug("Current original values are = %s" % self.originalWebserverEnvVars) + logging.debug("Current modified values are = %s" % self.testModifiedEnvVars) def tearDown(self): - os.remove(self.webserver_conf_path) + logging.debug("Deleting test webserver environment variables") + etc.restoreOriginalEnvVars(self.originalWebserverEnvVars, + self.testModifiedEnvVars) + logging.debug("Finished restoring original webserver environment variables") + logging.debug("Restored original values are = %s" % self.originalWebserverEnvVars) def test404Redirect(self): from emission.net.api.bottle import response diff --git a/emission/tests/storageTests/TestTokenQueries.py b/emission/tests/storageTests/TestTokenQueries.py index 23f57ae23..0200ca694 100644 --- a/emission/tests/storageTests/TestTokenQueries.py +++ b/emission/tests/storageTests/TestTokenQueries.py @@ -2,11 +2,13 @@ import logging import uuid import json +import os #changed all script runs from os() to subprocess.run() for consistency #TODO clean up commented out os() lines # import os import subprocess +import importlib import emission.core.get_database as edb @@ -15,6 +17,16 @@ class TestTokenQueries(unittest.TestCase): + def setUp(self): + # Delete irrelevant environment variables so that they don't mess up + # the expected comparison with the ground truth + # https://github.com/e-mission/e-mission-server/pull/961#issuecomment-2284668743 + for var_name in os.environ.keys(): + if not var_name.startswith("DB") and \ + var_name not in ["PATH", "PYTHONPATH"]: + logging.debug("Deleting environment variable %s with value %s" % (var_name, os.environ.get(var_name))) + del os.environ[var_name] + importlib.reload(edb) def tearDown(self): #All tests insert tokens of length one. Delete them once the test is done. @@ -157,19 +169,23 @@ def test_run_script_show(self): esdt.insert({'token':'z'}) sp = subprocess.run(["python3", "bin/auth/insert_tokens.py", "--show"], capture_output=True) # The first message is displayed when we run tests locally - # The second is displayed when we run in the docker CI, since the `DB_HOST` is set to `db` + # The second is displayed when we run in the CI/CD, but with the local install + # The third is displayed when we run in the docker CI since the `DB_HOST` is set to `db` self.assertIn(sp.stdout, - [b'storage not configured, falling back to sample, default configuration\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nx\ny\nz\n', - b'URL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nx\ny\nz\n' + [b'Retrieved config {\'DB_HOST\': \'localhost\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nx\ny\nz\n', + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'PATH\': \'/home/runner/miniconda-23.5.2/envs/emissiontest/bin:/home/runner/miniconda-23.5.2/condabin:/snap/bin:/home/runner/.local/bin:/opt/pipx_bin:/home/runner/.cargo/bin:/home/runner/.config/composer/vendor/bin:/usr/local/.ghcup/bin:/home/runner/.dotnet/tools:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/home/runner/.dotnet/tools\', \'LC_CTYPE\': \'C.UTF-8\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nx\ny\nz\n', + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'DB_HOST\': \'db\', \'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', \'LC_CTYPE\': \'C.UTF-8\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nx\ny\nz\n' ]) def test_run_script_empty(self): sp = subprocess.run(["python3", "bin/auth/insert_tokens.py"], capture_output=True) # The first message is displayed when we run tests locally - # The second is displayed when we run in the docker CI, since the `DB_HOST` is set to `db` + # The second is displayed when we run in the CI/CD, but with the local install + # The third is displayed when we run in the docker CI since the `DB_HOST` is set to `db` self.assertIn(sp.stdout, - [b'storage not configured, falling back to sample, default configuration\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nPlease provide the script with an argument. Use the "--help" option for more details\n', - b'URL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nPlease provide the script with an argument. Use the "--help" option for more details\n' + [b'Retrieved config {\'DB_HOST\': \'localhost\', \'DB_RESULT_LIMIT\': 250000}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nPlease provide the script with an argument. Use the "--help" option for more details\n', + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'PATH\': \'/home/runner/miniconda-23.5.2/envs/emissiontest/bin:/home/runner/miniconda-23.5.2/condabin:/snap/bin:/home/runner/.local/bin:/opt/pipx_bin:/home/runner/.cargo/bin:/home/runner/.config/composer/vendor/bin:/usr/local/.ghcup/bin:/home/runner/.dotnet/tools:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin:/home/runner/.dotnet/tools\', \'LC_CTYPE\': \'C.UTF-8\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL localhost\nPlease provide the script with an argument. Use the "--help" option for more details\n', + b'Config file not found, returning a copy of the environment variables instead...\nRetrieved config {\'PYTHONPATH\': \'.\', \'DB_HOST\': \'db\', \'PATH\': \'/root/miniconda-23.5.2/envs/emissiontest/bin:/root/miniconda-23.5.2/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\', \'LC_CTYPE\': \'C.UTF-8\'}\nURL not formatted, defaulting to "Stage_database"\nConnecting to database URL db\nPlease provide the script with an argument. Use the "--help" option for more details\n' ]) #test that no two options can be used together diff --git a/setup/tests/start_script.sh b/setup/tests/start_script.sh index b76478da9..2f4516ef9 100644 --- a/setup/tests/start_script.sh +++ b/setup/tests/start_script.sh @@ -4,13 +4,6 @@ cd /src/e-mission-server #set database URL using environment variable echo ${DB_HOST} -if [ -z ${DB_HOST} ] ; then - local_host=`hostname -i` - sed "s_localhost_${local_host}_" conf/storage/db.conf.sample > conf/storage/db.conf -else - sed "s_localhost_${DB_HOST}_" conf/storage/db.conf.sample > conf/storage/db.conf -fi -cat conf/storage/db.conf echo "Setting up conda..." source setup/setup_conda.sh Linux-x86_64