From 19c6f4aeeec3b1524606618ca42bdce8ee5c3136 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Wed, 16 Oct 2024 16:31:03 +0200 Subject: [PATCH 01/24] add decorator for deprecated locations --- src/actinia_core/endpoints.py | 8 +- .../rest/base/deprecated_locations.py | 101 ++++++++++++++++++ src/actinia_core/rest/base/resource_base.py | 9 +- 3 files changed, 115 insertions(+), 3 deletions(-) create mode 100644 src/actinia_core/rest/base/deprecated_locations.py diff --git a/src/actinia_core/endpoints.py b/src/actinia_core/endpoints.py index 1bc2ea1e8..620a11260 100644 --- a/src/actinia_core/endpoints.py +++ b/src/actinia_core/endpoints.py @@ -135,8 +135,12 @@ def create_core_endpoints(): - # Locationmanagement - flask_api.add_resource(ListLocationsResource, "/locations") + + # Project management + flask_api.add_resource(ListLocationsResource, "/locations", endpoint="/locations") + + # Deprectated locationmanagement + flask_api.add_resource(ListLocationsResource, "/projects", endpoint="/projects") flask_api.add_resource( LocationManagementResourceUser, "/locations//info", diff --git a/src/actinia_core/rest/base/deprecated_locations.py b/src/actinia_core/rest/base/deprecated_locations.py new file mode 100644 index 000000000..98db5e49d --- /dev/null +++ b/src/actinia_core/rest/base/deprecated_locations.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +####### +# actinia-core - an open source REST API for scalable, distributed, high +# performance processing of geographical data that uses GRASS GIS for +# computational tasks. For details, see https://actinia.mundialis.de/ +# +# Copyright (c) 2024 mundialis GmbH & Co. KG +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +####### + +""" +Deprecated location decorator +""" + + +from flask import jsonify, make_response, request +from functools import wraps + +from actinia_core.models.response_models import SimpleResponseModel +from actinia_core.version import G_VERSION + + +__license__ = "GPLv3" +__author__ = "Anika Weinmann" +__copyright__ = "Copyright 2024, mundialis GmbH & Co. KG" +__maintainer__ = "mundialis GmbH & Co. KG" + + +def location_deprecated_decorator(func): + """Add deprecation for location to headers. + Requesting GRASS GIS version to add: + * deprecation warning inside headers for GRASS >= 8.4 + * permanently moved page error for GRASS >= 9.0 + * error if projects is used for GRASS < 8.4 + * else no addition to result + + Args: + func (function): The function to wrap + + Returns: + function: The decorator functions + """ + + @wraps(func) + def decorator(*args, **kwargs): + current_url = request.base_url + project_url = current_url.replace("locations", "projects") + # get current GRASS GIS version + grass_version_s = G_VERSION["version"] + grass_version = [int(item) for item in grass_version_s.split(".")[:2]] + if ( + "locations" in current_url and grass_version >= [8, 4] + and grass_version < [9, 0] + ): + # deprecation warning inside headers for GRASS >= 8.4 + result = func(*args, **kwargs) + result.headers.set("Deprecation", "With GRASS GIS 8.4") + result.headers.set("Sunset", "With GRASS GIS 9.0") + result.headers.set("Location", project_url) + elif "locations" in current_url and grass_version >= [9, 0]: + # permanently moved page error for GRASS >= 9.0 + result = make_response( + jsonify( + SimpleResponseModel( + status="error", + message=f"Moved Permanently to {project_url}.", + ) + ), + 301, + ) + elif "projects" in current_url and grass_version < [8, 4]: + # error if projects is used for GRASS < 8.4 + result = make_response( + jsonify( + SimpleResponseModel( + status="error", + message="Not Found. The requested URL is only " + "available from GRASS GIS version 8.4." + ) + ), + 404, + ) + else: + # else no addition to result + result = func(*args, **kwargs) + return result + + return decorator diff --git a/src/actinia_core/rest/base/resource_base.py b/src/actinia_core/rest/base/resource_base.py index 9c8436a5d..8756d29a1 100644 --- a/src/actinia_core/rest/base/resource_base.py +++ b/src/actinia_core/rest/base/resource_base.py @@ -33,6 +33,9 @@ from flask import request, g from flask.json import loads as json_loads from flask_restful_swagger_2 import Resource +from actinia_core.rest.base.deprecated_locations import ( + location_deprecated_decorator, +) from actinia_core.rest.base.user_auth import check_user_permissions from actinia_core.rest.base.user_auth import create_dummy_user from actinia_core.core.common.app import auth @@ -50,10 +53,11 @@ from actinia_core.rest.resource_streamer import RequestStreamerResource from actinia_core.rest.resource_management import ResourceManager + __license__ = "GPLv3" __author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -71,6 +75,9 @@ class ResourceBase(Resource): decorators = [] + # Add decorators for deprecated GRASS GIS locations + decorators.append(location_deprecated_decorator) + if global_config.LOG_API_CALL is True: decorators.append(log_api_call) From 0b1e445e66f3c78105a8e90a2ff42259da44a3bd Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 17 Oct 2024 07:51:39 +0200 Subject: [PATCH 02/24] linting --- src/actinia_core/endpoints.py | 8 ++++++-- src/actinia_core/rest/base/deprecated_locations.py | 9 +++++---- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/src/actinia_core/endpoints.py b/src/actinia_core/endpoints.py index 620a11260..a52204d57 100644 --- a/src/actinia_core/endpoints.py +++ b/src/actinia_core/endpoints.py @@ -137,10 +137,14 @@ def create_core_endpoints(): # Project management - flask_api.add_resource(ListLocationsResource, "/locations", endpoint="/locations") + flask_api.add_resource( + ListLocationsResource, "/locations", endpoint="/locations" + ) # Deprectated locationmanagement - flask_api.add_resource(ListLocationsResource, "/projects", endpoint="/projects") + flask_api.add_resource( + ListLocationsResource, "/projects", endpoint="/projects" + ) flask_api.add_resource( LocationManagementResourceUser, "/locations//info", diff --git a/src/actinia_core/rest/base/deprecated_locations.py b/src/actinia_core/rest/base/deprecated_locations.py index 98db5e49d..765054086 100644 --- a/src/actinia_core/rest/base/deprecated_locations.py +++ b/src/actinia_core/rest/base/deprecated_locations.py @@ -62,9 +62,10 @@ def decorator(*args, **kwargs): grass_version_s = G_VERSION["version"] grass_version = [int(item) for item in grass_version_s.split(".")[:2]] if ( - "locations" in current_url and grass_version >= [8, 4] - and grass_version < [9, 0] - ): + "locations" in current_url + and grass_version >= [8, 4] + and grass_version < [9, 0] + ): # deprecation warning inside headers for GRASS >= 8.4 result = func(*args, **kwargs) result.headers.set("Deprecation", "With GRASS GIS 8.4") @@ -88,7 +89,7 @@ def decorator(*args, **kwargs): SimpleResponseModel( status="error", message="Not Found. The requested URL is only " - "available from GRASS GIS version 8.4." + "available from GRASS GIS version 8.4.", ) ), 404, From a327f279c60a97d6d7ea82f3bec921c13992d51f Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 17 Oct 2024 11:41:45 +0200 Subject: [PATCH 03/24] add project endpoints --- docker/actinia-core-dev/endpoints.csv | 4 +- src/actinia_core/endpoints.py | 197 ++++++++++++++++++-------- 2 files changed, 143 insertions(+), 58 deletions(-) diff --git a/docker/actinia-core-dev/endpoints.csv b/docker/actinia-core-dev/endpoints.csv index ebcafdbec..b133bee81 100644 --- a/docker/actinia-core-dev/endpoints.csv +++ b/docker/actinia-core-dev/endpoints.csv @@ -1,5 +1,5 @@ -ListLocationsResource;GET -LocationManagementResourceUser;GET +ListProjectsResource;GET +ProjectManagementResourceUser;GET ListMapsetsResource;GET MapsetManagementResourceUser;GET RasterLayersResource;GET diff --git a/src/actinia_core/endpoints.py b/src/actinia_core/endpoints.py index a52204d57..b513771b0 100644 --- a/src/actinia_core/endpoints.py +++ b/src/actinia_core/endpoints.py @@ -32,11 +32,11 @@ from actinia_core.core.common.config import global_config from actinia_core.core.logging_interface import log from actinia_core.rest.location_management import ( - ListLocationsResource, - LocationManagementResourceUser, + ListProjectsResource, + ProjectManagementResourceUser, ) from actinia_core.rest.location_management import ( - LocationManagementResourceAdminUser, + ProjectManagementResourceAdminUser, ) from actinia_core.rest.mapsets import AllMapsetsListingResourceAdmin from actinia_core.rest.mapset_management import ( @@ -129,169 +129,254 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2021, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" -def create_core_endpoints(): +def create_project_endpoints(projects_url_part="projects"): + """Function to add resources with "project" inside the endpoint url. - # Project management - flask_api.add_resource( - ListLocationsResource, "/locations", endpoint="/locations" - ) + Args: + projects_url_part (str): The name of the projects inside the endpoint + URL; to add deprecated location endpoints set + it to "locations" + """ - # Deprectated locationmanagement + # Project management flask_api.add_resource( - ListLocationsResource, "/projects", endpoint="/projects" + ListProjectsResource, + f"/{projects_url_part}", + endpoint=f"/{projects_url_part}", ) flask_api.add_resource( - LocationManagementResourceUser, - "/locations//info", + ProjectManagementResourceUser, + f"/{projects_url_part}//info", + endpoint=f"/{projects_url_part}//info", ) flask_api.add_resource( - LocationManagementResourceAdminUser, - "/locations/", + ProjectManagementResourceAdminUser, + f"/{projects_url_part}/", + endpoint=f"/{projects_url_part}/", ) + # Mapset management flask_api.add_resource( - ListMapsetsResource, "/locations//mapsets" + ListMapsetsResource, + f"/{projects_url_part}//mapsets", + endpoint=f"/{projects_url_part}//mapsets", ) flask_api.add_resource( MapsetManagementResourceUser, - "/locations//mapsets//info", + f"/{projects_url_part}//mapsets/" + "/info", + endpoint=f"/{projects_url_part}//mapsets/" + "/info", ) flask_api.add_resource( MapsetManagementResourceAdmin, - "/locations//mapsets/", + f"/{projects_url_part}//mapsets/" + "", + endpoint=f"/{projects_url_part}//mapsets/" + "", ) flask_api.add_resource( MapsetLockManagementResource, - "/locations//mapsets//lock", + f"/{projects_url_part}//mapsets/" + "/lock", + endpoint=f"/{projects_url_part}//mapsets/" + "/lock", ) # Raster management flask_api.add_resource( RasterLayersResource, - "/locations//mapsets/" + f"/{projects_url_part}//mapsets/" + "/raster_layers", + endpoint=f"/{projects_url_part}//mapsets/" "/raster_layers", ) flask_api.add_resource( RasterLayerResource, - "/locations//mapsets/" + f"/{projects_url_part}//mapsets/" + "/raster_layers/", + endpoint=f"/{projects_url_part}//mapsets/" "/raster_layers/", ) flask_api.add_resource( SyncEphemeralRasterLegendResource, - "/locations//mapsets//" - "raster_layers//legend", + f"/{projects_url_part}//mapsets/" + "/raster_layers//legend", + endpoint=f"/{projects_url_part}//mapsets/" + "/raster_layers//legend", ) flask_api.add_resource( SyncPersistentRasterColorsResource, - "/locations//mapsets//" - "raster_layers//colors", + f"/{projects_url_part}//mapsets/" + "/raster_layers//colors", + endpoint=f"/{projects_url_part}//mapsets/" + "/raster_layers//colors", ) flask_api.add_resource( SyncEphemeralRasterRendererResource, - "/locations//mapsets//" - "raster_layers//render", + f"/{projects_url_part}//mapsets/" + "/raster_layers//render", + endpoint=f"/{projects_url_part}//mapsets/" + "/raster_layers//render", ) flask_api.add_resource( SyncEphemeralRasterRGBRendererResource, - "/locations//mapsets//" - "render_rgb", + f"/{projects_url_part}//mapsets/" + "/render_rgb", + endpoint=f"/{projects_url_part}//mapsets/" + "/render_rgb", ) flask_api.add_resource( SyncEphemeralRasterShapeRendererResource, - "/locations//mapsets//" - "render_shade", + f"/{projects_url_part}//mapsets/" + "/render_shade", + endpoint=f"/{projects_url_part}//mapsets/" + "/render_shade", ) # STRDS management flask_api.add_resource( SyncSTRDSListerResource, - "/locations//mapsets//strds", + f"/{projects_url_part}//mapsets/" + "/strds", + endpoint=f"/{projects_url_part}//mapsets/" + "/strds", ) flask_api.add_resource( STRDSManagementResource, - "/locations//mapsets//strds/" - "", + f"/{projects_url_part}//mapsets/" + "/strds/", + endpoint=f"/{projects_url_part}//mapsets/" + "/strds/", ) flask_api.add_resource( STRDSRasterManagement, - "/locations//mapsets/" + f"/{projects_url_part}//mapsets/" + "/strds//raster_layers", + endpoint=f"/{projects_url_part}//mapsets/" "/strds//raster_layers", ) # Vector management flask_api.add_resource( VectorLayersResource, - "/locations//mapsets/" + f"/{projects_url_part}//mapsets/" + "/vector_layers", + endpoint=f"/{projects_url_part}//mapsets/" "/vector_layers", ) flask_api.add_resource( VectorLayerResource, - "/locations//mapsets/" + f"/{projects_url_part}//mapsets/" + "/vector_layers/", + endpoint=f"/{projects_url_part}//mapsets/" "/vector_layers/", ) flask_api.add_resource( SyncEphemeralVectorRendererResource, - "/locations//mapsets//" - "vector_layers//render", + f"/{projects_url_part}//mapsets/" + "/vector_layers//render", + endpoint=f"/{projects_url_part}//mapsets/" + "/vector_layers//render", ) flask_api.add_resource( SyncEphemeralSTRDSRendererResource, - "/locations//mapsets//" - "strds//render", + f"/{projects_url_part}//mapsets/" + "/strds//render", + endpoint=f"/{projects_url_part}//mapsets/" + "/strds//render", ) # Validation flask_api.add_resource( AsyncProcessValidationResource, - "/locations//process_chain_validation_async", + f"/{projects_url_part}//" + "process_chain_validation_async", + endpoint=f"/{projects_url_part}//" + "process_chain_validation_async", ) flask_api.add_resource( SyncProcessValidationResource, - "/locations//process_chain_validation_sync", + f"/{projects_url_part}//" + "process_chain_validation_sync", + endpoint=f"/{projects_url_part}//" + "process_chain_validation_sync", ) + # Async processing - flask_api.add_resource( - AsyncEphemeralCustomResource, "/custom_process/" - ) flask_api.add_resource( AsyncEphemeralResource, - "/locations//processing_async", + f"/{projects_url_part}//processing_async", + endpoint=f"/{projects_url_part}//" + "processing_async", ) flask_api.add_resource( AsyncEphemeralExportResource, - "/locations//processing_async_export", + f"/{projects_url_part}//processing_async_export", + endpoint=f"/{projects_url_part}//" + "processing_async_export", ) flask_api.add_resource( AsyncEphemeralExportS3Resource, - "/locations//processing_async_export_s3", + f"/{projects_url_part}//" + "processing_async_export_s3", + endpoint=f"/{projects_url_part}//" + "processing_async_export_s3", ) flask_api.add_resource( AsyncEphemeralExportGCSResource, - "/locations//processing_async_export_gcs", + f"/{projects_url_part}//" + "processing_async_export_gcs", + endpoint=f"/{projects_url_part}//" + "processing_async_export_gcs", ) flask_api.add_resource( AsyncPersistentResource, - "/locations//mapsets/" + f"/{projects_url_part}//mapsets/" + "/processing_async", + endpoint=f"/{projects_url_part}//mapsets/" "/processing_async", ) flask_api.add_resource( AsyncPersistentMapsetMergerResource, - "/locations//mapsets/" + f"/{projects_url_part}//mapsets/" + "/merging_async", + endpoint=f"/{projects_url_part}//mapsets/" "/merging_async", ) flask_api.add_resource( AsyncEphemeralRasterLayerExporterResource, - "/locations//mapsets//" - "raster_layers//geotiff_async", + f"/{projects_url_part}//mapsets/" + "/raster_layers//" + "geotiff_async", + endpoint=f"/{projects_url_part}//mapsets/" + "/raster_layers//" + "geotiff_async", ) flask_api.add_resource( AsyncEphemeralRasterLayerRegionExporterResource, - "/locations//mapsets/" - "/raster_layers//geotiff_async_orig", + f"/{projects_url_part}//mapsets/" + "/raster_layers//" + "geotiff_async_orig", + endpoint=f"/{projects_url_part}//mapsets/" + "/raster_layers//" + "geotiff_async_orig", + ) + +def create_core_endpoints(): + + # Endpoints for project/location, mapset, raster, STRDS, vector management + # and processing including validation + create_project_endpoints() + create_project_endpoints(projects_url_part="locations") + + # Async processing + flask_api.add_resource( + AsyncEphemeralCustomResource, "/custom_process/" ) # all mapsets across all locations listing From 3ea49a950b3cc98757412f0ecc7dbc11aec0ebd7 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 17 Oct 2024 11:48:08 +0200 Subject: [PATCH 04/24] endpoints for projects --- src/actinia_core/endpoints.py | 1 + src/actinia_core/rest/location_management.py | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/actinia_core/endpoints.py b/src/actinia_core/endpoints.py index b513771b0..623f6a736 100644 --- a/src/actinia_core/endpoints.py +++ b/src/actinia_core/endpoints.py @@ -367,6 +367,7 @@ def create_project_endpoints(projects_url_part="projects"): "geotiff_async_orig", ) + def create_core_endpoints(): # Endpoints for project/location, mapset, raster, STRDS, vector management diff --git a/src/actinia_core/rest/location_management.py b/src/actinia_core/rest/location_management.py index 56bd9c633..0334af414 100644 --- a/src/actinia_core/rest/location_management.py +++ b/src/actinia_core/rest/location_management.py @@ -62,7 +62,7 @@ __maintainer__ = "mundialis" -class ListLocationsResource(ResourceBase): +class ListProjectsResource(ResourceBase): """This resource represents GRASS GIS database directory that contains locations. """ @@ -142,7 +142,7 @@ def get(self): ) -class LocationManagementResourceUser(ResourceBase): +class ProjectManagementResourceUser(ResourceBase): """This class returns information about a specific location""" def __init__(self): @@ -175,7 +175,7 @@ def get(self, location_name): return make_response(jsonify(response_model), http_code) -class LocationManagementResourceAdminUser(ResourceBase): +class ProjectManagementResourceAdminUser(ResourceBase): """This class manages the creation, deletion and modification of locations This is only allowed for administrators and users From 32e9034c278be2b4815edc2658a95b39bec9b927 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 17 Oct 2024 13:27:21 +0200 Subject: [PATCH 05/24] rename location --- src/actinia_core/core/common/process_queue.py | 10 +- src/actinia_core/core/common/user_base.py | 46 ++--- src/actinia_core/core/grass_init.py | 32 ++-- .../core/resource_data_container.py | 16 +- src/actinia_core/endpoints.py | 128 +++++++------ .../ephemeral/ephemeral_custom_processing.py | 10 +- .../ephemeral/persistent_processing.py | 90 ++++----- .../ephemeral_processing.py | 174 +++++++++--------- .../persistent/location_management.py | 23 +-- .../persistent/mapset_management.py | 22 +-- .../persistent/persistent_mapset_merger.py | 10 +- src/actinia_core/rest/base/resource_base.py | 8 +- src/actinia_core/rest/base/user_auth.py | 62 +++---- src/actinia_core/rest/ephemeral_processing.py | 10 +- .../rest/ephemeral_processing_with_export.py | 24 +-- src/actinia_core/rest/map_layer_management.py | 60 +++--- src/actinia_core/rest/mapset_management.py | 52 +++--- .../rest/persistent_mapset_merger.py | 12 +- .../rest/persistent_processing.py | 10 +- src/actinia_core/rest/process_validation.py | 14 +- src/actinia_core/rest/raster_colors.py | 18 +- src/actinia_core/rest/raster_export.py | 18 +- src/actinia_core/rest/raster_layer.py | 16 +- src/actinia_core/rest/raster_legend.py | 10 +- src/actinia_core/rest/raster_renderer.py | 18 +- src/actinia_core/rest/resource_management.py | 19 +- src/actinia_core/rest/strds_management.py | 34 ++-- .../rest/strds_raster_management.py | 22 +-- src/actinia_core/rest/strds_renderer.py | 10 +- src/actinia_core/rest/vector_layer.py | 16 +- src/actinia_core/rest/vector_renderer.py | 10 +- src/actinia_core/testsuite.py | 32 ++-- tests/test_process_queue.py | 8 +- tests/test_raster_import_pixellimit.py | 14 +- tests/test_raster_upload.py | 16 +- tests/test_strds_management.py | 38 ++-- tests/test_strds_raster_management.py | 44 ++--- tests/test_strds_raster_renderer.py | 38 ++-- tests/test_vector_upload.py | 22 +-- 39 files changed, 608 insertions(+), 608 deletions(-) diff --git a/src/actinia_core/core/common/process_queue.py b/src/actinia_core/core/common/process_queue.py index 29affd7c6..5fe52e0cc 100644 --- a/src/actinia_core/core/common/process_queue.py +++ b/src/actinia_core/core/common/process_queue.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -55,7 +55,7 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika" +__author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = ( "Copyright 2016-present, Sören Gebbert and mundialis GmbH & Co. KG" ) @@ -102,7 +102,7 @@ def enqueue_job(timeout, func, *args): # # for debugging in ephemeral_processing.py (see also grass_init.py) # # only uncomment ONE of the following endpoints: - # # for '/locations//processing_async' + # # for '/projects//processing_async' # from ...rest.ephemeral_processing import \ # AsyncEphemeralResource # from ...processing.common.ephemeral_processing import start_job @@ -111,13 +111,13 @@ def enqueue_job(timeout, func, *args): # processing = EphemeralProcessing(*args) # processing.run() - # # for '/locations//processing_async_export' + # # for '/projects//processing_async_export' # from ...processing.actinia_processing.ephemeral.\ # ephemeral_processing_with_export import EphemeralProcessingWithExport # processing = EphemeralProcessingWithExport(*args) # processing.run() - # # for /locations/{location_name}/mapsets/{mapset_name}/processing_async + # # for /projects/{project_name}/mapsets/{mapset_name}/processing_async # from ...processing.actinia_processing.ephemeral.persistent_processing \ # import PersistentProcessing # processing = PersistentProcessing(*args) diff --git a/src/actinia_core/core/common/user_base.py b/src/actinia_core/core/common/user_base.py index 6d231f5a3..099488cc0 100644 --- a/src/actinia_core/core/common/user_base.py +++ b/src/actinia_core/core/common/user_base.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -29,7 +29,7 @@ __author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis GmbH & Co. KG" @@ -77,7 +77,7 @@ def __init__( unique user_group (str): The group of the user user_role (str): The user role (superadmin, admin, user, guest) - accessible_datasets (dict): Dict of location:mapset lists + accessible_datasets (dict): Dict of project:mapset lists accessible_modules (list): A list of modules that are allowed to use cell_limit (int): Maximum number of cells to process @@ -187,19 +187,19 @@ def set_accessible_datasets(self, accessible_datasets): """ self.accessible_datasets = accessible_datasets - def add_accessible_dataset(self, location_name, mapset_list): + def add_accessible_dataset(self, project_name, mapset_list): """Add a dataset to the accessible datasets If the dataset exists, the mapsets will be extended by the provided list Args: - location_name (str): Location name + project_name (str): Location name mapset_list (list): List of mapset names Example:: - location_name="nc_spm_08" + project_name="nc_spm_08" mapset_list = ["PERMANENT", "user1", @@ -207,47 +207,47 @@ def add_accessible_dataset(self, location_name, mapset_list): """ - if location_name not in self.accessible_datasets: - self.accessible_datasets[location_name] = mapset_list + if project_name not in self.accessible_datasets: + self.accessible_datasets[project_name] = mapset_list else: for mapset in mapset_list: - if mapset not in self.accessible_datasets[location_name]: - self.accessible_datasets[location_name].append(mapset) + if mapset not in self.accessible_datasets[project_name]: + self.accessible_datasets[project_name].append(mapset) - def remove_mapsets_from_location(self, location_name, mapset_list): - """Remove mapsets from an existing location + def remove_mapsets_from_project(self, project_name, mapset_list): + """Remove mapsets from an existing project Args: - location_name (str): Location name + project_name (str): Location name mapset_list (list): List of mapset names that should be removed Example:: - location_name="nc_spm_08" + project_name="nc_spm_08" mapset_list = ["landsat",] """ - if location_name in self.accessible_datasets: + if project_name in self.accessible_datasets: for mapset in mapset_list: - if mapset in self.accessible_datasets[location_name]: - self.accessible_datasets[location_name].remove(mapset) + if mapset in self.accessible_datasets[project_name]: + self.accessible_datasets[project_name].remove(mapset) - def remove_location(self, location_name): - """Remove a location from the accessible datasets + def remove_location(self, project_name): + """Remove a project from the accessible datasets Args: - location_name (str): Location name + project_name (str): Location name Example:: - location_name="nc_spm_08" + project_name="nc_spm_08" """ - if location_name in self.accessible_datasets: - self.accessible_datasets.pop(location_name) + if project_name in self.accessible_datasets: + self.accessible_datasets.pop(project_name) def set_accessible_modules(self, accessible_modules): """Set the accessible modules diff --git a/src/actinia_core/core/grass_init.py b/src/actinia_core/core/grass_init.py index ff3bfbb7f..074d0f931 100644 --- a/src/actinia_core/core/grass_init.py +++ b/src/actinia_core/core/grass_init.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -34,9 +34,9 @@ from .messages_logger import MessageLogger __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2023, Sören Gebbert, Anika Weinmann and mundialis GmbH & " + "Copyright 2016-2024, Sören Gebbert, Anika Weinmann and mundialis GmbH & " "Co. KG" ) __maintainer__ = "mundialis" @@ -190,9 +190,9 @@ class GrassGisRC(ProcessLogging): to allow map registering from none-current mapsets """ - def __init__(self, gisdbase, location_name, mapset): + def __init__(self, gisdbase, project_name, mapset): ProcessLogging.__init__(self) - self.location_name = location_name + self.project_name = project_name self.mapset = mapset self.gisdbase = gisdbase self.__gisrc_ile = "" @@ -223,7 +223,7 @@ def print_gisrc(self): def __write(self): try: gisrc = open(self.__gisrc_ile, "w") - gisrc.write("LOCATION_NAME: %s\n" % self.location_name) + gisrc.write("project_name: %s\n" % self.project_name) gisrc.write("MAPSET: %s\n" % self.mapset) gisrc.write("DIGITIZER: none\n") gisrc.write("GISDBASE: %s\n" % self.gisdbase) @@ -246,12 +246,12 @@ class GrassWindFile(ProcessLogging): """This class takes care of the correct creation of grass WIND and DEFAULT_WIND files using a dummy region""" - def __init__(self, gisdbase, location, mapset): + def __init__(self, gisdbase, project, mapset): """ Args: gisdbase (str): The GRASS database - location (str): The location name + project (str): The project name mapset (str): The name of the mapset Raises: @@ -267,11 +267,11 @@ def __init__(self, gisdbase, location, mapset): # If PERMANENT is used as mapset, the DEFAULT_WIND file will be # created too self.__windFile = os.path.join( - gisdbase, location, mapset, "DEFAULT_WIND" + gisdbase, project, mapset, "DEFAULT_WIND" ) self.__write() - self.__windFile = os.path.join(gisdbase, location, mapset, "WIND") + self.__windFile = os.path.join(gisdbase, project, mapset, "WIND") self.__write() try: @@ -470,7 +470,7 @@ def __init__( self, grass_data_base, grass_base_dir, - location_name, + project_name, mapset_name, grass_addon_path="", config=None, @@ -484,9 +484,9 @@ def __init__( Args: grass_data_base (str): GRASS GIS database root directory that - contains locations + contains projects grass_base_dir (str): The installation directory of GRASS GIS - location_name (str): The name of the location to work in + project_name (str): The name of the project to work in mapset_name (str): The name of the mapset to work in grass_addon_path (str): The path to GRASS GIS addons @@ -497,7 +497,7 @@ def __init__( self.gisrc_path = None self.grass_data_base = grass_data_base self.grass_base_dir = grass_base_dir - self.location_name = location_name + self.project_name = project_name self.mapset_name = mapset_name self.grass_addon_path = grass_addon_path self.has_temp_region = False @@ -510,7 +510,7 @@ def initialize(self): self.gisrc_path = tempfile.mkdtemp() self.mapset_path = os.path.join( - self.grass_data_base, self.location_name, self.mapset_name + self.grass_data_base, self.project_name, self.mapset_name ) # Generate a temporary region name @@ -522,7 +522,7 @@ def initialize(self): ) self.gisrc = GrassGisRC( - self.grass_data_base, self.location_name, self.mapset_name + self.grass_data_base, self.project_name, self.mapset_name ) self.gisrc.write(self.gisrc_path) diff --git a/src/actinia_core/core/resource_data_container.py b/src/actinia_core/core/resource_data_container.py index e00172884..61cb04319 100644 --- a/src/actinia_core/core/resource_data_container.py +++ b/src/actinia_core/core/resource_data_container.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -29,9 +29,9 @@ from .storage_interface_gcs import ResourceStorageGCS __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "Sören Gebbert" __email__ = "soerengebbert@googlemail.com" @@ -59,7 +59,7 @@ def __init__( orig_datetime, user_credentials, config, - location_name, + project_name, mapset_name, map_name, ): @@ -67,9 +67,9 @@ def __init__( Args: grass_data_base (str): GRASS GIS database root directory that - contains global locations + contains global projects grass_user_data_base (str): GRASS GIS database user directory that - contains group specific locations + contains group specific projects grass_base_dir (str): The installation directory of GRASS GIS request_data (dict): The module chain dictionary user_id (str): The user id @@ -84,7 +84,7 @@ def __init__( user_credentials (dict): The user credentials dict config (actinia_core.core.config.Configuration): The actinia configuration - location_name (str): The name of the location to work in + project_name (str): The name of the project to work in mapset_name (str): The name of the target mapset in which the computation should be performed map_name: The name of the map or other resource (raster, vector, @@ -105,7 +105,7 @@ def __init__( self.resource_url_base = resource_url_base self.user_credentials = user_credentials self.config = config - self.location_name = location_name + self.project_name = project_name self.mapset_name = mapset_name self.map_name = map_name self.orig_time = orig_time diff --git a/src/actinia_core/endpoints.py b/src/actinia_core/endpoints.py index 623f6a736..dc61f1bda 100644 --- a/src/actinia_core/endpoints.py +++ b/src/actinia_core/endpoints.py @@ -5,7 +5,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -33,10 +33,8 @@ from actinia_core.core.logging_interface import log from actinia_core.rest.location_management import ( ListProjectsResource, - ProjectManagementResourceUser, -) -from actinia_core.rest.location_management import ( ProjectManagementResourceAdminUser, + ProjectManagementResourceUser, ) from actinia_core.rest.mapsets import AllMapsetsListingResourceAdmin from actinia_core.rest.mapset_management import ( @@ -151,218 +149,218 @@ def create_project_endpoints(projects_url_part="projects"): ) flask_api.add_resource( ProjectManagementResourceUser, - f"/{projects_url_part}//info", - endpoint=f"/{projects_url_part}//info", + f"/{projects_url_part}//info", + endpoint=f"/{projects_url_part}//info", ) flask_api.add_resource( ProjectManagementResourceAdminUser, - f"/{projects_url_part}/", - endpoint=f"/{projects_url_part}/", + f"/{projects_url_part}/", + endpoint=f"/{projects_url_part}/", ) # Mapset management flask_api.add_resource( ListMapsetsResource, - f"/{projects_url_part}//mapsets", - endpoint=f"/{projects_url_part}//mapsets", + f"/{projects_url_part}//mapsets", + endpoint=f"/{projects_url_part}//mapsets", ) flask_api.add_resource( MapsetManagementResourceUser, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/info", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/info", ) flask_api.add_resource( MapsetManagementResourceAdmin, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "", ) flask_api.add_resource( MapsetLockManagementResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/lock", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/lock", ) # Raster management flask_api.add_resource( RasterLayersResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/raster_layers", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/raster_layers", ) flask_api.add_resource( RasterLayerResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/raster_layers/", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/raster_layers/", ) flask_api.add_resource( SyncEphemeralRasterLegendResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/raster_layers//legend", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/raster_layers//legend", ) flask_api.add_resource( SyncPersistentRasterColorsResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/raster_layers//colors", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/raster_layers//colors", ) flask_api.add_resource( SyncEphemeralRasterRendererResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/raster_layers//render", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/raster_layers//render", ) flask_api.add_resource( SyncEphemeralRasterRGBRendererResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/render_rgb", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/render_rgb", ) flask_api.add_resource( SyncEphemeralRasterShapeRendererResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/render_shade", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/render_shade", ) # STRDS management flask_api.add_resource( SyncSTRDSListerResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/strds", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/strds", ) flask_api.add_resource( STRDSManagementResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/strds/", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/strds/", ) flask_api.add_resource( STRDSRasterManagement, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/strds//raster_layers", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/strds//raster_layers", ) # Vector management flask_api.add_resource( VectorLayersResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/vector_layers", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/vector_layers", ) flask_api.add_resource( VectorLayerResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/vector_layers/", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/vector_layers/", ) flask_api.add_resource( SyncEphemeralVectorRendererResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/vector_layers//render", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/vector_layers//render", ) flask_api.add_resource( SyncEphemeralSTRDSRendererResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/strds//render", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/strds//render", ) # Validation flask_api.add_resource( AsyncProcessValidationResource, - f"/{projects_url_part}//" + f"/{projects_url_part}//" "process_chain_validation_async", - endpoint=f"/{projects_url_part}//" + endpoint=f"/{projects_url_part}//" "process_chain_validation_async", ) flask_api.add_resource( SyncProcessValidationResource, - f"/{projects_url_part}//" + f"/{projects_url_part}//" "process_chain_validation_sync", - endpoint=f"/{projects_url_part}//" + endpoint=f"/{projects_url_part}//" "process_chain_validation_sync", ) # Async processing flask_api.add_resource( AsyncEphemeralResource, - f"/{projects_url_part}//processing_async", - endpoint=f"/{projects_url_part}//" + f"/{projects_url_part}//processing_async", + endpoint=f"/{projects_url_part}//" "processing_async", ) flask_api.add_resource( AsyncEphemeralExportResource, - f"/{projects_url_part}//processing_async_export", - endpoint=f"/{projects_url_part}//" + f"/{projects_url_part}//processing_async_export", + endpoint=f"/{projects_url_part}//" "processing_async_export", ) flask_api.add_resource( AsyncEphemeralExportS3Resource, - f"/{projects_url_part}//" + f"/{projects_url_part}//" "processing_async_export_s3", - endpoint=f"/{projects_url_part}//" + endpoint=f"/{projects_url_part}//" "processing_async_export_s3", ) flask_api.add_resource( AsyncEphemeralExportGCSResource, - f"/{projects_url_part}//" + f"/{projects_url_part}//" "processing_async_export_gcs", - endpoint=f"/{projects_url_part}//" + endpoint=f"/{projects_url_part}//" "processing_async_export_gcs", ) flask_api.add_resource( AsyncPersistentResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/processing_async", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/processing_async", ) flask_api.add_resource( AsyncPersistentMapsetMergerResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/merging_async", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/merging_async", ) flask_api.add_resource( AsyncEphemeralRasterLayerExporterResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/raster_layers//" "geotiff_async", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/raster_layers//" "geotiff_async", ) flask_api.add_resource( AsyncEphemeralRasterLayerRegionExporterResource, - f"/{projects_url_part}//mapsets/" + f"/{projects_url_part}//mapsets/" "/raster_layers//" "geotiff_async_orig", - endpoint=f"/{projects_url_part}//mapsets/" + endpoint=f"/{projects_url_part}//mapsets/" "/raster_layers//" "geotiff_async_orig", ) @@ -380,7 +378,7 @@ def create_core_endpoints(): AsyncEphemeralCustomResource, "/custom_process/" ) - # all mapsets across all locations listing + # all mapsets across all projects listing flask_api.add_resource(AllMapsetsListingResourceAdmin, "/mapsets") # User management diff --git a/src/actinia_core/processing/actinia_processing/ephemeral/ephemeral_custom_processing.py b/src/actinia_core/processing/actinia_processing/ephemeral/ephemeral_custom_processing.py index c05ecdd16..212a0d35b 100644 --- a/src/actinia_core/processing/actinia_processing/ephemeral/ephemeral_custom_processing.py +++ b/src/actinia_core/processing/actinia_processing/ephemeral/ephemeral_custom_processing.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -29,14 +29,14 @@ ) from actinia_core.core.common.exceptions import AsyncProcessError from actinia_core.rest.base.user_auth import ( - check_location_mapset_module_access, + check_project_mapset_module_access, ) from actinia_core.core.common.process_object import Process __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -68,7 +68,7 @@ def _execute(self, skip_permission_check=False): self._setup(init_grass=False) # Check if the user has access to the required process executable - resp = check_location_mapset_module_access( + resp = check_project_mapset_module_access( self.user_credentials, self.config, module_name=self.executable ) diff --git a/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py b/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py index bb510c9bf..e0185fe5d 100644 --- a/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py +++ b/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -38,7 +38,7 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Guido Riembauer, Anika Weinmann, Lina Krisztian" __copyright__ = ( - "Copyright 2016-2023, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -48,7 +48,7 @@ class PersistentProcessing(EphemeralProcessing): This class is designed to run GRASS modules that are specified in a process chain in a temporary mapset that later on is copied into the - original location or merged into an existing mapset. + original project or merged into an existing mapset. Locking concept: @@ -69,7 +69,7 @@ class PersistentProcessing(EphemeralProcessing): If target mapset does not exists: - After processing finished successfully, copy the - temporary mapset to the original user group specific location using + temporary mapset to the original user group specific project using the target mapset name - Unlock the two mapsets after processing is finished, terminated or raised an error @@ -96,26 +96,26 @@ def __init__(self, rdc): # We have two mapset lock ids. The target mapset and the temporary # mapset self.target_mapset_lock_id = self._generate_mapset_lock_id( - self.user_group, self.location_name, self.target_mapset_name + self.user_group, self.project_name, self.target_mapset_name ) self.temp_mapset_lock_id = self._generate_mapset_lock_id( - self.user_group, self.location_name, self.temp_mapset_name + self.user_group, self.project_name, self.temp_mapset_name ) self.temp_mapset_lock_set = False - def _generate_mapset_lock_id(self, user_group, location_name, mapset_name): + def _generate_mapset_lock_id(self, user_group, project_name, mapset_name): """Generate a unique id to lock a mapset in the redis database Locations are user group specific. Hence different user groups may have - locations with the same names and with equal mapset names. + projects with the same names and with equal mapset names. - In the same user group, a location/mapset must be locked to grant + In the same user group, a project/mapset must be locked to grant exclusive access rights. Args: user_group: The user group used for locking - location_name: The location name in which the mapset is located + project_name: The project name in which the mapset is located for locking mapset_name: The mapset name that should be locked @@ -123,7 +123,7 @@ def _generate_mapset_lock_id(self, user_group, location_name, mapset_name): The lock id """ - return "%s/%s/%s" % (user_group, location_name, mapset_name) + return "%s/%s/%s" % (user_group, project_name, mapset_name) def _lock_temp_mapset(self): """Lock the temporary mapset @@ -153,9 +153,9 @@ def _check_mapset(self, mapset): """Check if the target mapset exists This method will check if the target mapset exists in the global and - user group locations. + user group projects. If the mapset is in the global database, then an AsyncProcessError - will be raised, since global location/mapsets can not be modified. + will be raised, since global project/mapsets can not be modified. This method sets in case of success: @@ -168,20 +168,20 @@ def _check_mapset(self, mapset): """ mapset_exists = False - # Check if the global location is accessible and that the target mapset + # Check if the global project is accessible and that the target mapset # does not exist if self.is_global_database is True: # Break if the target mapset exists in the global database if ( - os.path.exists(self.global_location_path) - and os.path.isdir(self.global_location_path) + os.path.exists(self.global_project_path) + and os.path.isdir(self.global_project_path) and os.access( - self.global_location_path, os.R_OK | os.X_OK | os.W_OK + self.global_project_path, os.R_OK | os.X_OK | os.W_OK ) is True ): self.orig_mapset_path = os.path.join( - self.global_location_path, mapset + self.global_project_path, mapset ) if os.path.exists(self.orig_mapset_path) is True: @@ -197,20 +197,20 @@ def _check_mapset(self, mapset): ) else: raise AsyncProcessError( - "Unable to access global location <%s>" - % self.location_name + "Unable to access global project <%s>" + % self.project_name ) # Always check if the target mapset already exists and set the flag # accordingly if ( - os.path.exists(self.user_location_path) - and os.path.isdir(self.user_location_path) - and os.access(self.user_location_path, os.R_OK | os.X_OK | os.W_OK) + os.path.exists(self.user_project_path) + and os.path.isdir(self.user_project_path) + and os.access(self.user_project_path, os.R_OK | os.X_OK | os.W_OK) is True ): self.orig_mapset_path = os.path.join( - self.user_location_path, mapset + self.user_project_path, mapset ) if os.path.exists(self.orig_mapset_path) is True: @@ -233,7 +233,7 @@ def _check_mapset(self, mapset): mapset_exists = False else: raise AsyncProcessError( - "Unable to access user location <%s>" % self.location_name + "Unable to access user project <%s>" % self.project_name ) return mapset_exists @@ -242,7 +242,7 @@ def _check_target_mapset_exists(self): """Check if the target mapset exists This method will check if the target mapset exists in the global and - user location. + user project. If the mapset is in the global database, then an AsyncProcessError will be raised, since global mapsets can not be modified. @@ -261,7 +261,7 @@ def _check_lock_target_mapset(self): mapset This method will check if the target mapset exists in the global and - user location. + user project. If the mapset is in the global database, then an AsyncProcessError will be raised, since global mapsets can not be modified. @@ -293,13 +293,13 @@ def _lock_target_mapset(self): if ret == 0: raise AsyncProcessError( - "Unable to lock location/mapset <%s/%s>, " + "Unable to lock project/mapset <%s/%s>, " "resource is already locked" - % (self.location_name, self.target_mapset_name) + % (self.project_name, self.target_mapset_name) ) self.message_logger.info( - "location/mapset <%s/%s> locked" - % (self.location_name, self.target_mapset_name) + "project/mapset <%s/%s> locked" + % (self.project_name, self.target_mapset_name) ) # if we manage to come here, the lock was correctly set @@ -335,9 +335,9 @@ def _merge_mapset_into_target(self, source_mapset, target_mapset): for directory in directories: source_path = os.path.join( - self.user_location_path, source_mapset, directory + self.user_project_path, source_mapset, directory ) - target_path = os.path.join(self.user_location_path, target_mapset) + target_path = os.path.join(self.user_project_path, target_mapset) change_mapsetname( source_path, @@ -370,7 +370,7 @@ def _merge_mapset_into_target(self, source_mapset, target_mapset): ) def _copy_merge_tmp_mapset_to_target_mapset(self): - """Copy the temporary mapset into the original location + """Copy the temporary mapset into the original project In case the mapset does not exists, then use the target mapset name, otherwise use the temporary mapset name for copying which is later on @@ -402,7 +402,7 @@ def _copy_merge_tmp_mapset_to_target_mapset(self): "Copy temporary mapset from %s to %s" % ( self.temp_mapset_path, - os.path.join(self.user_location_path, self.target_mapset_name), + os.path.join(self.user_project_path, self.target_mapset_name), ) ) @@ -412,18 +412,18 @@ def _copy_merge_tmp_mapset_to_target_mapset(self): # otherwise use the temporary mapset name for copying which is later # on merged into the target mapset and then removed if self.target_mapset_exists is True: - target_path = self.user_location_path + "/." + target_path = self.user_project_path + "/." message = ( - "Copy temporary mapset <%s> to target location " - "<%s>" % (self.temp_mapset_name, self.location_name) + "Copy temporary mapset <%s> to target project " + "<%s>" % (self.temp_mapset_name, self.project_name) ) else: target_path = os.path.join( - self.user_location_path, self.target_mapset_name + self.user_project_path, self.target_mapset_name ) message = ( - "Copy temporary mapset <%s> to target location " - "<%s>" % (self.target_mapset_name, self.location_name) + "Copy temporary mapset <%s> to target project " + "<%s>" % (self.target_mapset_name, self.project_name) ) self._send_resource_update(message) @@ -440,14 +440,14 @@ def _copy_merge_tmp_mapset_to_target_mapset(self): if p.returncode != 0: raise AsyncProcessError( "Unable to copy temporary mapset to " - "original location. Copy error " + "original project. Copy error " "stdout: %s stderr: %s returncode: %i" % (stdout_buff, stderr_buff, p.returncode) ) except Exception as e: raise AsyncProcessError( "Unable to copy temporary mapset to " - "original location. Exception %s" % str(e) + "original project. Exception %s" % str(e) ) # Merge the temp mapset into the target mapset in case the target @@ -457,7 +457,7 @@ def _copy_merge_tmp_mapset_to_target_mapset(self): self.temp_mapset_name, self.target_mapset_name ) shutil.rmtree( - os.path.join(self.user_location_path, self.temp_mapset_name) + os.path.join(self.user_project_path, self.temp_mapset_name) ) # remove interim results if self.interim_result.saving_interim_results is True: @@ -588,7 +588,7 @@ def _execute(self, skip_permission_check=False): # Execute the process list self._execute_process_list(process_list) - # Copy local mapset to original location, merge mapsets + # Copy local mapset to original project, merge mapsets # if necessary self._copy_merge_tmp_mapset_to_target_mapset() # Parse the module sdtout outputs and create the results diff --git a/src/actinia_core/processing/actinia_processing/ephemeral_processing.py b/src/actinia_core/processing/actinia_processing/ephemeral_processing.py index 3a7f9878c..cc8deb625 100644 --- a/src/actinia_core/processing/actinia_processing/ephemeral_processing.py +++ b/src/actinia_core/processing/actinia_processing/ephemeral_processing.py @@ -68,7 +68,7 @@ ) from actinia_core.core.interim_results import InterimResult, get_directory_size from actinia_core.rest.base.user_auth import ( - check_location_mapset_module_access, + check_project_mapset_module_access, ) __license__ = "GPLv3" @@ -92,23 +92,23 @@ class EphemeralProcessing(object): e.g: /tmp/soeren_disdbase - 2. Create the required location directory + 2. Create the required project directory e.g: /tmp/soeren_temp_gisdbase/ECAD 3. Softlink the PERMANENT and all required mapsets into the - new location directory from the original location, + new project directory from the original project, check the input parameter of the module for which mapsets must be linked - e.g: /mount/groups/[user group]/locations/ECAD/PERMANENT + e.g: /mount/groups/[user group]/projects/ECAD/PERMANENT -> /tmp/soeren_temp_gisdbase/ECAD/PERMANENT - e.g: /mount/groups/[user group]/locations/ECAD/Temperature + e.g: /mount/groups/[user group]/projects/ECAD/Temperature -> /tmp/soeren_temp_gisdbase/ECAD/Temperature 4. Set the GRASS GIS environmental variables to point to the new gisdbase, - location and PERMANENT mapset + project and PERMANENT mapset - 5. Create a new mapset with g.mapset in the temporary location directory + 5. Create a new mapset with g.mapset in the temporary project directory e.g: /tmp/soeren_temp_gisdbase/ECAD/MyMapset @@ -178,7 +178,7 @@ def __init__(self, rdc): # extended with the user group name in the setup self.grass_base_dir = self.rdc.grass_base_dir - self.location_name = self.rdc.location_name + self.project_name = self.rdc.project_name self.mapset_name = self.rdc.mapset_name # Set this True if the work is performed based on global database self.is_global_database = False @@ -204,12 +204,12 @@ def __init__(self, rdc): self.temp_grass_data_base = ( None # Path to the temporary grass database ) - self.temp_location_path = None # Path to the temporary location + self.temp_project_path = None # Path to the temporary project self.temp_file_path = None # The path to store temporary created files - self.global_location_path = ( - None # The path to the global location to link + self.global_project_path = ( + None # The path to the global project to link ) - self.user_location_path = None # The path to the user location to link + self.user_project_path = None # The path to the user project to link # List of resources that should be created self.resource_export_list = list() @@ -239,7 +239,7 @@ def __init__(self, rdc): list() ) # The process chain analysis will provide # a list of required mapsets that must be - # linked in the temporary location + # linked in the temporary project # The module that was called in the process chain, to detect g.region # calls and check for correct region settings @@ -675,7 +675,7 @@ def _validate_process_chain( if process.exec_type == "grass" or process.exec_type == "exec": if skip_permission_check is False: if process.skip_permission_check is False: - resp = check_location_mapset_module_access( + resp = check_project_mapset_module_access( user_credentials=self.user_credentials, config=self.config, module_name=process.executable, @@ -715,23 +715,23 @@ def _setup(self, init_grass=True): - Create the redis lock interface for resource locking - Set cell limit, process number limit and process time limit from user credentials. - - Create all required paths to original and temporary location and + - Create all required paths to original and temporary project and mapsets. - - temp_location_path - - global_location_path + - temp_project_path + - global_project_path - grass_user_data_base <- This path will be created if it does not exist - - user_location_path <- This path will be created if it does not + - user_project_path <- This path will be created if it does not exist - temp_grass_data_base <- This path will be created - temp_file_path <- This path will be created - - Check if the current working location is in a persistent (global) + - Check if the current working project is in a persistent (global) GRASS GIS database (is_global_database) - Create the process chain to process list converter Args: init_grass (bool): Set true to initialize the user credentials - and the temporary database and location paths + and the temporary database and project paths """ # The setup should only be executed once @@ -776,7 +776,7 @@ def _setup(self, init_grass=True): ) # Check and create all required paths to global, user and temporary - # locations + # projects if init_grass is True: self._setup_paths() @@ -806,12 +806,12 @@ def _setup_paths(self): ) self.temp_file_path = os.path.join(self.temp_grass_data_base, ".tmp") - if self.location_name: - self.temp_location_path = os.path.join( - self.temp_grass_data_base, self.location_name + if self.project_name: + self.temp_project_path = os.path.join( + self.temp_grass_data_base, self.project_name ) - self.global_location_path = os.path.join( - self.grass_data_base, self.location_name + self.global_project_path = os.path.join( + self.grass_data_base, self.project_name ) # Create the user database path if it does not exist if not os.path.exists(self.grass_user_data_base): @@ -823,48 +823,48 @@ def _setup_paths(self): ) if not os.path.exists(self.grass_user_data_base): os.mkdir(self.grass_user_data_base) - # Create the user group specific location path, if it does not + # Create the user group specific project path, if it does not # exist - self.user_location_path = os.path.join( - self.grass_user_data_base, self.location_name + self.user_project_path = os.path.join( + self.grass_user_data_base, self.project_name ) - if not os.path.exists(self.user_location_path): - os.mkdir(self.user_location_path) - # Check if the location is located in the global database + if not os.path.exists(self.user_project_path): + os.mkdir(self.user_project_path) + # Check if the project is located in the global database self.is_global_database = False - location = os.path.join(self.grass_data_base, self.location_name) - if os.path.isdir(location): + project = os.path.join(self.grass_data_base, self.project_name) + if os.path.isdir(project): self.is_global_database = True - # Create the database, location and temporary file directories + # Create the database, project and temporary file directories os.mkdir(self.temp_grass_data_base) os.mkdir(self.temp_file_path) def _create_temp_database(self, mapsets=None): - """Create a temporary gis database with location and mapsets + """Create a temporary gis database with project and mapsets from the global and user group database for processing. IMPORTANT: All processing and mapaste management is performed within a temporary database! - Link the required existing mapsets of global and user group locations - into the temporary location directory. + Link the required existing mapsets of global and user group projects + into the temporary project directory. Linking is performed in two steps: - 1.) If the location is a global location, then the mapsets from the - global location are linked in the temporary locations - 2.) Then link all required mapsets from the user group location - into the temporary location + 1.) If the project is a global project, then the mapsets from the + global project are linked in the temporary projects + 2.) Then link all required mapsets from the user group project + into the temporary project - Only mapsets from the global location are linked into the temporary - location to which the user group has access. + Only mapsets from the global project are linked into the temporary + project to which the user group has access. It checks for access in the global database but not in the user group database. The user can always access its own data of its group. Args: mapsets: A list of mapset names that should be linked into - the temporary location. If the list is empty, all + the temporary project. If the list is empty, all available user accessible mapsets of the global - and user group specific location will be linked. + and user group specific project will be linked. Raises: This function raises AsyncProcessError in case of an error. @@ -875,8 +875,8 @@ def _create_temp_database(self, mapsets=None): mapsets = [] try: - # Create the temporary location directory - os.mkdir(self.temp_location_path) + # Create the temporary project directory + os.mkdir(self.temp_project_path) # Always link the PERMANENT mapset if len(mapsets) > 0 and "PERMANENT" not in mapsets: @@ -887,7 +887,7 @@ def _create_temp_database(self, mapsets=None): if not mapsets: check_all_mapsets = True - # User and global location mapset linking + # User and global project mapset linking self._link_mapsets(mapsets, mapsets_to_link, check_all_mapsets) # Check if we missed some of the required mapsets @@ -900,23 +900,23 @@ def _create_temp_database(self, mapsets=None): if mapset not in mapset_list: raise AsyncProcessError( "Unable to link all required mapsets into " - "temporary location. Missing or un-accessible " - f"mapset <{mapset}> in location " - f"<{self.location_name}>" + "temporary project. Missing or un-accessible " + f"mapset <{mapset}> in project " + f"<{self.project_name}>" ) # Link the original mapsets from global and user database into the - # temporary location + # temporary project for mapset_path, mapset in mapsets_to_link: if ( os.path.isdir( - os.path.join(self.temp_location_path, mapset) + os.path.join(self.temp_project_path, mapset) ) is False ): os.symlink( mapset_path, - os.path.join(self.temp_location_path, mapset), + os.path.join(self.temp_project_path, mapset), ) except Exception as e: @@ -926,23 +926,23 @@ def _create_temp_database(self, mapsets=None): ) def _link_mapsets(self, mapsets, mapsets_to_link, check_all_mapsets): - """Helper method to link locations mapsets + """Helper method to link projects mapsets Args: - mapsets (list): List of mapsets in location + mapsets (list): List of mapsets in project mapsets_to_link (list): List of mapsets paths to link check_all_mapsets (bool): If set True, the mapsets list is created - with all locations on location_path + with all projects on project_path Returns: - mapsets (list): List of mapsets in location + mapsets (list): List of mapsets in project mapsets_to_link (list): List of mapsets paths to link """ - # Global location mapset linking + # Global project mapset linking if self.is_global_database is True: - # List all available mapsets in the global location + # List all available mapsets in the global project mapsets, mapsets_to_link = self._list_all_available_mapsets( - self.global_location_path, + self.global_project_path, mapsets, check_all_mapsets, mapsets_to_link, @@ -953,9 +953,9 @@ def _link_mapsets(self, mapsets, mapsets_to_link, check_all_mapsets): for mapset in mapsets: if mapset not in mapsets_to_link: left_over_mapsets.append(mapset) - # List all available mapsets in the user location + # List all available mapsets in the user project mapsets, mapsets_to_link = self._list_all_available_mapsets( - self.user_location_path, + self.user_project_path, left_over_mapsets, check_all_mapsets, mapsets_to_link, @@ -965,7 +965,7 @@ def _link_mapsets(self, mapsets, mapsets_to_link, check_all_mapsets): def _list_all_available_mapsets( self, - location_path, + project_path, mapsets, check_all_mapsets, mapsets_to_link, @@ -975,26 +975,26 @@ def _list_all_available_mapsets( it is checked if the mapset can be accessed. Args: - location_path (str): Path to location (global or user) + project_path (str): Path to project (global or user) mapsets (list): List of mapsets names to link. The mapsets list can be empty, if check_all_mapsets is True the list is filled with all mapsets from - the location_path + the project_path check_all_mapsets (bool): If set True, the mapsets list is created - with all locations on location_path + with all projects on project_path mapsets_to_link (list): List of mapset paths to link - global_db (bool): If set True, the location/mapset access is + global_db (bool): If set True, the project/mapset access is checked Returns: - mapsets (list): List of mapsets in location + mapsets (list): List of mapsets in project mapsets_to_link (list): List of mapsets paths to link """ - if os.path.isdir(location_path): + if os.path.isdir(project_path): if check_all_mapsets is True: - mapsets = os.listdir(location_path) + mapsets = os.listdir(project_path) for mapset in mapsets: - mapset_path = os.path.join(location_path, mapset) + mapset_path = os.path.join(project_path, mapset) if os.path.isdir(mapset_path) and os.access( mapset_path, os.R_OK & os.X_OK ): @@ -1006,10 +1006,10 @@ def _list_all_available_mapsets( if mapset not in mapsets_to_link and global_db is True: # Link the mapset from the global database # only if it can be accessed - resp = check_location_mapset_module_access( + resp = check_project_mapset_module_access( user_credentials=self.user_credentials, config=self.config, - location_name=self.location_name, + project_name=self.project_name, mapset_name=mapset, ) if resp is None: @@ -1021,18 +1021,18 @@ def _list_all_available_mapsets( mapsets_to_link.append((mapset_path, mapset)) else: raise AsyncProcessError( - "Invalid mapset <%s> in location <%s>" - % (mapset, self.location_name) + "Invalid mapset <%s> in project <%s>" + % (mapset, self.project_name) ) else: if global_db is True: msg = ( - "Unable to access global location <%s>" - % self.location_name + "Unable to access global project <%s>" + % self.project_name ) else: msg = ( - "Unable to access user location <%s>" % self.location_name + "Unable to access user project <%s>" % self.project_name ) raise AsyncProcessError(msg) return mapsets, mapsets_to_link @@ -1052,14 +1052,14 @@ def _create_grass_environment( """ self.message_logger.info( - "Initlialize GRASS grass_data_base: %s; location: %s; mapset: %s" - % (grass_data_base, self.location_name, mapset_name) + "Initlialize GRASS grass_data_base: %s; project: %s; mapset: %s" + % (grass_data_base, self.project_name, mapset_name) ) self.ginit = GrassInitializer( grass_data_base=grass_data_base, grass_base_dir=self.config.GRASS_GIS_BASE, - location_name=self.location_name, + project_name=self.project_name, mapset_name=mapset_name, config=self.config, grass_addon_path=self.config.GRASS_ADDON_PATH, @@ -1110,7 +1110,7 @@ def _create_temporary_mapset( """ self.temp_mapset_path = os.path.join( - self.temp_location_path, temp_mapset_name + self.temp_project_path, temp_mapset_name ) # if interim_result_mapset is set copy the mapset from the interim @@ -1174,7 +1174,7 @@ def _create_temporary_mapset( "db.connect", [ "driver=sqlite", - "database=$GISDBASE/$LOCATION_NAME/$MAPSET/vector/$MAP/" + "database=$GISDBASE/$project_name/$MAPSET/vector/$MAP/" "sqlite.db", ], ) @@ -1185,7 +1185,7 @@ def _create_temporary_mapset( # to the temporary mapset if source_mapset_name is not None and interim_result_mapset is None: source_mapset_path = os.path.join( - self.temp_location_path, source_mapset_name + self.temp_project_path, source_mapset_name ) if os.path.exists(os.path.join(source_mapset_path, "WIND")): shutil.copyfile( @@ -1830,7 +1830,7 @@ def _create_temporary_grass_environment( 3. Create temporary mapset This method will link the required mapsets that are - defined in *self.required_mapsets* into the location. + defined in *self.required_mapsets* into the project. The mapsets may be from the global and/or user database. Args: diff --git a/src/actinia_core/processing/actinia_processing/persistent/location_management.py b/src/actinia_core/processing/actinia_processing/persistent/location_management.py index d39ba1ba2..e4ef4466d 100644 --- a/src/actinia_core/processing/actinia_processing/persistent/location_management.py +++ b/src/actinia_core/processing/actinia_processing/persistent/location_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,23 +35,23 @@ from actinia_core.core.common.exceptions import AsyncProcessError __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika" +__author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" class PersistentLocationCreator(PersistentProcessing): - """Create a new location based on EPSG code""" + """Create a new project based on EPSG code""" def __init__(self, *args): PersistentProcessing.__init__(self, *args) def _execute(self): - new_location = self.location_name + new_project = self.project_name - self.location_name = self.config.GRASS_DEFAULT_LOCATION + self.project_name = self.config.GRASS_DEFAULT_LOCATION self._setup() @@ -59,10 +59,11 @@ def _execute(self): self._create_temp_database() + # TODO replace old PC style pc = { "1": { "module": "g.proj", - "inputs": {"epsg": epsg_code, "location": new_location}, + "inputs": {"epsg": epsg_code, "project": new_project}, "flags": "t", } } @@ -78,17 +79,17 @@ def _execute(self): self._execute_process_list(process_list) if os.path.isdir( - os.path.join(self.temp_grass_data_base, new_location) + os.path.join(self.temp_grass_data_base, new_project) ): shutil.move( - os.path.join(self.temp_grass_data_base, new_location), + os.path.join(self.temp_grass_data_base, new_project), self.grass_user_data_base, ) else: raise AsyncProcessError( - "Unable to create location <%s>" % new_location + "Unable to create project <%s>" % new_project ) self.finish_message = ( - "Location <%s> successfully created" % new_location + "Project <%s> successfully created" % new_project ) diff --git a/src/actinia_core/processing/actinia_processing/persistent/mapset_management.py b/src/actinia_core/processing/actinia_processing/persistent/mapset_management.py index 3e3e6ab8c..10e58f648 100644 --- a/src/actinia_core/processing/actinia_processing/persistent/mapset_management.py +++ b/src/actinia_core/processing/actinia_processing/persistent/mapset_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -41,15 +41,15 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika" +__author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" class PersistentMapsetLister(PersistentProcessing): - """List all mapsets in a location""" + """List all mapsets in a project""" def __init__(self, *args): PersistentProcessing.__init__(self, *args) @@ -162,7 +162,7 @@ def _execute(self): class PersistentMapsetCreator(PersistentProcessing): - """Create a mapset in an existing location""" + """Create a mapset in an existing project""" def __init__(self, *args): PersistentProcessing.__init__(self, *args) @@ -195,7 +195,7 @@ def _execute(self): ) # Create the new temporary mapset and merge it into the user database - # location + # project self._check_lock_target_mapset() self.required_mapsets = ["PERMANENT"] self._create_temporary_mapset(temp_mapset_name=self.temp_mapset_name) @@ -207,7 +207,7 @@ def _execute(self): class PersistentMapsetDeleter(PersistentProcessing): - """Delete a mapset from a location + """Delete a mapset from a project 1. Create temporary database 2. Check if PERMANENT mapset or global mapset which are not allowed to be @@ -227,7 +227,7 @@ def _execute(self): if "PERMANENT" == self.target_mapset_name: raise AsyncProcessError( "The PERMANENT mapset can not be deleted. " - "You must remove the location to get rid of it." + "You must remove the project to get rid of it." ) # Delete existing mapset @@ -258,7 +258,7 @@ def _execute(self): if self.target_mapset_exists is False: raise AsyncProcessError( f"Unable to get lock status of mapset <{self.mapset_name}>" - f" in location <{self.location_name}>: Mapset does not " + f" in project <{self.project_name}>: Mapset does not " "exist" ) else: @@ -309,10 +309,10 @@ def _execute(self): if self.target_mapset_exists is False: raise AsyncProcessError( ( - "Unable to unlock mapset <%s> in location <%s>:" + "Unable to unlock mapset <%s> in project <%s>:" " Mapset does not exist" ) - % (self.mapset_name, self.location_name) + % (self.mapset_name, self.project_name) ) else: self.lock_interface.unlock(self.target_mapset_lock_id) diff --git a/src/actinia_core/processing/actinia_processing/persistent/persistent_mapset_merger.py b/src/actinia_core/processing/actinia_processing/persistent/persistent_mapset_merger.py index a8233d215..ad6889040 100644 --- a/src/actinia_core/processing/actinia_processing/persistent/persistent_mapset_merger.py +++ b/src/actinia_core/processing/actinia_processing/persistent/persistent_mapset_merger.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -33,9 +33,9 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -45,7 +45,7 @@ class PersistentMapsetMerger(PersistentProcessing): This class is designed to run GRASS modules that are specified in a process chain in a temporary mapset that later on is copied into the original - location. + project. If the processing should be run in an existing mapset, the original mapset will be used for processing. @@ -90,7 +90,7 @@ def _check_lock_mapset(self, mapset_name): # maximum lock_id = "%s/%s/%s" % ( self.user_group, - self.location_name, + self.project_name, mapset_name, ) ret = self.lock_interface.lock( diff --git a/src/actinia_core/rest/base/resource_base.py b/src/actinia_core/rest/base/resource_base.py index 8756d29a1..f12678f52 100644 --- a/src/actinia_core/rest/base/resource_base.py +++ b/src/actinia_core/rest/base/resource_base.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -306,7 +306,7 @@ def preprocess( self, has_json=True, has_xml=False, - location_name=None, + project_name=None, mapset_name=None, map_name=None, process_chain_list=None, @@ -324,7 +324,7 @@ def preprocess( otherwise has_xml (bool): Set True if the request has XML data, False otherwise - location_name (str): The name of the location to work in + project_name (str): The name of the project to work in mapset_name (str): The name of the target mapset in which the computation should be performed map_name: The name of the map or other resource (raster, vector, @@ -423,7 +423,7 @@ def preprocess( orig_time=self.orig_time, orig_datetime=self.orig_datetime, config=global_config, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=map_name, ) diff --git a/src/actinia_core/rest/base/user_auth.py b/src/actinia_core/rest/base/user_auth.py index 668069265..7aa93f314 100644 --- a/src/actinia_core/rest/base/user_auth.py +++ b/src/actinia_core/rest/base/user_auth.py @@ -5,7 +5,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2023 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -39,7 +39,7 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Julia Haas, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2023, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -263,11 +263,11 @@ def check_user_permissions(f): """Check the user permissions This decorator function verifies the user permissions - to access locations, mapsets and modules. + to access projects, mapsets and modules. The function arguments are checked if they contain: - - location_name + - project_name - mapset_name - module_name @@ -283,12 +283,12 @@ def check_user_permissions(f): @wraps(f) def decorated_function(*args, **kwargs): - location_name = None + project_name = None mapset_name = None module_name = None - if "location_name" in kwargs: - location_name = kwargs["location_name"] + if "project_name" in kwargs: + project_name = kwargs["project_name"] if "mapset_name" in kwargs: mapset_name = kwargs["mapset_name"] @@ -296,10 +296,10 @@ def decorated_function(*args, **kwargs): if "module_name" in kwargs: module_name = kwargs["module_name"] - ret = check_location_mapset_module_access( + ret = check_project_mapset_module_access( user_credentials=g.user.get_credentials(), config=global_config, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, module_name=module_name, ) @@ -313,14 +313,14 @@ def decorated_function(*args, **kwargs): return decorated_function -def check_location_mapset_module_access( +def check_project_mapset_module_access( user_credentials, config, - location_name=None, + project_name=None, mapset_name=None, module_name=None, ): - """Check the user permissions to access locations, mapsets and modules. + """Check the user permissions to access projects, mapsets and modules. If the user has an admin or superadmin role, the tests are skipped. @@ -328,7 +328,7 @@ def check_location_mapset_module_access( user_credentials (dict): The user credentials dictionary config (actinia_core.core.common.config.Configuration): The actinia configuration - location_name (str): Name of the location to access + project_name (str): Name of the project to access mapset_name (str): Name of the mapset to access module_name (str): Name of the module to access @@ -347,28 +347,28 @@ def check_location_mapset_module_access( ): return None - # Mapset without location results in error - if location_name is None and mapset_name is not None: + # Mapset without project results in error + if project_name is None and mapset_name is not None: resp = { "Status": "error", - "Messages": "Internal error, mapset definition without location", + "Messages": "Internal error, mapset definition without project", } return (500, resp) - if location_name: - # Check if the location exists in the global database, if not return + if project_name: + # Check if the project exists in the global database, if not return grass_data_base = config.GRASS_DATABASE - location_path = os.path.join(grass_data_base, location_name) + project_path = os.path.join(grass_data_base, project_name) if ( - os.path.exists(location_path) is False - or os.path.isdir(location_path) is False - or os.access(location_path, os.R_OK & os.X_OK) is False + os.path.exists(project_path) is False + or os.path.isdir(project_path) is False + or os.access(project_path, os.R_OK & os.X_OK) is False ): return None - # Check if the mapset exists in the global location, if not return + # Check if the mapset exists in the global project, if not return if mapset_name: - mapset_path = os.path.join(location_path, mapset_name) + mapset_path = os.path.join(project_path, mapset_name) if ( os.path.exists(mapset_path) is False @@ -377,16 +377,16 @@ def check_location_mapset_module_access( ): return None - # Check permissions to the global database locations and mapsets + # Check permissions to the global database projects and mapsets accessible_datasets = user_credentials["permissions"][ "accessible_datasets" ] - if location_name not in accessible_datasets: + if project_name not in accessible_datasets: resp = { "Status": "error", - "Messages": "Unauthorized access to location <%s>" - % location_name, + "Messages": "Unauthorized access to project <%s>" + % project_name, } return (401, resp) @@ -394,13 +394,13 @@ def check_location_mapset_module_access( if mapset_name: # Check if the mapset exists in the global database if ( - not accessible_datasets[location_name] - or mapset_name not in accessible_datasets[location_name] + not accessible_datasets[project_name] + or mapset_name not in accessible_datasets[project_name] ): resp = { "Status": "error", "Messages": "Unauthorized access to mapset " - "<%s> in location <%s>" % (mapset_name, location_name), + "<%s> in project <%s>" % (mapset_name, project_name), } return (401, resp) diff --git a/src/actinia_core/rest/ephemeral_processing.py b/src/actinia_core/rest/ephemeral_processing.py index d29870a64..aea24e8a7 100644 --- a/src/actinia_core/rest/ephemeral_processing.py +++ b/src/actinia_core/rest/ephemeral_processing.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -34,7 +34,7 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -47,7 +47,7 @@ class AsyncEphemeralResource(ResourceBase): def __init__(self, resource_id=None, iteration=None, post_url=None): ResourceBase.__init__(self, resource_id, iteration, post_url) - def post(self, location_name): + def post(self, project_name): """Start an async GRASS processing task, that is completely temporary. The generated mapset is only created temporally, all created resources will be deleted after the processing finished. @@ -57,7 +57,7 @@ def post(self, location_name): and tests! Args: - location_name (str): The name of the location + project_name (str): The name of the project Returns: flask.Response: @@ -169,7 +169,7 @@ def post(self, location_name): """ # Preprocess the post call - rdc = self.preprocess(location_name=location_name) + rdc = self.preprocess(project_name=project_name) if rdc: enqueue_job(self.job_timeout, start_job, rdc) diff --git a/src/actinia_core/rest/ephemeral_processing_with_export.py b/src/actinia_core/rest/ephemeral_processing_with_export.py index c172f9af8..b03a6cd8b 100644 --- a/src/actinia_core/rest/ephemeral_processing_with_export.py +++ b/src/actinia_core/rest/ephemeral_processing_with_export.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -44,9 +44,9 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" __email__ = "info@mundialis.de" @@ -65,11 +65,11 @@ def __init__(self, resource_id=None, iteration=None, post_url=None): @swagger.doc( check_endpoint("post", ephemeral_processing_with_export.post_doc) ) - def post(self, location_name): - """Execute a user defined process chain in an ephemeral location/mapset + def post(self, project_name): + """Execute a user defined process chain in an ephemeral project/mapset and store the processing results for download. """ - rdc = self.preprocess(has_json=True, location_name=location_name) + rdc = self.preprocess(has_json=True, project_name=project_name) if rdc: rdc.set_storage_model_to_file() @@ -93,11 +93,11 @@ def __init__(self): @swagger.doc( check_endpoint("post", ephemeral_processing_with_export.post_doc) ) - def post(self, location_name): - """Execute a user defined process chain in an ephemeral location/mapset + def post(self, project_name): + """Execute a user defined process chain in an ephemeral project/mapset and store the processing result in an Amazon S3 bucket """ - rdc = self.preprocess(has_json=True, location_name=location_name) + rdc = self.preprocess(has_json=True, project_name=project_name) rdc.set_storage_model_to_s3() enqueue_job(self.job_timeout, start_job, rdc) @@ -120,11 +120,11 @@ def __init__(self): @swagger.doc( check_endpoint("post", ephemeral_processing_with_export.post_doc) ) - def post(self, location_name): - """Execute a user defined process chain in an ephemeral location/mapset + def post(self, project_name): + """Execute a user defined process chain in an ephemeral project/mapset and store the processing result in an Google cloud storage bucket """ - rdc = self.preprocess(has_json=True, location_name=location_name) + rdc = self.preprocess(has_json=True, project_name=project_name) rdc.set_storage_model_to_gcs() enqueue_job(self.job_timeout, start_job, rdc) diff --git a/src/actinia_core/rest/map_layer_management.py b/src/actinia_core/rest/map_layer_management.py index 9b4cc3471..26acd3a9f 100644 --- a/src/actinia_core/rest/map_layer_management.py +++ b/src/actinia_core/rest/map_layer_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -43,9 +43,9 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -57,7 +57,7 @@ def __init__(self, layer_type): ResourceBase.__init__(self) self.layer_type = layer_type - def _get(self, location_name, mapset_name): + def _get(self, project_name, mapset_name): """Return a collection of all available layers in the provided mapset. @@ -66,7 +66,7 @@ def _get(self, location_name, mapset_name): http://?pattern="*" Args: - location_name (str): The name of the location + project_name (str): The name of the project mapset_name (str): The name of the mapset Return: @@ -93,7 +93,7 @@ def _get(self, location_name, mapset_name): rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) @@ -112,7 +112,7 @@ def _get(self, location_name, mapset_name): return make_response(jsonify(response_model), http_code) - def _delete(self, location_name, mapset_name): + def _delete(self, project_name, mapset_name): """Remove a list of layers identified by a pattern The g.remove "pattern" parameters must be provided:: @@ -120,7 +120,7 @@ def _delete(self, location_name, mapset_name): http://?pattern="*" Args: - location_name (str): The name of the location + project_name (str): The name of the project mapset_name (str): The name of the mapset Return: @@ -130,7 +130,7 @@ def _delete(self, location_name, mapset_name): rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) @@ -149,7 +149,7 @@ def _delete(self, location_name, mapset_name): return make_response(jsonify(response_model), http_code) - def _put(self, location_name, mapset_name): + def _put(self, project_name, mapset_name): """Rename a list of layers The old names and new names must be provided as a @@ -158,7 +158,7 @@ def _put(self, location_name, mapset_name): [(a, a_new),(b, b_new),(c, c_new), ...] Args: - location_name (str): The name of the location + project_name (str): The name of the project mapset_name (str): The name of the mapset Return: @@ -168,7 +168,7 @@ def _put(self, location_name, mapset_name): rdc = self.preprocess( has_json=True, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) @@ -214,29 +214,29 @@ def __init__(self): @endpoint_decorator() @swagger.doc(check_endpoint("get", map_layer_management.raster_get_doc)) - def get(self, location_name, mapset_name): + def get(self, project_name, mapset_name): """Get a list of raster map layer names that are located in a specific - location/mapset + project/mapset """ - return self._get(location_name, mapset_name) + return self._get(project_name, mapset_name) @endpoint_decorator() @swagger.doc(check_endpoint("put", map_layer_management.raster_put_doc)) - def put(self, location_name, mapset_name): + def put(self, project_name, mapset_name): """Rename a single raster map layer or a list of raster map layers that - are located in a specific location/mapset + are located in a specific project/mapset """ - return self._put(location_name, mapset_name) + return self._put(project_name, mapset_name) @endpoint_decorator() @swagger.doc( check_endpoint("delete", map_layer_management.raster_delete_doc) ) - def delete(self, location_name, mapset_name): + def delete(self, project_name, mapset_name): """Delete a single raster map layer or a list of raster map layer names - that are located in a specific location/mapset + that are located in a specific project/mapset """ - return self._delete(location_name, mapset_name) + return self._delete(project_name, mapset_name) class VectorLayersResource(MapsetLayersResource): @@ -247,26 +247,26 @@ def __init__(self): @endpoint_decorator() @swagger.doc(check_endpoint("get", map_layer_management.vector_get_doc)) - def get(self, location_name, mapset_name): + def get(self, project_name, mapset_name): """Get a list of vector map layer names that are located in a specific - location/mapset + project/mapset """ - return self._get(location_name, mapset_name) + return self._get(project_name, mapset_name) @endpoint_decorator() @swagger.doc(check_endpoint("put", map_layer_management.vector_put_doc)) - def put(self, location_name, mapset_name): + def put(self, project_name, mapset_name): """Rename a single vector map layer or a list of vector map layers that - are located in a specific location/mapset + are located in a specific project/mapset """ - return self._put(location_name, mapset_name) + return self._put(project_name, mapset_name) @endpoint_decorator() @swagger.doc( check_endpoint("delete", map_layer_management.vector_delete_doc) ) - def delete(self, location_name, mapset_name): + def delete(self, project_name, mapset_name): """Delete a single vector map layer or a list of vector map layer names - that are located in a specific location/mapset + that are located in a specific project/mapset """ - return self._delete(location_name, mapset_name) + return self._delete(project_name, mapset_name) diff --git a/src/actinia_core/rest/mapset_management.py b/src/actinia_core/rest/mapset_management.py index 846e5c730..af6c913ca 100644 --- a/src/actinia_core/rest/mapset_management.py +++ b/src/actinia_core/rest/mapset_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -55,29 +55,29 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika, Julia Haas" +__author__ = "Sören Gebbert, Carmen Tawalika, Julia Haas, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" class ListMapsetsResource(ResourceBase): - """List all mapsets in a location""" + """List all mapsets in a project""" layer_type = None # @check_queue_type_overwrite() @endpoint_decorator() @swagger.doc(check_endpoint("get", mapset_management.get_doc)) - def get(self, location_name): + def get(self, project_name): """ - Get a list of all mapsets that are located in a specific location. + Get a list of all mapsets that are located in a specific project. """ rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name="PERMANENT", ) if rdc: @@ -102,15 +102,15 @@ def __init__(self): @endpoint_decorator() @swagger.doc(check_endpoint("get", mapset_management.get_user_doc)) - def get(self, location_name, mapset_name): + def get(self, project_name, mapset_name): """ Get the current computational region of the mapset and the projection - of the location as WKT string. + of the project as WKT string. """ rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) @@ -142,12 +142,12 @@ def __init__(self): @endpoint_decorator() @swagger.doc(check_endpoint("post", mapset_management.post_user_doc)) - def post(self, location_name, mapset_name): - """Create a new mapset in an existing location.""" + def post(self, project_name, mapset_name): + """Create a new mapset in an existing project.""" rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) @@ -155,13 +155,13 @@ def post(self, location_name, mapset_name): http_code, response_model = self.wait_until_finish() return make_response(jsonify(response_model), http_code) - def put(self, location_name, mapset_name): + def put(self, project_name, mapset_name): """Modify the region of a mapset TODO: Implement region setting Args: - location_name (str): Name of the location + project_name (str): Name of the project mapset_name (str): Name of the mapset Returns: @@ -173,12 +173,12 @@ def put(self, location_name, mapset_name): @endpoint_decorator() @swagger.doc(check_endpoint("delete", mapset_management.delete_user_doc)) - def delete(self, location_name, mapset_name): + def delete(self, project_name, mapset_name): """Delete an existing mapset""" rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) @@ -201,12 +201,12 @@ class MapsetLockManagementResource(ResourceBase): @endpoint_decorator() @swagger.doc(check_endpoint("get", mapset_management.get_lock_doc)) - def get(self, location_name, mapset_name): - """Get the location/mapset lock status.""" + def get(self, project_name, mapset_name): + """Get the project/mapset lock status.""" rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) @@ -218,12 +218,12 @@ def get(self, location_name, mapset_name): @endpoint_decorator() @swagger.doc(check_endpoint("post", mapset_management.post_lock_doc)) - def post(self, location_name, mapset_name): - """Create a location/mapset lock.""" + def post(self, project_name, mapset_name): + """Create a project/mapset lock.""" rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) @@ -233,12 +233,12 @@ def post(self, location_name, mapset_name): @endpoint_decorator() @swagger.doc(check_endpoint("delete", mapset_management.delete_lock_doc)) - def delete(self, location_name, mapset_name): - """Delete a location/mapset lock.""" + def delete(self, project_name, mapset_name): + """Delete a project/mapset lock.""" rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) diff --git a/src/actinia_core/rest/persistent_mapset_merger.py b/src/actinia_core/rest/persistent_mapset_merger.py index 741b5eacf..e01d0ea75 100644 --- a/src/actinia_core/rest/persistent_mapset_merger.py +++ b/src/actinia_core/rest/persistent_mapset_merger.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -32,9 +32,9 @@ from actinia_core.processing.common.persistent_mapset_merger import start_job __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -43,13 +43,13 @@ class AsyncPersistentMapsetMergerResource(ResourceBase): def __init__(self): ResourceBase.__init__(self) - def post(self, location_name, mapset_name): + def post(self, project_name, mapset_name): """Merge several existing mapsets into a single one. All mapsets that should be merged and the target mapset will be locked for the processing. Args: - location_name (str): The name of the location + project_name (str): The name of the project target_mapset_name (str): The name of the target mapset, into other mapsets should be merged @@ -82,7 +82,7 @@ def post(self, location_name, mapset_name): """ # Preprocess the post call rdc = self.preprocess( - has_json=True, location_name=location_name, mapset_name=mapset_name + has_json=True, project_name=project_name, mapset_name=mapset_name ) if rdc: diff --git a/src/actinia_core/rest/persistent_processing.py b/src/actinia_core/rest/persistent_processing.py index 49611719f..1395960a8 100644 --- a/src/actinia_core/rest/persistent_processing.py +++ b/src/actinia_core/rest/persistent_processing.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -41,7 +41,7 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Guido Riembauer, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -52,7 +52,7 @@ def __init__(self, resource_id=None, iteration=None, post_url=None): @endpoint_decorator() @swagger.doc(check_endpoint("post", persistent_processing.post_doc)) - def post(self, location_name, mapset_name): + def post(self, project_name, mapset_name): """Execute a user defined process chain that creates a new mapset or runs in an existing one. @@ -60,7 +60,7 @@ def post(self, location_name, mapset_name): executed must be provided as JSON payload of the POST request. Args: - location_name (str): The name of the location + project_name (str): The name of the project mapset_name (str): The name of the mapset Returns: @@ -179,7 +179,7 @@ def post(self, location_name, mapset_name): """ # Preprocess the post call rdc = self.preprocess( - has_json=True, location_name=location_name, mapset_name=mapset_name + has_json=True, project_name=project_name, mapset_name=mapset_name ) if rdc: diff --git a/src/actinia_core/rest/process_validation.py b/src/actinia_core/rest/process_validation.py index 827d0e875..521c7e62b 100644 --- a/src/actinia_core/rest/process_validation.py +++ b/src/actinia_core/rest/process_validation.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -42,9 +42,9 @@ from actinia_core.processing.common.process_validation import start_job __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -54,12 +54,12 @@ class AsyncProcessValidationResource(ResourceBase): @endpoint_decorator() @swagger.doc(check_endpoint("post", process_validation.post_doc)) - def post(self, location_name): + def post(self, project_name): """Validate a process chain asynchronously, check the provided sources and the mapsets.""" rdc = self.preprocess( - has_json=True, has_xml=True, location_name=location_name + has_json=True, has_xml=True, project_name=project_name ) if rdc: @@ -75,12 +75,12 @@ class SyncProcessValidationResource(ResourceBase): @endpoint_decorator() @swagger.doc(check_endpoint("post", process_validation.post_doc)) - def post(self, location_name): + def post(self, project_name): """Validate a process chain synchronously, check the provided sources and the mapsets.""" rdc = self.preprocess( - has_json=True, has_xml=True, location_name=location_name + has_json=True, has_xml=True, project_name=project_name ) if rdc: diff --git a/src/actinia_core/rest/raster_colors.py b/src/actinia_core/rest/raster_colors.py index 1c4669e18..2f7626a84 100644 --- a/src/actinia_core/rest/raster_colors.py +++ b/src/actinia_core/rest/raster_colors.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -41,9 +41,9 @@ from actinia_core.processing.common.raster_colors import start_job_from_rules __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika" +__author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -53,11 +53,11 @@ class SyncPersistentRasterColorsResource(ResourceBase): @endpoint_decorator() @swagger.doc(check_endpoint("get", raster_colors.get_doc)) - def get(self, location_name, mapset_name, raster_name): + def get(self, project_name, mapset_name, raster_name): """Get the color definition of an existing raster map layer. Args: - location_name: Name of the location + project_name: Name of the project mapset_name: Name of the mapset raster_name: name of the raster map @@ -65,7 +65,7 @@ def get(self, location_name, mapset_name, raster_name): rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=raster_name, ) @@ -81,7 +81,7 @@ def get(self, location_name, mapset_name, raster_name): @endpoint_decorator() @swagger.doc(check_endpoint("post", raster_colors.post_doc)) - def post(self, location_name, mapset_name, raster_name): + def post(self, project_name, mapset_name, raster_name): """Set the color definition for an existing raster map layer. The JSON input should contain the color rules, a predefined color table @@ -93,7 +93,7 @@ def post(self, location_name, mapset_name, raster_name): Args: - location_name: Name of the location + project_name: Name of the project mapset_name: Name of the mapset raster_name: name of the raster map @@ -104,7 +104,7 @@ def post(self, location_name, mapset_name, raster_name): rdc = self.preprocess( has_json=True, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=raster_name, ) diff --git a/src/actinia_core/rest/raster_export.py b/src/actinia_core/rest/raster_export.py index 6f328edca..d864bbd52 100644 --- a/src/actinia_core/rest/raster_export.py +++ b/src/actinia_core/rest/raster_export.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -41,9 +41,9 @@ from actinia_core.processing.common.raster_export import start_job __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -60,16 +60,16 @@ def __init__(self): @endpoint_decorator() @swagger.doc(check_endpoint("post", raster_export.post_doc)) - def post(self, location_name, mapset_name, raster_name): + def post(self, project_name, mapset_name, raster_name): """Export an existing raster map layer as GeoTiff.""" - return self._execute(location_name, mapset_name, raster_name, False) + return self._execute(project_name, mapset_name, raster_name, False) def _execute( - self, location_name, mapset_name, raster_name, use_raster_region + self, project_name, mapset_name, raster_name, use_raster_region ): rdc = self.preprocess( has_json=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=raster_name, ) @@ -97,8 +97,8 @@ def __init__(self): @endpoint_decorator() @swagger.doc(check_endpoint("post", raster_export.region_post_doc)) - def post(self, location_name, mapset_name, raster_name): + def post(self, project_name, mapset_name, raster_name): """Export an existing raster map layer as GeoTiff using the raster map specific region. """ - return self._execute(location_name, mapset_name, raster_name, True) + return self._execute(project_name, mapset_name, raster_name, True) diff --git a/src/actinia_core/rest/raster_layer.py b/src/actinia_core/rest/raster_layer.py index 3306fc416..5d6fb9dc2 100644 --- a/src/actinia_core/rest/raster_layer.py +++ b/src/actinia_core/rest/raster_layer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -49,7 +49,7 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Carmen Tawalika, Guido Riembauer, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -59,12 +59,12 @@ class RasterLayerResource(MapLayerRegionResourceBase): @endpoint_decorator() @swagger.doc(check_endpoint("get", raster_layer.get_doc)) - def get(self, location_name, mapset_name, raster_name): + def get(self, project_name, mapset_name, raster_name): """Get information about an existing raster map layer.""" rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=raster_name, ) @@ -83,12 +83,12 @@ def get(self, location_name, mapset_name, raster_name): @endpoint_decorator() @swagger.doc(check_endpoint("delete", raster_layer.delete_doc)) - def delete(self, location_name, mapset_name, raster_name): + def delete(self, project_name, mapset_name, raster_name): """Delete an existing raster map layer.""" rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=raster_name, ) @@ -107,7 +107,7 @@ def delete(self, location_name, mapset_name, raster_name): @endpoint_decorator() @swagger.doc(check_endpoint("post", raster_layer.post_doc)) - def post(self, location_name, mapset_name, raster_name): + def post(self, project_name, mapset_name, raster_name): """Create a new raster layer by uploading a GeoTIFF""" allowed_extensions = ["tif", "tiff"] @@ -175,7 +175,7 @@ def post(self, location_name, mapset_name, raster_name): rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=raster_name, ) diff --git a/src/actinia_core/rest/raster_legend.py b/src/actinia_core/rest/raster_legend.py index 57345ad0a..20452c332 100644 --- a/src/actinia_core/rest/raster_legend.py +++ b/src/actinia_core/rest/raster_legend.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -41,9 +41,9 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -154,7 +154,7 @@ def create_parser_options(self, args): @endpoint_decorator() @swagger.doc(check_endpoint("get", raster_legend.get_doc)) - def get(self, location_name, mapset_name, raster_name): + def get(self, project_name, mapset_name, raster_name): """Render the legend of a raster map layer as a PNG image.""" parser = self.create_parser() args = parser.parse_args() @@ -166,7 +166,7 @@ def get(self, location_name, mapset_name, raster_name): rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=raster_name, ) diff --git a/src/actinia_core/rest/raster_renderer.py b/src/actinia_core/rest/raster_renderer.py index 3741076c6..64b32dfaa 100644 --- a/src/actinia_core/rest/raster_renderer.py +++ b/src/actinia_core/rest/raster_renderer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -43,9 +43,9 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -55,7 +55,7 @@ class SyncEphemeralRasterRendererResource(RendererBaseResource): @endpoint_decorator() @swagger.doc(check_endpoint("get", raster_renderer.raster_render_get_doc)) - def get(self, location_name, mapset_name, raster_name): + def get(self, project_name, mapset_name, raster_name): """Render a raster map layer as a PNG image.""" parser = self.create_parser() args = parser.parse_args() @@ -67,7 +67,7 @@ def get(self, location_name, mapset_name, raster_name): rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=raster_name, ) @@ -137,7 +137,7 @@ def extract_rgb_maps(self, args, mapset_name): @swagger.doc( check_endpoint("get", raster_renderer.raster_rgb_render_get_doc) ) - def get(self, location_name, mapset_name): + def get(self, project_name, mapset_name): """Render three raster map layer as composed RGB PNG image.""" parser = self.create_parser() @@ -173,7 +173,7 @@ def get(self, location_name, mapset_name): rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) @@ -235,7 +235,7 @@ def extract_shade_maps(self, args, mapset_name): @swagger.doc( check_endpoint("get", raster_renderer.raster_shade_render_get_doc) ) - def get(self, location_name, mapset_name): + def get(self, project_name, mapset_name): """Render two raster layers as a composed shade PNG image""" parser = self.create_parser() parser.add_argument( @@ -262,7 +262,7 @@ def get(self, location_name, mapset_name): rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) diff --git a/src/actinia_core/rest/resource_management.py b/src/actinia_core/rest/resource_management.py index 974367121..6f11979bc 100644 --- a/src/actinia_core/rest/resource_management.py +++ b/src/actinia_core/rest/resource_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -58,7 +58,7 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis GmbH & Co. KG" @@ -352,10 +352,11 @@ def _create_ResourceDataContainer_for_resumption( is None ): return None, None, None - location = re.findall(r"locations\/(.*?)\/", post_url)[0] + # TODO project location + project = re.findall(r"locations\/(.*?)\/", post_url)[0] processing_class = global_config.INTERIM_SAVING_ENDPOINTS[endpoint] if processing_class == "AsyncEphemeralResource": - # /locations//processing_async + # /projects//processing_async from .ephemeral_processing import AsyncEphemeralResource from ..processing.common.ephemeral_processing import start_job @@ -363,10 +364,10 @@ def _create_ResourceDataContainer_for_resumption( resource_id, iteration, post_url ) rdc = processing_resource.preprocess( - location_name=location, **preprocess_kwargs + project_name=project, **preprocess_kwargs ) elif processing_class == "AsyncPersistentResource": - # /locations/{location_name}/mapsets/{mapset_name}/processing_async + # /projects/{project_name}/mapsets/{mapset_name}/processing_async from .persistent_processing import AsyncPersistentResource from ..processing.common.persistent_processing import start_job @@ -375,11 +376,11 @@ def _create_ResourceDataContainer_for_resumption( ) mapset = re.findall(r"mapsets\/(.*?)\/", post_url)[0] rdc = processing_resource.preprocess( - location_name=location, mapset_name=mapset, **preprocess_kwargs + project_name=project, mapset_name=mapset, **preprocess_kwargs ) elif processing_class == "AsyncEphemeralExportResource": - # /locations/{location_name}/processing_async_export + # /projects/{project_name}/processing_async_export from .ephemeral_processing_with_export import ( AsyncEphemeralExportResource, ) @@ -391,7 +392,7 @@ def _create_ResourceDataContainer_for_resumption( resource_id, iteration, post_url ) rdc = processing_resource.preprocess( - location_name=location, **preprocess_kwargs + project_name=project, **preprocess_kwargs ) else: return make_response( diff --git a/src/actinia_core/rest/strds_management.py b/src/actinia_core/rest/strds_management.py index 5665385fe..420798eaf 100644 --- a/src/actinia_core/rest/strds_management.py +++ b/src/actinia_core/rest/strds_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -47,28 +47,28 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika" +__author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" class SyncSTRDSListerResource(ResourceBase): - """List all STRDS in a location/mapset""" + """List all STRDS in a project/mapset""" layer_type = None @endpoint_decorator() @swagger.doc(check_endpoint("get", strds_management.list_get_doc)) - def get(self, location_name, mapset_name): + def get(self, project_name, mapset_name): """ - Get a list of all STRDS that are located in a specific location/mapset. + Get a list of all STRDS that are located in a specific project/mapset. """ rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, ) @@ -104,19 +104,19 @@ def get(self, location_name, mapset_name): class STRDSManagementResource(ResourceBase): - """List all STRDS in a location/mapset""" + """List all STRDS in a project/mapset""" @endpoint_decorator() @swagger.doc(check_endpoint("get", strds_management.get_doc)) - def get(self, location_name, mapset_name, strds_name): + def get(self, project_name, mapset_name, strds_name): """ Get information about a STRDS that is located in a specific - location/mapset. + project/mapset. """ rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=strds_name, ) @@ -132,12 +132,12 @@ def get(self, location_name, mapset_name, strds_name): @endpoint_decorator() @swagger.doc(check_endpoint("delete", strds_management.delete_doc)) - def delete(self, location_name, mapset_name, strds_name): - """Delete a STRDS that is located in a specific location/mapset.""" + def delete(self, project_name, mapset_name, strds_name): + """Delete a STRDS that is located in a specific project/mapset.""" rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=strds_name, ) @@ -157,12 +157,12 @@ def delete(self, location_name, mapset_name, strds_name): @endpoint_decorator() @swagger.doc(check_endpoint("post", strds_management.post_doc)) - def post(self, location_name, mapset_name, strds_name): - """Create a new STRDS in a specific location/mapset.""" + def post(self, project_name, mapset_name, strds_name): + """Create a new STRDS in a specific project/mapset.""" rdc = self.preprocess( has_json=True, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=strds_name, ) diff --git a/src/actinia_core/rest/strds_raster_management.py b/src/actinia_core/rest/strds_raster_management.py index 78e1ca3a7..ee39b4889 100644 --- a/src/actinia_core/rest/strds_raster_management.py +++ b/src/actinia_core/rest/strds_raster_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -45,9 +45,9 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika" +__author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -57,14 +57,14 @@ class STRDSRasterManagement(ResourceBase): @endpoint_decorator() @swagger.doc(check_endpoint("get", strds_raster_management.get_doc)) - def get(self, location_name, mapset_name, strds_name): + def get(self, project_name, mapset_name, strds_name): """ Get a list of all raster map layers that are registered in a STRDS """ rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=strds_name, ) @@ -87,14 +87,14 @@ def get(self, location_name, mapset_name, strds_name): @endpoint_decorator() @swagger.doc(check_endpoint("put", strds_raster_management.put_doc)) - def put(self, location_name, mapset_name, strds_name): + def put(self, project_name, mapset_name, strds_name): """Register raster map layers in a STRDS located in a specific - location/mapset. + project/mapset. """ rdc = self.preprocess( has_json=True, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=strds_name, ) @@ -109,15 +109,15 @@ def put(self, location_name, mapset_name, strds_name): @endpoint_decorator() @swagger.doc(check_endpoint("delete", strds_raster_management.delete_doc)) - def delete(self, location_name, mapset_name, strds_name): + def delete(self, project_name, mapset_name, strds_name): """ Unregister raster map layers from a STRDS located in a specific - location/mapset. + project/mapset. """ rdc = self.preprocess( has_json=True, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=strds_name, ) diff --git a/src/actinia_core/rest/strds_renderer.py b/src/actinia_core/rest/strds_renderer.py index 1a05458dc..66085877c 100644 --- a/src/actinia_core/rest/strds_renderer.py +++ b/src/actinia_core/rest/strds_renderer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -38,9 +38,9 @@ from actinia_core.processing.common.strds_renderer import start_job __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -48,7 +48,7 @@ class SyncEphemeralSTRDSRendererResource(RendererBaseResource): @endpoint_decorator() @swagger.doc(check_endpoint("get", strds_renderer.get_doc)) - def get(self, location_name, mapset_name, strds_name): + def get(self, project_name, mapset_name, strds_name): """ Render the raster map layers of a specific STRDS as a single image. """ @@ -62,7 +62,7 @@ def get(self, location_name, mapset_name, strds_name): rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=strds_name, ) diff --git a/src/actinia_core/rest/vector_layer.py b/src/actinia_core/rest/vector_layer.py index cfcb28bfe..beae48d74 100644 --- a/src/actinia_core/rest/vector_layer.py +++ b/src/actinia_core/rest/vector_layer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -50,7 +50,7 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Carmen Tawalika, Guido Riembauer, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -60,12 +60,12 @@ class VectorLayerResource(MapLayerRegionResourceBase): @endpoint_decorator() @swagger.doc(check_endpoint("get", vector_layer.get_doc)) - def get(self, location_name, mapset_name, vector_name): + def get(self, project_name, mapset_name, vector_name): """Get information about an existing vector map layer.""" rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=vector_name, ) @@ -85,12 +85,12 @@ def get(self, location_name, mapset_name, vector_name): @endpoint_decorator() @swagger.doc(check_endpoint("delete", vector_layer.delete_dop)) - def delete(self, location_name, mapset_name, vector_name): + def delete(self, project_name, mapset_name, vector_name): """Delete an existing vector map layer.""" rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=vector_name, ) @@ -110,7 +110,7 @@ def delete(self, location_name, mapset_name, vector_name): @endpoint_decorator() @swagger.doc(check_endpoint("post", vector_layer.post_doc)) - def post(self, location_name, mapset_name, vector_name): + def post(self, project_name, mapset_name, vector_name): """Create a new vector layer by uploading a GPKG, zipped Shapefile, or GeoJSON. """ @@ -216,7 +216,7 @@ def post(self, location_name, mapset_name, vector_name): rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=vector_name, ) diff --git a/src/actinia_core/rest/vector_renderer.py b/src/actinia_core/rest/vector_renderer.py index dc8f4bf1d..6601f1d33 100644 --- a/src/actinia_core/rest/vector_renderer.py +++ b/src/actinia_core/rest/vector_renderer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -40,9 +40,9 @@ from actinia_core.processing.common.vector_renderer import start_job __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -52,7 +52,7 @@ class SyncEphemeralVectorRendererResource(RendererBaseResource): @endpoint_decorator() @swagger.doc(check_endpoint("get", vector_renderer.get_doc)) - def get(self, location_name, mapset_name, vector_name): + def get(self, project_name, mapset_name, vector_name): """Render a single vector map layer""" parser = self.create_parser() args = parser.parse_args() @@ -64,7 +64,7 @@ def get(self, location_name, mapset_name, vector_name): rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name=mapset_name, map_name=vector_name, ) diff --git a/src/actinia_core/testsuite.py b/src/actinia_core/testsuite.py index e3f1f99e2..ee7eb689e 100644 --- a/src/actinia_core/testsuite.py +++ b/src/actinia_core/testsuite.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2019 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -43,9 +43,9 @@ from actinia_core.core.common.process_queue import create_process_queue __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" @@ -339,9 +339,9 @@ def waitAsyncStatusAssertHTTP( time.sleep(0.4) return resp_data - def assertRasterInfo(self, location, mapset, raster, ref_info, header): + def assertRasterInfo(self, project, mapset, raster, ref_info, header): url = ( - f"{URL_PREFIX}/locations/{location}/mapsets/{mapset}/" + f"{URL_PREFIX}/projects/{project}/mapsets/{mapset}/" f"raster_layers/{raster}" ) rv = self.server.get(url, headers=header) @@ -360,9 +360,9 @@ def assertRasterInfo(self, location, mapset, raster, ref_info, header): ), ) - def assertVectorInfo(self, location, mapset, vector, ref_info, header): + def assertVectorInfo(self, project, mapset, vector, ref_info, header): url = ( - f"{URL_PREFIX}/locations/{location}/mapsets/{mapset}/" + f"{URL_PREFIX}/projects/{project}/mapsets/{mapset}/" f"vector_layers/{vector}" ) rv = self.server.get(url, headers=header) @@ -381,34 +381,34 @@ def assertVectorInfo(self, location, mapset, vector, ref_info, header): ), ) - def create_new_mapset(self, mapset_name, location_name="nc_spm_08"): - self.delete_mapset(mapset_name, location_name) + def create_new_mapset(self, mapset_name, project_name="nc_spm_08"): + self.delete_mapset(mapset_name, project_name) # Create new mapset self.server.post( URL_PREFIX - + "/locations/%s/mapsets/%s" % (location_name, mapset_name), + + "/projects/%s/mapsets/%s" % (project_name, mapset_name), headers=self.admin_auth_header, ) - def delete_mapset(self, mapset_name, location_name="nc_spm_08"): + def delete_mapset(self, mapset_name, project_name="nc_spm_08"): # Unlock mapset for deletion self.server.delete( URL_PREFIX - + "/locations/%s/mapsets/%s/lock" % (location_name, mapset_name), + + "/projects/%s/mapsets/%s/lock" % (project_name, mapset_name), headers=self.admin_auth_header, ) # Delete existing mapset self.server.delete( URL_PREFIX - + "/locations/%s/mapsets/%s" % (location_name, mapset_name), + + "/projects/%s/mapsets/%s" % (project_name, mapset_name), headers=self.admin_auth_header, ) - def create_vector_layer(self, location, mapset, vector, region, parameter): + def create_vector_layer(self, project, mapset, vector, region, parameter): # Remove potentially existing vector layer url = ( - f"{URL_PREFIX}/locations/{location}/mapsets/{mapset}/" + f"{URL_PREFIX}/projects/{project}/mapsets/{mapset}/" f"vector_layers/{vector}" ) rv = self.server.delete(url, headers=self.user_auth_header) @@ -440,7 +440,7 @@ def create_vector_layer(self, location, mapset, vector, region, parameter): "version": "1", } url = ( - f"{URL_PREFIX}/locations/{location}/mapsets/{mapset}/" + f"{URL_PREFIX}/projects/{project}/mapsets/{mapset}/" f"processing_async" ) rv = self.server.post( diff --git a/tests/test_process_queue.py b/tests/test_process_queue.py index 07f981942..0f704d06f 100644 --- a/tests/test_process_queue.py +++ b/tests/test_process_queue.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -42,9 +42,9 @@ from test_resource_base import global_config __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "Sören Gebbert" __email__ = "soerengebbert@googlemail.com" @@ -98,7 +98,7 @@ def setUp(self): orig_time=time.time(), orig_datetime=datetime.datetime.now(), config=global_config, - location_name="location_name", + project_name="project_name", mapset_name="mapset_name", map_name="map_name", ) diff --git a/tests/test_raster_import_pixellimit.py b/tests/test_raster_import_pixellimit.py index 779364d36..678b881d6 100644 --- a/tests/test_raster_import_pixellimit.py +++ b/tests/test_raster_import_pixellimit.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2023 Lina Krisztian and mundialis GmbH & Co. KG +# Copyright (c) 2023-2024 mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -41,15 +41,15 @@ ) __license__ = "GPLv3" -__author__ = "Lina Krisztian" -__copyright__ = "Copyright 2023, mundialis GmbH & Co. KG" +__author__ = "Lina Krisztian, Anika Weinmann" +__copyright__ = "Copyright 2023 - 2024, mundialis GmbH & Co. KG" __maintainer__ = "mundialis GmbH & Co. KG" class ImportRasterLayerPixellimitTestCase(ActiniaResourceTestCaseBase): - location = "nc_spm_08" + project = "nc_spm_08" tmp_mapset = "mapset_rasterimport_pixellimit" - endpoint = f"/locations/{location}/mapsets/{tmp_mapset}/processing_async" + endpoint = f"/projects/{project}/mapsets/{tmp_mapset}/processing_async" rimport_inp = "elevation" # import resolution with which the process should fail: rimport_res_fail = 0.1 @@ -57,11 +57,11 @@ class ImportRasterLayerPixellimitTestCase(ActiniaResourceTestCaseBase): def setUp(self): # create new temp mapset super(ImportRasterLayerPixellimitTestCase, self).setUp() - self.create_new_mapset(self.tmp_mapset, location_name=self.location) + self.create_new_mapset(self.tmp_mapset, project_name=self.project) def tearDown(self): # delete mapset - self.delete_mapset(self.tmp_mapset, location_name=self.location) + self.delete_mapset(self.tmp_mapset, project_name=self.project) super(ImportRasterLayerPixellimitTestCase, self).tearDown() def test_pixellimit_allowed(self): diff --git a/tests/test_raster_upload.py b/tests/test_raster_upload.py index db56fcb4a..5d3808549 100644 --- a/tests/test_raster_upload.py +++ b/tests/test_raster_upload.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -43,12 +43,12 @@ __license__ = "GPLv3" __author__ = "Anika Weinmann, Guido Riembauer" -__copyright__ = "Copyright 2016-2021, mundialis GmbH & Co. KG" +__copyright__ = "Copyright 2016-2024, mundialis GmbH & Co. KG" __maintainer__ = "mundialis GmbH & Co. KG" class UploadRasterLayerTestCase(ActiniaResourceTestCaseBase): - location = "nc_spm_08" + project = "nc_spm_08" mapset = "PERMANENT" tmp_mapset = "mapset_upload" raster = "elev_ned_30m" @@ -89,11 +89,11 @@ def tearDownClass(cls): def setUp(self): # create new temp mapset super(UploadRasterLayerTestCase, self).setUp() - self.create_new_mapset(self.tmp_mapset, location_name=self.location) + self.create_new_mapset(self.tmp_mapset, project_name=self.project) def tearDown(self): # delete mapset - self.delete_mapset(self.tmp_mapset, location_name=self.location) + self.delete_mapset(self.tmp_mapset, project_name=self.project) super(UploadRasterLayerTestCase, self).tearDown() def test_upload_raster_userdb(self): @@ -101,7 +101,7 @@ def test_upload_raster_userdb(self): Test successful GeoTIFF upload and check against reference raster info """ url = ( - f"{URL_PREFIX}/locations/{self.location}/mapsets/{self.tmp_mapset}" + f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.tmp_mapset}" f"/raster_layers/{self.raster}" ) multipart_form_data = {"file": open(self.local_raster, "rb")} @@ -120,7 +120,7 @@ def test_upload_raster_userdb(self): ) self.assertRasterInfo( - self.location, + self.project, self.tmp_mapset, self.raster, self.ref_info, @@ -130,7 +130,7 @@ def test_upload_raster_userdb(self): def test_upload_raster_globaldb_error(self): """Test Error if raster is uploaded to global DB""" url = ( - f"{URL_PREFIX}/locations/{self.location}/mapsets/{self.mapset}/" + f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.mapset}/" f"raster_layers/{self.raster}" ) multipart_form_data = {"file": open(self.local_raster, "rb")} diff --git a/tests/test_strds_management.py b/tests/test_strds_management.py index a3e2b86e7..f009cbca0 100644 --- a/tests/test_strds_management.py +++ b/tests/test_strds_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -39,17 +39,17 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "Sören Gebbert" __email__ = "soerengebbert@googlemail.com" -location = "nc_spm_08" +project = "nc_spm_08" strds_mapset = "modis_lst" -strds_url = URL_PREFIX + "/locations/%(location)s/mapsets/%(mapset)s/strds" % { - "location": location, +strds_url = URL_PREFIX + "/projects/%(project)s/mapsets/%(mapset)s/strds" % { + "project": project, "mapset": strds_mapset, } strds_data = "LST_Day_monthly" @@ -128,13 +128,13 @@ def test_strds_info(self): def test_strds_create_remove(self): new_mapset = "strds_test" - self.create_new_mapset(mapset_name=new_mapset, location_name=location) + self.create_new_mapset(mapset_name=new_mapset, project_name=project) # Create success rv = self.server.post( URL_PREFIX - + "/locations/%s/mapsets/%s/strds/test_strds" - % (location, new_mapset), + + "/projects/%s/mapsets/%s/strds/test_strds" + % (project, new_mapset), headers=self.admin_auth_header, data=json_dumps( { @@ -158,8 +158,8 @@ def test_strds_create_remove(self): # Create failure since the strds already exists rv = self.server.post( URL_PREFIX - + "/locations/%s/mapsets/%s/strds/test_strds" - % (location, new_mapset), + + "/projects/%s/mapsets/%s/strds/test_strds" + % (project, new_mapset), headers=self.admin_auth_header, data=json_dumps( { @@ -182,8 +182,8 @@ def test_strds_create_remove(self): # Read/check information of the new strds rv = self.server.get( URL_PREFIX - + "/locations/%s/mapsets/%s/strds/test_strds" - % (location, new_mapset), + + "/projects/%s/mapsets/%s/strds/test_strds" + % (project, new_mapset), headers=self.user_auth_header, ) print(rv.data) @@ -202,8 +202,8 @@ def test_strds_create_remove(self): # Delete the strds rv = self.server.delete( URL_PREFIX - + "/locations/%s/mapsets/%s/strds/test_strds" - % (location, new_mapset), + + "/projects/%s/mapsets/%s/strds/test_strds" + % (project, new_mapset), headers=self.admin_auth_header, ) print(rv.data) @@ -218,8 +218,8 @@ def test_strds_create_remove(self): # Try to delete the strds again to produce an error rv = self.server.delete( URL_PREFIX - + "/locations/%s/mapsets/%s/strds/test_strds" - % (location, new_mapset), + + "/projects/%s/mapsets/%s/strds/test_strds" + % (project, new_mapset), headers=self.admin_auth_header, ) print(rv.data) @@ -234,8 +234,8 @@ def test_strds_create_remove(self): rv = self.server.get( URL_PREFIX - + "/locations/%s/mapsets/%s/strds/test_strds" - % (location, new_mapset), + + "/projects/%s/mapsets/%s/strds/test_strds" + % (project, new_mapset), headers=self.user_auth_header, ) print(rv.data) diff --git a/tests/test_strds_raster_management.py b/tests/test_strds_raster_management.py index 9152af145..347c12697 100644 --- a/tests/test_strds_raster_management.py +++ b/tests/test_strds_raster_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -34,17 +34,17 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "Sören Gebbert" __email__ = "soerengebbert@googlemail.com" -location = "nc_spm_08" +project = "nc_spm_08" strds_mapset = "modis_lst" -strds_url = URL_PREFIX + "/locations/%(location)s/mapsets/%(mapset)s/strds" % { - "location": location, +strds_url = URL_PREFIX + "/projects/%(project)s/mapsets/%(mapset)s/strds" % { + "project": project, "mapset": strds_mapset, } strds_data = "LST_Day_monthly" @@ -53,13 +53,13 @@ class STRDSTestCase(ActiniaResourceTestCaseBase): def create_raster_layer( - self, location_name, mapset_name, raster_name, val + self, project_name, mapset_name, raster_name, val ): # Remove potentially existing raster layer rv = self.server.delete( URL_PREFIX - + "/locations/%s/mapsets/%s/raster_layers/%s" - % (location_name, mapset_name, raster_name), + + "/projects/%s/mapsets/%s/raster_layers/%s" + % (project_name, mapset_name, raster_name), headers=self.admin_auth_header, ) # print(rv.data) @@ -93,8 +93,8 @@ def create_raster_layer( } rv = self.server.post( URL_PREFIX - + "/locations/%s/mapsets/%s/processing_async" - % (location_name, mapset_name), + + "/projects/%s/mapsets/%s/processing_async" + % (project_name, mapset_name), headers=self.admin_auth_header, data=json_dumps(postbody), content_type="application/json", @@ -118,7 +118,7 @@ def create_raster_layer( def test_strds_creation_error(self): # This must fail, global mapsets are not allowed to modify rv = self.server.post( - f"{URL_PREFIX}/locations/{location}/mapsets/{strds_mapset}/strds/" + f"{URL_PREFIX}/projects/{project}/mapsets/{strds_mapset}/strds/" "test_strds_register", headers=self.admin_auth_header, data=json_dumps( @@ -141,11 +141,11 @@ def test_strds_creation_error(self): ) def test_strds_create_register_unregister_1(self): - self.create_new_mapset(new_mapset, location) + self.create_new_mapset(new_mapset, project) # Create success rv = self.server.post( - f"{URL_PREFIX}/locations/{location}/mapsets/{new_mapset}/strds/" + f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" "test_strds_register", headers=self.admin_auth_header, data=json_dumps( @@ -168,9 +168,9 @@ def test_strds_create_register_unregister_1(self): ) # Create the raster layer - self.create_raster_layer(location, new_mapset, "test_layer_1", 1) - self.create_raster_layer(location, new_mapset, "test_layer_2", 2) - self.create_raster_layer(location, new_mapset, "test_layer_3", 3) + self.create_raster_layer(project, new_mapset, "test_layer_1", 1) + self.create_raster_layer(project, new_mapset, "test_layer_2", 2) + self.create_raster_layer(project, new_mapset, "test_layer_3", 3) raster_layers = [ { @@ -191,7 +191,7 @@ def test_strds_create_register_unregister_1(self): ] rv = self.server.put( - f"{URL_PREFIX}/locations/{location}/mapsets/{new_mapset}/strds/" + f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" "test_strds_register/raster_layers", data=json_dumps(raster_layers), content_type="application/json", @@ -209,7 +209,7 @@ def test_strds_create_register_unregister_1(self): # Check strds rv = self.server.get( - f"{URL_PREFIX}/locations/{location}/mapsets/{new_mapset}/strds/" + f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" "test_strds_register", headers=self.admin_auth_header, ) @@ -234,7 +234,7 @@ def test_strds_create_register_unregister_1(self): raster_layers = ["test_layer_1", "test_layer_2", "test_layer_3"] rv = self.server.delete( - f"{URL_PREFIX}/locations/{location}/mapsets/{new_mapset}/strds/" + f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" "test_strds_register/raster_layers", data=json_dumps(raster_layers), content_type="application/json", @@ -252,7 +252,7 @@ def test_strds_create_register_unregister_1(self): # Check strds rv = self.server.get( - f"{URL_PREFIX}/locations/{location}/mapsets/{new_mapset}/strds/" + f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" "test_strds_register", headers=self.user_auth_header, ) @@ -275,7 +275,7 @@ def test_strds_create_register_unregister_1(self): # Delete the strds rv = self.server.delete( - f"{URL_PREFIX}/locations/{location}/mapsets/{new_mapset}/strds/" + f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" "test_strds_register", headers=self.user_auth_header, ) diff --git a/tests/test_strds_raster_renderer.py b/tests/test_strds_raster_renderer.py index 2b60a4ff9..28443ffb8 100644 --- a/tests/test_strds_raster_renderer.py +++ b/tests/test_strds_raster_renderer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -34,17 +34,17 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "Soeren Gebbert" __email__ = "soerengebbert@googlemail.com" -location = "nc_spm_08" +project = "nc_spm_08" strds_mapset = "modis_lst" -strds_url = URL_PREFIX + "/locations/%(location)s/mapsets/%(mapset)s/strds" % { - "location": location, +strds_url = URL_PREFIX + "/projects/%(project)s/mapsets/%(mapset)s/strds" % { + "project": project, "mapset": strds_mapset, } strds_data = "LST_Day_monthly" @@ -52,13 +52,13 @@ class STRDSRenderTestCase(ActiniaResourceTestCaseBase): def create_raster_layer( - self, location_name, mapset_name, raster_name, val + self, project_name, mapset_name, raster_name, val ): # Remove potentially existing raster layer rv = self.server.delete( URL_PREFIX - + "/locations/%s/mapsets/%s/raster_layers/%s" - % (location_name, mapset_name, raster_name), + + "/projects/%s/mapsets/%s/raster_layers/%s" + % (project_name, mapset_name, raster_name), headers=self.admin_auth_header, ) # print(rv.data) @@ -92,8 +92,8 @@ def create_raster_layer( } rv = self.server.post( URL_PREFIX - + "/locations/%s/mapsets/%s/processing_async" - % (location_name, mapset_name), + + "/projects/%s/mapsets/%s/processing_async" + % (project_name, mapset_name), headers=self.admin_auth_header, data=json_dumps(postbody), content_type="application/json", @@ -116,11 +116,11 @@ def create_raster_layer( def test_strds_render_1(self): new_mapset = "strds_render_test" - self.create_new_mapset(new_mapset, location) + self.create_new_mapset(new_mapset, project) # Create success rv = self.server.post( - f"{URL_PREFIX}/locations/{location}/mapsets/{new_mapset}/strds/" + f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" "test_strds_register", headers=self.admin_auth_header, data=json_dumps( @@ -143,9 +143,9 @@ def test_strds_render_1(self): ) # Create the raster layer - self.create_raster_layer(location, new_mapset, "test_layer_1", 1) - self.create_raster_layer(location, new_mapset, "test_layer_2", 2) - self.create_raster_layer(location, new_mapset, "test_layer_3", 3) + self.create_raster_layer(project, new_mapset, "test_layer_1", 1) + self.create_raster_layer(project, new_mapset, "test_layer_2", 2) + self.create_raster_layer(project, new_mapset, "test_layer_3", 3) raster_layers = [ { @@ -166,7 +166,7 @@ def test_strds_render_1(self): ] rv = self.server.put( - f"{URL_PREFIX}/locations/{location}/mapsets/{new_mapset}/strds/" + f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" "test_strds_register/raster_layers", data=json_dumps(raster_layers), content_type="application/json", @@ -184,7 +184,7 @@ def test_strds_render_1(self): # Check strds rv = self.server.get( - f"{URL_PREFIX}/locations/{location}/mapsets/{new_mapset}/strds/" + f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" "test_strds_register/render?width=100&height=100", headers=self.admin_auth_header, ) @@ -200,7 +200,7 @@ def test_strds_render_1(self): # Check strds rv = self.server.get( - f"{URL_PREFIX}/locations/{location}/mapsets/{new_mapset}/strds/" + f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" "test_strds_register/render?width=100&height=100&" "start_time=2000-01-01 00:00:00&end_time=2000-01-02 00:00:00", headers=self.admin_auth_header, diff --git a/tests/test_vector_upload.py b/tests/test_vector_upload.py index 56c4eec63..c07c6a531 100644 --- a/tests/test_vector_upload.py +++ b/tests/test_vector_upload.py @@ -34,7 +34,7 @@ __license__ = "GPLv3" __author__ = "Anika Weinmann, Guido Riembauer" -__copyright__ = "Copyright 2016-2021, mundialis GmbH & Co. KG" +__copyright__ = "Copyright 2016-2024, mundialis GmbH & Co. KG" __maintainer__ = "mundialis GmbH & Co. KG" geojson_data = """{ @@ -80,7 +80,7 @@ class UploadVectorLayerTestCase(ActiniaResourceTestCaseBase): - location = "nc_spm_08" + project = "nc_spm_08" mapset = "PERMANENT" tmp_mapset = "mapset_upload" vector = "testvector" @@ -126,11 +126,11 @@ def tearDownClass(cls): def setUp(self): # create new temp mapset super(UploadVectorLayerTestCase, self).setUp() - self.create_new_mapset(self.tmp_mapset, location_name=self.location) + self.create_new_mapset(self.tmp_mapset, project_name=self.project) def tearDown(self): # delete mapset - self.delete_mapset(self.tmp_mapset, location_name=self.location) + self.delete_mapset(self.tmp_mapset, project_name=self.project) super(UploadVectorLayerTestCase, self).tearDown() def test_upload_vector_geojson_userdb(self): @@ -138,7 +138,7 @@ def test_upload_vector_geojson_userdb(self): Test successful GeoJSON upload and check against reference vector info """ url = ( - f"{URL_PREFIX}/locations/{self.location}/mapsets/{self.tmp_mapset}" + f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.tmp_mapset}" f"/vector_layers/{self.vector}" ) multipart_form_data = {"file": open(self.local_geojson, "rb")} @@ -156,7 +156,7 @@ def test_upload_vector_geojson_userdb(self): ) self.assertVectorInfo( - self.location, + self.project, self.tmp_mapset, self.vector, self.ref_info, @@ -168,7 +168,7 @@ def test_upload_vector_gpkg_userdb(self): Test successful GPKG upload and check against reference vector info """ url = ( - f"{URL_PREFIX}/locations/{self.location}/mapsets/{self.tmp_mapset}" + f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.tmp_mapset}" f"/vector_layers/{self.vector}" ) multipart_form_data = {"file": open(self.gpkg_file, "rb")} @@ -187,7 +187,7 @@ def test_upload_vector_gpkg_userdb(self): ) self.assertVectorInfo( - self.location, + self.project, self.tmp_mapset, self.vector, self.ref_info, @@ -200,7 +200,7 @@ def test_upload_vector_zipped_shp_userdb(self): vector info """ url = ( - f"{URL_PREFIX}/locations/{self.location}/mapsets/{self.tmp_mapset}" + f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.tmp_mapset}" f"/vector_layers/{self.vector}" ) multipart_form_data = {"file": open(self.zipped_shp_file, "rb")} @@ -224,7 +224,7 @@ def test_upload_vector_zipped_shp_userdb(self): {"column": "fid", "type": "DOUBLE PRECISION"}, ] self.assertVectorInfo( - self.location, + self.project, self.tmp_mapset, self.vector, shp_ref_info, @@ -234,7 +234,7 @@ def test_upload_vector_zipped_shp_userdb(self): def test_upload_vector_globaldb_error(self): """Test Error if vector (GPKG) is uploaded to global DB""" url = ( - f"{URL_PREFIX}/locations/{self.location}/mapsets/{self.mapset}/" + f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.mapset}/" f"vector_layers/{self.vector}" ) multipart_form_data = {"file": open(self.gpkg_file, "rb")} From fd11517cd0d9c657e2a41b087e2322904506dbc9 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 17 Oct 2024 14:50:28 +0200 Subject: [PATCH 06/24] rename location --- README.md | 14 +-- docker/README.md | 4 +- docker/actinia-core-alpine/Dockerfile | 4 +- docker/actinia-core-alpine/start.sh | 2 +- docker/actinia-core-tests/Dockerfile | 2 +- docs/docs/actinia_concepts.md | 16 ++-- docs/docs/index.md | 2 +- docs/docs/installation.md | 8 +- docs/docs/introduction.md | 24 ++--- docs/docs/tutorial_data_access.md | 74 +++++++------- docs/docs/tutorial_landsat_ndvi.md | 2 +- docs/docs/tutorial_process_chain.md | 20 ++-- docs/docs/tutorial_sentinel2_ndvi.md | 6 +- docs/docs/tutorial_strds_sampling.md | 8 +- logging.md | 32 +++---- scripts/actinia-algebra | 20 ++-- scripts/actinia-bench | 14 +-- scripts/curl_commands.sh | 92 +++++++++--------- src/actinia_core/cli/actinia_user.py | 39 ++++---- src/actinia_core/core/common/api_logger.py | 11 ++- src/actinia_core/core/common/app.py | 26 ++--- src/actinia_core/core/common/config.py | 22 +++-- src/actinia_core/core/common/keycloak_user.py | 19 ++-- src/actinia_core/core/common/user.py | 11 ++- src/actinia_core/core/common/user_base.py | 10 +- .../core/geodata_download_importer.py | 9 +- src/actinia_core/core/grass_modules_list.py | 14 +-- src/actinia_core/core/redis_lock.py | 30 +++--- src/actinia_core/core/redis_user.py | 16 ++-- src/actinia_core/endpoints.py | 5 +- src/actinia_core/models/process_chain.py | 14 +-- src/actinia_core/models/response_models.py | 35 +++---- .../ephemeral_processing_with_export.py | 6 +- .../ephemeral/persistent_processing.py | 5 +- ...on_management.py => project_management.py} | 6 +- .../persistent/strds_raster_management.py | 11 ++- .../persistent/vector_layer.py | 9 +- ...on_management.py => project_management.py} | 18 ++-- src/actinia_core/rest/mapsets.py | 19 ++-- ...on_management.py => project_management.py} | 4 +- src/actinia_core/rest/resource_management.py | 5 +- src/actinia_core/version.py | 11 ++- tests/test_async_mapset_merging.py | 44 ++++----- tests/test_async_mapset_merging_strds.py | 31 +++--- ...est_async_process_postgis_import_export.py | 14 +-- tests/test_async_process_validation.py | 17 ++-- tests/test_async_process_validation_errors.py | 38 ++++---- tests/test_async_processing.py | 34 +++---- tests/test_async_processing_2.py | 22 ++--- tests/test_async_processing_export.py | 46 ++++----- tests/test_async_processing_export_file.py | 22 ++--- ...test_async_processing_export_to_storage.py | 20 ++-- tests/test_async_processing_export_vector.py | 16 ++-- tests/test_async_processing_import_export.py | 50 +++++----- tests/test_async_processing_mapset.py | 42 ++++---- ...async_processing_stdin_parameter_parser.py | 5 +- tests/test_async_processing_stdout_parser.py | 14 +-- tests/test_async_raster_export.py | 16 ++-- tests/test_job_resumption.py | 22 ++--- tests/test_mapset_management.py | 52 +++++----- tests/test_mapsets.py | 23 ++--- tests/test_noauth.py | 28 +++--- ...nagement.py => test_project_management.py} | 96 +++++++++---------- tests/test_raster_colors.py | 42 ++++---- tests/test_raster_layer.py | 14 +-- tests/test_raster_layers.py | 32 +++---- tests/test_raster_legend.py | 34 +++---- tests/test_raster_renderer.py | 54 +++++------ tests/test_user_management.py | 14 +-- tests/test_vector_layer.py | 17 ++-- tests/test_vector_layers.py | 16 ++-- tests/test_vector_renderer.py | 23 +++-- tests/test_webhook.py | 11 ++- 73 files changed, 814 insertions(+), 794 deletions(-) rename src/actinia_core/processing/actinia_processing/persistent/{location_management.py => project_management.py} (95%) rename src/actinia_core/processing/common/{location_management.py => project_management.py} (79%) rename src/actinia_core/rest/{location_management.py => project_management.py} (98%) rename tests/{test_location_management.py => test_project_management.py} (72%) diff --git a/README.md b/README.md index e120751e4..3b71f3f07 100644 --- a/README.md +++ b/README.md @@ -63,28 +63,28 @@ Actinia is also available on [OSGeoLive](https://live.osgeo.org/en/overview/acti ### Data management -- List all locations that are available in the actinia persistent database: +- List all projects that are available in the actinia persistent database: ```bash -curl -u 'demouser:gu3st!pa55w0rd' -X GET "https://actinia.mundialis.de/api/v3/locations" +curl -u 'demouser:gu3st!pa55w0rd' -X GET "https://actinia.mundialis.de/api/v3/projects" ``` -- List all mapsets in the location latlong_wgs84: +- List all mapsets in the project latlong_wgs84: ```bash -curl -u 'demouser:gu3st!pa55w0rd' -X GET "https://actinia.mundialis.de/api/v3/locations/latlong_wgs84/mapsets" +curl -u 'demouser:gu3st!pa55w0rd' -X GET "https://actinia.mundialis.de/api/v3/projects/latlong_wgs84/mapsets" ``` -- List all space-time raster datasets (STRDS) in location latlong_wgs84 and mapset Sentinel_timeseries: +- List all space-time raster datasets (STRDS) in project latlong_wgs84 and mapset Sentinel_timeseries: ```bash -curl -u 'demouser:gu3st!pa55w0rd' -X GET "https://actinia.mundialis.de/api/v3/locations/latlong_wgs84/mapsets/modis_ndvi_global/strds" +curl -u 'demouser:gu3st!pa55w0rd' -X GET "https://actinia.mundialis.de/api/v3/projects/latlong_wgs84/mapsets/modis_ndvi_global/strds" ``` - List all raster map layers of the STRDS: ```bash -curl -u 'demouser:gu3st!pa55w0rd' -X GET "https://actinia.mundialis.de/api/v3/locations/latlong_wgs84/mapsets/modis_ndvi_global/strds/ndvi_16_5600m/raster_layers" +curl -u 'demouser:gu3st!pa55w0rd' -X GET "https://actinia.mundialis.de/api/v3/projects/latlong_wgs84/mapsets/modis_ndvi_global/strds/ndvi_16_5600m/raster_layers" ``` ### Landsat and Sentinel-2 NDVI computation diff --git a/docker/README.md b/docker/README.md index 9029436ad..e5b92f38d 100644 --- a/docker/README.md +++ b/docker/README.md @@ -20,7 +20,7 @@ curl http://127.0.0.1:8088/api/v3/version - Want to __start developing__? Look for [Local dev-setup with docker](#local-dev-setup) below. - For __production deployment__, see [Production deployment](#production-deployment) below. -On startup, some GRASS GIS locations are created by default but they are still empty. How to get some geodata to start processing, see in [Testing GRASS GIS inside a container](#grass-gis) below. +On startup, some GRASS GIS projects are created by default but they are still empty. How to get some geodata to start processing, see in [Testing GRASS GIS inside a container](#grass-gis) below. ## Adding a user @@ -192,7 +192,7 @@ You now have some data which you can access through actinia. To get information via API, start actinia with gunicorn and run ```bash -curl -u actinia-gdi:actinia-gdi http://127.0.0.1:8088/api/v3/locations/nc_spm_08/mapsets +curl -u actinia-gdi:actinia-gdi http://127.0.0.1:8088/api/v3/projects/nc_spm_08/mapsets ``` The folder where you downloaded the data into (`/actinia_core/grassdb`) is mounted into your docker container via the compose file, so all data is kept, even if your docker container restarts. diff --git a/docker/actinia-core-alpine/Dockerfile b/docker/actinia-core-alpine/Dockerfile index e59df0ec5..cf281ecb2 100644 --- a/docker/actinia-core-alpine/Dockerfile +++ b/docker/actinia-core-alpine/Dockerfile @@ -16,12 +16,12 @@ ENV GISBASE "" COPY --from=grass /usr/local/bin/grass /usr/local/bin/grass COPY --from=grass /usr/local/grass* /usr/local/grass/ RUN ln -s /usr/local/grass "$(grass --config path)" -RUN grass --tmp-location EPSG:4326 --exec g.version -rge && \ +RUN grass --tmp-project EPSG:4326 --exec g.version -rge && \ pdal --version && \ python --version # Install GRASS GIS addon d.rast.multi (needed for STRDS render endpoint) -RUN grass --tmp-location EPSG:4326 --exec g.extension -s \ +RUN grass --tmp-project EPSG:4326 --exec g.extension -s \ extension=d.rast.multi url=https://github.com/mundialis/d_rast_multi diff --git a/docker/actinia-core-alpine/start.sh b/docker/actinia-core-alpine/start.sh index d0cf56263..638337bde 100755 --- a/docker/actinia-core-alpine/start.sh +++ b/docker/actinia-core-alpine/start.sh @@ -7,7 +7,7 @@ mkdir -p /actinia_core/workspace/temp_db mkdir -p /actinia_core/workspace/tmp mkdir -p /actinia_core/resources -# Create default location in mounted (!) directory +# Create default project in mounted (!) directory [ ! -d "/actinia_core/grassdb/nc_spm_08" ] && grass -e -c 'EPSG:3358' /actinia_core/grassdb/nc_spm_08 actinia-user create -u actinia-gdi -w actinia-gdi -r superadmin -g superadmin -c 100000000000 -n 1000 -t 31536000 diff --git a/docker/actinia-core-tests/Dockerfile b/docker/actinia-core-tests/Dockerfile index e8a7939ce..0c487a98c 100644 --- a/docker/actinia-core-tests/Dockerfile +++ b/docker/actinia-core-tests/Dockerfile @@ -21,7 +21,7 @@ RUN wget --quiet https://grass.osgeo.org/sampledata/north_carolina/nc_spm_mapset RUN chown -R 1001:1001 nc_spm_08/modis_lst && chmod -R g+w nc_spm_08/modis_lst # install GRASS addons required for tests -RUN grass --tmp-location EPSG:4326 --exec g.extension -s extension=r.colors.out_sld +RUN grass --tmp-project EPSG:4326 --exec g.extension -s extension=r.colors.out_sld # install things only for tests # DL3013 # Pin versions in pip diff --git a/docs/docs/actinia_concepts.md b/docs/docs/actinia_concepts.md index 9332c2f99..d773bc0cc 100644 --- a/docs/docs/actinia_concepts.md +++ b/docs/docs/actinia_concepts.md @@ -58,7 +58,7 @@ The following user-roles are supported: ### 3. user - Can run computational tasks in ephemeral and user specific databases -- Can create, modify and delete locations in a user specific +- Can create, modify and delete projects in a user specific database - Can create, modify and delete mapsets in user specific databases @@ -81,7 +81,7 @@ Overview table: |------|------------|-------|------|-------|------| | amount raster cells is unlimited | y | y | limited, selected via redis | limited, selected via redis | - | | database access is unlimited | y | only to persistent databases that were granted by a superadmin | limited, defined in redis | limited, defined in redis | - | -| location/mapset access is unlimited | y | y | can create, modify and delete locations/mapsets in user specific databases, defined in redis | has access to persistent databases that were granted by a superadmin, defined in redis | - | +| project/mapset access is unlimited | y | y | can create, modify and delete projects/mapsets in user specific databases, defined in redis | has access to persistent databases that were granted by a superadmin, defined in redis | - | |module access is unlimited | y | y | can run computational tasks in ephemeral and user specific databases | has very limited access to API calls | - | | get, create, delete a single user | y | users with the maximum user-role user of the same user group | n | n | Only normal users (role=user can be created) | @@ -97,7 +97,7 @@ number_of_workers = 3 ## The Actinia databases -Actinia manages GRASS GIS locations in its *persistent database*. User +Actinia manages GRASS GIS projects in its *persistent database*. User are not permitted to modify data in the actinia persistent database, but can access all data read-only for processing and visualization. Data in the persistent database can only be accessed via HTTP GET API calls. @@ -113,7 +113,7 @@ database. **Summary** ### 1. Persistent database -- Read only database with locations and mapsets that can be +- Read only database with projects and mapsets that can be used as processing environment and data source - Data can only be accessed using HTTP GET API calls @@ -124,16 +124,16 @@ database. removed after the processing is finished - Ephemeral databases use persistent databases as processing environments to access required data from mapsets in - persistent locations + persistent projects ### 3. User specific databases - Persistent databases that can be created and modified by a specific user group -- The base for a location in a user specific database can be - a location from a persistent database, however mapsets +- The base for a project in a user specific database can be + a project from a persistent database, however mapsets names must be unique. - A user group can only access a single database with any - number of locations + number of projects **Footnotes** diff --git a/docs/docs/index.md b/docs/docs/index.md index e027ca2cb..a62a2d2d1 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -38,7 +38,7 @@ The source code is available here: - Installation - Actinia database access - Using curl for HTTP requests - - Access to locations and mapsets in the persistent database + - Access to projects and mapsets in the persistent database - Access to raster layers in the persistent database - Access to raster time-series in the persistent database - Time-series sampling diff --git a/docs/docs/installation.md b/docs/docs/installation.md index 8690316f3..c3dfdf0a5 100644 --- a/docs/docs/installation.md +++ b/docs/docs/installation.md @@ -51,7 +51,7 @@ sudo update-alternatives --install /usr/bin/python python /usr/bin/python3 1 3. Compile and install GRASS GIS and additional modules: ```bash -# or some other location of your choice, /tmp will not keep data between reboots +# or some other project of your choice, /tmp will not keep data between reboots cd /tmp # fetch weekly generated latest snapshot of GRASS GIS stable @@ -110,7 +110,7 @@ cd ../.. sudo make install ``` -4. Download the test datasets ("locations") and place them into a +4. Download the test datasets ("projects") and place them into a specific directory that will be used by actinia as a persistent database: ```bash @@ -179,13 +179,13 @@ ACTINIA_VERSION="v3" export ACTINIA_URL="http://localhost:5000/api/${ACTINIA_VERSION}" export AUTH='-u superadmin:abcdefgh' -curl ${AUTH} -X GET ${ACTINIA_URL}/locations +curl ${AUTH} -X GET ${ACTINIA_URL}/projects ``` The `curl` command call should report back: ```bash -{"locations":["nc_spm_08","ECAD","latlong_wgs84"],"status":"success"} +{"projects":["nc_spm_08","ECAD","latlong_wgs84"],"status":"success"} ``` Success and welcome to actinia! diff --git a/docs/docs/introduction.md b/docs/docs/introduction.md index 0dc3a4df8..4a7a3035c 100644 --- a/docs/docs/introduction.md +++ b/docs/docs/introduction.md @@ -17,9 +17,9 @@ provide problem specific services like Sentinel-2 and Landsat NDVI computation, spatio-temporal statistical analysis and many more. To use actinia the user must have an understanding of the GRASS GIS -concept[^3] of location, mapsets, raster maps, space-time datasets and +concept[^3] of project, mapsets, raster maps, space-time datasets and modules. The URLs that provide access to the GRASS database reflect -these concepts. Hence, the location, the mapset and the required raster map +these concepts. Hence, the project, the mapset and the required raster map are part of the URL to access the service. ## What is REST? @@ -48,35 +48,35 @@ export AUTH='-u demouser:gu3st!pa55w0rd' **Data management** -List all locations that are available in the actinia persistent database: +List all projects that are available in the actinia persistent database: ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects" ``` -List all mapsets in the location latlong_wgs84: +List all mapsets in the project latlong_wgs84: ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/latlong_wgs84/mapsets" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/latlong_wgs84/mapsets" ``` -List all raster layers in location latlong_wgs84 and mapset Sentinel2A: +List all raster layers in project latlong_wgs84 and mapset Sentinel2A: ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/latlong_wgs84/mapsets/Sentinel2A/raster_layers" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/latlong_wgs84/mapsets/Sentinel2A/raster_layers" ``` -List all space-time raster datasets (STRDS) in location +List all space-time raster datasets (STRDS) in project ECAD and mapset PERMANENT: ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/ECAD/mapsets/PERMANENT/strds" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/ECAD/mapsets/PERMANENT/strds" ``` List all raster map layers of the STRDS precipitation_1950_2013_yearly_mm: ```bash -curl ${AUTH} -X GET "${ACTINIA_URL}/locations/ECAD/mapsets/PERMANENT/strds/precipitation_1950_2013_yearly_mm/raster_layers" +curl ${AUTH} -X GET "${ACTINIA_URL}/projects/ECAD/mapsets/PERMANENT/strds/precipitation_1950_2013_yearly_mm/raster_layers" ``` **Landsat and Sentinel-2A NDVI computation** @@ -102,7 +102,7 @@ file in a cloud storage for download. [^1]: https://grass.osgeo.org/ [^2]: https://en.wikipedia.org/wiki/Representational_State_Transfer -[^3]: https://grass.osgeo.org/grass-stable/manuals/helptext.html#2.-background:-grass-gis-location-structure +[^3]: https://grass.osgeo.org/grass-stable/manuals/helptext.html#2.-background:-grass-gis-project-structure [^4]: https://en.wikipedia.org/wiki/Representational_State_Transfer [^5]: https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol [^6]: https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Request_methods diff --git a/docs/docs/tutorial_data_access.md b/docs/docs/tutorial_data_access.md index b86d91f56..c99ff2e30 100644 --- a/docs/docs/tutorial_data_access.md +++ b/docs/docs/tutorial_data_access.md @@ -25,20 +25,20 @@ export AUTH='-u demouser:gu3st!pa55w0rd' # other user credentials can be provided in the same way ``` -## Access to locations and mapsets in the persistent database +## Access to projects and mapsets in the persistent database -The following API call lists all available locations in the actinia +The following API call lists all available projects in the actinia persistent database (the `-i` includes the HTTP response headers): ```bash - curl ${AUTH} -X GET -i "${ACTINIA_URL}/locations" + curl ${AUTH} -X GET -i "${ACTINIA_URL}/projects" ``` The output should look similar to this: ```json { - "locations": [ + "projects": [ "latlong_wgs84", "ECAD", "nc_spm_08" @@ -48,10 +48,10 @@ The output should look similar to this: ``` To show the region settings and the projection of the GRASS GIS standard -location *nc_spm_08* the following REST call must be used: +project *nc_spm_08* the following REST call must be used: ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/nc_spm_08/info" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/nc_spm_08/info" ``` The JSON response is the standard response of the actinia REST API. Most @@ -70,10 +70,10 @@ The output should look similar to this then: "accept_datetime": "2019-08-01 20:30:05.717499", "accept_timestamp": 1564691405.7174985, "api_info": { - "endpoint": "locationmanagementresourceuser", + "endpoint": "projectmanagementresourceuser", "method": "GET", - "path": "/api/v3/locations/nc_spm_08/info", - "request_url": "http://actinia.mundialis.de/api/v3/locations/nc_spm_08/info" + "path": "/api/v3/projects/nc_spm_08/info", + "request_url": "http://actinia.mundialis.de/api/v3/projects/nc_spm_08/info" }, "datetime": "2019-08-01 20:30:05.881138", "http_code": 200, @@ -157,14 +157,14 @@ The output should look similar to this then: } ``` -To list all mapsets located in the location *nc_spm_08* the following +To list all mapsets located in the project *nc_spm_08* the following API call is used: ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/nc_spm_08/mapsets" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/nc_spm_08/mapsets" ``` -The response of this synchronous call lists all mapsets of the location +The response of this synchronous call lists all mapsets of the project in the *process_results* section: ```json @@ -174,8 +174,8 @@ in the *process_results* section: "api_info": { "endpoint": "listmapsetsresource", "method": "GET", - "path": "/api/v3/locations/nc_spm_08/mapsets", - "request_url": "http://actinia.mundialis.de/api/v3/locations/nc_spm_08/mapsets" + "path": "/api/v3/projects/nc_spm_08/mapsets", + "request_url": "http://actinia.mundialis.de/api/v3/projects/nc_spm_08/mapsets" }, "datetime": "2019-08-01 20:31:11.430294", "http_code": 200, @@ -232,11 +232,11 @@ Using the following API call will show all information about the mapset *PERMANENT*: ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/nc_spm_08/mapsets/PERMANENT/info" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/nc_spm_08/mapsets/PERMANENT/info" ``` The response shows the region of the mapset and the projection of the -location in the *process_results* section: +project in the *process_results* section: ```json { @@ -245,8 +245,8 @@ location in the *process_results* section: "api_info": { "endpoint": "mapsetmanagementresourceuser", "method": "GET", - "path": "/api/v3/locations/nc_spm_08/mapsets/PERMANENT/info", - "request_url": "http://actinia.mundialis.de/api/v3/locations/nc_spm_08/mapsets/PERMANENT/info" + "path": "/api/v3/projects/nc_spm_08/mapsets/PERMANENT/info", + "request_url": "http://actinia.mundialis.de/api/v3/projects/nc_spm_08/mapsets/PERMANENT/info" }, "datetime": "2019-08-01 20:31:51.810266", "http_code": 200, @@ -332,12 +332,12 @@ location in the *process_results* section: ## Access to raster layers in the persistent database -The location ECAD contains yearly climate data (precipitation and +The project ECAD contains yearly climate data (precipitation and temperature) of Europe for 60 years. We list all raster layers of the -location ECAD in mapset *PERMANENT*: +project ECAD in mapset *PERMANENT*: ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/ECAD/mapsets/PERMANENT/raster_layers" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/ECAD/mapsets/PERMANENT/raster_layers" ``` The response lists all raster layers of the mapset in the @@ -350,8 +350,8 @@ The response lists all raster layers of the mapset in the "api_info": { "endpoint": "rasterlayersresource", "method": "GET", - "path": "/api/v3/locations/ECAD/mapsets/PERMANENT/raster_layers", - "request_url": "http://actinia.mundialis.de/api/v3/locations/ECAD/mapsets/PERMANENT/raster_layers" + "path": "/api/v3/projects/ECAD/mapsets/PERMANENT/raster_layers", + "request_url": "http://actinia.mundialis.de/api/v3/projects/ECAD/mapsets/PERMANENT/raster_layers" }, "datetime": "2018-05-30 09:13:51.745702", "http_code": 200, @@ -414,7 +414,7 @@ Show info about the raster layer *temperature_mean_yearly_celsius_60*: ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/ECAD/mapsets/PERMANENT/raster_layers/temperature_mean_yearly_celsius_60" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/ECAD/mapsets/PERMANENT/raster_layers/temperature_mean_yearly_celsius_60" ``` The response lists information about the raster layer @@ -428,8 +428,8 @@ section: "api_info": { "endpoint": "rasterlayerresource", "method": "GET", - "path": "/api/v3/locations/ECAD/mapsets/PERMANENT/raster_layers/temperature_mean_yearly_celsius_60", - "request_url": "http://actinia.mundialis.de/api/v3/locations/ECAD/mapsets/PERMANENT/raster_layers/temperature_mean_yearly_celsius_60" + "path": "/api/v3/projects/ECAD/mapsets/PERMANENT/raster_layers/temperature_mean_yearly_celsius_60", + "request_url": "http://actinia.mundialis.de/api/v3/projects/ECAD/mapsets/PERMANENT/raster_layers/temperature_mean_yearly_celsius_60" }, "datetime": "2018-05-30 09:17:15.437797", "http_code": 200, @@ -471,7 +471,7 @@ section: "description": "\"generated by r.in.gdal\"", "east": "75.5", "ewres": "0.25", - "location": "ECAD", + "project": "ECAD", "map": "temperature_mean_yearly_celsius_60", "mapset": "PERMANENT", "max": "29.406963562753", @@ -509,14 +509,14 @@ section: Actinia supports the analysis of time-series data based on the temporal framework of GRASS GIS[^3], [^4]. A time-series datatype is located in -location *ECAD* with mapsets *PERMANENT*. The time-series datatype is +project *ECAD* with mapsets *PERMANENT*. The time-series datatype is called space-time raster dataset (strds) and represents a time-stamped series of yearly temperature and precipitation data for Europe. We list all strds with the following API call: ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/ECAD/mapsets/PERMANENT/strds" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/ECAD/mapsets/PERMANENT/strds" ``` We receive two strds in the *process_results* section of the JSON @@ -529,8 +529,8 @@ response: "api_info": { "endpoint": "syncstrdslisterresource", "method": "GET", - "path": "api/v3/locations/ECAD/mapsets/PERMANENT/strds", - "request_url": "http://actinia.mundialis.de/api/v3/locations/ECAD/mapsets/PERMANENT/strds" + "path": "api/v3/projects/ECAD/mapsets/PERMANENT/strds", + "request_url": "http://actinia.mundialis.de/api/v3/projects/ECAD/mapsets/PERMANENT/strds" }, "datetime": "2018-05-30 09:18:17.351918", "http_code": 200, @@ -589,7 +589,7 @@ Use the following API call to receive information about the strds *temperature_mean_1950_2013_yearly_celsius*. ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius" ``` All relevant information about strds @@ -603,8 +603,8 @@ All relevant information about strds "api_info": { "endpoint": "strdsmanagementresource", "method": "GET", - "path": "/api/v3/locations/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius", - "request_url": "http://actinia.mundialis.de/api/v3/locations/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius" + "path": "/api/v3/projects/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius", + "request_url": "http://actinia.mundialis.de/api/v3/projects/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius" }, "datetime": "2018-05-30 09:19:25.519419", "http_code": 200, @@ -688,7 +688,7 @@ List all raster layers that are registered in the strds *temperature_mean_1950_2013_yearly_celsius* with time-stamps: ```bash - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/raster_layers" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/raster_layers" ``` A list of about 60 raster layers with minimum, maximum values, @@ -702,8 +702,8 @@ section of the JSON response: "api_info": { "endpoint": "strdsrastermanagement", "method": "GET", - "path": "/api/v3/locations/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/raster_layers", - "request_url": "http://actinia.mundialis.de/api/v3/locations/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/raster_layers" + "path": "/api/v3/projects/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/raster_layers", + "request_url": "http://actinia.mundialis.de/api/v3/projects/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/raster_layers" }, "datetime": "2018-05-30 09:20:31.197637", "http_code": 200, diff --git a/docs/docs/tutorial_landsat_ndvi.md b/docs/docs/tutorial_landsat_ndvi.md index d12824393..1b5db4e79 100644 --- a/docs/docs/tutorial_landsat_ndvi.md +++ b/docs/docs/tutorial_landsat_ndvi.md @@ -339,7 +339,7 @@ as well as univariate statistics of the computed NDVI scene. "run_time": 0.15161657333374023, "stderr": [ "Default locale settings are missing. GRASS running with C locale.WARNING: Searched for a web browser, but none found", - "Creating new GRASS GIS location/mapset...", + "Creating new GRASS GIS project/mapset...", "Cleaning up temporary files...", "" ], diff --git a/docs/docs/tutorial_process_chain.md b/docs/docs/tutorial_process_chain.md index c1a83d7bb..bf4ca16e6 100644 --- a/docs/docs/tutorial_process_chain.md +++ b/docs/docs/tutorial_process_chain.md @@ -6,7 +6,7 @@ Actinia provides the **process chain** approach to specify import, processing and export of geodata using the actinia GRASS GIS processing system. The process chain must be formulated in JSON. The processing is always performed in an ephemeral database. The computational environment -is based on locations in the persistent database. If required, the +is based on projects in the persistent database. If required, the ephemeral database can be moved into the persistent user database, so that the computational results can be used in further processing steps or visualized using the actinia rendering REST calls. @@ -15,7 +15,7 @@ The ephemeral database will be removed after computation. However, all raster and vector data that was generated during the processing can be exported using gdal/ogr specific datatypes and stored in an object storage, outside the actinia environment. Within a process chain we have -read only access to all raster maps of the persistent database location +read only access to all raster maps of the persistent database project that is used as computational environment. A process chain is a list of GRASS GIS modules[^1] that will be executed in @@ -195,8 +195,8 @@ response: "api_info": { "endpoint": "asyncephemeralresource", "method": "POST", - "path": "/api/v3/locations/nc_spm_08/processing_async", - "request_url": "http://localhost/api/v3/locations/nc_spm_08/processing_async" + "path": "/api/v3/projects/nc_spm_08/processing_async", + "request_url": "http://localhost/api/v3/projects/nc_spm_08/processing_async" }, "datetime": "2018-06-28 14:11:03.878996", "http_code": 200, @@ -609,7 +609,7 @@ export AUTH='-u demouser:gu3st!pa55w0rd' We create a process chain that computes the NDVI from a Sentinel-2A scene based on the bands 8 and 4 with the GRASS GIS module *r.mapcalc*. We -use the North Carolina sample location **nc_spm_08** as processing +use the North Carolina sample project **nc_spm_08** as processing environment and the computational region of sentinel band B04 for the NDVI processing. Then we calculate univariate statistics for the Sentinel-2A scene. The computed NDVI raster layer will be exported as @@ -701,7 +701,7 @@ Run the process chain asynchronously: ], "version": "1"}' -curl ${AUTH} -X POST "${ACTINIA_URL}/locations/nc_spm_08/processing_async_export" -H "accept: application/json" -H "content-type: application/json" -d "$JSON" +curl ${AUTH} -X POST "${ACTINIA_URL}/projects/nc_spm_08/processing_async_export" -H "accept: application/json" -H "content-type: application/json" -d "$JSON" ``` The response requires the polling of the status URL, since the API call @@ -714,8 +714,8 @@ works asynchronously: "api_info": { "endpoint": "asyncephemeralexportresource", "method": "POST", - "path": "/api/v3/locations/nc_spm_08/processing_async_export", - "request_url": "http://actinia.mundialis.de/api/v3/locations/nc_spm_08/processing_async_export" + "path": "/api/v3/projects/nc_spm_08/processing_async_export", + "request_url": "http://actinia.mundialis.de/api/v3/projects/nc_spm_08/processing_async_export" }, "datetime": "2022-07-28 14:07:21.580321", "http_code": 200, @@ -751,8 +751,8 @@ The finished response should look like this: "api_info": { "endpoint": "asyncephemeralexportresource", "method": "POST", - "path": "/api/v3/locations/nc_spm_08/processing_async_export", - "request_url": "http://actinia.mundialis.de/api/v3/locations/nc_spm_08/processing_async_export" + "path": "/api/v3/projects/nc_spm_08/processing_async_export", + "request_url": "http://actinia.mundialis.de/api/v3/projects/nc_spm_08/processing_async_export" }, "datetime": "2022-07-28 14:14:42.004376", "http_code": 200, diff --git a/docs/docs/tutorial_sentinel2_ndvi.md b/docs/docs/tutorial_sentinel2_ndvi.md index 04fe3ea3b..7ae82718d 100644 --- a/docs/docs/tutorial_sentinel2_ndvi.md +++ b/docs/docs/tutorial_sentinel2_ndvi.md @@ -193,7 +193,7 @@ as well as univariate statistics of the computed NDVI scene. "run_time": 0.36118006706237793, "stderr": [ "Default locale settings are missing. GRASS running with C locale.WARNING: Searched for a web browser, but none found", - "Creating new GRASS GIS location/mapset...", + "Creating new GRASS GIS project/mapset...", "Cleaning up temporary files...", "" ], @@ -209,9 +209,9 @@ as well as univariate statistics of the computed NDVI scene. "return_code": 0, "run_time": 0.3551313877105713, "stderr": [ - "WARNING: Projection of dataset does not appear to match current location.", + "WARNING: Projection of dataset does not appear to match current project.", "", - "Location PROJ_INFO is:", + "Project PROJ_INFO is:", "name: WGS 84 / UTM zone 50N", "datum: wgs84", "ellps: wgs84", diff --git a/docs/docs/tutorial_strds_sampling.md b/docs/docs/tutorial_strds_sampling.md index 5f0e6b709..4ee5d67bb 100644 --- a/docs/docs/tutorial_strds_sampling.md +++ b/docs/docs/tutorial_strds_sampling.md @@ -5,7 +5,7 @@ The actinia REST API provides an API call to sample a space-time raster dataset (STRDS) at coordinate points. The coordinates must be specified in JSON and must use the same coordinate reference system as the -location that contains the STRDS. +project that contains the STRDS. We will use the Unix shell and curl to access the REST API. First open a shell of choice (we use bash here) and setup the login information, the IP address and the port on which the actinia service is running, so you can simply change the IP and port if your server uses a different address: @@ -25,7 +25,7 @@ different coordinates. ["b", "9.9681980", "51.666166"], ["c", "24.859647", "52.699099"]]}' - curl ${AUTH} -X POST "${ACTINIA_URL}/locations/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/sampling_sync" -H "accept: application/json" -H "content-type: application/json" -d "$JSON" + curl ${AUTH} -X POST "${ACTINIA_URL}/projects/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/sampling_sync" -H "accept: application/json" -H "content-type: application/json" -d "$JSON" ``` The result of the sampling is a list of time stamped values, that are @@ -38,8 +38,8 @@ located in the *process_results* section of the JSON response: "api_info": { "endpoint": "syncephemeralstrdssamplingresource", "method": "POST", - "path": "/api/v3/locations/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/sampling_sync", - "request_url": "http://actinia.mundialis.de/api/v3/locations/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/sampling_sync" + "path": "/api/v3/projects/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/sampling_sync", + "request_url": "http://actinia.mundialis.de/api/v3/projects/ECAD/mapsets/PERMANENT/strds/temperature_mean_1950_2013_yearly_celsius/sampling_sync" }, "datetime": "2018-05-30 10:33:24.503297", "http_code": 200, diff --git a/logging.md b/logging.md index 419c9f36e..920bcce27 100644 --- a/logging.md +++ b/logging.md @@ -77,7 +77,7 @@ __Simple log__ "level": "INFO", "component": "actinia-core", "module": "process_queue", - "message": "Run process: {endpoint: asyncephemeralexportresource, method: POST, path: /api/v3/locations/latlong_wgs84/processing_async_export, request_url: http://127.0.0.1:8088/api/v3/locations/latlong_wgs84/processing_async_export}", + "message": "Run process: {endpoint: asyncephemeralexportresource, method: POST, path: /api/v3/projects/latlong_wgs84/processing_async_export, request_url: http://127.0.0.1:8088/api/v3/projects/latlong_wgs84/processing_async_export}", "pathname": "/usr/lib/python3.8/site-packages/actinia_core-0.99.5.post0.dev18+g4d82691.dirty-py3.8.egg/actinia_core/resources/common/process_queue.py", "lineno": 355, "processName": "Process-1", @@ -86,7 +86,7 @@ __Simple log__ ``` ```text -[2020-05-08 09:19:48,430] INFO : actinia-core.process_queue -Run process: {'endpoint': 'asyncephemeralexportresource', 'method': 'POST', 'path': '/api/v3/locations/latlong_wgs84/processing_async_export', 'request_url': 'http://127.0.0.1:8088/api/v3/locations/latlong_wgs84/processing_async_export'} [in /usr/lib/python3.8/site-packages/actinia_core-0.99.5.post0.dev20+gacc075c-py3.8.egg/actinia_core/resources/common/process_queue.py:355] +[2020-05-08 09:19:48,430] INFO : actinia-core.process_queue -Run process: {'endpoint': 'asyncephemeralexportresource', 'method': 'POST', 'path': '/api/v3/projects/latlong_wgs84/processing_async_export', 'request_url': 'http://127.0.0.1:8088/api/v3/projects/latlong_wgs84/processing_async_export'} [in /usr/lib/python3.8/site-packages/actinia_core-0.99.5.post0.dev20+gacc075c-py3.8.egg/actinia_core/resources/common/process_queue.py:355] ``` __Gunicorn log__ @@ -97,7 +97,7 @@ __Gunicorn log__ "level": "INFO", "component": "gunicorn.access", "module": "glogging", - "message": "172.20.0.1 - actinia-gdi [06/May/2020:09:07:34 +0000] \"POST /api/v3/locations/latlong_wgs84/processing_async_export HTTP/1.1\" 200 756 \"-\" \"curl/7.58.0\"", + "message": "172.20.0.1 - actinia-gdi [06/May/2020:09:07:34 +0000] \"POST /api/v3/projects/latlong_wgs84/processing_async_export HTTP/1.1\" 200 756 \"-\" \"curl/7.58.0\"", "pathname": "/usr/lib/python3.8/site-packages/gunicorn/glogging.py", "lineno": 344, "processName": "MainProcess", @@ -106,7 +106,7 @@ __Gunicorn log__ ``` ```text -[2020-05-08 09:19:48,426] INFO : gunicorn.access.glogging -172.20.0.1 - actinia-gdi [08/May/2020:09:19:48 +0000] "POST /api/v3/locations/latlong_wgs84/processing_async_export HTTP/1.1" 200 755 "-" "curl/7.58.0" [in /usr/lib/python3.8/site-packages/gunicorn/glogging.py:344] +[2020-05-08 09:19:48,426] INFO : gunicorn.access.glogging -172.20.0.1 - actinia-gdi [08/May/2020:09:19:48 +0000] "POST /api/v3/projects/latlong_wgs84/processing_async_export HTTP/1.1" 200 755 "-" "curl/7.58.0" [in /usr/lib/python3.8/site-packages/gunicorn/glogging.py:344] ``` __messages_logger INFO__ @@ -153,10 +153,10 @@ __api_logger__ "api_info": { "endpoint": "asyncephemeralexportresource", "method": "POST", - "path": "/api/v3/locations/latlong_wgs84/processing_async_export", - "request_url": "http://127.0.0.1:8088/api/v3/locations/latlong_wgs84/processing_async_export" + "path": "/api/v3/projects/latlong_wgs84/processing_async_export", + "request_url": "http://127.0.0.1:8088/api/v3/projects/latlong_wgs84/processing_async_export" }, - "request_str": "", + "request_str": "", "user_id": "actinia-gdi", "status": "api_call", "logger": "api_logger" @@ -164,7 +164,7 @@ __api_logger__ ``` ```text -[2020-05-08 09:19:48,419] INFO : actinia-core.redis_fluentd_logger_base -{'time_stamp': '2020-05-08 09:19:48.419294', 'node': '3f3e57b7ab83', 'api_info': {'endpoint': 'asyncephemeralexportresource', 'method': 'POST', 'path': '/api/v3/locations/latlong_wgs84/processing_async_export', 'request_url': 'http://127.0.0.1:8088/api/v3/locations/latlong_wgs84/processing_async_export'}, 'request_str': "", 'user_id': 'actinia-gdi', 'status': 'api_call', 'logger': 'api_logger'} [in /usr/lib/python3.8/site-packages/actinia_core-0.99.5.post0.dev20+gacc075c-py3.8.egg/actinia_core/resources/common/redis_fluentd_logger_base.py:94] +[2020-05-08 09:19:48,419] INFO : actinia-core.redis_fluentd_logger_base -{'time_stamp': '2020-05-08 09:19:48.419294', 'node': '3f3e57b7ab83', 'api_info': {'endpoint': 'asyncephemeralexportresource', 'method': 'POST', 'path': '/api/v3/projects/latlong_wgs84/processing_async_export', 'request_url': 'http://127.0.0.1:8088/api/v3/projects/latlong_wgs84/processing_async_export'}, 'request_str': "", 'user_id': 'actinia-gdi', 'status': 'api_call', 'logger': 'api_logger'} [in /usr/lib/python3.8/site-packages/actinia_core-0.99.5.post0.dev20+gacc075c-py3.8.egg/actinia_core/resources/common/redis_fluentd_logger_base.py:94] ``` __resources_logger "accepted"__ @@ -198,15 +198,15 @@ __resources_logger "accepted"__ "api_info": { "endpoint": "asyncephemeralexportresource", "method": "POST", - "path": "/api/v3/locations/latlong_wgs84/processing_async_export", - "request_url": "http://127.0.0.1:8088/api/v3/locations/latlong_wgs84/processing_async_export" + "path": "/api/v3/projects/latlong_wgs84/processing_async_export", + "request_url": "http://127.0.0.1:8088/api/v3/projects/latlong_wgs84/processing_async_export" }, "logger": "resources_logger" } ``` ```text -[2020-05-08 09:19:48,424] INFO : actinia-core.redis_fluentd_logger_base -{'status': 'accepted', 'user_id': 'actinia-gdi', 'resource_id': 'resource_id-53e2a5ae-d4c0-4b9d-8d24-03030a4b314a', 'accept_timestamp': 1588929588.421158, 'accept_datetime': '2020-05-08 09:19:48.421166', 'timestamp': 1588929588.4238856, 'datetime': '2020-05-08 09:19:48.423887', 'message': 'Resource accepted', 'time_delta': 0.002741098403930664, 'process_results': {}, 'process_chain_list': [], 'http_code': 200, 'urls': {'resources': [], 'status': 'http://127.0.0.1:8088/api/v3/resources/actinia-gdi/resource_id-53e2a5ae-d4c0-4b9d-8d24-03030a4b314a'}, 'api_info': {'endpoint': 'asyncephemeralexportresource', 'method': 'POST', 'path': '/api/v3/locations/latlong_wgs84/processing_async_export', 'request_url': 'http://127.0.0.1:8088/api/v3/locations/latlong_wgs84/processing_async_export'}, 'logger': 'resources_logger'} [in /usr/lib/python3.8/site-packages/actinia_core-0.99.5.post0.dev20+gacc075c-py3.8.egg/actinia_core/resources/common/redis_fluentd_logger_base.py:91] +[2020-05-08 09:19:48,424] INFO : actinia-core.redis_fluentd_logger_base -{'status': 'accepted', 'user_id': 'actinia-gdi', 'resource_id': 'resource_id-53e2a5ae-d4c0-4b9d-8d24-03030a4b314a', 'accept_timestamp': 1588929588.421158, 'accept_datetime': '2020-05-08 09:19:48.421166', 'timestamp': 1588929588.4238856, 'datetime': '2020-05-08 09:19:48.423887', 'message': 'Resource accepted', 'time_delta': 0.002741098403930664, 'process_results': {}, 'process_chain_list': [], 'http_code': 200, 'urls': {'resources': [], 'status': 'http://127.0.0.1:8088/api/v3/resources/actinia-gdi/resource_id-53e2a5ae-d4c0-4b9d-8d24-03030a4b314a'}, 'api_info': {'endpoint': 'asyncephemeralexportresource', 'method': 'POST', 'path': '/api/v3/projects/latlong_wgs84/processing_async_export', 'request_url': 'http://127.0.0.1:8088/api/v3/projects/latlong_wgs84/processing_async_export'}, 'logger': 'resources_logger'} [in /usr/lib/python3.8/site-packages/actinia_core-0.99.5.post0.dev20+gacc075c-py3.8.egg/actinia_core/resources/common/redis_fluentd_logger_base.py:91] ``` __resources_logger "running"__ @@ -243,15 +243,15 @@ __resources_logger "running"__ "api_info": { "endpoint": "asyncephemeralexportresource", "method": "POST", - "path": "/api/v3/locations/latlong_wgs84/processing_async_export", - "request_url": "http://127.0.0.1:8088/api/v3/locations/latlong_wgs84/processing_async_export" + "path": "/api/v3/projects/latlong_wgs84/processing_async_export", + "request_url": "http://127.0.0.1:8088/api/v3/projects/latlong_wgs84/processing_async_export" }, "logger": "resources_logger" } ``` ```text -[2020-05-08 09:19:48,514] INFO : actinia-core.redis_fluentd_logger_base -{'status': 'running', 'user_id': 'actinia-gdi', 'resource_id': 'resource_id-53e2a5ae-d4c0-4b9d-8d24-03030a4b314a', 'accept_timestamp': 1588929588.421158, 'accept_datetime': '2020-05-08 09:19:48.421166', 'timestamp': 1588929588.5140734, 'datetime': '2020-05-08 09:19:48.514093', 'message': 'Checking access to URL: https://raw.githubusercontent.com/mmacata/pagestest/gh-pages/bonn.geojson', 'time_delta': 0.09295296669006348, 'progress': {'step': 0, 'num_of_steps': 0}, 'process_chain_list': [], 'http_code': 200, 'urls': {'resources': [], 'status': 'http://127.0.0.1:8088/api/v3/resources/actinia-gdi/resource_id-53e2a5ae-d4c0-4b9d-8d24-03030a4b314a'}, 'api_info': {'endpoint': 'asyncephemeralexportresource', 'method': 'POST', 'path': '/api/v3/locations/latlong_wgs84/processing_async_export', 'request_url': 'http://127.0.0.1:8088/api/v3/locations/latlong_wgs84/processing_async_export'}, 'logger': 'resources_logger'} [in /usr/lib/python3.8/site-packages/actinia_core-0.99.5.post0.dev20+gacc075c-py3.8.egg/actinia_core/resources/common/redis_fluentd_logger_base.py:91] +[2020-05-08 09:19:48,514] INFO : actinia-core.redis_fluentd_logger_base -{'status': 'running', 'user_id': 'actinia-gdi', 'resource_id': 'resource_id-53e2a5ae-d4c0-4b9d-8d24-03030a4b314a', 'accept_timestamp': 1588929588.421158, 'accept_datetime': '2020-05-08 09:19:48.421166', 'timestamp': 1588929588.5140734, 'datetime': '2020-05-08 09:19:48.514093', 'message': 'Checking access to URL: https://raw.githubusercontent.com/mmacata/pagestest/gh-pages/bonn.geojson', 'time_delta': 0.09295296669006348, 'progress': {'step': 0, 'num_of_steps': 0}, 'process_chain_list': [], 'http_code': 200, 'urls': {'resources': [], 'status': 'http://127.0.0.1:8088/api/v3/resources/actinia-gdi/resource_id-53e2a5ae-d4c0-4b9d-8d24-03030a4b314a'}, 'api_info': {'endpoint': 'asyncephemeralexportresource', 'method': 'POST', 'path': '/api/v3/projects/latlong_wgs84/processing_async_export', 'request_url': 'http://127.0.0.1:8088/api/v3/projects/latlong_wgs84/processing_async_export'}, 'logger': 'resources_logger'} [in /usr/lib/python3.8/site-packages/actinia_core-0.99.5.post0.dev20+gacc075c-py3.8.egg/actinia_core/resources/common/redis_fluentd_logger_base.py:91] ``` __resources_logger "error" shortened__ @@ -314,8 +314,8 @@ __resources_logger "error" shortened__ "api_info": { "endpoint": "asyncephemeralexportresource", "method": "POST", - "path": "/api/v3/locations/latlong_wgs84/processing_async_export", - "request_url": "http://127.0.0.1:8088/api/v3/locations/latlong_wgs84/processing_async_export" + "path": "/api/v3/projects/latlong_wgs84/processing_async_export", + "request_url": "http://127.0.0.1:8088/api/v3/projects/latlong_wgs84/processing_async_export" }, "logger": "resources_logger" } diff --git a/scripts/actinia-algebra b/scripts/actinia-algebra index 3f44a7ecc..d784c67c7 100644 --- a/scripts/actinia-algebra +++ b/scripts/actinia-algebra @@ -79,9 +79,9 @@ def main(): 'parallel on a actinia Service', formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument("location", + parser.add_argument("project", type=str, - help="The name of the location to be used for processing") + help="The name of the project to be used for processing") parser.add_argument("mapset", type=str, @@ -159,7 +159,7 @@ def main(): pc[1]["inputs"]["expression"] = "%s"%args.expression pc[1]["inputs"]["basename"] = "%s"%args.basename - url = args.server + "/locations/" + args.location + "/processing_async" + url = args.server + "/projects/" + args.project + "/processing_async" param=[url, auth, q, 1, pc] p = Process(target=start_async_processing, args=param) p.start() @@ -245,7 +245,7 @@ def main(): # 3. Create new mapset start = time.time() - url = args.server + "/locations/" + args.location + "/mapsets/" + args.mapset + url = args.server + "/projects/" + args.project + "/mapsets/" + args.mapset print("Create mapset", url) r = requests.post(url, auth=auth) if r.status_code != 200: @@ -262,7 +262,7 @@ def main(): # 4. Merge source mapsets in target mapsets start = time.time() - url = args.server + "/locations/" + args.location + "/mapsets/" + args.mapset + "/merging_async" + url = args.server + "/projects/" + args.project + "/mapsets/" + args.mapset + "/merging_async" print("Merge mapsets %s into <%s> using URL %s"%(str(mapset_list), args.mapset, url)) param=[url, auth, q, count + 1, mapset_list] p = Process(target=start_async_processing, @@ -285,7 +285,7 @@ def main(): # Create the new strds start = time.time() - url = args.server + "/locations/" + args.location + "/mapsets/" + args.mapset + "/strds/" + threads_list["STDS"]["name"] + \ + url = args.server + "/projects/" + args.project + "/mapsets/" + args.mapset + "/strds/" + threads_list["STDS"]["name"] + \ "?temporaltype=%s&title=title&description=description"%threads_list["STDS"]["temporal_type"] r = requests.post(url, auth=auth) if r.status_code != 200: @@ -293,7 +293,7 @@ def main(): else: print("Message: " + str(r.text)) - url = args.server + "/locations/" + args.location + "/mapsets/" + args.mapset + "/strds/" + threads_list["STDS"]["name"] + \ + url = args.server + "/projects/" + args.project + "/mapsets/" + args.mapset + "/strds/" + threads_list["STDS"]["name"] + \ "/raster_layers" r = requests.put(url, auth=auth, data=simplejson.dumps(threads_list["register"])) if r.status_code != 200: @@ -310,7 +310,7 @@ def main(): # 6. List all maps from the STRDS start = time.time() - url = args.server + "/locations/" + args.location + "/mapsets/" + args.mapset + "/strds/" + threads_list["STDS"]["name"] + \ + url = args.server + "/projects/" + args.project + "/mapsets/" + args.mapset + "/strds/" + threads_list["STDS"]["name"] + \ "/raster_layers" r = requests.get(url, auth=auth) if r.status_code != 200: @@ -335,7 +335,7 @@ def main(): if mapset_list: for mapset_name in mapset_list: print("Remove temporary mapset", mapset_name) - url = args.server + "/locations/" + args.location + "/mapsets/" + mapset_name + url = args.server + "/projects/" + args.project + "/mapsets/" + mapset_name r = requests.delete(url, auth=auth) if r.status_code != 200: print("Error code: " + str(r.status_code) + " message: " + str(r.text)) @@ -371,7 +371,7 @@ def mapcal_request(args, count, auth, threads_list, mapset_name = args.mapset + "_%s"%id_ mapset_list.append(mapset_name) - url = args.server + "/locations/" + args.location + "/mapsets/" + mapset_name + "/processing_async" + url = args.server + "/projects/" + args.project + "/mapsets/" + mapset_name + "/processing_async" pchain = {} shift = 0 diff --git a/scripts/actinia-bench b/scripts/actinia-bench index eb9944619..5390d1c9c 100755 --- a/scripts/actinia-bench +++ b/scripts/actinia-bench @@ -371,25 +371,25 @@ def start_query_processing_async_export(base_url, auth, q, id, type, polling): start = time.time() if type == "long_export": - url = base_url + "/locations/nc_spm_08/processing_async_export" + url = base_url + "/projects/nc_spm_08/processing_async_export" pc = PROCESS_CHAIN_LONG elif type == "long": - url = base_url + "/locations/nc_spm_08/processing_async" + url = base_url + "/projects/nc_spm_08/processing_async" pc = PROCESS_CHAIN_LONG elif type == "short_export": - url = base_url + "/locations/nc_spm_08/processing_async_export" + url = base_url + "/projects/nc_spm_08/processing_async_export" pc = PROCESS_CHAIN_SHORT elif type == "short": - url = base_url + "/locations/nc_spm_08/processing_async" + url = base_url + "/projects/nc_spm_08/processing_async" pc = PROCESS_CHAIN_SHORT elif type == "ndvi_sent": - url = base_url + "/locations/utm32N/processing_async" + url = base_url + "/projects/utm32N/processing_async" pc = SENTINEL_NDVI elif type == "ndvi_sent_export": - url = base_url + "/locations/utm32N/processing_async_export" + url = base_url + "/projects/utm32N/processing_async_export" pc = SENTINEL_NDVI elif type == "error": - url = base_url + "/locations/utm32N/processing_async" + url = base_url + "/projects/utm32N/processing_async" pc = PROCESS_CHAIN_ERROR try: diff --git a/scripts/curl_commands.sh b/scripts/curl_commands.sh index a12644bdb..9c23344d8 100755 --- a/scripts/curl_commands.sh +++ b/scripts/curl_commands.sh @@ -16,54 +16,54 @@ export AUTH='-u demouser:gu3st!pa55w0rd' # https://actinia.mundialis.de/api/v3/version | https://actinia.mundialis.de/latest/version ##### -# Show all locations in the GRASS database -curl ${AUTH} -X GET -i ${actinia_url}/locations_wgs84 +# Show all projects in the GRASS database +curl ${AUTH} -X GET -i ${actinia_url}/projects_wgs84 -# Create a new location based on an EPSG code +# Create a new project based on an EPSG code echo '{ "epsg": "4326" -}' > /tmp/pc_location_epsg4326.json +}' > /tmp/pc_project_epsg4326.json -curl ${AUTH} -H "Content-Type: application/json" -X POST "${actinia_url}/locations/latlong_TEST" -d @/tmp/pc_location_epsg4326.json +curl ${AUTH} -H "Content-Type: application/json" -X POST "${actinia_url}/projects/latlong_TEST" -d @/tmp/pc_project_epsg4326.json -# Check if the location is listed -curl ${AUTH} -X GET ${actinia_url}/locations_wgs84 +# Check if the project is listed +curl ${AUTH} -X GET ${actinia_url}/projects_wgs84 -# Delete the new location, careful! -curl ${AUTH} -X DELETE ${actinia_url}/locations/latlong_TEST -rm -f /tmp/pc_location_epsg4326.json +# Delete the new project, careful! +curl ${AUTH} -X DELETE ${actinia_url}/projects/latlong_TEST +rm -f /tmp/pc_project_epsg4326.json -# Get information of the NC location -curl ${AUTH} -X GET -i ${actinia_url}/locations/nc_spm_08/info +# Get information of the NC project +curl ${AUTH} -X GET -i ${actinia_url}/projects/nc_spm_08/info -# List all mapsets in that location -curl ${AUTH} -X GET -i ${actinia_url}/locations/nc_spm_08/mapsets +# List all mapsets in that project +curl ${AUTH} -X GET -i ${actinia_url}/projects/nc_spm_08/mapsets # Create a new mapset -curl ${AUTH} -X POST -i ${actinia_url}/locations/nc_spm_08/mapsets/temporary # Success +curl ${AUTH} -X POST -i ${actinia_url}/projects/nc_spm_08/mapsets/temporary # Success # Check if the new mapset is listed -curl ${AUTH} -X GET -i ${actinia_url}/locations/nc_spm_08/mapsets +curl ${AUTH} -X GET -i ${actinia_url}/projects/nc_spm_08/mapsets -# Get a list of all raster layers in the PERMANENT location -curl ${AUTH} -X GET -i ${actinia_url}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers +# Get a list of all raster layers in the PERMANENT project +curl ${AUTH} -X GET -i ${actinia_url}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers # Get a list of all raster layer using a g.list pattern -curl ${AUTH} -X GET -i "${actinia_url}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers?pattern=lsat*" +curl ${AUTH} -X GET -i "${actinia_url}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers?pattern=lsat*" # Get the information about the elevation -curl ${AUTH} -X GET -i ${actinia_url}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers/elevation +curl ${AUTH} -X GET -i ${actinia_url}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers/elevation # Render an image of the elevation raster layer curl ${AUTH} -d "n=228500&s=215000&w=630000&e=645000&ewres=50&nsres=50" \ - -X GET ${actinia_url}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers/elevation/render > elevation_NC.png + -X GET ${actinia_url}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers/elevation/render > elevation_NC.png # Get information about the new created raster layer -curl ${AUTH} -X GET -i ${actinia_url}/locations/nc_spm_08/mapsets/temporary/raster_layers/my_slope -curl ${AUTH} -X GET -i ${actinia_url}/locations/nc_spm_08/mapsets/temporary/raster_layers/my_aspect +curl ${AUTH} -X GET -i ${actinia_url}/projects/nc_spm_08/mapsets/temporary/raster_layers/my_slope +curl ${AUTH} -X GET -i ${actinia_url}/projects/nc_spm_08/mapsets/temporary/raster_layers/my_aspect # Delete the temporary mapset -curl ${AUTH} -X DELETE -i ${actinia_url}/locations/nc_spm_08/mapsets/temporary # Success +curl ${AUTH} -X DELETE -i ${actinia_url}/projects/nc_spm_08/mapsets/temporary # Success ############################################################################### @@ -71,13 +71,13 @@ curl ${AUTH} -X DELETE -i ${actinia_url}/locations/nc_spm_08/mapsets/temporary # ############################################################################### # Get a list or raster layers from a STRDS -curl ${AUTH} -X GET -i "${actinia_url}/locations/ECAD/mapsets/PERMANENT/strds/precipitation_1950_2013_yearly_mm/raster_layers?where=start_time>2013-05-01" +curl ${AUTH} -X GET -i "${actinia_url}/projects/ECAD/mapsets/PERMANENT/strds/precipitation_1950_2013_yearly_mm/raster_layers?where=start_time>2013-05-01" ############################################################################### ############### Vector ###################################################### ############################################################################### -curl ${AUTH} -X GET -i "${actinia_url}/locations/nc_spm_08/mapsets/PERMANENT/vector_layers/geology" +curl ${AUTH} -X GET -i "${actinia_url}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers/geology" ############################################################################### @@ -145,11 +145,11 @@ PROCESS_CHAIN='{ # Validation of process chain (using sync call) curl ${AUTH} -H "Content-Type: application/json" -X POST \ - -d "${PROCESS_CHAIN}" ${actinia_url}/locations/nc_spm_08/process_chain_validation_sync + -d "${PROCESS_CHAIN}" ${actinia_url}/projects/nc_spm_08/process_chain_validation_sync # Start the module r.slope.aspect (using async call) curl ${AUTH} -H "Content-Type: application/json" -X POST \ - -d "${PROCESS_CHAIN}" ${actinia_url}/locations/nc_spm_08/processing_async_export + -d "${PROCESS_CHAIN}" ${actinia_url}/projects/nc_spm_08/processing_async_export # Get status (add resource URL) # curl ${AUTH} -X GET -i @@ -252,23 +252,23 @@ PROCESS_CHAIN_LONG='{ # Validation of process chain (using sync call) curl ${AUTH} -H "Content-Type: application/json" -X POST \ - -d "${PROCESS_CHAIN_LONG}" ${actinia_url}/locations/nc_spm_08/process_chain_validation_sync + -d "${PROCESS_CHAIN_LONG}" ${actinia_url}/projects/nc_spm_08/process_chain_validation_sync # Start the module r.slope.aspect (using async call) curl ${AUTH} -H "Content-Type: application/json" -X POST \ - -d "${PROCESS_CHAIN_LONG}" ${actinia_url}/locations/nc_spm_08/mapsets/test_mapset/processing_async + -d "${PROCESS_CHAIN_LONG}" ${actinia_url}/projects/nc_spm_08/mapsets/test_mapset/processing_async # Get status (add resource URL) # curl ${AUTH} -X GET -i # List all raster layer in the new mapset -curl ${AUTH} -X GET -i ${actinia_url}/locations/nc_spm_08/mapsets/test_mapset/raster_layers +curl ${AUTH} -X GET -i ${actinia_url}/projects/nc_spm_08/mapsets/test_mapset/raster_layers # Info about my_accumulation -curl ${AUTH} -X GET -i ${actinia_url}/locations/nc_spm_08/mapsets/test_mapset/raster_layers/my_accumulation +curl ${AUTH} -X GET -i ${actinia_url}/projects/nc_spm_08/mapsets/test_mapset/raster_layers/my_accumulation # Remove the new mapset -curl ${AUTH} -X DELETE -i ${actinia_url}/locations/nc_spm_08/mapsets/test_mapset +curl ${AUTH} -X DELETE -i ${actinia_url}/projects/nc_spm_08/mapsets/test_mapset # Parallel computation LIST="1 @@ -278,7 +278,7 @@ LIST="1 for i in ${LIST} ; do echo "Run local process ${i}" curl ${AUTH} -H "Content-Type: application/json" -X POST \ - -d "${PROCESS_CHAIN_LONG}" ${actinia_url}/locations/nc_spm_08/processing_async + -d "${PROCESS_CHAIN_LONG}" ${actinia_url}/projects/nc_spm_08/processing_async done # Export Parallel computation @@ -291,7 +291,7 @@ LIST="1 for i in ${LIST} ; do echo "Run export process ${i}" curl ${AUTH} -H "Content-Type: application/json" -X POST \ - -d "${PROCESS_CHAIN}" ${actinia_url}/locations/nc_spm_08/processing_async_export + -d "${PROCESS_CHAIN}" ${actinia_url}/projects/nc_spm_08/processing_async_export done # New mapsets in massive parallel computation @@ -314,22 +314,22 @@ LIST="1 for i in ${LIST} ; do echo "Run new mapset process ${i}" curl ${AUTH} -H "Content-Type: application/json" -X POST \ - -d "${PROCESS_CHAIN_LONG}" ${actinia_url}/locations/nc_spm_08/mapsets/test_mapset_${i}/processing_async + -d "${PROCESS_CHAIN_LONG}" ${actinia_url}/projects/nc_spm_08/mapsets/test_mapset_${i}/processing_async done -curl ${AUTH} -X GET -i ${actinia}/locations/nc_spm_08/mapsets +curl ${AUTH} -X GET -i ${actinia}/projects/nc_spm_08/mapsets for i in ${LIST} ; do echo "Run ${i}" - curl ${AUTH} -X GET -i ${actinia_url}/locations/nc_spm_08/mapsets/test_mapset_${i}/raster_layers - curl ${AUTH} -X GET -i ${actinia}/locations/nc_spm_08/mapsets/test_mapset_${i}/raster_layers/my_accumulation - curl ${AUTH} -X DELETE -i ${actinia_url}/locations/nc_spm_08/mapsets/test_mapset_${i} + curl ${AUTH} -X GET -i ${actinia_url}/projects/nc_spm_08/mapsets/test_mapset_${i}/raster_layers + curl ${AUTH} -X GET -i ${actinia}/projects/nc_spm_08/mapsets/test_mapset_${i}/raster_layers/my_accumulation + curl ${AUTH} -X DELETE -i ${actinia_url}/projects/nc_spm_08/mapsets/test_mapset_${i} done # Spatio-Temporal sampling curl ${AUTH} -H "Content-Type: application/json" -X POST \ -d '[["a", 10.5, 52.5], ["b", 10, 52], ["c", 11, 53]]' \ - ${actinia_url}/locations/ECAD/mapsets/PERMANENT/strds/P_sum_yearly_mm/sampling_sync + ${actinia_url}/projects/ECAD/mapsets/PERMANENT/strds/P_sum_yearly_mm/sampling_sync JSON='{"bands":["B04", "B08"], @@ -343,7 +343,7 @@ JSON='{"bands":["B04", "B08"], "S2B_MSIL1C_20170711T102029_N0205_R065_T32UPC_20170711T102309", "S2A_MSIL1C_20170706T102021_N0205_R065_T32UPC_20170706T102301"]}' -curl ${AUTH} -H "Content-Type: application/json" -X POST -d "${JSON}" ${actinia_url}/locations/latlong_wgs84/mapsets/Sentinel2A/sentinel2_import +curl ${AUTH} -H "Content-Type: application/json" -X POST -d "${JSON}" ${actinia_url}/projects/latlong_wgs84/mapsets/Sentinel2A/sentinel2_import JSON='{"bands": ["B04", "B08"], @@ -378,7 +378,7 @@ JSON='{"bands":["B04", "B08"], "S2A_MSIL1C_20170621T110651_N0205_R137_T30SUJ_20170621T111222", "S2A_MSIL1C_20170412T110621_N0204_R137_T30SUJ_20170412T111708"]}' -curl ${AUTH} -H "Content-Type: application/json" -X POST -d "${JSON}" ${actinia_url}/locations/latlong_wgs84/mapsets/sentinel2A_openeo_subset/sentinel2_import +curl ${AUTH} -H "Content-Type: application/json" -X POST -d "${JSON}" ${actinia_url}/projects/latlong_wgs84/mapsets/sentinel2A_openeo_subset/sentinel2_import JSON='{ "list": [ @@ -406,7 +406,7 @@ JSON='{ "version": "1" }' -curl ${AUTH} -H "Content-Type: application/json" -X POST -d "${JSON}" ${actinia_url}/locations/latlong_wgs84/mapsets/sentinel2A_openeo_subset_ndvi/processing_sync +curl ${AUTH} -H "Content-Type: application/json" -X POST -d "${JSON}" ${actinia_url}/projects/latlong_wgs84/mapsets/sentinel2A_openeo_subset_ndvi/processing_sync # shellcheck disable=SC2016 JSON='{ @@ -437,7 +437,7 @@ JSON='{ ' -curl ${AUTH} -H "Content-Type: application/json" -X POST -d "${JSON}" ${actinia_url}/locations/latlong_wgs84/mapsets/sentinel2A_openeo_subset_ndvi/processing_async +curl ${AUTH} -H "Content-Type: application/json" -X POST -d "${JSON}" ${actinia_url}/projects/latlong_wgs84/mapsets/sentinel2A_openeo_subset_ndvi/processing_async JSON='{"list": [{ @@ -484,4 +484,4 @@ JSON='{"list": [{ "version": "1"} ' -curl ${AUTH} -H "Content-Type: application/json" -X POST -d "${JSON}" ${actinia_url}/locations/latlong_wgs84/process_chain_validation_sync +curl ${AUTH} -H "Content-Type: application/json" -X POST -d "${JSON}" ${actinia_url}/projects/latlong_wgs84/process_chain_validation_sync diff --git a/src/actinia_core/cli/actinia_user.py b/src/actinia_core/cli/actinia_user.py index 4ff6e73f9..d3f458202 100755 --- a/src/actinia_core/cli/actinia_user.py +++ b/src/actinia_core/cli/actinia_user.py @@ -5,7 +5,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Soeren Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Soeren Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,11 +35,12 @@ import sys __license__ = "GPLv3" -__author__ = "Soeren Gebbert" +__author__ = "Soeren Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2023, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" def verify_password(username_or_token, password): @@ -74,7 +75,7 @@ def set_user_credentials(user, args, method): user (actinia.core.user.ActiniaUser()): The user object args: The command line arguments method (str): "create", "update", "update_add", - "update_rm", "update_rm_location" + "update_rm", "update_rm_project" """ @@ -95,19 +96,19 @@ def set_user_credentials(user, args, method): for lm in lm_list: if "/" not in lm: - sys.stderr.write("Wrong location mapset format for datasets\n") + sys.stderr.write("Wrong project mapset format for datasets\n") return if "/" in lm: - location, mapset = lm.split("/") + project, mapset = lm.split("/") else: - location = lm + project = lm mapset = None - if location not in datasets: - datasets[location] = [] + if project not in datasets: + datasets[project] = [] - datasets[location].append(mapset) + datasets[project].append(mapset) if method == "update" or method == "create": user.set_accessible_datasets(datasets) @@ -116,10 +117,10 @@ def set_user_credentials(user, args, method): user.add_accessible_dataset(dataset, datasets[dataset]) if method == "update_rm": for dataset in datasets: - user.remove_mapsets_from_location(dataset, datasets[dataset]) - if method == "update_rm_location": + user.remove_mapsets_from_project(dataset, datasets[dataset]) + if method == "update_rm_project": for dataset in datasets: - user.remove_location(dataset) + user.remove_project(dataset) if args.modules: modules = args.modules.split(",") @@ -150,7 +151,7 @@ def main(): "update", "update_add", "update_rm", - "update_rm_location", + "update_rm_project", "pwcheck", ], help="The action that should be performed:" @@ -162,8 +163,8 @@ def main(): " and add datasets or/and modules" " * update_rm: Update a user with the provided parameters" " and remove mapsets or/and modules" - " * update_rm_location: Update a user with the provided parameters" - " and remove locations or/and modules" + " * update_rm_project: Update a user with the provided parameters" + " and remove projects or/and modules" " * pwcheck: Check the password of the user", ) parser.add_argument( @@ -247,7 +248,7 @@ def main(): type=str, required=False, help="The datasets the user is allowed to access." - " Format: location/mapset,location/mapset", + " Format: project/mapset,project/mapset", ) parser.add_argument( "-m", @@ -330,7 +331,7 @@ def main(): args.action == "update" or args.action == "update_add" or args.action == "update_rm" - or args.action == "update_rm_location" + or args.action == "update_rm_project" ): if args.user_id is None: sys.stderr.write("You need to provide a user id\n") diff --git a/src/actinia_core/core/common/api_logger.py b/src/actinia_core/core/common/api_logger.py index 1d3a124d9..f24b33b28 100644 --- a/src/actinia_core/core/common/api_logger.py +++ b/src/actinia_core/core/common/api_logger.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -41,11 +41,12 @@ has_fluent = False __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika" +__author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = ( "Copyright 2016-present, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" def log_api_call(f): @@ -97,8 +98,8 @@ def add_entry(self, user_id, http_request): example = { "endpoint": "asyncephemeralresource", "method": "POST", - "path": "/locations/nc_spm_08/processing_async", - "request_url": "http://localhost/locations/nc_spm_08/" + "path": "/projects/nc_spm_08/processing_async", + "request_url": "http://localhost/projects/nc_spm_08/" "processing_async" } diff --git a/src/actinia_core/core/common/app.py b/src/actinia_core/core/common/app.py index 7a6f37d8e..4c5932d86 100644 --- a/src/actinia_core/core/common/app.py +++ b/src/actinia_core/core/common/app.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2023 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -62,29 +62,29 @@ **Data management** -- List all locations that are available in the actinia persistent database: +- List all projects that are available in the actinia persistent database: - curl ${AUTH} -X GET "${ACTINIA_URL}/locations" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects" -- List all mapsets in the location latlong_wgs84: +- List all mapsets in the project latlong_wgs84: - curl ${AUTH} -X GET "${ACTINIA_URL}/locations/latlong_wgs84/mapsets" + curl ${AUTH} -X GET "${ACTINIA_URL}/projects/latlong_wgs84/mapsets" -- List all raster layers in location latlong_wgs84 and mapset Sentinel2A +- List all raster layers in project latlong_wgs84 and mapset Sentinel2A curl ${AUTH} -X GET \ - "${ACTINIA_URL}/locations/latlong_wgs84/mapsets/Sentinel2A/raster_layers" + "${ACTINIA_URL}/projects/latlong_wgs84/mapsets/Sentinel2A/raster_layers" -- List all space-time raster datasets (STRDS) in location ECAD and mapset +- List all space-time raster datasets (STRDS) in project ECAD and mapset PERMANENT: curl ${AUTH} -X GET \ - "${ACTINIA_URL}/locations/ECAD/mapsets/PERMANENT/raster_layers" + "${ACTINIA_URL}/projects/ECAD/mapsets/PERMANENT/raster_layers" - List all raster map layers of the STRDS precipitation_1950_2013_yearly_mm: curl ${AUTH} -X GET \ - "${ACTINIA_URL}/locations/ECAD/mapsets/PERMANENT/strds/precipitation_\ + "${ACTINIA_URL}/projects/ECAD/mapsets/PERMANENT/strds/precipitation_\ 1950_2013_yearly_mm/raster_layers" **Landsat and Sentinel2A NDVI computation** @@ -118,10 +118,10 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Julia Haas, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2023, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" - +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" flask_app = Flask(__name__) # allows endpoints with and without trailing slashes diff --git a/src/actinia_core/core/common/config.py b/src/actinia_core/core/common/config.py index cf0dae6f9..c6e560a4f 100644 --- a/src/actinia_core/core/common/config.py +++ b/src/actinia_core/core/common/config.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2023 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -34,9 +34,10 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2023, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" if os.environ.get("DEFAULT_CONFIG_PATH"): DEFAULT_CONFIG_PATH = os.environ["DEFAULT_CONFIG_PATH"] @@ -260,15 +261,15 @@ def __init__(self): """GRASS""" # The GRASS global database - # The GRASS database in which the user locations + # The GRASS database in which the user projects # are stored. This is the base path, der user group # will be added on runtime self.GRASS_DATABASE = "%s/actinia/grassdb" % home - # The default GRASS location in the global database that - # is used for location generation + # The default GRASS project in the global database that + # is used for project generation self.GRASS_USER_DATABASE = "%s/actinia/userdata" % home # The directory to store temporary GRASS databases - self.GRASS_DEFAULT_LOCATION = "nc_spm_08" + self.GRASS_DEFAULT_PROJECT = "nc_spm_08" # Directory to store exported resources self.GRASS_TMP_DATABASE = "%s/actinia/workspace/temp_db" % home self.GRASS_RESOURCE_DIR = "%s/actinia/resources" % home @@ -532,7 +533,7 @@ def write(self, path=DEFAULT_CONFIG_PATH): config.set("GRASS", "GRASS_DATABASE", self.GRASS_DATABASE) config.set("GRASS", "GRASS_USER_DATABASE", self.GRASS_USER_DATABASE) config.set( - "GRASS", "GRASS_DEFAULT_LOCATION", self.GRASS_DEFAULT_LOCATION + "GRASS", "GRASS_DEFAULT_PROJECT", self.GRASS_DEFAULT_PROJECT ) config.set("GRASS", "GRASS_TMP_DATABASE", self.GRASS_TMP_DATABASE) config.set("GRASS", "GRASS_RESOURCE_DIR", self.GRASS_RESOURCE_DIR) @@ -727,10 +728,15 @@ def read(self, path=DEFAULT_CONFIG_PATH): self.GRASS_USER_DATABASE = config.get( "GRASS", "GRASS_USER_DATABASE" ) + # Deprecated location if config.has_option("GRASS", "GRASS_DEFAULT_LOCATION"): - self.GRASS_DEFAULT_LOCATION = config.get( + self.GRASS_DEFAULT_PROJECT = config.get( "GRASS", "GRASS_DEFAULT_LOCATION" ) + elif config.has_option("GRASS", "GRASS_DEFAULT_PROJECT"): + self.GRASS_DEFAULT_PROJECT = config.get( + "GRASS", "GRASS_DEFAULT_PROJECT" + ) if config.has_option("GRASS", "GRASS_TMP_DATABASE"): self.GRASS_TMP_DATABASE = config.get( "GRASS", "GRASS_TMP_DATABASE" diff --git a/src/actinia_core/core/common/keycloak_user.py b/src/actinia_core/core/common/keycloak_user.py index 662d203ef..42ec1c642 100644 --- a/src/actinia_core/core/common/keycloak_user.py +++ b/src/actinia_core/core/common/keycloak_user.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -37,8 +37,9 @@ from actinia_core.core.common.config import global_config __author__ = "Anika Weinmann" -__copyright__ = "Copyright 2022, mundialis GmbH & Co. KG" +__copyright__ = "Copyright 2024, mundialis GmbH & Co. KG" __maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" def create_user_from_tokeninfo(token_info): @@ -119,13 +120,13 @@ def __init__( lm_list = accessible_datasets.split(",") for lm in lm_list: if "/" in lm: - location, mapset = lm.split("/") + project, mapset = lm.split("/") else: - location = lm + project = lm mapset = None - if location not in datasets: - datasets[location] = [] - datasets[location].append(mapset) + if project not in datasets: + datasets[project] = [] + datasets[project].append(mapset) else: datasets = accessible_datasets if isinstance(accessible_modules, str): @@ -239,11 +240,11 @@ def get_credentials(self): return credentials def get_accessible_datasets(self): - """Return a dictionary of location:mapset list entries + """Return a dictionary of project:mapset list entries Returns: dict: - Return a dictionary of location:mapset list entries + Return a dictionary of project:mapset list entries """ self._generate_permission_dict() diff --git a/src/actinia_core/core/common/user.py b/src/actinia_core/core/common/user.py index fe0628f27..686a0968d 100644 --- a/src/actinia_core/core/common/user.py +++ b/src/actinia_core/core/common/user.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -37,8 +37,9 @@ ) __author__ = "Sören Gebbert, Anika Weinmann" -__copyright__ = "Copyright 2016-2022, mundialis GmbH & Co. KG" +__copyright__ = "Copyright 2016-2024, mundialis GmbH & Co. KG" __maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class ActiniaUser(ActiniaUserBase): @@ -129,11 +130,11 @@ def get_credentials(self): return self.db.get_credentials(self.user_id) def get_accessible_datasets(self): - """Return a dictionary of location:mapset list entries + """Return a dictionary of project:mapset list entries Returns: dict: - Return a dictionary of location:mapset list entries + Return a dictionary of project:mapset list entries """ self.permissions = self.db.get_credentials(self.user_id)["permissions"] @@ -413,7 +414,7 @@ def create_user( user_group (str): The group of the user user_role (str): The password accessible_datasets (dict): The user role (admin, user, guest) - accessible_modules (list): Dict of location:mapset lists + accessible_modules (list): Dict of project:mapset lists cell_limit (int): Maximum number of cells to process process_num_limit (int): The maximum number of processes the user is allowed to run in a single chain diff --git a/src/actinia_core/core/common/user_base.py b/src/actinia_core/core/common/user_base.py index 099488cc0..780f24f33 100644 --- a/src/actinia_core/core/common/user_base.py +++ b/src/actinia_core/core/common/user_base.py @@ -32,7 +32,7 @@ "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis GmbH & Co. KG" - +__email__ = "info@mundialis.de" USER_ROLES = ["superadmin", "admin", "user", "guest"] @@ -194,7 +194,7 @@ def add_accessible_dataset(self, project_name, mapset_list): list Args: - project_name (str): Location name + project_name (str): Project name mapset_list (list): List of mapset names Example:: @@ -218,7 +218,7 @@ def remove_mapsets_from_project(self, project_name, mapset_list): """Remove mapsets from an existing project Args: - project_name (str): Location name + project_name (str): Project name mapset_list (list): List of mapset names that should be removed Example:: @@ -234,11 +234,11 @@ def remove_mapsets_from_project(self, project_name, mapset_list): if mapset in self.accessible_datasets[project_name]: self.accessible_datasets[project_name].remove(mapset) - def remove_location(self, project_name): + def remove_project(self, project_name): """Remove a project from the accessible datasets Args: - project_name (str): Location name + project_name (str): Project name Example:: diff --git a/src/actinia_core/core/geodata_download_importer.py b/src/actinia_core/core/geodata_download_importer.py index 5ae042742..026576fac 100644 --- a/src/actinia_core/core/geodata_download_importer.py +++ b/src/actinia_core/core/geodata_download_importer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -37,9 +37,10 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Julia Haas, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" # Mimetypes supported for download SUPPORTED_MIMETYPES = [ @@ -179,7 +180,7 @@ def _check_urls(self): def get_download_process_list(self): """Create the process list to download, import and preprocess - geodata location on a remote location + geodata project on a remote project The downloaded files will be stored in a temporary directory. After the download of all files completes, the downloaded files will be moved to diff --git a/src/actinia_core/core/grass_modules_list.py b/src/actinia_core/core/grass_modules_list.py index 77caa827f..856c0820a 100644 --- a/src/actinia_core/core/grass_modules_list.py +++ b/src/actinia_core/core/grass_modules_list.py @@ -5,7 +5,7 @@ "keywords": "raster,cost surface,cumulative costs,cost " "allocation", "description": "Creates a raster map showing the cumulative " - "cost of moving between different geographic locations on an " + "cost of moving between different geographic projects on an " "input raster map whose cell category values represent" " cost.", "module": "r.cost", @@ -15,7 +15,7 @@ "allocation", "description": "Creates a raster map showing the anisotropic " "cumulative cost of moving between different geographic " - "locations on an input raster map whose cell category values " + "projects on an input raster map whose cell category values " "represent cost.", "module": "r.walk", }, @@ -23,8 +23,8 @@ "projection": { "r.proj": { "keywords": "raster,projection,transformation,import", - "description": "Re-projects a raster map from given location " - "to the current location.", + "description": "Re-projects a raster map from given project " + "to the current project.", "module": "r.proj", } }, @@ -916,7 +916,7 @@ "keywords": "general,settings", "description": "Changes/reports current mapset. Optionally " "create new mapset or list available mapsets in given " - "location.", + "project.", "module": "g.mapset", }, "g.region": { @@ -935,10 +935,10 @@ }, "projection": { "g.proj": { - "keywords": "general,projection,create location", + "keywords": "general,projection,create project", "description": "Prints or modifies GRASS projection " "information files (in various coordinate system descriptions" - "). Can also be used to create new GRASS locations.", + "). Can also be used to create new GRASS projects.", "module": "g.proj", } }, diff --git a/src/actinia_core/core/redis_lock.py b/src/actinia_core/core/redis_lock.py index 1a1c88992..10991443c 100644 --- a/src/actinia_core/core/redis_lock.py +++ b/src/actinia_core/core/redis_lock.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -28,16 +28,12 @@ import redis __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" -__credits__ = [ - "Thünen Institutes of Climate-Smart Agriculture", - "https://www.ti.bund.de/en/ak/", -] +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class RedisLockingInterface(object): @@ -48,7 +44,7 @@ class RedisLockingInterface(object): # Redis LUA script to lock e resource # Two keys must be provided, the name of the resource and the expiration # time in seconds - # lock_resource("location/mapset", 30) + # lock_resource("project/mapset", 30) # Return 1 for success and 0 for unable to acquire lock because # resource-lock already exists lua_lock_resource = """ @@ -63,7 +59,7 @@ class RedisLockingInterface(object): # LUA script to extend the lock valid time # Two keys must be provided, the name of the resource and the expiration # time in seconds - # extend_resource_lock("user/location/mapset", 30) + # extend_resource_lock("user/project/mapset", 30) # Return 1 for success, 0 for resource does not exists lua_extend_resource_lock = """ local value_exists = redis.call('EXISTS', KEYS[1]) @@ -137,7 +133,7 @@ def disconnect(self): The lock mechanism can be used to avoid concurrent access to GRASS GIS mapsets by several processes. A mapset has a unique id: - location/mapset + project/mapset That can be used to create a resource lock. Locking and lock checking are atomic operations. Hence, it is guaranteed that only a single @@ -150,7 +146,7 @@ def get(self, resource_id, expiration=30): Args: resource_id (str): Name of the resource to lock, for example - "location/mapset" + "project/mapset" Returns: bool: @@ -170,7 +166,7 @@ def lock(self, resource_id, expiration=30): Args: resource_id (str): Name of the resource to lock, for example - "location/mapset" + "project/mapset" expiration (int): The time in seconds for which the lock is acquired @@ -193,7 +189,7 @@ def extend(self, resource_id, expiration=30): Args: resource_id (str): Name of the resource to extent the lock, for - example "location/mapset" + example "project/mapset" expiration (int): The time in seconds for which the lock is acquired @@ -217,7 +213,7 @@ def unlock(self, resource_id): Args: resource_id (str): Name of the resource to remove the lock, for - example "location/mapset" + example "project/mapset" Returns: int: @@ -236,7 +232,7 @@ def unlock(self, resource_id): def test_locking(r): - resource = "location/mapset" + resource = "project/mapset" # Remove the lock if its present r.unlock(resource) diff --git a/src/actinia_core/core/redis_user.py b/src/actinia_core/core/redis_user.py index e5740910b..e859e2bbc 100644 --- a/src/actinia_core/core/redis_user.py +++ b/src/actinia_core/core/redis_user.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -29,12 +29,12 @@ import pickle __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class RedisUserInterface(RedisBaseInterface): @@ -318,7 +318,7 @@ def test_management(r): password_hash = "hash" user_role = "admin" permissions = { - "locations": { + "projects": { "NC": {"mapsets": ["PERMANWENT", "user1"]}, "ECAD": {"mapsets": ["Temp", "Prec"]}, }, @@ -380,7 +380,7 @@ def test_management(r): password_hash="yellow", user_role="user", permissions={ - "locations": {"utm32n": {"mapsets": ["PERMANWENT"]}}, + "projects": {"utm32n": {"mapsets": ["PERMANWENT"]}}, "modules": [ "i.vi", ], @@ -398,7 +398,7 @@ def test_management(r): raise Exception("update does not work") if user_creds["user_role"] != "user": raise Exception("update does not work") - if "utm32n" not in user_creds["permissions"]["locations"]: + if "utm32n" not in user_creds["permissions"]["projects"]: raise Exception("update does not work") user_ids = r.list_all_ids() diff --git a/src/actinia_core/endpoints.py b/src/actinia_core/endpoints.py index dc61f1bda..fdad67c92 100644 --- a/src/actinia_core/endpoints.py +++ b/src/actinia_core/endpoints.py @@ -31,7 +31,7 @@ from actinia_core.core.common.app import flask_api from actinia_core.core.common.config import global_config from actinia_core.core.logging_interface import log -from actinia_core.rest.location_management import ( +from actinia_core.rest.project_management import ( ListProjectsResource, ProjectManagementResourceAdminUser, ProjectManagementResourceUser, @@ -129,7 +129,8 @@ __copyright__ = ( "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" def create_project_endpoints(projects_url_part="projects"): diff --git a/src/actinia_core/models/process_chain.py b/src/actinia_core/models/process_chain.py index 9e995f617..c8c6b6d89 100644 --- a/src/actinia_core/models/process_chain.py +++ b/src/actinia_core/models/process_chain.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2021 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -29,12 +29,12 @@ from actinia_api import URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika" +__author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2021, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" - +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" SUPPORTED_EXPORT_FORMATS = [ "COG", @@ -116,7 +116,7 @@ class InputParameter(IOParameterBase): " scenes the scene name and the band must be provided. " "The Landsat approach is different.

" "In case a Landsat scene is requested, all " - "bands will be download, in the target location imported" + "bands will be download, in the target project imported" " and an atmospheric correction is applied. The " "atmospheric correction must be specified. The resulting" " raster map layers have a specific name scheme, that " @@ -474,7 +474,7 @@ class GrassModule(Schema): required = ["id", "module"] description = ( "The definition of a single GRASS GIS module and its inputs, outputs " - "and flags. This module will be run in a location/mapset environment " + "and flags. This module will be run in a project/mapset environment " "and is part of a process chain. The stdout and stderr output of " "modules that were run before this module in the process chain can be " "used as stdin for this module. The stdout of a module can be " diff --git a/src/actinia_core/models/response_models.py b/src/actinia_core/models/response_models.py index edbe78e9c..9b7b9b98c 100644 --- a/src/actinia_core/models/response_models.py +++ b/src/actinia_core/models/response_models.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,11 +35,12 @@ from actinia_core.core.common.process_chain import GrassModule __license__ = "GPLv3" -__author__ = "Sören Gebbert, Julia Haas, Guido Riembauer" +__author__ = "Sören Gebbert, Julia Haas, Guido Riembauer, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2021, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class ProgressInfoModel(Schema): @@ -211,7 +212,7 @@ class MapsetListResponseModel(Schema): "items": {"type": "string"}, "description": ( "The names of all available mapsets with" - " corresponding locations in the global database" + " corresponding projects in the global database" ), }, } @@ -283,8 +284,8 @@ class ApiInfoModel(Schema): example = { "endpoint": "asyncephemeralresource", "method": "POST", - "path": f"{URL_PREFIX}/locations/nc_spm_08/processing_async", - "request_url": f"http://localhost{URL_PREFIX}/locations/nc_spm_08/" + "path": f"{URL_PREFIX}/projects/nc_spm_08/processing_async", + "request_url": f"http://localhost{URL_PREFIX}/projects/nc_spm_08/" "processing_async", } @@ -420,8 +421,8 @@ class ProcessingResponseModel(Schema): "api_info": { "endpoint": "asyncephemeralresource", "method": "POST", - "path": "/locations/nc_spm_08/processing_async", - "request_url": f"http://localhost{URL_PREFIX}/locations/nc_spm_08/" + "path": "/projects/nc_spm_08/processing_async", + "request_url": f"http://localhost{URL_PREFIX}/projects/nc_spm_08/" "processing_async", }, "datetime": "2017-05-24 22:37:21.608717", @@ -455,8 +456,8 @@ class ProcessingErrorResponseModel(ProcessingResponseModel): "api_info": { "endpoint": "mapsetmanagementresourceuser", "method": "GET", - "path": "/locations/nc_spm_08/mapsets/PERMANE/info", - "request_url": f"http://localhost{URL_PREFIX}/locations/nc_spm_08/" + "path": "/projects/nc_spm_08/mapsets/PERMANE/info", + "request_url": f"http://localhost{URL_PREFIX}/projects/nc_spm_08/" "mapsets/PERMANENT/info", }, "datetime": "2018-05-06 22:02:14.398927", @@ -810,7 +811,7 @@ class MapsetInfoModel(Schema): properties = { "projection": { "type": "string", - "description": "The location projection WKT string", + "description": "The project projection WKT string", }, "region": RegionModel, } @@ -874,8 +875,8 @@ class MapsetInfoResponseModel(ProcessingResponseModel): "api_info": { "endpoint": "mapsetmanagementresourceuser", "method": "GET", - "path": f"{URL_PREFIX}/locations/ECAD/mapsets/PERMANENT/info", - "request_url": f"http://localhost{URL_PREFIX}/locations/ECAD/" + "path": f"{URL_PREFIX}/projects/ECAD/mapsets/PERMANENT/info", + "request_url": f"http://localhost{URL_PREFIX}/projects/ECAD/" "mapsets/PERMANENT/info", }, "datetime": "2018-05-02 10:53:20.392509", @@ -1136,8 +1137,8 @@ class StringListProcessingResultResponseModel(ProcessingResponseModel): "api_info": { "endpoint": "listmapsetsresource", "method": "GET", - "path": "/locations/nc_spm_08/mapsets", - "request_url": "http://localhost:5000/locations/nc_spm_08/mapsets", + "path": "/projects/nc_spm_08/mapsets", + "request_url": "http://localhost:5000/projects/nc_spm_08/mapsets", }, "datetime": "2018-05-02 12:02:20.861017", "http_code": 200, @@ -1351,7 +1352,7 @@ class UserInfoResponseModel(Schema): "properties": {}, "description": "The persistent GRASS GIS databases the " "user is allowed to use. Contains one object for each " - "location name with an array of strings containing " + "project name with an array of strings containing " "all allowed mapset names. See example for more " "information.", }, diff --git a/src/actinia_core/processing/actinia_processing/ephemeral/ephemeral_processing_with_export.py b/src/actinia_core/processing/actinia_processing/ephemeral/ephemeral_processing_with_export.py index 2d71cf319..2caf174ee 100644 --- a/src/actinia_core/processing/actinia_processing/ephemeral/ephemeral_processing_with_export.py +++ b/src/actinia_core/processing/actinia_processing/ephemeral/ephemeral_processing_with_export.py @@ -34,7 +34,7 @@ from actinia_core.core.stac_exporter_interface import STACExporter __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) @@ -45,10 +45,10 @@ class EphemeralProcessingWithExport(EphemeralProcessing): """ This class processes GRASS data on the local machine in an temporary mapset - and copies the exported results to a dedicated storage location. + and copies the exported results to a dedicated storage project. The temporary mapset will be removed by this class when the processing - finished and the results are stored in the dedicated storage location. + finished and the results are stored in the dedicated storage project. TODO: Implement the export of arbitrary files that were generated in the processing of the process chain diff --git a/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py b/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py index e0185fe5d..f913dab43 100644 --- a/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py +++ b/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py @@ -40,7 +40,8 @@ __copyright__ = ( "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class PersistentProcessing(EphemeralProcessing): @@ -107,7 +108,7 @@ def __init__(self, rdc): def _generate_mapset_lock_id(self, user_group, project_name, mapset_name): """Generate a unique id to lock a mapset in the redis database - Locations are user group specific. Hence different user groups may have + Projects are user group specific. Hence different user groups may have projects with the same names and with equal mapset names. In the same user group, a project/mapset must be locked to grant diff --git a/src/actinia_core/processing/actinia_processing/persistent/location_management.py b/src/actinia_core/processing/actinia_processing/persistent/project_management.py similarity index 95% rename from src/actinia_core/processing/actinia_processing/persistent/location_management.py rename to src/actinia_core/processing/actinia_processing/persistent/project_management.py index e4ef4466d..7ec332b82 100644 --- a/src/actinia_core/processing/actinia_processing/persistent/location_management.py +++ b/src/actinia_core/processing/actinia_processing/persistent/project_management.py @@ -22,7 +22,7 @@ ####### """ -Location management +Project management TODO: Integrate into the ephemeral process chain approach """ @@ -42,7 +42,7 @@ __maintainer__ = "mundialis" -class PersistentLocationCreator(PersistentProcessing): +class PersistentProjectCreator(PersistentProcessing): """Create a new project based on EPSG code""" def __init__(self, *args): @@ -51,7 +51,7 @@ def __init__(self, *args): def _execute(self): new_project = self.project_name - self.project_name = self.config.GRASS_DEFAULT_LOCATION + self.project_name = self.config.GRASS_DEFAULT_PROJECT self._setup() diff --git a/src/actinia_core/processing/actinia_processing/persistent/strds_raster_management.py b/src/actinia_core/processing/actinia_processing/persistent/strds_raster_management.py index 3c82bf7d8..9b70c36e4 100644 --- a/src/actinia_core/processing/actinia_processing/persistent/strds_raster_management.py +++ b/src/actinia_core/processing/actinia_processing/persistent/strds_raster_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -38,15 +38,16 @@ from actinia_core.core.common.exceptions import AsyncProcessError __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika" +__author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class PersistentRasterSTRDSLister(PersistentProcessing): - """List all mapsets in a location""" + """List all mapsets in a project""" def __init__(self, *args): PersistentProcessing.__init__(self, *args) diff --git a/src/actinia_core/processing/actinia_processing/persistent/vector_layer.py b/src/actinia_core/processing/actinia_processing/persistent/vector_layer.py index 986a29f3c..749b11c5d 100644 --- a/src/actinia_core/processing/actinia_processing/persistent/vector_layer.py +++ b/src/actinia_core/processing/actinia_processing/persistent/vector_layer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,9 +35,10 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Carmen Tawalika, Guido Riembauer, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class PersistentVectorDeleter(PersistentProcessing): @@ -45,7 +46,7 @@ def __init__(self, *args): PersistentProcessing.__init__(self, *args) def _execute(self): - """Delete a specific vector layer from a location in the user database + """Delete a specific vector layer from a project in the user database Use the original mapset for processing """ diff --git a/src/actinia_core/processing/common/location_management.py b/src/actinia_core/processing/common/project_management.py similarity index 79% rename from src/actinia_core/processing/common/location_management.py rename to src/actinia_core/processing/common/project_management.py index 76fc32675..689a8052d 100644 --- a/src/actinia_core/processing/common/location_management.py +++ b/src/actinia_core/processing/common/project_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -22,7 +22,7 @@ ####### """ -Location management +Project management TODO: Integrate into the ephemeral process chain approach """ @@ -30,19 +30,19 @@ from actinia_core.processing.common.utils import try_import __license__ = "GPLv3" -__author__ = "Sören Gebbert, Carmen Tawalika" +__author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" -PersistentLocationCreator = try_import( +PersistentProjectCreator = try_import( ( "actinia_core.processing.actinia_processing.persistent" - + ".location_management" + + ".project_management" ), - "PersistentLocationCreator", + "PersistentProjectCreator", ) PersistentGetProjectionRegionInfo = try_import( @@ -54,8 +54,8 @@ ) -def create_location(*args): - processing = PersistentLocationCreator(*args) +def create_project(*args): + processing = PersistentProjectCreator(*args) processing.run() diff --git a/src/actinia_core/rest/mapsets.py b/src/actinia_core/rest/mapsets.py index eeebb6bb7..b51cc8887 100644 --- a/src/actinia_core/rest/mapsets.py +++ b/src/actinia_core/rest/mapsets.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2021 mundialis GmbH & Co. KG +# Copyright (c) 2021-2024 mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -22,10 +22,10 @@ ####### """ -Mapset resources for information across all locations +Mapset resources for information across all projects * List all mapset locks -* List all mapsets in all locations available to a user +* List all mapsets in all projects available to a user """ from flask import jsonify, make_response @@ -54,9 +54,10 @@ __license__ = "GPLv3" -__author__ = "Julia Haas, Guido Riembauer" -__copyright__ = "Copyright 2021 mundialis GmbH & Co. KG" -__maintainer__ = "mundialis" +__author__ = "Julia Haas, Guido Riembauer, Anika Weinmann" +__copyright__ = "Copyright 2021-2024 mundialis GmbH & Co. KG" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class AllMapsetsListingResourceAdmin(ResourceBase): @@ -160,9 +161,9 @@ def get(self): ]["accessible_datasets"] redis_interface.disconnect() mapsets = [] - for location in locs_mapsets: - for mapset in locs_mapsets[location]: - mapsets.append(f"{location}/{mapset}") + for project in locs_mapsets: + for mapset in locs_mapsets[project]: + mapsets.append(f"{project}/{mapset}") try: return make_response( jsonify( diff --git a/src/actinia_core/rest/location_management.py b/src/actinia_core/rest/project_management.py similarity index 98% rename from src/actinia_core/rest/location_management.py rename to src/actinia_core/rest/project_management.py index 0334af414..280deb17a 100644 --- a/src/actinia_core/rest/location_management.py +++ b/src/actinia_core/rest/project_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2022 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -57,7 +57,7 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis" diff --git a/src/actinia_core/rest/resource_management.py b/src/actinia_core/rest/resource_management.py index 6f11979bc..db0996c9e 100644 --- a/src/actinia_core/rest/resource_management.py +++ b/src/actinia_core/rest/resource_management.py @@ -61,6 +61,7 @@ "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class ResourceManagerBase(Resource): @@ -352,8 +353,8 @@ def _create_ResourceDataContainer_for_resumption( is None ): return None, None, None - # TODO project location - project = re.findall(r"locations\/(.*?)\/", post_url)[0] + # TODO project project + project = re.findall(r"projects\/(.*?)\/", post_url)[0] processing_class = global_config.INTERIM_SAVING_ENDPOINTS[endpoint] if processing_class == "AsyncEphemeralResource": # /projects//processing_async diff --git a/src/actinia_core/version.py b/src/actinia_core/version.py index e4f7fe007..f8f64e652 100644 --- a/src/actinia_core/version.py +++ b/src/actinia_core/version.py @@ -5,7 +5,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -27,11 +27,12 @@ """ __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" from flask import make_response, jsonify, request from importlib import metadata @@ -61,7 +62,7 @@ def init_versions(): g_version = subprocess.run( [ "grass", - "--tmp-location", + "--tmp-project", "epsg:4326", "--exec", "g.version", diff --git a/tests/test_async_mapset_merging.py b/tests/test_async_mapset_merging.py index 1f5d93c2f..a37d43caf 100644 --- a/tests/test_async_mapset_merging.py +++ b/tests/test_async_mapset_merging.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,12 +35,12 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" # Module change example for r.slope.aspect with g.region adjustment @@ -107,13 +107,13 @@ def check_remove_test_mapsets(self): # Unlock mapset for deletion rv = self.server.delete( URL_PREFIX - + "/locations/%s/mapsets/%s/lock" % ("nc_spm_08", mapset), + + "/projects/%s/mapsets/%s/lock" % ("nc_spm_08", mapset), headers=self.admin_auth_header, ) print(rv.data) rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/mapsets", + URL_PREFIX + "/projects/nc_spm_08/mapsets", headers=self.user_auth_header, ) print(rv.data) @@ -132,7 +132,7 @@ def check_remove_test_mapsets(self): if mapset in mapsets: # Delete the mapset if it already exists rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/%s" % mapset, + URL_PREFIX + "/projects/nc_spm_08/mapsets/%s" % mapset, headers=self.admin_auth_header, ) print(rv.data) @@ -152,7 +152,7 @@ def test_1_merge_no_access_to_target_mapset_error(self): """No access to target mapset error test""" # Try merge source mapsets into target mapset rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/user1/merging_async", + URL_PREFIX + "/projects/nc_spm_08/mapsets/user1/merging_async", headers=self.user_auth_header, data=json_dumps(["Source_A", "Source_B"]), content_type="application/json", @@ -173,7 +173,7 @@ def test_2_merge_missing_target_mapset_error(self): # Try merge source mapsets into target mapset rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/Target/merging_async", + URL_PREFIX + "/projects/nc_spm_08/mapsets/Target/merging_async", headers=self.admin_auth_header, data=json_dumps(["Source_A", "Source_B"]), content_type="application/json", @@ -192,7 +192,7 @@ def test_3_merge_missing_source_mapsets_error(self): # Create target mapset rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/Target", + URL_PREFIX + "/projects/nc_spm_08/mapsets/Target", headers=self.admin_auth_header, ) print(rv.data) @@ -207,7 +207,7 @@ def test_3_merge_missing_source_mapsets_error(self): # Try merge source mapsets into target mapset rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/Target/merging_async", + URL_PREFIX + "/projects/nc_spm_08/mapsets/Target/merging_async", headers=self.admin_auth_header, data=json_dumps(["Source_A", "Source_B"]), content_type="application/json", @@ -226,7 +226,7 @@ def test_4_merge_empty_mapset_list(self): # Create target mapset rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/Target", + URL_PREFIX + "/projects/nc_spm_08/mapsets/Target", headers=self.admin_auth_header, ) print(rv.data) @@ -241,7 +241,7 @@ def test_4_merge_empty_mapset_list(self): # Try merge source mapsets into target mapset rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/Target/merging_async", + URL_PREFIX + "/projects/nc_spm_08/mapsets/Target/merging_async", headers=self.admin_auth_header, data=json_dumps([]), content_type="application/json", @@ -261,7 +261,7 @@ def test_5_merge_two_mapsets(self): # Create the source mapsets rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/Source_A/processing_async", + + "/projects/nc_spm_08/mapsets/Source_A/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_short_1), content_type="application/json", @@ -270,7 +270,7 @@ def test_5_merge_two_mapsets(self): rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/Source_B/processing_async", + + "/projects/nc_spm_08/mapsets/Source_B/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_short_2), content_type="application/json", @@ -279,7 +279,7 @@ def test_5_merge_two_mapsets(self): # Create target mapset rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/Target", + URL_PREFIX + "/projects/nc_spm_08/mapsets/Target", headers=self.admin_auth_header, ) print(rv.data) @@ -294,7 +294,7 @@ def test_5_merge_two_mapsets(self): # Merge source mapsets into target mapset rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/Target/merging_async", + URL_PREFIX + "/projects/nc_spm_08/mapsets/Target/merging_async", headers=self.admin_auth_header, data=json_dumps(["Source_A", "Source_B"]), content_type="application/json", @@ -304,7 +304,7 @@ def test_5_merge_two_mapsets(self): # Check copied raster rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/Target/raster_layers/my_aspect_1", + + "/projects/nc_spm_08/mapsets/Target/raster_layers/my_aspect_1", headers=self.admin_auth_header, ) print(rv.data) @@ -313,7 +313,7 @@ def test_5_merge_two_mapsets(self): ) rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/Target/raster_layers/my_aspect_2", + + "/projects/nc_spm_08/mapsets/Target/raster_layers/my_aspect_2", headers=self.admin_auth_header, ) print(rv.data) @@ -322,7 +322,7 @@ def test_5_merge_two_mapsets(self): ) rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/Target/raster_layers/my_slope_1", + + "/projects/nc_spm_08/mapsets/Target/raster_layers/my_slope_1", headers=self.admin_auth_header, ) print(rv.data) @@ -331,7 +331,7 @@ def test_5_merge_two_mapsets(self): ) rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/Target/raster_layers/my_slope_2", + + "/projects/nc_spm_08/mapsets/Target/raster_layers/my_slope_2", headers=self.admin_auth_header, ) print(rv.data) diff --git a/tests/test_async_mapset_merging_strds.py b/tests/test_async_mapset_merging_strds.py index c35255aaf..e14800807 100644 --- a/tests/test_async_mapset_merging_strds.py +++ b/tests/test_async_mapset_merging_strds.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2021-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,8 +35,9 @@ __license__ = "GPLv3" __author__ = "Guido Riembauer, Anika Weinmann" -__copyright__ = "Copyright 2021, mundialis GmbH & Co. KG" -__maintainer__ = "mundialis" +__copyright__ = "Copyright 2021-2024, mundialis GmbH & Co. KG" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" mapset_name = "test_strds_merging" @@ -257,21 +258,21 @@ class AsyncMapsetMergingSTRDS(ActiniaResourceTestCaseBase): def tearDown(self): # unlock and delete the test mapsets rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/mapsets", + URL_PREFIX + "/projects/nc_spm_08/mapsets", headers=self.user_auth_header, ) existing_mapsets = json_load(rv.data)["process_results"] if self.user_mapset in existing_mapsets: rvdellock = self.server.delete( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/lock" % self.user_mapset, + + "/projects/nc_spm_08/mapsets/%s/lock" % self.user_mapset, headers=self.admin_auth_header, ) print(rvdellock.data.decode()) rvdel = self.server.delete( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s" % self.user_mapset, + + "/projects/nc_spm_08/mapsets/%s" % self.user_mapset, headers=self.admin_auth_header, ) print(rvdel.data.decode()) @@ -282,7 +283,7 @@ def check_strds_in_mapset(self, strds_names): for strds_name in strds_names: rv[strds_name] = self.server.get( URL_PREFIX - + f"/locations/nc_spm_08/mapsets/{self.user_mapset}/strds", + + f"/projects/nc_spm_08/mapsets/{self.user_mapset}/strds", headers=self.user_auth_header, ) strds = json_load(rv[strds_name].data)["process_results"] @@ -295,7 +296,7 @@ def check_strds_in_mapset(self, strds_names): def check_modis_strds(self, raster_dict, strds_name): # check if correct maps are listed in strds strds rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/{self.user_mapset}/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" f"strds/{strds_name}/raster_layers", headers=self.user_auth_header, ) @@ -317,7 +318,7 @@ def check_modis_strds(self, raster_dict, strds_name): def test_create_strds_in_persistent_user_db(self): rv = self.server.post( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/{self.user_mapset}/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" "processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds1), @@ -338,7 +339,7 @@ def test_create_strds_in_persistent_user_db(self): def test_create_strds_in_persistent_user_db_and_list_it(self): rv = self.server.post( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/{self.user_mapset}/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" "processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds1), @@ -353,7 +354,7 @@ def test_create_strds_in_persistent_user_db_and_list_it(self): ) rv2 = self.server.post( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/{self.user_mapset}/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" "processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_list), @@ -373,7 +374,7 @@ def test_create_strds_in_persistent_user_db_and_list_it(self): def test_create_strds_in_persistent_user_db_2(self): rv = self.server.post( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/{self.user_mapset}/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" "processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds1), @@ -388,7 +389,7 @@ def test_create_strds_in_persistent_user_db_2(self): ) rv = self.server.post( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/{self.user_mapset}/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" "processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds2), @@ -410,7 +411,7 @@ def test_create_strds_in_persistent_user_db_2(self): def test_create_strds_in_persistent_user_db_3(self): rv = self.server.post( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/{self.user_mapset}/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" "processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds1), @@ -430,7 +431,7 @@ def test_create_strds_in_persistent_user_db_3(self): self.check_modis_strds(self.raster_dict_modis, "modis") rv = self.server.post( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/{self.user_mapset}/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" "processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds3), diff --git a/tests/test_async_process_postgis_import_export.py b/tests/test_async_process_postgis_import_export.py index 1a37f5cd6..3c928357c 100644 --- a/tests/test_async_process_postgis_import_export.py +++ b/tests/test_async_process_postgis_import_export.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,12 +35,12 @@ from test_resource_base import ActiniaResourceTestCaseBase # , URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" process_chain_postgis = { "list": [ @@ -95,7 +95,7 @@ def gen_output_layer_name(self): # # TODO fix test and comment the test in (postgres DB is needed) # def test_1_async_processing_postgis_validation(self): # rv = self.server.post( - # f"{URL_PREFIX}/locations/nc_spm_08/process_chain_validation_async", + # f"{URL_PREFIX}/projects/nc_spm_08/process_chain_validation_async", # headers=self.admin_auth_header, # data=json_dumps(process_chain_postgis), # content_type="application/json", @@ -115,7 +115,7 @@ def gen_output_layer_name(self): # self.gen_output_layer_name() # # rv = self.server.post( - # URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + # URL_PREFIX + "/projects/nc_spm_08/processing_async_export", # headers=self.admin_auth_header, # data=json_dumps(process_chain_postgis), # content_type="application/json", diff --git a/tests/test_async_process_validation.py b/tests/test_async_process_validation.py index ef872f6cc..6cfbc72c8 100644 --- a/tests/test_async_process_validation.py +++ b/tests/test_async_process_validation.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -42,11 +42,12 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" # Module example for r.out.ascii with g.region adjustment and temporary file # handling @@ -359,7 +360,7 @@ class AsyncProcessValidationTestCase(ActiniaResourceTestCaseBase): def test_async_processing_legacy(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_legacy), content_type="application/json", @@ -375,7 +376,7 @@ def test_async_processing_legacy(self): def test_async_processing_new(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_new), content_type="application/json", @@ -398,7 +399,7 @@ def test_async_processing_new(self): def test_async_processing_new_ndvi(self): rv = self.server.post( URL_PREFIX - + "/locations/latlong_wgs84/process_chain_validation_async", + + "/projects/latlong_wgs84/process_chain_validation_async", headers=self.admin_auth_header, data=json_dumps(process_chain_ndvi), content_type="application/json", @@ -422,7 +423,7 @@ def test_async_processing_new_ndvi(self): def test_async_processing_new_ndvi_export_landsat(self): rv = self.server.post( URL_PREFIX - + "/locations/latlong_wgs84/process_chain_validation_async", + + "/projects/latlong_wgs84/process_chain_validation_async", headers=self.admin_auth_header, data=json_dumps(process_chain_ndvi_landsat), content_type="application/json", @@ -445,7 +446,7 @@ def test_async_processing_new_ndvi_export_landsat(self): def test_async_processing_landsat(self): rv = self.server.post( URL_PREFIX - + "/locations/latlong_wgs84/process_chain_validation_async", + + "/projects/latlong_wgs84/process_chain_validation_async", headers=self.admin_auth_header, data=json_dumps(process_chain_landsat), content_type="application/json", diff --git a/tests/test_async_process_validation_errors.py b/tests/test_async_process_validation_errors.py index 4b7d30189..78330d645 100644 --- a/tests/test_async_process_validation_errors.py +++ b/tests/test_async_process_validation_errors.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -34,12 +34,12 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" # param wrong process_chain_error_1 = { @@ -329,7 +329,7 @@ class AsyncProcessValidationTestCase(ActiniaResourceTestCaseBase): def test_async_processing_error_webhook_finished(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_webhook_finished), content_type="application/json", @@ -347,7 +347,7 @@ def test_async_processing_error_webhook_finished(self): def test_async_processing_error_webhook_update(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_webhook_update), content_type="application/json", @@ -365,7 +365,7 @@ def test_async_processing_error_webhook_update(self): def test_async_processing_error_1(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_1), content_type="application/json", @@ -383,7 +383,7 @@ def test_async_processing_error_1(self): def test_async_processing_error_2(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_2), content_type="application/json", @@ -401,7 +401,7 @@ def test_async_processing_error_2(self): def test_async_processing_error_3(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_3), content_type="application/json", @@ -419,7 +419,7 @@ def test_async_processing_error_3(self): def test_async_processing_error_4(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_4), content_type="application/json", @@ -437,7 +437,7 @@ def test_async_processing_error_4(self): def test_async_processing_error_5(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_5), content_type="application/json", @@ -455,7 +455,7 @@ def test_async_processing_error_5(self): def test_async_processing_output_error_1(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_output_error_1), content_type="application/json", @@ -473,7 +473,7 @@ def test_async_processing_output_error_1(self): def test_async_processing_output_error_2(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_output_error_2), content_type="application/json", @@ -491,7 +491,7 @@ def test_async_processing_output_error_2(self): def test_async_processing_landsat_error_1(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_landsat_error_1), content_type="application/json", @@ -509,7 +509,7 @@ def test_async_processing_landsat_error_1(self): def test_async_processing_landsat_error_2(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_landsat_error_2), content_type="application/json", @@ -527,7 +527,7 @@ def test_async_processing_landsat_error_2(self): def test_async_processing_landsat_error_3(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_landsat_error_3), content_type="application/json", @@ -545,7 +545,7 @@ def test_async_processing_landsat_error_3(self): def test_async_processing_sent_error_1(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_sent_1), content_type="application/json", @@ -563,7 +563,7 @@ def test_async_processing_sent_error_1(self): def test_async_processing_sent_error_2(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/process_chain_validation_sync", + URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_sent_2), content_type="application/json", diff --git a/tests/test_async_processing.py b/tests/test_async_processing.py index c5d1486a4..3334a6cab 100644 --- a/tests/test_async_processing.py +++ b/tests/test_async_processing.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,12 +35,12 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" # Module change example for r.slope.aspect with g.region adjustment process_chain = { @@ -174,7 +174,7 @@ class AsyncProcessTestCase(ActiniaResourceTestCaseBase): def test_async_processing(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain), content_type="application/json", @@ -188,7 +188,7 @@ def test_async_processing(self): def test_async_processing_termination(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain), content_type="application/json", @@ -245,7 +245,7 @@ def test_async_processing_termination(self): def test_async_processing_large_region(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_region), content_type="application/json", @@ -265,7 +265,7 @@ def test_async_processing_error_1(self): """ rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_1), content_type="application/json", @@ -285,7 +285,7 @@ def test_async_processing_error_2(self): """ rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_2), content_type="application/json", @@ -300,7 +300,7 @@ def test_async_processing_error_2(self): def test_async_processing_error_3(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_3), content_type="application/json", @@ -315,7 +315,7 @@ def test_async_processing_error_3(self): def test_async_processing_error_4(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_4), content_type="application/json", @@ -332,7 +332,7 @@ def test_async_processing_error_5(self): """No JSON payload error""" rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.user_auth_header, ) @@ -348,7 +348,7 @@ def test_async_processing_error_5(self): def test_async_processing_error_6(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_5), content_type="application/json", @@ -363,7 +363,7 @@ def test_async_processing_error_6(self): def test_async_processing_error_7(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_6), content_type="application/json", @@ -385,7 +385,7 @@ def test_async_processing_error_8(self): pc[3]["inputs"]["elevation"] = "elevation@NO_Mapset" rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(pc), content_type="application/json", @@ -408,7 +408,7 @@ def test_async_processing_error_9(self): pc[1]["flags"] = "p" rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(pc), content_type="application/json", diff --git a/tests/test_async_processing_2.py b/tests/test_async_processing_2.py index 059b8da11..a525d70dc 100644 --- a/tests/test_async_processing_2.py +++ b/tests/test_async_processing_2.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -42,12 +42,12 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" # Module example for r.out.ascii with g.region adjustment and temporary file # handling @@ -324,7 +324,7 @@ class AsyncProcess2TestCase(ActiniaResourceTestCaseBase): def test_async_processing_legacy(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_legacy), content_type="application/json", @@ -339,7 +339,7 @@ def test_async_processing_legacy(self): def test_async_processing_rmapcalc(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_rmapcalc), content_type="application/json", @@ -354,7 +354,7 @@ def test_async_processing_rmapcalc(self): def test_async_processing_new(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_new), content_type="application/json", @@ -375,7 +375,7 @@ def test_async_processing_new(self): ) def test_async_processing_new_ndvi(self): rv = self.server.post( - URL_PREFIX + "/locations/latlong_wgs84/processing_async", + URL_PREFIX + "/projects/latlong_wgs84/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_ndvi), content_type="application/json", @@ -396,7 +396,7 @@ def test_async_processing_new_ndvi(self): ) def test_async_processing_new_ndvi_export(self): rv = self.server.post( - URL_PREFIX + "/locations/latlong_wgs84/processing_async_export", + URL_PREFIX + "/projects/latlong_wgs84/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_ndvi), content_type="application/json", @@ -417,7 +417,7 @@ def test_async_processing_new_ndvi_export(self): ) def test_async_processing_new_ndvi_export_landsat(self): rv = self.server.post( - URL_PREFIX + "/locations/latlong_wgs84/processing_async_export", + URL_PREFIX + "/projects/latlong_wgs84/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_ndvi_landsat), content_type="application/json", diff --git a/tests/test_async_processing_export.py b/tests/test_async_processing_export.py index d0dffd00e..dd50ffcc9 100644 --- a/tests/test_async_processing_export.py +++ b/tests/test_async_processing_export.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -34,12 +34,12 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" # Module change example for r.slope.aspect with g.region adjustment process_chain_long = { @@ -326,7 +326,7 @@ class AsyncProcessExportTestCaseUser(ActiniaResourceTestCaseBase): def test_async_processing(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_short), content_type="application/json", @@ -356,7 +356,7 @@ def test_long_fail(self): # The process num limit exceeds the credentials settings of the user rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -373,7 +373,7 @@ def test_long_run_fail(self): # The process time limit exceeds the credentials settings of the user rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_short_long_run), content_type="application/json", @@ -390,7 +390,7 @@ def test_large_Region_fail(self): # The cell limit exceeds the credentials settings of the user rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_short_large_region), content_type="application/json", @@ -405,7 +405,7 @@ def test_large_Region_fail(self): def test_termination_1(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_short_long_run), content_type="application/json", @@ -428,7 +428,7 @@ def test_termination_1(self): def test_termination_2(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_short_long_run), content_type="application/json", @@ -451,7 +451,7 @@ def test_termination_2(self): def test_termination_3(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.root_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -474,7 +474,7 @@ def test_termination_3(self): def test_error_1(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_error_1), content_type="application/json", @@ -489,7 +489,7 @@ def test_error_1(self): def test_error_2(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_error_2), content_type="application/json", @@ -504,7 +504,7 @@ def test_error_2(self): def test_error_3(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_error_3), content_type="application/json", @@ -519,7 +519,7 @@ def test_error_3(self): def test_error_4(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_error_4), content_type="application/json", @@ -535,7 +535,7 @@ def test_error_4(self): def test_stac_export(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_short_stac), content_type="application/json", @@ -562,7 +562,7 @@ def test_stac_export(self): class AsyncProcessExportTestCaseAdmin(ActiniaResourceTestCaseBase): def test_async_processing(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -588,7 +588,7 @@ def test_async_processing(self): def test_termination(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -611,7 +611,7 @@ def test_termination(self): def test_error_1(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_error_1), content_type="application/json", @@ -626,7 +626,7 @@ def test_error_1(self): def test_error_2(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_error_2), content_type="application/json", @@ -641,7 +641,7 @@ def test_error_2(self): def test_error_3(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_error_3), content_type="application/json", @@ -656,7 +656,7 @@ def test_error_3(self): def test_error_4(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_error_4), content_type="application/json", diff --git a/tests/test_async_processing_export_file.py b/tests/test_async_processing_export_file.py index a1d197a45..89d3afa1f 100644 --- a/tests/test_async_processing_export_file.py +++ b/tests/test_async_processing_export_file.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,12 +35,12 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" # Module example for r.out.ascii output file export file_export = { @@ -92,7 +92,7 @@ class AsyncProcessFileExportTestCase(ActiniaResourceTestCaseBase): def test_async_processing_file_export(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", @@ -123,7 +123,7 @@ def test_async_processing_file_export(self): def test_termination(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", @@ -159,7 +159,7 @@ class AsyncProcessExportTestCaseAdminS3(ActiniaResourceTestCaseBase): ) def test_async_processing_export(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export_s3", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export_s3", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", @@ -189,7 +189,7 @@ def test_async_processing_export(self): ) def test_termination(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export_s3", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export_s3", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", @@ -225,7 +225,7 @@ class AsyncProcessExportTestCaseAdminGCS(ActiniaResourceTestCaseBase): ) def test_async_processing_export(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export_gcs", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export_gcs", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", @@ -255,7 +255,7 @@ def test_async_processing_export(self): ) def test_termination(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export_gcs", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export_gcs", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", diff --git a/tests/test_async_processing_export_to_storage.py b/tests/test_async_processing_export_to_storage.py index 43130391d..63f8df3f3 100644 --- a/tests/test_async_processing_export_to_storage.py +++ b/tests/test_async_processing_export_to_storage.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,12 +35,12 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" # Module change example for r.slope.aspect with g.region adjustment process_chain_long = { @@ -87,7 +87,7 @@ class AsyncProcessExport2TestCaseAdmin(ActiniaResourceTestCaseBase): def test_async_processing_export(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -128,7 +128,7 @@ class AsyncProcessExportTestCaseAdminS3(ActiniaResourceTestCaseBase): ) def test_async_processing_export(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export_s3", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export_s3", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -158,7 +158,7 @@ def test_async_processing_export(self): ) def test_termination(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export_s3", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export_s3", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -194,7 +194,7 @@ class AsyncProcessExportTestCaseAdminGCS(ActiniaResourceTestCaseBase): ) def test_async_processing_export(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export_gcs", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export_gcs", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -224,7 +224,7 @@ def test_async_processing_export(self): ) def test_termination(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export_gcs", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export_gcs", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", diff --git a/tests/test_async_processing_export_vector.py b/tests/test_async_processing_export_vector.py index f09cf7436..030cb16ac 100644 --- a/tests/test_async_processing_export_vector.py +++ b/tests/test_async_processing_export_vector.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -41,12 +41,12 @@ ) __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" vector_layer_export = { "list": [ @@ -178,7 +178,7 @@ class AsyncProcessTestCase(ActiniaResourceTestCaseBase): def test_vector_export(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(vector_layer_export), content_type="application/json", @@ -210,7 +210,7 @@ def test_vector_export(self): def test_vector_buffer(self): rv = self.server.post( - URL_PREFIX + "/locations/latlong_wgs84/processing_async_export", + URL_PREFIX + "/projects/latlong_wgs84/processing_async_export", headers=self.admin_auth_header, data=json_dumps(vector_layer_buffer), content_type="application/json", @@ -245,7 +245,7 @@ def test_vector_buffer(self): def test_vector_clean(self): rv = self.server.post( - URL_PREFIX + "/locations/latlong_wgs84/processing_async_export", + URL_PREFIX + "/projects/latlong_wgs84/processing_async_export", headers=self.admin_auth_header, data=json_dumps(vector_layer_clean), content_type="application/json", diff --git a/tests/test_async_processing_import_export.py b/tests/test_async_processing_import_export.py index b02f9851a..c605bbffd 100644 --- a/tests/test_async_processing_import_export.py +++ b/tests/test_async_processing_import_export.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -49,11 +49,11 @@ no_stac_plugin = True __license__ = "GPLv3" -__author__ = "Sören Gebbert, Julia Haas" +__author__ = "Sören Gebbert, Julia Haas, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" __email__ = "info@mundialis.de" process_chain_raster_import_export = { @@ -652,7 +652,7 @@ class AsyncProcessTestCase(ActiniaResourceTestCaseBase): ) def test_raster_import_export_sentinel_ndvi(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps( process_chain_sentinel_import_export_sentinel_ndvi @@ -675,7 +675,7 @@ def test_raster_import_export_sentinel_ndvi(self): ) def test_raster_import_export(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_sentinel_import_export), content_type="application/json", @@ -690,7 +690,7 @@ def test_raster_import_export(self): def test_raster_import(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_raster_import_info), content_type="application/json", @@ -705,7 +705,7 @@ def test_raster_import(self): def test_raster_import_nofile(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_raster_import_error_no_file), content_type="application/json", @@ -717,7 +717,7 @@ def test_raster_import_nofile(self): def test_import_export(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_raster_import_export), content_type="application/json", @@ -732,7 +732,7 @@ def test_import_export(self): def test_vector_import(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_vector_import_info), content_type="application/json", @@ -753,7 +753,7 @@ def test_vector_import(self): ) def test_sentinel_import_info(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_sentinel_import_info), content_type="application/json", @@ -774,7 +774,7 @@ def test_sentinel_import_info(self): ) def test_sentinel_import_univar(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_sentinel_import_univar), content_type="application/json", @@ -795,7 +795,7 @@ def test_sentinel_import_univar(self): ) def test_sentinel_import_stats(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_sentinel_import_stats), content_type="application/json", @@ -816,7 +816,7 @@ def test_sentinel_import_stats(self): ) def test_sentinel_import_error(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_sentinel_import_error), content_type="application/json", @@ -833,7 +833,7 @@ def test_stac_import(self): Test of STAC collection import with http response 200 """ - endpoint = URL_PREFIX + "/locations/nc_spm_08/processing_async_export" + endpoint = URL_PREFIX + "/projects/nc_spm_08/processing_async_export" rv = self.server.post( endpoint, headers=self.admin_auth_header, @@ -854,7 +854,7 @@ def test_stac_source_error_import(self): Test of STAC collection import with http response 400, raising error on wrongly structured, undefined, or missing source ID. """ - endpoint = URL_PREFIX + "/locations/nc_spm_08/processing_async_export" + endpoint = URL_PREFIX + "/projects/nc_spm_08/processing_async_export" rv = self.server.post( endpoint, headers=self.admin_auth_header, @@ -874,7 +874,7 @@ def test_stac_source_filter_error_import(self): or wrong Spatial coordinates in bbox. """ - endpoint = URL_PREFIX + "/locations/nc_spm_08/processing_async_export" + endpoint = URL_PREFIX + "/projects/nc_spm_08/processing_async_export" rv = self.server.post( endpoint, headers=self.admin_auth_header, @@ -892,7 +892,7 @@ def test_raster_import_resample_resolution(self): resolution method, with http response 200 """ rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_raster_import_resample_resolution), content_type="application/json", @@ -911,7 +911,7 @@ def test_raster_import_resample_resolution_info(self): resampling and resolution info """ rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps( process_chain_raster_import_resample_resolution_info @@ -953,7 +953,7 @@ def test_raster_import_resample_resolution_error_resamp(self): in options """ rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps( process_chain_raster_import_resample_resolution_error_resamp @@ -972,7 +972,7 @@ def test_raster_import_resample_resolution_error_resol(self): in options """ rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps( process_chain_raster_import_resample_resolution_error_resol @@ -991,7 +991,7 @@ def test_raster_import_resample_resolution_error_val_missing(self): resolution set to value """ rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps( pc_raster_import_resample_resolution_error_val_missing @@ -1010,7 +1010,7 @@ def test_raster_import_resample_resolution_error_val_not_float(self): convertible to float """ rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps( pc_raster_import_resample_resolution_error_val_not_float @@ -1029,7 +1029,7 @@ def test_raster_import_resample_resolution_error_resol_not_set(self): value set """ rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps( pc_raster_import_resample_resolution_error_resol_not_set @@ -1048,7 +1048,7 @@ def test_raster_import_resample_resolution_error_resol_not_val(self): "value" when value set """ rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=json_dumps( pc_raster_import_resample_resolution_error_resol_not_val diff --git a/tests/test_async_processing_mapset.py b/tests/test_async_processing_mapset.py index 75086a0a6..c5b3dda8f 100644 --- a/tests/test_async_processing_mapset.py +++ b/tests/test_async_processing_mapset.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,12 +35,12 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" # Module change example for r.slope.aspect with g.region adjustment process_chain_long = { @@ -106,7 +106,7 @@ class AsyncProcessMapsetTestCaseAdmin(ActiniaResourceTestCaseBase): def check_remove_test_mapset(self): rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/mapsets", + URL_PREFIX + "/projects/nc_spm_08/mapsets", headers=self.user_auth_header, ) print(rv.data) @@ -124,7 +124,7 @@ def check_remove_test_mapset(self): if "test_mapset" in mapsets: # Delete the mapset if it already exists rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -148,7 +148,7 @@ def test_1_new_mapset(self): rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/test_mapset/processing_async", + + "/projects/nc_spm_08/mapsets/test_mapset/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -156,7 +156,7 @@ def test_1_new_mapset(self): self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header) rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/mapsets", + URL_PREFIX + "/projects/nc_spm_08/mapsets", headers=self.admin_auth_header, ) print(rv.data) @@ -175,7 +175,7 @@ def test_1_new_mapset(self): rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/test_mapset/raster_layers", + + "/projects/nc_spm_08/mapsets/test_mapset/raster_layers", headers=self.admin_auth_header, ) print(rv.data) @@ -195,7 +195,7 @@ def test_1_new_mapset(self): # Remove the mapset rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -217,7 +217,7 @@ def test_2_existing_mapset(self): # Create new mapset rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -234,7 +234,7 @@ def test_2_existing_mapset(self): # Atemporary mapset will be created and merged in the existing rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/test_mapset/processing_async", + + "/projects/nc_spm_08/mapsets/test_mapset/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -243,7 +243,7 @@ def test_2_existing_mapset(self): rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/test_mapset/raster_layers", + + "/projects/nc_spm_08/mapsets/test_mapset/raster_layers", headers=self.user_auth_header, ) print(rv.data) @@ -263,7 +263,7 @@ def test_2_existing_mapset(self): # Remove the mapset rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -286,7 +286,7 @@ def test_3_existing_mapset_lock(self): # Create new mapset rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -302,7 +302,7 @@ def test_3_existing_mapset_lock(self): # Run the processing inside the new mapset rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/test_mapset/processing_async", + + "/projects/nc_spm_08/mapsets/test_mapset/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -330,7 +330,7 @@ def test_3_existing_mapset_lock(self): # Second runner rv_lock_1 = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/test_mapset/processing_async", + + "/projects/nc_spm_08/mapsets/test_mapset/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_short), content_type="application/json", @@ -350,7 +350,7 @@ def test_3_existing_mapset_lock(self): # Third runner rv_lock_2 = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/test_mapset/processing_async", + + "/projects/nc_spm_08/mapsets/test_mapset/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -434,7 +434,7 @@ def test_3_existing_mapset_lock(self): # Remove the mapset rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -450,7 +450,7 @@ def test_3_existing_mapset_lock(self): def test_4_create_global_mapset(self): rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/PERMANENT/processing_async", + + "/projects/nc_spm_08/mapsets/PERMANENT/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", diff --git a/tests/test_async_processing_stdin_parameter_parser.py b/tests/test_async_processing_stdin_parameter_parser.py index 633841181..ba44da25c 100644 --- a/tests/test_async_processing_stdin_parameter_parser.py +++ b/tests/test_async_processing_stdin_parameter_parser.py @@ -36,6 +36,7 @@ __author__ = "Anika Weinmann" __copyright__ = "Copyright 2024, mundialis GmbH & Co. KG" __maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" PC_RUNIVAR = { "list": [ @@ -105,7 +106,7 @@ class AsyncProcessStdinParameterParserTestCase(ActiniaResourceTestCaseBase): def test_glist_parsing(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(PC_GLIST), content_type="application/json", @@ -130,7 +131,7 @@ def test_glist_parsing(self): def test_runivar_parsing(self): min, max = 0, 21 rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(PC_RUNIVAR), content_type="application/json", diff --git a/tests/test_async_processing_stdout_parser.py b/tests/test_async_processing_stdout_parser.py index f3c7a4768..dc3a5458a 100644 --- a/tests/test_async_processing_stdout_parser.py +++ b/tests/test_async_processing_stdout_parser.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -33,12 +33,12 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" process_chain = { "version": 1, @@ -129,7 +129,7 @@ class AsyncProcessStdoutParserTestCase(ActiniaResourceTestCaseBase): def test_output_parsing(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain), content_type="application/json", @@ -156,7 +156,7 @@ def test_output_parsing(self): def test_output_parsing_r_what(self): rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async", + URL_PREFIX + "/projects/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(r_what), content_type="application/json", diff --git a/tests/test_async_raster_export.py b/tests/test_async_raster_export.py index 8ce19dcbc..fc2717fe3 100644 --- a/tests/test_async_raster_export.py +++ b/tests/test_async_raster_export.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -33,18 +33,18 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class RasterAsyncExport(ActiniaResourceTestCaseBase): def test_export(self): rv = self.server.post( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/geotiff_async", headers=self.user_auth_header, ) @@ -72,7 +72,7 @@ def test_export(self): def test_export_region(self): rv = self.server.post( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/geotiff_async_orig", headers=self.user_auth_header, ) @@ -100,7 +100,7 @@ def test_export_region(self): def test_export_error(self): rv = self.server.post( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" "raster_layers/elevationion/geotiff_async", headers=self.user_auth_header, ) diff --git a/tests/test_job_resumption.py b/tests/test_job_resumption.py index c3d623168..3b765f0f2 100644 --- a/tests/test_job_resumption.py +++ b/tests/test_job_resumption.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2021-20224 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -49,7 +49,7 @@ __license__ = "GPLv3" __author__ = "Anika Weinmann" -__copyright__ = "Copyright 2021-2022, mundialis GmbH & Co. KG" +__copyright__ = "Copyright 2021-2024, mundialis GmbH & Co. KG" __maintainer__ = "mundialis GmbH & Co. KG" __email__ = "info@mundialis.de" @@ -289,7 +289,7 @@ class JobResumptionProcessingTestCase(ActiniaResourceTestCaseBase): cfg_file = os.environ.get("DEFAULT_CONFIG_PATH", "/etc/default/actinia") tmp_cfg_file = "%s_tmp" % cfg_file save_interim_results_value = None - endpoint = "/locations/nc_spm_08/processing_async" + endpoint = "/projects/nc_spm_08/processing_async" resource_user_id = None resource_resource_id = None sleep_time = 15 @@ -916,7 +916,7 @@ def test_resource_endpoints(self): class JobResumptionProcessingExportTestCase(JobResumptionProcessingTestCase): - endpoint = "/locations/nc_spm_08/processing_async_export" + endpoint = "/projects/nc_spm_08/processing_async_export" resource_user_id = None resource_resource_id = None @@ -1075,9 +1075,9 @@ def test_job_2_times_resumption_exporter(self): class JobResumptionPersistentProcessingTestCase( JobResumptionProcessingTestCase ): - location = "nc_spm_08" + project = "nc_spm_08" mapset = "test_mapset" - endpoint = "/locations/%s/mapsets/%s/processing_async" % (location, mapset) + endpoint = "/projects/%s/mapsets/%s/processing_async" % (project, mapset) resource_user_id = None resource_resource_id = None mapset_created = True @@ -1086,14 +1086,14 @@ def tearDown(self): if self.mapset_created is True: rv = self.server.delete( URL_PREFIX - + "/locations/%s/mapsets/%s/lock" - % (self.location, self.mapset), + + "/projects/%s/mapsets/%s/lock" + % (self.project, self.mapset), headers=self.admin_auth_header, ) self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header) rv2 = self.server.delete( URL_PREFIX - + "/locations/%s/mapsets/%s" % (self.location, self.mapset), + + "/projects/%s/mapsets/%s" % (self.project, self.mapset), headers=self.admin_auth_header, ) self.waitAsyncStatusAssertHTTP(rv2, headers=self.admin_auth_header) @@ -1103,7 +1103,7 @@ def tearDown(self): def test_saved_interim_results(self): """Test if the interim results are removed""" - self.create_new_mapset(self.mapset, self.location) + self.create_new_mapset(self.mapset, self.project) tpl = Template(json_dumps(process_chain_1)) rv = self.server.post( URL_PREFIX + self.endpoint, @@ -1149,7 +1149,7 @@ def test_resource_endpoints(self): class JobResumptionErrorTestCase(ActiniaResourceTestCaseBase): - endpoint = "/locations/nc_spm_08/processing_async" + endpoint = "/projects/nc_spm_08/processing_async" def test_job_resumption_config_error(self): """Test if the job resumption fails if save_interim_results is set to diff --git a/tests/test_mapset_management.py b/tests/test_mapset_management.py index d62ac70d1..dbfdb27f4 100644 --- a/tests/test_mapset_management.py +++ b/tests/test_mapset_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -33,18 +33,18 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class MapsetTestCase(ActiniaResourceTestCaseBase): def test_list_mapsets(self): rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/mapsets", + URL_PREFIX + "/projects/nc_spm_08/mapsets", headers=self.user_auth_header, ) print(rv.data) @@ -64,7 +64,7 @@ def test_list_mapsets(self): def test_mapsets_region_1(self): rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/mapsets/PERMANENT/info", + URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT/info", headers=self.admin_auth_header, ) print(rv.data) @@ -86,7 +86,7 @@ def test_mapsets_region_1(self): def test_mapsets_region_2(self): rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/mapsets/user1/info", + URL_PREFIX + "/projects/nc_spm_08/mapsets/user1/info", headers=self.admin_auth_header, ) print(rv.data) @@ -111,7 +111,7 @@ def test_mapset_creation_and_deletion(self): # Mapset already exists rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -126,7 +126,7 @@ def test_mapset_creation_and_deletion(self): # Delete mapset rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -141,7 +141,7 @@ def test_mapset_creation_and_deletion(self): # Delete should fail, since mapset does not exists rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -157,7 +157,7 @@ def test_mapset_creation_and_deletion(self): def test_mapset_creation_and_deletion_unprivileged(self): # Create new mapsets as unprivileged user rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", headers=self.guest_auth_header, ) print(rv.data) @@ -169,7 +169,7 @@ def test_mapset_creation_and_deletion_unprivileged(self): # Delete mapset as unprivileged user rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", headers=self.guest_auth_header, ) print(rv.data) @@ -182,7 +182,7 @@ def test_mapset_creation_and_deletion_unprivileged(self): def test_mapset_deletion_permanent_error(self): # Delete PERMANENT rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/PERMANENT", + URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT", headers=self.admin_auth_header, ) print(rv.data) @@ -195,7 +195,7 @@ def test_mapset_deletion_permanent_error(self): def test_mapset_deletion_global_db_error(self): # Delete PERMANENT rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/user1", + URL_PREFIX + "/projects/nc_spm_08/mapsets/user1", headers=self.admin_auth_header, ) print(rv.data) @@ -208,21 +208,21 @@ def test_mapset_deletion_global_db_error(self): def test_mapset_creation_and_locking(self): # Unlock mapset for deletion rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset_2/lock", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) # Delete any existing mapsets rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset_2", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2", headers=self.admin_auth_header, ) print(rv.data) # Create new mapsets rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset_2", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2", headers=self.admin_auth_header, ) print(rv.data) @@ -237,7 +237,7 @@ def test_mapset_creation_and_locking(self): # Lock mapset rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset_2/lock", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) @@ -252,7 +252,7 @@ def test_mapset_creation_and_locking(self): # get mapset lock(False) rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset_2/lock", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) @@ -270,7 +270,7 @@ def test_mapset_creation_and_locking(self): # Unlock mapset rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset_2/lock", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) @@ -285,7 +285,7 @@ def test_mapset_creation_and_locking(self): # get mapset lock (False) rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset_2/lock", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) @@ -303,7 +303,7 @@ def test_mapset_creation_and_locking(self): # Delete mapset rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset_2", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2", headers=self.admin_auth_header, ) print(rv.data) @@ -318,7 +318,7 @@ def test_mapset_creation_and_locking(self): # get mapset lock (False) rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset_2/lock", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) @@ -339,7 +339,7 @@ def test_mapset_creation_and_locking(self): # error is logged. Skip until fixed. TODO reactivate # https://github.com/actinia-org/actinia-core/issues/487 # rv = self.server.post( - # URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset_2/lock", + # URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", # headers=self.admin_auth_header, # ) # print(rv.data) @@ -356,7 +356,7 @@ def test_mapset_creation_and_locking(self): # Unlock mapset rv = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/test_mapset_2/lock", + URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) diff --git a/tests/test_mapsets.py b/tests/test_mapsets.py index 53b61a37d..97b19010a 100644 --- a/tests/test_mapsets.py +++ b/tests/test_mapsets.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2021 mundialis GmbH & Co. KG +# Copyright (c) 2021-2024 mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -34,9 +34,10 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Julia Haas, Guido Riembauer" -__copyright__ = "Copyright 2021 mundialis GmbH & Co. KG" -__maintainer__ = "mundialis" +__author__ = "Julia Haas, Guido Riembauer, Anika Weinmann" +__copyright__ = "Copyright 2021-2024 mundialis GmbH & Co. KG" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class MapsetsTestCase(ActiniaResourceTestCaseBase): @@ -47,9 +48,9 @@ class MapsetsTestCase(ActiniaResourceTestCaseBase): "latlong_wgs84": ["PERMANENT"], } ref_mapsets = [] - for location in accessible_datasets: - for mapset in accessible_datasets[location]: - ref_mapsets.append(f"{location}/{mapset}") + for project in accessible_datasets: + for mapset in accessible_datasets[project]: + ref_mapsets.append(f"{project}/{mapset}") @classmethod def setUpClass(cls): @@ -69,7 +70,7 @@ def setUpClass(cls): def tearDown(self): # unlock and delete the test mapsets rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/mapsets", + URL_PREFIX + "/projects/nc_spm_08/mapsets", headers=self.user_auth_header, ) existing_mapsets = json_load(rv.data)["process_results"] @@ -77,13 +78,13 @@ def tearDown(self): if mapset in existing_mapsets: rvdellock = self.server.delete( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/lock" % mapset, + + "/projects/nc_spm_08/mapsets/%s/lock" % mapset, headers=self.admin_auth_header, ) print(rvdellock.data.decode()) rvdel = self.server.delete( - URL_PREFIX + "/locations/nc_spm_08/mapsets/%s" % mapset, + URL_PREFIX + "/projects/nc_spm_08/mapsets/%s" % mapset, headers=self.admin_auth_header, ) print(rvdel.data.decode()) @@ -93,7 +94,7 @@ def test_two_locked_mapsets(self): for mapset in self.test_mapsets: self.create_new_mapset(mapset) self.server.post( - URL_PREFIX + "/locations/nc_spm_08/mapsets/%s/lock" % mapset, + URL_PREFIX + "/projects/nc_spm_08/mapsets/%s/lock" % mapset, headers=self.root_auth_header, ) rv = self.server.get( diff --git a/tests/test_noauth.py b/tests/test_noauth.py index 5d3f04294..59c71fba2 100644 --- a/tests/test_noauth.py +++ b/tests/test_noauth.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2023 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2023-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -22,7 +22,7 @@ ####### """ -Tests: Location test case +Tests: Projct test case """ import os from flask.json import dumps as json_dumps @@ -41,7 +41,7 @@ __license__ = "GPLv3" __author__ = "Anika Weinmann" -__copyright__ = "Copyright 2023, mundialis GmbH & Co. KG" +__copyright__ = "Copyright 2023-2024, mundialis GmbH & Co. KG" __maintainer__ = "mundialis GmbH & Co. KG" __email__ = "info@mundialis.de" @@ -186,9 +186,9 @@ def test_01_version(self): self.assertIn("version", data) self.assertIn("grass_version", data) - def test_02_list_locations(self): - """Test list location endpoint""" - rv = self.server.get(f"{URL_PREFIX}/locations") + def test_02_list_projects(self): + """Test list project endpoint""" + rv = self.server.get(f"{URL_PREFIX}/projects") self.assertEqual( rv.status_code, 200, @@ -198,14 +198,14 @@ def test_02_list_locations(self): rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype ) self.assertIn( - "locations", json_loads(rv.data), "No locations in response" + "projects", json_loads(rv.data), "No projects in response" ) - locations = json_loads(rv.data)["locations"] - self.assertIn("nc_spm_08", locations, "Wrong location listed") + projects = json_loads(rv.data)["projects"] + self.assertIn("nc_spm_08", projects, "Wrong project listed") def test_03_processing_ephemeral(self): """Test job resumption with processing_async endpoint and stdout""" - endpoint = "/locations/nc_spm_08/processing_async" + endpoint = "/projects/nc_spm_08/processing_async" rv = self.server.post( f"{URL_PREFIX}{endpoint}", data=json_dumps(PC), @@ -222,7 +222,7 @@ def test_04_processing_persistent(self): """Test job resumption with persistent processing_async endpoint and stdout """ - endpoint = "/locations/nc_spm_08/mapsets/test/processing_async" + endpoint = "/projects/nc_spm_08/mapsets/test/processing_async" rv = self.server.post( f"{URL_PREFIX}{endpoint}", data=json_dumps(PC), @@ -235,7 +235,7 @@ def test_04_processing_persistent(self): ) self.compare_stdout(resp) # check processing mapset - rv2 = self.server.get(f"{URL_PREFIX}/locations/nc_spm_08/mapsets") + rv2 = self.server.get(f"{URL_PREFIX}/projects/nc_spm_08/mapsets") self.assertEqual( rv2.status_code, 200, @@ -247,7 +247,7 @@ def test_04_processing_persistent(self): ) # check created raster rv3 = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/test/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/test/raster_layers" ) self.assertEqual( rv3.status_code, @@ -261,7 +261,7 @@ def test_04_processing_persistent(self): # delete test mapset self.admin_auth_header = None self.delete_mapset("test", "nc_spm_08") - rv4 = self.server.get(f"{URL_PREFIX}/locations/nc_spm_08/mapsets") + rv4 = self.server.get(f"{URL_PREFIX}/projects/nc_spm_08/mapsets") self.assertEqual( rv4.status_code, 200, diff --git a/tests/test_location_management.py b/tests/test_project_management.py similarity index 72% rename from tests/test_location_management.py rename to tests/test_project_management.py index 29aad9e1b..bca056523 100644 --- a/tests/test_location_management.py +++ b/tests/test_project_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -22,7 +22,7 @@ ####### """ -Tests: Location test case +Tests: Project test case """ from flask.json import loads as json_loads, dumps as json_dumps import unittest @@ -35,16 +35,16 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2022, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "Sören Gebbert" __email__ = "soerengebbert@googlemail.com" -class LocationTestCase(ActiniaResourceTestCaseBase): - def test_list_locations(self): +class ProjectTestCase(ActiniaResourceTestCaseBase): + def test_list_projects(self): rv = self.server.get( - URL_PREFIX + "/locations", headers=self.user_auth_header + URL_PREFIX + "/projects", headers=self.user_auth_header ) print(rv.data) self.assertEqual( @@ -56,14 +56,14 @@ def test_list_locations(self): rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype ) - if "nc_spm_08" in json_loads(rv.data)["locations"]: - location = "nc_spm_08" + if "nc_spm_08" in json_loads(rv.data)["projects"]: + project = "nc_spm_08" - self.assertEqual(location, "nc_spm_08", "Wrong location listed") + self.assertEqual(project, "nc_spm_08", "Wrong project listed") - def test_location_info(self): + def test_project_info(self): rv = self.server.get( - URL_PREFIX + "/locations/nc_spm_08/info", + URL_PREFIX + "/projects/nc_spm_08/info", headers=self.admin_auth_header, ) print(rv.data) @@ -84,11 +84,11 @@ def test_location_info(self): self.assertTrue("cols" in region_settings) self.assertTrue("rows" in region_settings) - def test_location_global_db_error(self): - # ERROR: Try to create a location as admin that exists in the global + def test_project_global_db_error(self): + # ERROR: Try to create a project as admin that exists in the global # database rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08", + URL_PREFIX + "/projects/nc_spm_08", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.admin_auth_header, @@ -103,16 +103,16 @@ def test_location_global_db_error(self): rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype ) - def test_location_creation_and_deletion(self): - # Delete a potentially existing location + def test_project_creation_and_deletion(self): + # Delete a potentially existing project rv = self.server.delete( - URL_PREFIX + "/locations/test_location", + URL_PREFIX + "/projects/test_project", headers=self.admin_auth_header, ) - # Create new location as admin + # Create new project as admin rv = self.server.post( - URL_PREFIX + "/locations/test_location", + URL_PREFIX + "/projects/test_project", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.admin_auth_header, @@ -127,9 +127,9 @@ def test_location_creation_and_deletion(self): rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype ) - # ERROR: Try to create a location as admin that already exists + # ERROR: Try to create a project as admin that already exists rv = self.server.post( - URL_PREFIX + "/locations/test_location", + URL_PREFIX + "/projects/test_project", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.admin_auth_header, @@ -144,9 +144,9 @@ def test_location_creation_and_deletion(self): rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype ) - # Delete location + # Delete project rv = self.server.delete( - URL_PREFIX + "/locations/test_location", + URL_PREFIX + "/projects/test_project", headers=self.admin_auth_header, ) print(rv.data) @@ -159,9 +159,9 @@ def test_location_creation_and_deletion(self): rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype ) - # ERROR: Delete should fail, since location does not exists + # ERROR: Delete should fail, since project does not exists rv = self.server.delete( - URL_PREFIX + "/locations/test_location", + URL_PREFIX + "/projects/test_project", headers=self.admin_auth_header, ) print(rv.data) @@ -174,16 +174,16 @@ def test_location_creation_and_deletion(self): rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype ) - def test_location_creation_and_deletion_as_user(self): - # Delete a potentially existing location + def test_project_creation_and_deletion_as_user(self): + # Delete a potentially existing project rv = self.server.delete( - URL_PREFIX + "/locations/test_location", + URL_PREFIX + "/projects/test_project", headers=self.user_auth_header, ) - # Create new location as user + # Create new project as user rv = self.server.post( - URL_PREFIX + "/locations/test_location", + URL_PREFIX + "/projects/test_project", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.user_auth_header, @@ -191,18 +191,18 @@ def test_location_creation_and_deletion_as_user(self): self.assertEqual( rv.status_code, 200, - "Location creation by user: HTML status code is wrong %i" + "Project creation by user: HTML status code is wrong %i" % rv.status_code, ) self.assertEqual( rv.mimetype, "application/json", - "Location creation by user: Wrong mimetype %s" % rv.mimetype, + "Project creation by user: Wrong mimetype %s" % rv.mimetype, ) - # ERROR: Try to create a location as user that already exists + # ERROR: Try to create a project as user that already exists rv = self.server.post( - URL_PREFIX + "/locations/test_location", + URL_PREFIX + "/projects/test_project", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.user_auth_header, @@ -210,53 +210,53 @@ def test_location_creation_and_deletion_as_user(self): self.assertEqual( rv.status_code, 400, - "Location recreation by user: HTML status code is wrong %i" + "Project recreation by user: HTML status code is wrong %i" % rv.status_code, ) self.assertEqual( rv.mimetype, "application/json", - "Location recreation by user: Wrong mimetype %s" % rv.mimetype, + "Project recreation by user: Wrong mimetype %s" % rv.mimetype, ) - # Delete location + # Delete project rv = self.server.delete( - URL_PREFIX + "/locations/test_location", + URL_PREFIX + "/projects/test_project", headers=self.user_auth_header, ) self.assertEqual( rv.status_code, 200, - "Location deletion by user: HTML status code is wrong %i" + "Project deletion by user: HTML status code is wrong %i" % rv.status_code, ) self.assertEqual( rv.mimetype, "application/json", - "Location deletion by user: Wrong mimetype %s" % rv.mimetype, + "Project deletion by user: Wrong mimetype %s" % rv.mimetype, ) - # ERROR: Delete should fail, since location does not exists + # ERROR: Delete should fail, since project does not exists rv = self.server.delete( - URL_PREFIX + "/locations/test_location", + URL_PREFIX + "/projects/test_project", headers=self.user_auth_header, ) self.assertEqual( rv.status_code, 400, - "Location redeletion by user: HTML status code is wrong %i" + "Project redeletion by user: HTML status code is wrong %i" % rv.status_code, ) self.assertEqual( rv.mimetype, "application/json", - "Location redeletion by user: Wrong mimetype %s" % rv.mimetype, + "Project redeletion by user: Wrong mimetype %s" % rv.mimetype, ) - def test_location_creation_and_deletion_as_guest(self): - # ERROR: Try to create a location as guest + def test_project_creation_and_deletion_as_guest(self): + # ERROR: Try to create a project as guest rv = self.server.post( - URL_PREFIX + "/locations/test_location_user", + URL_PREFIX + "/projects/test_project_user", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.guest_auth_header, @@ -273,7 +273,7 @@ def test_location_creation_and_deletion_as_guest(self): # ERROR: Delete should fail since the guest user is not authorized rv = self.server.delete( - URL_PREFIX + "/locations/test_location_user", + URL_PREFIX + "/projects/test_project_user", headers=self.guest_auth_header, ) print(rv.data) diff --git a/tests/test_raster_colors.py b/tests/test_raster_colors.py index 0b53be820..9d5e29860 100644 --- a/tests/test_raster_colors.py +++ b/tests/test_raster_colors.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -36,18 +36,18 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Soeren Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class RasterLayerTestCase(ActiniaResourceTestCaseBase): def test_raster_layer_get_colors(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/colors", headers=self.user_auth_header, ) @@ -95,7 +95,7 @@ def test_raster_layer_set_colors(self): } rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/processing_async" % new_mapset, + + "/projects/nc_spm_08/mapsets/%s/processing_async" % new_mapset, headers=self.user_auth_header, data=json_dumps(postbody), content_type="application/json", @@ -126,7 +126,7 @@ def test_raster_layer_set_colors(self): # Set the color table rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" % new_mapset, headers=self.user_auth_header, data=json_dumps(rules), @@ -148,7 +148,7 @@ def test_raster_layer_set_colors(self): # Set the color table rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" % new_mapset, headers=self.user_auth_header, data=json_dumps(rules), @@ -170,7 +170,7 @@ def test_raster_layer_set_colors(self): # Set the color table rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" % new_mapset, headers=self.user_auth_header, data=json_dumps(rules), @@ -190,7 +190,7 @@ def test_raster_layer_set_colors(self): # Delete rv = self.server.delete( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer" % new_mapset, headers=self.user_auth_header, ) @@ -235,7 +235,7 @@ def test_1_raster_layer_set_colors_errors(self): } rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/processing_async" % new_mapset, + + "/projects/nc_spm_08/mapsets/%s/processing_async" % new_mapset, headers=self.user_auth_header, data=json_dumps(postbody), content_type="application/json", @@ -264,7 +264,7 @@ def test_1_raster_layer_set_colors_errors(self): rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" % new_mapset, headers=self.user_auth_header, data=json_dumps(rules), @@ -288,7 +288,7 @@ def test_1_raster_layer_set_colors_errors(self): rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" % new_mapset, headers=self.user_auth_header, data=json_dumps(rules), @@ -310,7 +310,7 @@ def test_1_raster_layer_set_colors_errors(self): rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" % new_mapset, headers=self.user_auth_header, data=json_dumps(rules), @@ -332,7 +332,7 @@ def test_1_raster_layer_set_colors_errors(self): rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" % new_mapset, headers=self.user_auth_header, data=json_dumps(rules), @@ -354,7 +354,7 @@ def test_1_raster_layer_set_colors_errors(self): rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" % new_mapset, headers=self.user_auth_header, data=json_dumps(rules), @@ -376,7 +376,7 @@ def test_1_raster_layer_set_colors_errors(self): rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" % new_mapset, headers=self.user_auth_header, data=json_dumps(rules), @@ -401,7 +401,7 @@ def test_1_raster_layer_set_colors_errors(self): rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" % new_mapset, headers=self.user_auth_header, data=json_dumps(rules), @@ -421,7 +421,7 @@ def test_1_raster_layer_set_colors_errors(self): # Delete rv = self.server.delete( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/test_layer" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer" % new_mapset, headers=self.user_auth_header, ) @@ -438,7 +438,7 @@ def test_1_raster_layer_set_colors_errors(self): def test_raster_layer_colors_error_1(self): # Raster does not exist rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevat/colors", headers=self.user_auth_header, ) diff --git a/tests/test_raster_layer.py b/tests/test_raster_layer.py index d43b41df7..d744ebe66 100644 --- a/tests/test_raster_layer.py +++ b/tests/test_raster_layer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,12 +35,12 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class RasterLayerTestCase(ActiniaResourceTestCaseBase): @@ -49,7 +49,7 @@ class RasterLayerTestCase(ActiniaResourceTestCaseBase): def test_raster_layer_info(self): rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/PERMANENT/raster_layers/elevation", + + "/projects/nc_spm_08/mapsets/PERMANENT/raster_layers/elevation", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -74,7 +74,7 @@ def test_raster_layer_info_error_1(self): # Raster does not exist rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/PERMANENT/raster_layers/elevat", + + "/projects/nc_spm_08/mapsets/PERMANENT/raster_layers/elevat", headers=self.user_auth_header, ) pprint(json_load(rv.data)) diff --git a/tests/test_raster_layers.py b/tests/test_raster_layers.py index 2af3fe259..0e9a55e61 100644 --- a/tests/test_raster_layers.py +++ b/tests/test_raster_layers.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -33,12 +33,12 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class ListRasterLayersTestCase(ActiniaResourceTestCaseBase): @@ -46,7 +46,7 @@ def create_raster_layer(self, mapset_name, raster_name): # Remove potentially existing raster layer rv = self.server.delete( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/%s" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/%s" % (mapset_name, raster_name), headers=self.user_auth_header, ) @@ -81,7 +81,7 @@ def create_raster_layer(self, mapset_name, raster_name): } rv = self.server.post( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/processing_async" % mapset_name, + + "/projects/nc_spm_08/mapsets/%s/processing_async" % mapset_name, headers=self.user_auth_header, data=json_dumps(postbody), content_type="application/json", @@ -105,7 +105,7 @@ def create_raster_layer(self, mapset_name, raster_name): def test_list_raster_layers(self): rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/PERMANENT/raster_layers", + + "/projects/nc_spm_08/mapsets/PERMANENT/raster_layers", headers=self.user_auth_header, ) print(rv.data.decode()) @@ -126,7 +126,7 @@ def test_list_raster_layers(self): def test_list_raster_layers_pattern(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" "raster_layers?pattern=lsat*", headers=self.user_auth_header, ) @@ -148,7 +148,7 @@ def test_list_raster_layers_pattern(self): def test_list_raster_layers_empty_list(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" "raster_layers?pattern=NONE", headers=self.user_auth_header, ) @@ -177,7 +177,7 @@ def test_remove_raster_layers_pattern(self): # # Delete raster layers # rv = self.server.delete( - # f"{URL_PREFIX}/locations/nc_spm_08/mapsets/user1/raster_layers?" + # f"{URL_PREFIX}/projects/nc_spm_08/mapsets/user1/raster_layers?" # "pattern=test_delete_layer_*", # headers=self.user_auth_header # ) @@ -194,7 +194,7 @@ def test_remove_raster_layers_pattern(self): # List raster layer rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/{new_mapset}/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{new_mapset}/" "raster_layers?pattern=test_delete_layer_*", headers=self.user_auth_header, ) @@ -216,7 +216,7 @@ def test_remove_raster_layers_pattern(self): for map_name in map_list: rv = self.server.delete( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/%s" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/%s" % (new_mapset, map_name), headers=self.user_auth_header, ) @@ -240,7 +240,7 @@ def test_rename_raster_layers(self): # Rename raster layer rv = self.server.put( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers" % new_mapset, + + "/projects/nc_spm_08/mapsets/%s/raster_layers" % new_mapset, headers=self.user_auth_header, data=json_dumps(rename_map_list), content_type="application/json", @@ -258,7 +258,7 @@ def test_rename_raster_layers(self): # Rename raster layer rv = self.server.put( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers" % new_mapset, + + "/projects/nc_spm_08/mapsets/%s/raster_layers" % new_mapset, headers=self.user_auth_header, data=json_dumps(rename_map_list), content_type="application/json", @@ -277,7 +277,7 @@ def test_rename_raster_layers(self): for map_name in new_map_list: rv = self.server.delete( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/raster_layers/%s" + + "/projects/nc_spm_08/mapsets/%s/raster_layers/%s" % (new_mapset, map_name), headers=self.user_auth_header, ) diff --git a/tests/test_raster_legend.py b/tests/test_raster_legend.py index 351bea98e..b1007559e 100644 --- a/tests/test_raster_legend.py +++ b/tests/test_raster_legend.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -34,18 +34,18 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Soeren Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class RasterLegendTestCase(ActiniaResourceTestCaseBase): def test_raster_legend_no_args(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" "raster_layers/elevation/legend", headers=self.user_auth_header, ) @@ -61,7 +61,7 @@ def test_raster_legend_no_args(self): def test_raster_legend_args_1(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" "raster_layers/elevation/legend?at=0,100,0,20", headers=self.user_auth_header, ) @@ -77,7 +77,7 @@ def test_raster_legend_args_1(self): def test_raster_legend_args_2(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" "raster_layers/elevation/legend?range=100,120", headers=self.user_auth_header, ) @@ -93,7 +93,7 @@ def test_raster_legend_args_2(self): def test_raster_legend_args_3(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/legend?&use=100,110,120", headers=self.user_auth_header, ) @@ -109,7 +109,7 @@ def test_raster_legend_args_3(self): def test_raster_legend_args_4(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/legend?&fontsize=100", headers=self.user_auth_header, ) @@ -125,7 +125,7 @@ def test_raster_legend_args_4(self): def test_raster_legend_args_5(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/legend?width=100&height=100", headers=self.user_auth_header, ) @@ -141,7 +141,7 @@ def test_raster_legend_args_5(self): def test_raster_legend_args_6(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/legend?width=100&height=100&range=100,120&" "use=105,110,115&at=0,100,0,30", headers=self.user_auth_header, @@ -158,7 +158,7 @@ def test_raster_legend_args_6(self): def test_raster_legend_args_7(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/legend?labelnum=4", headers=self.user_auth_header, ) @@ -175,7 +175,7 @@ def test_raster_legend_args_7(self): def test_raster_legend_args_error_1(self): # Wrong "at" parameter rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/legend?at=-0,-0", headers=self.user_auth_header, ) @@ -194,7 +194,7 @@ def test_raster_legend_args_error_1(self): def test_raster_legend_args_error_2(self): # Wrong witdth rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/legend?width=-20&at=20,40,20,40", headers=self.user_auth_header, ) @@ -213,7 +213,7 @@ def test_raster_legend_args_error_2(self): def test_raster_legend_args_error_3(self): # Wrong range and use rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/legend?range=100,120&use=90,130,115", headers=self.user_auth_header, ) @@ -232,7 +232,7 @@ def test_raster_legend_args_error_3(self): def test_raster_legend_args_error_4(self): # Wrong labelnum rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/legend?labelnum=-4", headers=self.user_auth_header, ) diff --git a/tests/test_raster_renderer.py b/tests/test_raster_renderer.py index acf0f65e7..69e8be5b7 100644 --- a/tests/test_raster_renderer.py +++ b/tests/test_raster_renderer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,18 +35,18 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Soeren Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class RasterLayerRendererTestCase(ActiniaResourceTestCaseBase): def test_raster_layer_image_no_args(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/render", headers=self.user_auth_header, ) @@ -62,7 +62,7 @@ def test_raster_layer_image_no_args(self): def test_raster_layer_image_args_1(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/render?n=228500&s=215000&w=630000&e=645000", headers=self.user_auth_header, ) @@ -78,7 +78,7 @@ def test_raster_layer_image_args_1(self): def test_raster_layer_image_args_2(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/render?n=228500&s=215000&w=630000&e=645000&" "width=100&height=100", headers=self.user_auth_header, @@ -95,7 +95,7 @@ def test_raster_layer_image_args_2(self): def test_raster_layer_image_args_3(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/render?width=100&height=100", headers=self.user_auth_header, ) @@ -112,7 +112,7 @@ def test_raster_layer_image_args_3(self): def test_raster_layer_image_args_error_1(self): # North is smaller then south rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevation/render?n=-228500&s=215000", headers=self.user_auth_header, ) @@ -130,7 +130,7 @@ def test_raster_layer_image_args_error_2(self): # Negative size rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/PERMANENT/raster_layers/elevation/" + + "/projects/nc_spm_08/mapsets/PERMANENT/raster_layers/elevation/" "render?&width=-100&height=-100", headers=self.user_auth_header, ) @@ -147,7 +147,7 @@ def test_raster_layer_image_args_error_2(self): def test_raster_layer_image_args_error_3(self): # Raster does not exist rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/raster_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" "/elevat/render?", headers=self.user_auth_header, ) @@ -163,7 +163,7 @@ def test_raster_layer_image_args_error_3(self): def test_raster_layer_image_rgb_1(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/landsat/render_rgb?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" "red=lsat5_1987_10&blue=lsat5_1987_20&green=lsat5_1987_30", headers=self.user_auth_header, ) @@ -179,7 +179,7 @@ def test_raster_layer_image_rgb_1(self): def test_raster_layer_image_rgb_2(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/landsat/render_rgb?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" "n=228513&s=214975.5&w=629992.5&e=645012&width=100&height=100" "&red=lsat5_1987_30&blue=lsat5_1987_20&green=lsat5_1987_10", headers=self.user_auth_header, @@ -196,7 +196,7 @@ def test_raster_layer_image_rgb_2(self): def test_raster_layer_image_rgb_3(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/landsat/render_rgb?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" "width=100&height=100" "&red=lsat5_1987_30&blue=lsat5_1987_20&green=lsat5_1987_10", headers=self.user_auth_header, @@ -214,7 +214,7 @@ def test_raster_layer_image_rgb_3(self): def test_raster_layer_image_rgb_error_green(self): # No green raster layer rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/landsat/render_rgb?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" "n=228513&s=214975.5&w=629992.5&e=645012" "&red=lsat5_1987_30&blue=lsat5_1987_20", headers=self.user_auth_header, @@ -233,7 +233,7 @@ def test_raster_layer_image_rgb_error_green(self): def test_raster_layer_image_rgb_error_blue(self): # No blue raster layer rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/landsat/render_rgb?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" "n=228513&s=214975.5&w=629992.5&e=645012" "&red=lsat5_1987_30&green=lsat5_1987_20", headers=self.user_auth_header, @@ -252,7 +252,7 @@ def test_raster_layer_image_rgb_error_blue(self): def test_raster_layer_image_rgb_error_red(self): # No red raster layer rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/landsat/render_rgb?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" "n=228513&s=214975.5&w=629992.5&e=645012" "&blue=lsat5_1987_30&green=lsat5_1987_20", headers=self.user_auth_header, @@ -270,7 +270,7 @@ def test_raster_layer_image_rgb_error_red(self): def test_raster_layer_image_rgb_error_wrong_raster(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/landsat/render_rgb?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" "n=228513&s=214975.5&w=629992.5&e=645012" "&red=lsat5_1987_30_1&blue=lsat5_1987_20&green=lsat5_1987_10", headers=self.user_auth_header, @@ -288,7 +288,7 @@ def test_raster_layer_image_rgb_error_wrong_raster(self): def test_raster_layer_image_rgb_error_mapset_in_name_1(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/landsat/render_rgb?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" "red=lsat5_1987_10@landsat&blue=lsat5_1987_20&green=lsat5_1987_30", headers=self.user_auth_header, ) @@ -305,7 +305,7 @@ def test_raster_layer_image_rgb_error_mapset_in_name_1(self): def test_raster_layer_image_rgb_error_mapset_in_name_2(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/landsat/render_rgb?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" "red=lsat5_1987_10&blue=lsat5_1987_20@landsat&green=lsat5_1987_30", headers=self.user_auth_header, ) @@ -322,7 +322,7 @@ def test_raster_layer_image_rgb_error_mapset_in_name_2(self): def test_raster_layer_image_rgb_error_mapset_in_name_3(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/landsat/render_rgb?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" "red=lsat5_1987_10&blue=lsat5_1987_20&green=lsat5_1987_30@landsat", headers=self.user_auth_header, ) @@ -339,7 +339,7 @@ def test_raster_layer_image_rgb_error_mapset_in_name_3(self): def test_raster_layer_image_shade_1(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/render_shade?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/render_shade?" "shade=aspect&color=elevation", headers=self.user_auth_header, ) @@ -355,7 +355,7 @@ def test_raster_layer_image_shade_1(self): def test_raster_layer_image_shade_2(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/render_shade?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/render_shade?" "width=100&height=100" "&shade=aspect&color=elevation", headers=self.user_auth_header, @@ -372,7 +372,7 @@ def test_raster_layer_image_shade_2(self): def test_raster_layer_image_error_mapset_in_name_1(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/render_shade?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/render_shade?" "&shade=aspect@PERMANENT&color=elevation", headers=self.user_auth_header, ) @@ -389,7 +389,7 @@ def test_raster_layer_image_error_mapset_in_name_1(self): def test_raster_layer_image_error_mapset_in_name_2(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/render_shade?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/render_shade?" "&shade=aspect&color=elevation@PERMANENT", headers=self.user_auth_header, ) @@ -406,7 +406,7 @@ def test_raster_layer_image_error_mapset_in_name_2(self): def test_raster_layer_image_error_missing_color_1(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/render_shade?" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/render_shade?" "&shade=aspect", headers=self.user_auth_header, ) diff --git a/tests/test_user_management.py b/tests/test_user_management.py index ed47b1ed6..b6b3f792a 100644 --- a/tests/test_user_management.py +++ b/tests/test_user_management.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -34,12 +34,12 @@ from test_resource_base import ActiniaResourceTestCaseBase __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class UserTestCase(ActiniaResourceTestCaseBase): @@ -215,8 +215,8 @@ def test_create_update_user(self): user = ActiniaUser(user_id=self.user_id) user.read_from_db() - user.remove_location("utm32n") - user.remove_mapsets_from_location( + user.remove_project("utm32n") + user.remove_mapsets_from_project( "nc_spm_08", [ "user1", diff --git a/tests/test_vector_layer.py b/tests/test_vector_layer.py index 3bc6ceb0a..5740d84a2 100644 --- a/tests/test_vector_layer.py +++ b/tests/test_vector_layer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -38,9 +38,10 @@ __license__ = "GPLv3" __author__ = "Sören Gebbert, Anika Weinmann, Guido Riembauer" __copyright__ = ( - "Copyright 2016-2021, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) __maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class RasterLayerTestCase(ActiniaResourceTestCaseBase): @@ -57,7 +58,7 @@ def test_creation_1(self): ) # rv = self.server.post( # URL_PREFIX - # + "/locations/nc_spm_08/mapsets/%s/vector_layers/test_layer" + # + "/projects/nc_spm_08/mapsets/%s/vector_layers/test_layer" # % new_mapset, # headers=self.user_auth_header, # data=json_dumps( @@ -93,7 +94,7 @@ def test_creation_1(self): # Check rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/vector_layers/test_layer" + + "/projects/nc_spm_08/mapsets/%s/vector_layers/test_layer" % new_mapset, headers=self.user_auth_header, ) @@ -118,7 +119,7 @@ def test_creation_1(self): # Delete rv = self.server.delete( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/vector_layers/test_layer" + + "/projects/nc_spm_08/mapsets/%s/vector_layers/test_layer" % new_mapset, headers=self.user_auth_header, ) @@ -135,7 +136,7 @@ def test_creation_1(self): # Delete fail rv = self.server.delete( URL_PREFIX - + "/locations/nc_spm_08/mapsets/%s/vector_layers/test_layer" + + "/projects/nc_spm_08/mapsets/%s/vector_layers/test_layer" % new_mapset, headers=self.user_auth_header, ) @@ -152,7 +153,7 @@ def test_creation_1(self): def test_layer_info(self): rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/PERMANENT/vector_layers/" + + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers/" "boundary_county", headers=self.user_auth_header, ) @@ -178,7 +179,7 @@ def test_layer_info_error_1(self): # Raster does not exist rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/PERMANENT/vector_layers/" + + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers/" "boundary_county_nope", headers=self.user_auth_header, ) diff --git a/tests/test_vector_layers.py b/tests/test_vector_layers.py index 76d100456..eee7fd7ae 100644 --- a/tests/test_vector_layers.py +++ b/tests/test_vector_layers.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -33,19 +33,19 @@ from test_resource_base import ActiniaResourceTestCaseBase, URL_PREFIX __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Sören Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class VectorLayersTestCase(ActiniaResourceTestCaseBase): def test_list_vector_layers(self): rv = self.server.get( URL_PREFIX - + "/locations/nc_spm_08/mapsets/PERMANENT/vector_layers", + + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers", headers=self.user_auth_header, ) print(rv.data) @@ -66,7 +66,7 @@ def test_list_vector_layers(self): def test_list_vector_layers_pattern(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" "vector_layers?pattern=elev_*", headers=self.user_auth_header, ) @@ -90,7 +90,7 @@ def test_list_vector_layers_pattern(self): def test_list_vector_layers_empty_list(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" "vector_layers?pattern=NONE", headers=self.user_auth_header, ) diff --git a/tests/test_vector_renderer.py b/tests/test_vector_renderer.py index 69704af28..1ec4a6fb8 100644 --- a/tests/test_vector_renderer.py +++ b/tests/test_vector_renderer.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -35,18 +35,17 @@ __license__ = "GPLv3" -__author__ = "Sören Gebbert" +__author__ = "Sören Gebbert, Anika Weinmann" __copyright__ = ( - "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" + "Copyright 2016-2024, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "Soeren Gebbert" -__email__ = "soerengebbert@googlemail.com" +__maintainer__ = "mundialis GmbH & Co. KG" class VectorLayerRendererTestCase(ActiniaResourceTestCaseBase): def test_vectorlayer_image_no_args(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/vector_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" "/boundary_county/render", headers=self.user_auth_header, ) @@ -62,7 +61,7 @@ def test_vectorlayer_image_no_args(self): def test_vectorlayer_image_args_1(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/vector_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" "/boundary_county/render?n=228500&s=215000&w=630000&e=645000", headers=self.user_auth_header, ) @@ -78,7 +77,7 @@ def test_vectorlayer_image_args_1(self): def test_vectorlayer_image_args_2(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/vector_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" "/boundary_county/render?n=228500&s=215000&w=630000&e=645000&" "width=100&height=100", headers=self.user_auth_header, @@ -95,7 +94,7 @@ def test_vectorlayer_image_args_2(self): def test_vectorlayer_image_args_3(self): rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/vector_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" "/boundary_county/render?width=100&height=100", headers=self.user_auth_header, ) @@ -112,7 +111,7 @@ def test_vectorlayer_image_args_3(self): def test_vectorlayer_image_args_error_1(self): # North is smaller then south rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/vector_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" "/boundary_county/render?n=-228500&s=215000", headers=self.user_auth_header, ) @@ -129,7 +128,7 @@ def test_vectorlayer_image_args_error_1(self): def test_vectorlayer_image_args_error_2(self): # Negative size rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/vector_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" "/boundary_county/render?&width=-100&height=-100", headers=self.user_auth_header, ) @@ -146,7 +145,7 @@ def test_vectorlayer_image_args_error_2(self): def test_vectorlayer_image_args_error_3(self): # Raster does not exist rv = self.server.get( - f"{URL_PREFIX}/locations/nc_spm_08/mapsets/PERMANENT/vector_layers" + f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" "/boundary_county_nomap/render?", headers=self.user_auth_header, ) diff --git a/tests/test_webhook.py b/tests/test_webhook.py index 58140bc50..b233998a5 100644 --- a/tests/test_webhook.py +++ b/tests/test_webhook.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2016-2018 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2016-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -22,7 +22,7 @@ ####### """ -Tests: Async process test case +Tests: Webhook """ import unittest @@ -40,8 +40,9 @@ __license__ = "GPLv3" __author__ = "Anika Weinmann" -__copyright__ = "Copyright 2022, mundialis GmbH & Co. KG" +__copyright__ = "Copyright 2024, mundialis GmbH & Co. KG" __maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" port = "5006" @@ -144,7 +145,7 @@ def poll_job(self, resp_data): # time.sleep(3) # tm = Template(json_dumps(pc)) # rv = self.server.post( - # URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + # URL_PREFIX + "/projects/nc_spm_08/processing_async_export", # headers=self.admin_auth_header, # data=tm.render(sleep=1), # content_type="application/json", @@ -157,7 +158,7 @@ def test_finished_webhook_retries(self): """ tm = Template(json_dumps(pc)) rv = self.server.post( - URL_PREFIX + "/locations/nc_spm_08/processing_async_export", + URL_PREFIX + "/projects/nc_spm_08/processing_async_export", headers=self.admin_auth_header, data=tm.render(sleep=30), content_type="application/json", From 628ac8361a6e4ad5d13aad6789461f0b5985e41c Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 17 Oct 2024 15:02:50 +0200 Subject: [PATCH 07/24] unify maintainer --- src/actinia_core/cli/actinia_server.py | 3 ++- src/actinia_core/cli/rq_custom_worker.py | 3 ++- src/actinia_core/core/common/aws_sentinel_interface.py | 3 ++- src/actinia_core/core/common/exceptions.py | 3 ++- .../core/common/google_satellite_bigquery_interface.py | 3 ++- src/actinia_core/core/common/process_chain.py | 3 ++- src/actinia_core/core/common/redis_interface.py | 3 ++- src/actinia_core/core/grass_init.py | 3 ++- src/actinia_core/core/mapset_merge_utils.py | 3 ++- src/actinia_core/core/utils.py | 3 ++- .../actinia_processing/ephemeral/base/renderer_base.py | 3 ++- .../ephemeral/download_cache_management.py | 5 ++++- .../ephemeral/ephemeral_custom_processing.py | 3 ++- .../ephemeral/ephemeral_processing_with_export.py | 2 +- .../actinia_processing/ephemeral/process_validation.py | 3 ++- .../processing/actinia_processing/ephemeral/raster_colors.py | 3 ++- .../processing/actinia_processing/ephemeral/raster_layer.py | 3 ++- .../processing/actinia_processing/ephemeral/raster_legend.py | 3 ++- .../ephemeral/resource_storage_management.py | 3 ++- .../processing/actinia_processing/ephemeral/vector_layer.py | 3 ++- .../ephemeral_renderer_base/raster_renderer.py | 3 ++- .../ephemeral_renderer_base/strds_renderer.py | 3 ++- .../ephemeral_renderer_base/vector_renderer.py | 3 ++- .../ephemeral_with_export/raster_export.py | 3 ++- .../persistent/download_cache_management.py | 3 ++- .../actinia_processing/persistent/map_layer_management.py | 3 ++- .../actinia_processing/persistent/mapset_management.py | 3 ++- .../persistent/persistent_mapset_merger.py | 3 ++- .../actinia_processing/persistent/project_management.py | 3 ++- .../actinia_processing/persistent/raster_colors.py | 3 ++- .../processing/actinia_processing/persistent/raster_layer.py | 3 ++- .../persistent/resource_storage_management.py | 3 ++- .../actinia_processing/persistent/strds_management.py | 3 ++- .../processing/common/download_cache_management.py | 3 ++- .../processing/common/ephemeral_custom_processing.py | 3 ++- src/actinia_core/processing/common/ephemeral_processing.py | 3 ++- .../processing/common/ephemeral_processing_with_export.py | 2 +- src/actinia_core/processing/common/map_layer_management.py | 3 ++- src/actinia_core/processing/common/mapset_management.py | 3 ++- .../processing/common/persistent_mapset_merger.py | 3 ++- src/actinia_core/processing/common/persistent_processing.py | 3 ++- src/actinia_core/processing/common/process_validation.py | 3 ++- src/actinia_core/processing/common/project_management.py | 3 ++- src/actinia_core/processing/common/raster_colors.py | 3 ++- src/actinia_core/processing/common/raster_export.py | 3 ++- src/actinia_core/processing/common/raster_layer.py | 3 ++- src/actinia_core/processing/common/raster_legend.py | 3 ++- src/actinia_core/processing/common/raster_renderer.py | 3 ++- .../processing/common/resource_storage_management.py | 3 ++- src/actinia_core/processing/common/strds_management.py | 3 ++- .../processing/common/strds_raster_management.py | 3 ++- src/actinia_core/processing/common/strds_renderer.py | 3 ++- src/actinia_core/processing/common/utils.py | 3 ++- src/actinia_core/processing/common/vector_layer.py | 3 ++- src/actinia_core/processing/common/vector_renderer.py | 3 ++- src/actinia_core/rest/base/map_layer_base.py | 3 ++- src/actinia_core/rest/base/renderer_base.py | 3 ++- src/actinia_core/rest/base/resource_base.py | 3 ++- src/actinia_core/rest/base/user_auth.py | 3 ++- src/actinia_core/rest/download_cache_management.py | 3 ++- src/actinia_core/rest/ephemeral_custom_processing.py | 3 ++- src/actinia_core/rest/ephemeral_processing.py | 3 ++- src/actinia_core/rest/ephemeral_processing_with_export.py | 2 +- src/actinia_core/rest/map_layer_management.py | 3 ++- src/actinia_core/rest/mapset_management.py | 3 ++- src/actinia_core/rest/persistent_mapset_merger.py | 3 ++- src/actinia_core/rest/persistent_processing.py | 3 ++- src/actinia_core/rest/process_chain_monitoring.py | 3 ++- src/actinia_core/rest/process_validation.py | 3 ++- src/actinia_core/rest/project_management.py | 3 ++- src/actinia_core/rest/raster_colors.py | 3 ++- src/actinia_core/rest/raster_export.py | 3 ++- src/actinia_core/rest/raster_layer.py | 3 ++- src/actinia_core/rest/raster_legend.py | 3 ++- src/actinia_core/rest/raster_renderer.py | 3 ++- src/actinia_core/rest/resource_storage_management.py | 3 ++- src/actinia_core/rest/strds_management.py | 3 ++- src/actinia_core/rest/strds_raster_management.py | 3 ++- src/actinia_core/rest/strds_renderer.py | 3 ++- src/actinia_core/rest/user_api_key.py | 3 ++- src/actinia_core/rest/vector_layer.py | 3 ++- src/actinia_core/rest/vector_renderer.py | 3 ++- src/actinia_core/testsuite.py | 3 ++- tests/test_resource_storage.py | 3 ++- tests/test_version_health.py | 3 ++- tests/unittests/test_keycloak_user.py | 3 ++- tests/unittests/test_version.py | 3 ++- 87 files changed, 173 insertions(+), 87 deletions(-) diff --git a/src/actinia_core/cli/actinia_server.py b/src/actinia_core/cli/actinia_server.py index 2bde42e9d..15753f613 100755 --- a/src/actinia_core/cli/actinia_server.py +++ b/src/actinia_core/cli/actinia_server.py @@ -41,7 +41,8 @@ __copyright__ = ( "Copyright 2016-2023, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" def main(): diff --git a/src/actinia_core/cli/rq_custom_worker.py b/src/actinia_core/cli/rq_custom_worker.py index 7ae07396c..504cd6156 100755 --- a/src/actinia_core/cli/rq_custom_worker.py +++ b/src/actinia_core/cli/rq_custom_worker.py @@ -62,7 +62,8 @@ __copyright__ = ( "Copyright 2016-2023, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" def main(): diff --git a/src/actinia_core/core/common/aws_sentinel_interface.py b/src/actinia_core/core/common/aws_sentinel_interface.py index 08344d694..dbf9497c8 100644 --- a/src/actinia_core/core/common/aws_sentinel_interface.py +++ b/src/actinia_core/core/common/aws_sentinel_interface.py @@ -33,7 +33,8 @@ __copyright__ = ( "Copyright 2016-2021, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" def get_sentinel_date(product_id): diff --git a/src/actinia_core/core/common/exceptions.py b/src/actinia_core/core/common/exceptions.py index 9b5113e7a..ecbbfcc33 100644 --- a/src/actinia_core/core/common/exceptions.py +++ b/src/actinia_core/core/common/exceptions.py @@ -31,7 +31,8 @@ __copyright__ = ( "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" class AsyncProcessError(Exception): diff --git a/src/actinia_core/core/common/google_satellite_bigquery_interface.py b/src/actinia_core/core/common/google_satellite_bigquery_interface.py index 4af81416c..0d86bda1a 100644 --- a/src/actinia_core/core/common/google_satellite_bigquery_interface.py +++ b/src/actinia_core/core/common/google_satellite_bigquery_interface.py @@ -37,7 +37,8 @@ __copyright__ = ( "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG" ) -__maintainer__ = "mundialis" +__maintainer__ = "mundialis GmbH & Co. KG" +__email__ = "info@mundialis.de" GML_BODY = """ Date: Thu, 17 Oct 2024 15:20:35 +0200 Subject: [PATCH 08/24] fix GRASS Initialisation --- job_resumption.md | 8 +- src/actinia_core/core/grass_init.py | 3 +- src/actinia_core/rest/project_management.py | 92 ++++++++++----------- 3 files changed, 52 insertions(+), 51 deletions(-) diff --git a/job_resumption.md b/job_resumption.md index b456e7069..4c901c452 100644 --- a/job_resumption.md +++ b/job_resumption.md @@ -114,9 +114,9 @@ URL=http://127.0.0.1:8088/api/v3 The job resumption works for the following endpoints: -- ephemeral processing: `ENDPOINT=locations/nc_spm_08/processing_async` -- ephemeral processing with export: `ENDPOINT=locations/nc_spm_08/processing_async_export` -- persistent processing: `ENDPOINT=locations/nc_spm_08/mapsets/test44/processing_async` +- ephemeral processing: `ENDPOINT=projects/nc_spm_08/processing_async` +- ephemeral processing with export: `ENDPOINT=projects/nc_spm_08/processing_async_export` +- persistent processing: `ENDPOINT=projects/nc_spm_08/mapsets/test44/processing_async` `pc_error.json`: process chain with error @@ -554,7 +554,7 @@ actiniadelete $AUTH $URL/resource_storage ```bash # processing (which ends with error) JSON=pc_template_error.json -actiniapost $AUTH $JSON $URL/locations/nc_spm_08/processing_export +actiniapost $AUTH $JSON $URL/projects/nc_spm_08/processing_export # job resumption (with complete process chain) JSONPUT=pc_template_forput.json diff --git a/src/actinia_core/core/grass_init.py b/src/actinia_core/core/grass_init.py index 181116479..813ea82fb 100644 --- a/src/actinia_core/core/grass_init.py +++ b/src/actinia_core/core/grass_init.py @@ -224,7 +224,8 @@ def print_gisrc(self): def __write(self): try: gisrc = open(self.__gisrc_ile, "w") - gisrc.write("project_name: %s\n" % self.project_name) + # TODO with GRASS GIS 9 + gisrc.write("LOCATION_NAME: %s\n" % self.project_name) gisrc.write("MAPSET: %s\n" % self.mapset) gisrc.write("DIGITIZER: none\n") gisrc.write("GISDBASE: %s\n" % self.gisdbase) diff --git a/src/actinia_core/rest/project_management.py b/src/actinia_core/rest/project_management.py index 51ab764d1..cfc2eb8f7 100644 --- a/src/actinia_core/rest/project_management.py +++ b/src/actinia_core/rest/project_management.py @@ -49,9 +49,9 @@ from actinia_core.rest.base.resource_base import ResourceBase from actinia_core.core.common.redis_interface import enqueue_job from actinia_core.core.utils import ensure_valid_path -from actinia_core.processing.common.location_management import ( +from actinia_core.processing.common.project_management import ( read_current_region, - create_location, + create_project, ) __license__ = "GPLv3" @@ -65,14 +65,14 @@ class ListProjectsResource(ResourceBase): """This resource represents GRASS GIS database directory - that contains locations. + that contains projects. """ def __init__(self): ResourceBase.__init__(self) """ - Return a list of all available locations that are located in the GRASS + Return a list of all available projects that are located in the GRASS database """ layer_type = None @@ -80,8 +80,8 @@ def __init__(self): @endpoint_decorator() @swagger.doc(check_endpoint("get", location_management.get_doc)) def get(self): - """Get a list of all available locations""" - locations = [] + """Get a list of all available projects""" + projects = [] if os.path.isdir(self.grass_data_base): dirs = os.listdir(self.grass_data_base) @@ -96,7 +96,7 @@ def get(self): mapset_path, os.R_OK & os.X_OK ): # Check access rights to the global database - # Super admin can see all locations + # Super admin can see all projects if ( self.has_superadmin_role or dir @@ -104,8 +104,8 @@ def get(self): "accessible_datasets" ] ): - locations.append(dir) - # List all locations in the user database + projects.append(dir) + # List all projects in the user database user_database = os.path.join( self.grass_user_data_base, self.user_group ) @@ -121,12 +121,12 @@ def get(self): if os.path.isdir(mapset_path) and os.access( mapset_path, os.R_OK & os.X_OK ): - locations.append(dir) - if locations: + projects.append(dir) + if projects: return make_response( jsonify( LocationListResponseModel( - status="success", locations=locations + status="success", projects=projects ) ), 200, @@ -144,22 +144,22 @@ def get(self): class ProjectManagementResourceUser(ResourceBase): - """This class returns information about a specific location""" + """This class returns information about a specific project""" def __init__(self): ResourceBase.__init__(self) @endpoint_decorator() @swagger.doc(check_endpoint("get", location_management.get_user_doc)) - def get(self, location_name): + def get(self, project_name): """ - Get the location projection and current computational region of the + Get the project projection and current computational region of the PERMANENT mapset """ rdc = self.preprocess( has_json=False, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name="PERMANENT", ) if rdc: @@ -177,7 +177,7 @@ def get(self, location_name): class ProjectManagementResourceAdminUser(ResourceBase): - """This class manages the creation, deletion and modification of locations + """This class manages the creation, deletion and modification of projects This is only allowed for administrators and users """ @@ -194,27 +194,27 @@ def __init__(self): @endpoint_decorator() @swagger.doc(check_endpoint("delete", location_management.delete_user_doc)) - def delete(self, location_name): + def delete(self, project_name): """ - Delete an existing location and everything inside from the user + Delete an existing project and everything inside from the user database. """ - # Delete only locations from the user database - location = ensure_valid_path( - [self.grass_user_data_base, self.user_group, location_name] + # Delete only projects from the user database + project = ensure_valid_path( + [self.grass_user_data_base, self.user_group, project_name] ) - permanent_mapset = ensure_valid_path([location, "PERMANENT"]) + permanent_mapset = ensure_valid_path([project, "PERMANENT"]) wind_file = ensure_valid_path([permanent_mapset, "WIND"]) - # Check the location path, only "valid" locations can be deleted - if os.path.isdir(location): + # Check the project path, only "valid" projects can be deleted + if os.path.isdir(project): if os.path.isdir(permanent_mapset) and os.path.isfile(wind_file): try: - shutil.rmtree(location) + shutil.rmtree(project) return make_response( jsonify( SimpleResponseModel( status="success", - message="location %s deleted" % location_name, + message="Project %s deleted" % project_name, ) ), 200, @@ -224,8 +224,8 @@ def delete(self, location_name): jsonify( SimpleResponseModel( status="error", - message="Unable to delete location " - f"{location_name} Exception {e}", + message="Unable to delete project " + f"{project_name} Exception {e}", ) ), 500, @@ -235,7 +235,7 @@ def delete(self, location_name): jsonify( SimpleResponseModel( status="error", - message="location %s does not exists" % location_name, + message="Project %s does not exists" % project_name, ) ), 400, @@ -243,38 +243,38 @@ def delete(self, location_name): @endpoint_decorator() @swagger.doc(check_endpoint("post", location_management.post_user_doc)) - def post(self, location_name): - """Create a new location based on EPSG code in the user database.""" - # Create only new locations if they did not exist in the global + def post(self, project_name): + """Create a new project based on EPSG code in the user database.""" + # Create only new projects if they did not exist in the global # database - location = ensure_valid_path([self.grass_data_base, location_name]) + project = ensure_valid_path([self.grass_data_base, project_name]) - # Check the location path - if os.path.isdir(location): + # Check the project path + if os.path.isdir(project): return self.get_error_response( - message="Unable to create location. " - "Location <%s> exists in global database." % location_name + message="Unable to create project. " + "Location <%s> exists in global database." % project_name ) # Check also for the user database - location = ensure_valid_path( - [self.grass_user_data_base, self.user_group, location_name] + project = ensure_valid_path( + [self.grass_user_data_base, self.user_group, project_name] ) - # Check the location path - if os.path.isdir(location): + # Check the project path + if os.path.isdir(project): return self.get_error_response( - message="Unable to create location. " - "Location <%s> exists in user database." % location_name + message="Unable to create project. " + "Location <%s> exists in user database." % project_name ) rdc = self.preprocess( has_json=True, has_xml=False, - location_name=location_name, + project_name=project_name, mapset_name="PERMANENT", ) if rdc: - enqueue_job(self.job_timeout, create_location, rdc) + enqueue_job(self.job_timeout, create_project, rdc) http_code, response_model = self.wait_until_finish() else: http_code, response_model = pickle.loads(self.response_data) From 49b4418082527caee7a2ca942b7144dc65b40620 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 17 Oct 2024 15:34:18 +0200 Subject: [PATCH 09/24] black --- .../ephemeral/persistent_processing.py | 3 +-- .../actinia_processing/ephemeral_processing.py | 11 +++-------- .../persistent/project_management.py | 8 ++------ tests/test_job_resumption.py | 3 +-- tests/test_raster_layers.py | 3 +-- tests/test_strds_raster_management.py | 4 +--- tests/test_strds_raster_renderer.py | 4 +--- tests/test_vector_layer.py | 6 ++---- tests/test_vector_layers.py | 3 +-- 9 files changed, 13 insertions(+), 32 deletions(-) diff --git a/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py b/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py index f913dab43..c7f15f001 100644 --- a/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py +++ b/src/actinia_core/processing/actinia_processing/ephemeral/persistent_processing.py @@ -198,8 +198,7 @@ def _check_mapset(self, mapset): ) else: raise AsyncProcessError( - "Unable to access global project <%s>" - % self.project_name + "Unable to access global project <%s>" % self.project_name ) # Always check if the target mapset already exists and set the flag diff --git a/src/actinia_core/processing/actinia_processing/ephemeral_processing.py b/src/actinia_core/processing/actinia_processing/ephemeral_processing.py index cc8deb625..2d0789525 100644 --- a/src/actinia_core/processing/actinia_processing/ephemeral_processing.py +++ b/src/actinia_core/processing/actinia_processing/ephemeral_processing.py @@ -909,9 +909,7 @@ def _create_temp_database(self, mapsets=None): # temporary project for mapset_path, mapset in mapsets_to_link: if ( - os.path.isdir( - os.path.join(self.temp_project_path, mapset) - ) + os.path.isdir(os.path.join(self.temp_project_path, mapset)) is False ): os.symlink( @@ -1027,13 +1025,10 @@ def _list_all_available_mapsets( else: if global_db is True: msg = ( - "Unable to access global project <%s>" - % self.project_name + "Unable to access global project <%s>" % self.project_name ) else: - msg = ( - "Unable to access user project <%s>" % self.project_name - ) + msg = "Unable to access user project <%s>" % self.project_name raise AsyncProcessError(msg) return mapsets, mapsets_to_link diff --git a/src/actinia_core/processing/actinia_processing/persistent/project_management.py b/src/actinia_core/processing/actinia_processing/persistent/project_management.py index 1483e59c3..57b742661 100644 --- a/src/actinia_core/processing/actinia_processing/persistent/project_management.py +++ b/src/actinia_core/processing/actinia_processing/persistent/project_management.py @@ -79,9 +79,7 @@ def _execute(self): self._execute_process_list(process_list) - if os.path.isdir( - os.path.join(self.temp_grass_data_base, new_project) - ): + if os.path.isdir(os.path.join(self.temp_grass_data_base, new_project)): shutil.move( os.path.join(self.temp_grass_data_base, new_project), self.grass_user_data_base, @@ -91,6 +89,4 @@ def _execute(self): "Unable to create project <%s>" % new_project ) - self.finish_message = ( - "Project <%s> successfully created" % new_project - ) + self.finish_message = "Project <%s> successfully created" % new_project diff --git a/tests/test_job_resumption.py b/tests/test_job_resumption.py index 3b765f0f2..6a0c669e6 100644 --- a/tests/test_job_resumption.py +++ b/tests/test_job_resumption.py @@ -1086,8 +1086,7 @@ def tearDown(self): if self.mapset_created is True: rv = self.server.delete( URL_PREFIX - + "/projects/%s/mapsets/%s/lock" - % (self.project, self.mapset), + + "/projects/%s/mapsets/%s/lock" % (self.project, self.mapset), headers=self.admin_auth_header, ) self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header) diff --git a/tests/test_raster_layers.py b/tests/test_raster_layers.py index 0e9a55e61..c60139359 100644 --- a/tests/test_raster_layers.py +++ b/tests/test_raster_layers.py @@ -104,8 +104,7 @@ def create_raster_layer(self, mapset_name, raster_name): def test_list_raster_layers(self): rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/PERMANENT/raster_layers", + URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT/raster_layers", headers=self.user_auth_header, ) print(rv.data.decode()) diff --git a/tests/test_strds_raster_management.py b/tests/test_strds_raster_management.py index 347c12697..cfd92c32b 100644 --- a/tests/test_strds_raster_management.py +++ b/tests/test_strds_raster_management.py @@ -52,9 +52,7 @@ class STRDSTestCase(ActiniaResourceTestCaseBase): - def create_raster_layer( - self, project_name, mapset_name, raster_name, val - ): + def create_raster_layer(self, project_name, mapset_name, raster_name, val): # Remove potentially existing raster layer rv = self.server.delete( URL_PREFIX diff --git a/tests/test_strds_raster_renderer.py b/tests/test_strds_raster_renderer.py index 28443ffb8..8036837e6 100644 --- a/tests/test_strds_raster_renderer.py +++ b/tests/test_strds_raster_renderer.py @@ -51,9 +51,7 @@ class STRDSRenderTestCase(ActiniaResourceTestCaseBase): - def create_raster_layer( - self, project_name, mapset_name, raster_name, val - ): + def create_raster_layer(self, project_name, mapset_name, raster_name, val): # Remove potentially existing raster layer rv = self.server.delete( URL_PREFIX diff --git a/tests/test_vector_layer.py b/tests/test_vector_layer.py index 5740d84a2..d6e661ac2 100644 --- a/tests/test_vector_layer.py +++ b/tests/test_vector_layer.py @@ -152,8 +152,7 @@ def test_creation_1(self): def test_layer_info(self): rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers/" + URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers/" "boundary_county", headers=self.user_auth_header, ) @@ -178,8 +177,7 @@ def test_layer_info(self): def test_layer_info_error_1(self): # Raster does not exist rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers/" + URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers/" "boundary_county_nope", headers=self.user_auth_header, ) diff --git a/tests/test_vector_layers.py b/tests/test_vector_layers.py index eee7fd7ae..0856e7bb8 100644 --- a/tests/test_vector_layers.py +++ b/tests/test_vector_layers.py @@ -44,8 +44,7 @@ class VectorLayersTestCase(ActiniaResourceTestCaseBase): def test_list_vector_layers(self): rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers", + URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers", headers=self.user_auth_header, ) print(rv.data) From 2ef9463262f381f4101d2713bc4e71524a9319ee Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Fri, 18 Oct 2024 15:26:38 +0200 Subject: [PATCH 10/24] fixes for tests and G83 --- docker/actinia-core-tests/Dockerfile | 3 ++- src/actinia_core/testsuite.py | 31 ++++++++++++++++++---------- src/actinia_core/version.py | 13 ++++++++++++ 3 files changed, 35 insertions(+), 12 deletions(-) diff --git a/docker/actinia-core-tests/Dockerfile b/docker/actinia-core-tests/Dockerfile index 0c487a98c..678eaa2e4 100644 --- a/docker/actinia-core-tests/Dockerfile +++ b/docker/actinia-core-tests/Dockerfile @@ -21,7 +21,8 @@ RUN wget --quiet https://grass.osgeo.org/sampledata/north_carolina/nc_spm_mapset RUN chown -R 1001:1001 nc_spm_08/modis_lst && chmod -R g+w nc_spm_08/modis_lst # install GRASS addons required for tests -RUN grass --tmp-project EPSG:4326 --exec g.extension -s extension=r.colors.out_sld +# TODO use --tmp-project if actinia-core uses GRASS 8.4 +RUN grass --tmp-location EPSG:4326 --exec g.extension -s extension=r.colors.out_sld # install things only for tests # DL3013 # Pin versions in pip diff --git a/src/actinia_core/testsuite.py b/src/actinia_core/testsuite.py index 975f19396..81b141684 100644 --- a/src/actinia_core/testsuite.py +++ b/src/actinia_core/testsuite.py @@ -35,7 +35,7 @@ from actinia_api import URL_PREFIX from .health_check import health_check -from .version import version +from .version import version, init_versions from actinia_core.core.common.app import flask_app from actinia_core.core.common import redis_interface from actinia_core.core.common.config import global_config @@ -137,6 +137,15 @@ class ActiniaTestCaseBase(unittest.TestCase): root = None auth_header = {} users_list = [] + project_url_part = "projects" + + # set project_url_part to "locations" if GRASS GIS version < 8.4 + init_versions() + from .version import G_VERSION + grass_version_s = G_VERSION["version"] + grass_version = [int(item) for item in grass_version_s.split(".")[:2]] + if grass_version < [8, 4]: + project_url_part = "locations" if "ACTINIA_SERVER_TEST" in os.environ: server_test = bool(os.environ["ACTINIA_SERVER_TEST"]) @@ -342,7 +351,7 @@ def waitAsyncStatusAssertHTTP( def assertRasterInfo(self, project, mapset, raster, ref_info, header): url = ( - f"{URL_PREFIX}/projects/{project}/mapsets/{mapset}/" + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/{mapset}/" f"raster_layers/{raster}" ) rv = self.server.get(url, headers=header) @@ -363,7 +372,7 @@ def assertRasterInfo(self, project, mapset, raster, ref_info, header): def assertVectorInfo(self, project, mapset, vector, ref_info, header): url = ( - f"{URL_PREFIX}/projects/{project}/mapsets/{mapset}/" + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/{mapset}/" f"vector_layers/{vector}" ) rv = self.server.get(url, headers=header) @@ -386,30 +395,30 @@ def create_new_mapset(self, mapset_name, project_name="nc_spm_08"): self.delete_mapset(mapset_name, project_name) # Create new mapset self.server.post( - URL_PREFIX - + "/projects/%s/mapsets/%s" % (project_name, mapset_name), + f"{URL_PREFIX}/{self.project_url_part}/{project_name}/" + f"mapsets/{mapset_name}", headers=self.admin_auth_header, ) def delete_mapset(self, mapset_name, project_name="nc_spm_08"): # Unlock mapset for deletion self.server.delete( - URL_PREFIX - + "/projects/%s/mapsets/%s/lock" % (project_name, mapset_name), + f"{URL_PREFIX}/{self.project_url_part}/{project_name}/mapsets/" + f"{mapset_name}/lock", headers=self.admin_auth_header, ) # Delete existing mapset self.server.delete( - URL_PREFIX - + "/projects/%s/mapsets/%s" % (project_name, mapset_name), + f"{URL_PREFIX}/{self.project_url_part}/{project_name}/" + f"mapsets/{mapset_name}", headers=self.admin_auth_header, ) def create_vector_layer(self, project, mapset, vector, region, parameter): # Remove potentially existing vector layer url = ( - f"{URL_PREFIX}/projects/{project}/mapsets/{mapset}/" + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/{mapset}/" f"vector_layers/{vector}" ) rv = self.server.delete(url, headers=self.user_auth_header) @@ -441,7 +450,7 @@ def create_vector_layer(self, project, mapset, vector, region, parameter): "version": "1", } url = ( - f"{URL_PREFIX}/projects/{project}/mapsets/{mapset}/" + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/{mapset}/" f"processing_async" ) rv = self.server.post( diff --git a/src/actinia_core/version.py b/src/actinia_core/version.py index f8f64e652..84de93c67 100644 --- a/src/actinia_core/version.py +++ b/src/actinia_core/version.py @@ -70,6 +70,19 @@ def init_versions(): ], capture_output=True, ).stdout + if not g_version: + # for GRASS GIS version < 8.4 + g_version = subprocess.run( + [ + "grass", + "--tmp-location", + "epsg:4326", + "--exec", + "g.version", + "-rge", + ], + capture_output=True, + ).stdout log.debug("Detecting GRASS GIS version") for i in g_version.decode("utf-8").strip("\n").split("\n"): try: From eff93b767cfd12b6fb0ec683f486ae5ebbec08e9 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 14 Nov 2024 13:39:08 +0100 Subject: [PATCH 11/24] fixes --- .../ephemeral_processing.py | 3 +- .../persistent/project_management.py | 16 +- src/actinia_core/rest/project_management.py | 14 +- src/actinia_core/testsuite.py | 32 ++- tests/test_async_mapset_merging.py | 253 +++++++++--------- ...est_async_process_postgis_import_export.py | 6 +- tests/test_async_processing_export_file.py | 24 +- ...test_async_processing_export_to_storage.py | 21 +- tests/test_async_processing_export_vector.py | 9 +- tests/test_async_processing_import_export.py | 69 +++-- tests/test_async_processing_mapset.py | 10 +- ...async_processing_stdin_parameter_parser.py | 4 +- tests/test_async_processing_stdout_parser.py | 4 +- tests/test_async_raster_export.py | 12 +- tests/test_mapset_management.py | 61 +++-- tests/test_noauth.py | 25 +- tests/test_project_management.py | 33 +-- tests/test_raster_colors.py | 76 +++--- tests/test_raster_import_pixellimit.py | 8 +- tests/test_raster_layer.py | 8 +- tests/test_raster_layers.py | 38 ++- tests/test_raster_legend.py | 50 ++-- tests/test_raster_renderer.py | 99 +++---- tests/test_raster_upload.py | 8 +- tests/test_strds_management.py | 56 ++-- tests/test_strds_raster_management.py | 57 ++-- tests/test_strds_raster_renderer.py | 35 ++- tests/test_vector_layer.py | 57 +--- tests/test_vector_layers.py | 11 +- tests/test_vector_renderer.py | 34 +-- tests/test_vector_upload.py | 16 +- 31 files changed, 605 insertions(+), 544 deletions(-) diff --git a/src/actinia_core/processing/actinia_processing/ephemeral_processing.py b/src/actinia_core/processing/actinia_processing/ephemeral_processing.py index 2d0789525..705d3f8c2 100644 --- a/src/actinia_core/processing/actinia_processing/ephemeral_processing.py +++ b/src/actinia_core/processing/actinia_processing/ephemeral_processing.py @@ -1167,9 +1167,10 @@ def _create_temporary_mapset( # Set the vector database connection to vector map specific databases self.ginit.run_module( "db.connect", + # TODO GRASS GIS 9.0 [ "driver=sqlite", - "database=$GISDBASE/$project_name/$MAPSET/vector/$MAP/" + "database=$GISDBASE/$LOCATION_NAME/$MAPSET/vector/$MAP/" "sqlite.db", ], ) diff --git a/src/actinia_core/processing/actinia_processing/persistent/project_management.py b/src/actinia_core/processing/actinia_processing/persistent/project_management.py index 57b742661..d454cc0a3 100644 --- a/src/actinia_core/processing/actinia_processing/persistent/project_management.py +++ b/src/actinia_core/processing/actinia_processing/persistent/project_management.py @@ -33,6 +33,7 @@ PersistentProcessing, ) from actinia_core.core.common.exceptions import AsyncProcessError +from actinia_core.version import G_VERSION __license__ = "GPLv3" __author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" @@ -60,13 +61,20 @@ def _execute(self): self._create_temp_database() - # TODO replace old PC style + grass_version_s = G_VERSION["version"] + grass_version = [int(item) for item in grass_version_s.split(".")[:2]] + project_param = "location" if grass_version < [8, 4] else "project" pc = { - "1": { + "version": 1, + "list": [{ + "id": "1", "module": "g.proj", - "inputs": {"epsg": epsg_code, "project": new_project}, + "inputs": [ + {"param": "epsg", "value": epsg_code}, + {"param": project_param, "value": new_project}, + ], "flags": "t", - } + }] } process_list = self._validate_process_chain( diff --git a/src/actinia_core/rest/project_management.py b/src/actinia_core/rest/project_management.py index cfc2eb8f7..ec7db5154 100644 --- a/src/actinia_core/rest/project_management.py +++ b/src/actinia_core/rest/project_management.py @@ -53,6 +53,7 @@ read_current_region, create_project, ) +from actinia_core.version import G_VERSION __license__ = "GPLv3" __author__ = "Sören Gebbert, Carmen Tawalika, Anika Weinmann" @@ -123,12 +124,15 @@ def get(self): ): projects.append(dir) if projects: + param = {"status": "success"} + grass_version_s = G_VERSION["version"] + grass_version = [int(item) for item in grass_version_s.split(".")[:2]] + if grass_version >= [8, 4]: + param["projects"] = projects + else: + param["locations"] = projects return make_response( - jsonify( - LocationListResponseModel( - status="success", projects=projects - ) - ), + jsonify(LocationListResponseModel(**param)), 200, ) else: diff --git a/src/actinia_core/testsuite.py b/src/actinia_core/testsuite.py index 81b141684..c74261c9b 100644 --- a/src/actinia_core/testsuite.py +++ b/src/actinia_core/testsuite.py @@ -392,7 +392,17 @@ def assertVectorInfo(self, project, mapset, vector, ref_info, header): ) def create_new_mapset(self, mapset_name, project_name="nc_spm_08"): - self.delete_mapset(mapset_name, project_name) + get_url = f"{URL_PREFIX}/{self.project_url_part}/{project_name}/mapsets" + rv_get = self.server.get(get_url, headers=self.user_auth_header) + self.assertEqual( + rv_get.status_code, + 200, + "HTML status code is wrong %i" % rv_get.status_code, + ) + resp = json_loads(rv_get.data.decode()) + if mapset_name in resp["process_results"]: + self.delete_mapset(mapset_name, project_name) + # Create new mapset self.server.post( f"{URL_PREFIX}/{self.project_url_part}/{project_name}/" @@ -417,11 +427,20 @@ def delete_mapset(self, mapset_name, project_name="nc_spm_08"): def create_vector_layer(self, project, mapset, vector, region, parameter): # Remove potentially existing vector layer - url = ( + vl_url = ( f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/{mapset}/" - f"vector_layers/{vector}" + f"vector_layers" ) - rv = self.server.delete(url, headers=self.user_auth_header) + url = f"{vl_url}/{vector}" + rv_get = self.server.get(vl_url, headers=self.user_auth_header) + self.assertEqual( + rv_get.status_code, + 200, + "HTML status code is wrong %i" % rv_get.status_code, + ) + resp = json_loads(rv_get.data.decode()) + if vector in resp["process_results"]: + rv = self.server.delete(url, headers=self.user_auth_header) parameter["column"] = "z" region["res"] = 100000 @@ -451,7 +470,7 @@ def create_vector_layer(self, project, mapset, vector, region, parameter): } url = ( f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/{mapset}/" - f"processing_async" + "processing_async" ) rv = self.server.post( url, @@ -461,7 +480,8 @@ def create_vector_layer(self, project, mapset, vector, region, parameter): ) self.waitAsyncStatusAssertHTTP( rv, - headers=self.admin_auth_header, + headers=self.user_auth_header, + # headers=self.admin_auth_header, http_status=200, status="finished", ) diff --git a/tests/test_async_mapset_merging.py b/tests/test_async_mapset_merging.py index a37d43caf..7a7daaba5 100644 --- a/tests/test_async_mapset_merging.py +++ b/tests/test_async_mapset_merging.py @@ -106,14 +106,14 @@ def check_remove_test_mapsets(self): for mapset in test_mapsets: # Unlock mapset for deletion rv = self.server.delete( - URL_PREFIX - + "/projects/%s/mapsets/%s/lock" % ("nc_spm_08", mapset), + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{mapset}/lock", headers=self.admin_auth_header, ) print(rv.data) rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets", headers=self.user_auth_header, ) print(rv.data) @@ -132,7 +132,8 @@ def check_remove_test_mapsets(self): if mapset in mapsets: # Delete the mapset if it already exists rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/%s" % mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + f"mapsets/{mapset}", headers=self.admin_auth_header, ) print(rv.data) @@ -148,111 +149,115 @@ def check_remove_test_mapsets(self): "Wrong mimetype %s" % rv.mimetype, ) - def test_1_merge_no_access_to_target_mapset_error(self): - """No access to target mapset error test""" - # Try merge source mapsets into target mapset - rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/user1/merging_async", - headers=self.user_auth_header, - data=json_dumps(["Source_A", "Source_B"]), - content_type="application/json", - ) - - print(rv.data) - self.waitAsyncStatusAssertHTTP( - rv, - headers=self.user_auth_header, - http_status=400, - status="error", - message_check="AsyncProcessError", - ) - - def test_2_merge_missing_target_mapset_error(self): - """Missing target mapset test""" - self.check_remove_test_mapsets() - - # Try merge source mapsets into target mapset - rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/Target/merging_async", - headers=self.admin_auth_header, - data=json_dumps(["Source_A", "Source_B"]), - content_type="application/json", - ) - self.waitAsyncStatusAssertHTTP( - rv, - headers=self.admin_auth_header, - http_status=400, - status="error", - message_check="AsyncProcessError", - ) - - def test_3_merge_missing_source_mapsets_error(self): - """Test error for missing source mapsets""" - self.check_remove_test_mapsets() - - # Create target mapset - rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/Target", - headers=self.admin_auth_header, - ) - print(rv.data) - self.assertEqual( - rv.status_code, - 200, - "HTML status code is wrong %i" % rv.status_code, - ) - self.assertEqual( - rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype - ) - - # Try merge source mapsets into target mapset - rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/Target/merging_async", - headers=self.admin_auth_header, - data=json_dumps(["Source_A", "Source_B"]), - content_type="application/json", - ) - self.waitAsyncStatusAssertHTTP( - rv, - headers=self.admin_auth_header, - http_status=400, - status="error", - message_check="AsyncProcessError", - ) - - def test_4_merge_empty_mapset_list(self): - """Test error for missing source mapsets""" - self.check_remove_test_mapsets() - - # Create target mapset - rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/Target", - headers=self.admin_auth_header, - ) - print(rv.data) - self.assertEqual( - rv.status_code, - 200, - "HTML status code is wrong %i" % rv.status_code, - ) - self.assertEqual( - rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype - ) - - # Try merge source mapsets into target mapset - rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/Target/merging_async", - headers=self.admin_auth_header, - data=json_dumps([]), - content_type="application/json", - ) - self.waitAsyncStatusAssertHTTP( - rv, - headers=self.admin_auth_header, - http_status=400, - status="error", - message_check="AsyncProcessError", - ) + # def test_1_merge_no_access_to_target_mapset_error(self): + # """No access to target mapset error test""" + # # Try merge source mapsets into target mapset + # rv = self.server.post( + # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/user1/" + # "merging_async", + # headers=self.user_auth_header, + # data=json_dumps(["Source_A", "Source_B"]), + # content_type="application/json", + # ) + + # print(rv.data) + # self.waitAsyncStatusAssertHTTP( + # rv, + # headers=self.user_auth_header, + # http_status=400, + # status="error", + # message_check="AsyncProcessError", + # ) + + # def test_2_merge_missing_target_mapset_error(self): + # """Missing target mapset test""" + # self.check_remove_test_mapsets() + + # # Try merge source mapsets into target mapset + # rv = self.server.post( + # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" + # "merging_async", + # headers=self.admin_auth_header, + # data=json_dumps(["Source_A", "Source_B"]), + # content_type="application/json", + # ) + # self.waitAsyncStatusAssertHTTP( + # rv, + # headers=self.admin_auth_header, + # http_status=400, + # status="error", + # message_check="AsyncProcessError", + # ) + + # def test_3_merge_missing_source_mapsets_error(self): + # """Test error for missing source mapsets""" + # self.check_remove_test_mapsets() + + # # Create target mapset + # rv = self.server.post( + # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target", + # headers=self.admin_auth_header, + # ) + # print(rv.data) + # self.assertEqual( + # rv.status_code, + # 200, + # "HTML status code is wrong %i" % rv.status_code, + # ) + # self.assertEqual( + # rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + # ) + + # # Try merge source mapsets into target mapset + # rv = self.server.post( + # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" + # "merging_async", + # headers=self.admin_auth_header, + # data=json_dumps(["Source_A", "Source_B"]), + # content_type="application/json", + # ) + # self.waitAsyncStatusAssertHTTP( + # rv, + # headers=self.admin_auth_header, + # http_status=400, + # status="error", + # message_check="AsyncProcessError", + # ) + + # def test_4_merge_empty_mapset_list(self): + # """Test error for missing source mapsets""" + # self.check_remove_test_mapsets() + + # # Create target mapset + # rv = self.server.post( + # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target", + # headers=self.admin_auth_header, + # ) + # print(rv.data) + # self.assertEqual( + # rv.status_code, + # 200, + # "HTML status code is wrong %i" % rv.status_code, + # ) + # self.assertEqual( + # rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + # ) + + # # Try merge source mapsets into target mapset + # rv = self.server.post( + # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" + # "merging_async", + # headers=self.admin_auth_header, + # data=json_dumps([]), + # content_type="application/json", + # ) + # self.waitAsyncStatusAssertHTTP( + # rv, + # headers=self.admin_auth_header, + # http_status=400, + # status="error", + # message_check="AsyncProcessError", + # ) def test_5_merge_two_mapsets(self): """Test the merging of two mapsets into a target mapset""" @@ -260,8 +265,8 @@ def test_5_merge_two_mapsets(self): # Create the source mapsets rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/Source_A/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Source_A/" + "processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_short_1), content_type="application/json", @@ -269,8 +274,8 @@ def test_5_merge_two_mapsets(self): self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header) rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/Source_B/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Source_B/" + "processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_short_2), content_type="application/json", @@ -279,7 +284,7 @@ def test_5_merge_two_mapsets(self): # Create target mapset rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/Target", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target", headers=self.admin_auth_header, ) print(rv.data) @@ -294,7 +299,8 @@ def test_5_merge_two_mapsets(self): # Merge source mapsets into target mapset rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/Target/merging_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" + "merging_async", headers=self.admin_auth_header, data=json_dumps(["Source_A", "Source_B"]), content_type="application/json", @@ -303,8 +309,8 @@ def test_5_merge_two_mapsets(self): # Check copied raster rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/Target/raster_layers/my_aspect_1", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" + "raster_layers/my_aspect_1", headers=self.admin_auth_header, ) print(rv.data) @@ -312,8 +318,8 @@ def test_5_merge_two_mapsets(self): "my_aspect_1", json_load(rv.data)["process_results"]["map"] ) rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/Target/raster_layers/my_aspect_2", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" + "raster_layers/my_aspect_2", headers=self.admin_auth_header, ) print(rv.data) @@ -321,8 +327,8 @@ def test_5_merge_two_mapsets(self): "my_aspect_2", json_load(rv.data)["process_results"]["map"] ) rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/Target/raster_layers/my_slope_1", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" + "raster_layers/my_slope_1", headers=self.admin_auth_header, ) print(rv.data) @@ -330,15 +336,14 @@ def test_5_merge_two_mapsets(self): "my_slope_1", json_load(rv.data)["process_results"]["map"] ) rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/Target/raster_layers/my_slope_2", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" + "raster_layers/my_slope_2", headers=self.admin_auth_header, ) print(rv.data) self.assertTrue( "my_slope_2", json_load(rv.data)["process_results"]["map"] ) - time.sleep(1) diff --git a/tests/test_async_process_postgis_import_export.py b/tests/test_async_process_postgis_import_export.py index 3c928357c..021ebc4ca 100644 --- a/tests/test_async_process_postgis_import_export.py +++ b/tests/test_async_process_postgis_import_export.py @@ -95,7 +95,8 @@ def gen_output_layer_name(self): # # TODO fix test and comment the test in (postgres DB is needed) # def test_1_async_processing_postgis_validation(self): # rv = self.server.post( - # f"{URL_PREFIX}/projects/nc_spm_08/process_chain_validation_async", + # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + # "process_chain_validation_async", # headers=self.admin_auth_header, # data=json_dumps(process_chain_postgis), # content_type="application/json", @@ -115,7 +116,8 @@ def gen_output_layer_name(self): # self.gen_output_layer_name() # # rv = self.server.post( - # URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + # "processing_async_export", # headers=self.admin_auth_header, # data=json_dumps(process_chain_postgis), # content_type="application/json", diff --git a/tests/test_async_processing_export_file.py b/tests/test_async_processing_export_file.py index 89d3afa1f..5a1847703 100644 --- a/tests/test_async_processing_export_file.py +++ b/tests/test_async_processing_export_file.py @@ -92,7 +92,8 @@ class AsyncProcessFileExportTestCase(ActiniaResourceTestCaseBase): def test_async_processing_file_export(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", @@ -123,7 +124,8 @@ def test_async_processing_file_export(self): def test_termination(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", @@ -159,7 +161,8 @@ class AsyncProcessExportTestCaseAdminS3(ActiniaResourceTestCaseBase): ) def test_async_processing_export(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export_s3", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export_s3", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", @@ -189,7 +192,8 @@ def test_async_processing_export(self): ) def test_termination(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export_s3", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export_s3", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", @@ -197,8 +201,7 @@ def test_termination(self): resp = json_loads(rv.data) # Send the termination request self.server.delete( - URL_PREFIX - + "/resources/%s/%s" % (resp["user_id"], resp["resource_id"]), + f"URL_PREFIX/resources/{resp['user_id']}/{resp['resource_id']}", headers=self.admin_auth_header, ) @@ -225,7 +228,8 @@ class AsyncProcessExportTestCaseAdminGCS(ActiniaResourceTestCaseBase): ) def test_async_processing_export(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export_gcs", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export_gcs", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", @@ -255,7 +259,8 @@ def test_async_processing_export(self): ) def test_termination(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export_gcs", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export_gcs", headers=self.admin_auth_header, data=json_dumps(file_export), content_type="application/json", @@ -263,8 +268,7 @@ def test_termination(self): resp = json_loads(rv.data) # Send the termination request self.server.delete( - URL_PREFIX - + "/resources/%s/%s" % (resp["user_id"], resp["resource_id"]), + f"{URL_PREFIX}/resources/{resp['user_id']}/{resp['resource_id']}", headers=self.admin_auth_header, ) diff --git a/tests/test_async_processing_export_to_storage.py b/tests/test_async_processing_export_to_storage.py index 63f8df3f3..29a83edbe 100644 --- a/tests/test_async_processing_export_to_storage.py +++ b/tests/test_async_processing_export_to_storage.py @@ -87,7 +87,8 @@ class AsyncProcessExport2TestCaseAdmin(ActiniaResourceTestCaseBase): def test_async_processing_export(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -128,7 +129,8 @@ class AsyncProcessExportTestCaseAdminS3(ActiniaResourceTestCaseBase): ) def test_async_processing_export(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export_s3", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export_s3", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -158,7 +160,8 @@ def test_async_processing_export(self): ) def test_termination(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export_s3", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export_s3", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -166,8 +169,7 @@ def test_termination(self): resp = json_loads(rv.data) # Send the termination request self.server.delete( - URL_PREFIX - + "/resources/%s/%s" % (resp["user_id"], resp["resource_id"]), + f"{URL_PREFIX}/resources/{resp['user_id']}/{resp['resource_id']}", headers=self.admin_auth_header, ) @@ -194,7 +196,8 @@ class AsyncProcessExportTestCaseAdminGCS(ActiniaResourceTestCaseBase): ) def test_async_processing_export(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export_gcs", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export_gcs", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -224,7 +227,8 @@ def test_async_processing_export(self): ) def test_termination(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export_gcs", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export_gcs", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -232,8 +236,7 @@ def test_termination(self): resp = json_loads(rv.data) # Send the termination request self.server.delete( - URL_PREFIX - + "/resources/%s/%s" % (resp["user_id"], resp["resource_id"]), + f"{URL_PREFIX}/resources/{resp['user_id']}/{resp['resource_id']}", headers=self.admin_auth_header, ) diff --git a/tests/test_async_processing_export_vector.py b/tests/test_async_processing_export_vector.py index 030cb16ac..c852ffdd5 100644 --- a/tests/test_async_processing_export_vector.py +++ b/tests/test_async_processing_export_vector.py @@ -178,7 +178,8 @@ class AsyncProcessTestCase(ActiniaResourceTestCaseBase): def test_vector_export(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(vector_layer_export), content_type="application/json", @@ -210,7 +211,8 @@ def test_vector_export(self): def test_vector_buffer(self): rv = self.server.post( - URL_PREFIX + "/projects/latlong_wgs84/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/latlong_wgs84/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(vector_layer_buffer), content_type="application/json", @@ -245,7 +247,8 @@ def test_vector_buffer(self): def test_vector_clean(self): rv = self.server.post( - URL_PREFIX + "/projects/latlong_wgs84/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/latlong_wgs84/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(vector_layer_clean), content_type="application/json", diff --git a/tests/test_async_processing_import_export.py b/tests/test_async_processing_import_export.py index c605bbffd..559d00ced 100644 --- a/tests/test_async_processing_import_export.py +++ b/tests/test_async_processing_import_export.py @@ -652,7 +652,8 @@ class AsyncProcessTestCase(ActiniaResourceTestCaseBase): ) def test_raster_import_export_sentinel_ndvi(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps( process_chain_sentinel_import_export_sentinel_ndvi @@ -675,7 +676,8 @@ def test_raster_import_export_sentinel_ndvi(self): ) def test_raster_import_export(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_sentinel_import_export), content_type="application/json", @@ -690,7 +692,8 @@ def test_raster_import_export(self): def test_raster_import(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_raster_import_info), content_type="application/json", @@ -705,7 +708,8 @@ def test_raster_import(self): def test_raster_import_nofile(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_raster_import_error_no_file), content_type="application/json", @@ -717,7 +721,8 @@ def test_raster_import_nofile(self): def test_import_export(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_raster_import_export), content_type="application/json", @@ -732,7 +737,8 @@ def test_import_export(self): def test_vector_import(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_vector_import_info), content_type="application/json", @@ -753,7 +759,8 @@ def test_vector_import(self): ) def test_sentinel_import_info(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_sentinel_import_info), content_type="application/json", @@ -774,7 +781,8 @@ def test_sentinel_import_info(self): ) def test_sentinel_import_univar(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_sentinel_import_univar), content_type="application/json", @@ -795,7 +803,8 @@ def test_sentinel_import_univar(self): ) def test_sentinel_import_stats(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_sentinel_import_stats), content_type="application/json", @@ -816,7 +825,8 @@ def test_sentinel_import_stats(self): ) def test_sentinel_import_error(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_sentinel_import_error), content_type="application/json", @@ -833,7 +843,10 @@ def test_stac_import(self): Test of STAC collection import with http response 200 """ - endpoint = URL_PREFIX + "/projects/nc_spm_08/processing_async_export" + endpoint = ( + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export" + ) rv = self.server.post( endpoint, headers=self.admin_auth_header, @@ -854,7 +867,10 @@ def test_stac_source_error_import(self): Test of STAC collection import with http response 400, raising error on wrongly structured, undefined, or missing source ID. """ - endpoint = URL_PREFIX + "/projects/nc_spm_08/processing_async_export" + endpoint = ( + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export" + ) rv = self.server.post( endpoint, headers=self.admin_auth_header, @@ -874,7 +890,10 @@ def test_stac_source_filter_error_import(self): or wrong Spatial coordinates in bbox. """ - endpoint = URL_PREFIX + "/projects/nc_spm_08/processing_async_export" + endpoint = ( + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export" + ) rv = self.server.post( endpoint, headers=self.admin_auth_header, @@ -892,7 +911,8 @@ def test_raster_import_resample_resolution(self): resolution method, with http response 200 """ rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_raster_import_resample_resolution), content_type="application/json", @@ -911,7 +931,8 @@ def test_raster_import_resample_resolution_info(self): resampling and resolution info """ rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps( process_chain_raster_import_resample_resolution_info @@ -953,7 +974,8 @@ def test_raster_import_resample_resolution_error_resamp(self): in options """ rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps( process_chain_raster_import_resample_resolution_error_resamp @@ -972,7 +994,8 @@ def test_raster_import_resample_resolution_error_resol(self): in options """ rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps( process_chain_raster_import_resample_resolution_error_resol @@ -991,7 +1014,8 @@ def test_raster_import_resample_resolution_error_val_missing(self): resolution set to value """ rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps( pc_raster_import_resample_resolution_error_val_missing @@ -1010,7 +1034,8 @@ def test_raster_import_resample_resolution_error_val_not_float(self): convertible to float """ rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps( pc_raster_import_resample_resolution_error_val_not_float @@ -1029,7 +1054,8 @@ def test_raster_import_resample_resolution_error_resol_not_set(self): value set """ rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps( pc_raster_import_resample_resolution_error_resol_not_set @@ -1048,7 +1074,8 @@ def test_raster_import_resample_resolution_error_resol_not_val(self): "value" when value set """ rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps( pc_raster_import_resample_resolution_error_resol_not_val diff --git a/tests/test_async_processing_mapset.py b/tests/test_async_processing_mapset.py index c5b3dda8f..c549ea1a3 100644 --- a/tests/test_async_processing_mapset.py +++ b/tests/test_async_processing_mapset.py @@ -106,7 +106,7 @@ class AsyncProcessMapsetTestCaseAdmin(ActiniaResourceTestCaseBase): def check_remove_test_mapset(self): rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets", headers=self.user_auth_header, ) print(rv.data) @@ -124,7 +124,8 @@ def check_remove_test_mapset(self): if "test_mapset" in mapsets: # Delete the mapset if it already exists rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -147,8 +148,7 @@ def test_1_new_mapset(self): self.check_remove_test_mapset() rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/test_mapset/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/test_mapset/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -156,7 +156,7 @@ def test_1_new_mapset(self): self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header) rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets", headers=self.admin_auth_header, ) print(rv.data) diff --git a/tests/test_async_processing_stdin_parameter_parser.py b/tests/test_async_processing_stdin_parameter_parser.py index ba44da25c..41f621a14 100644 --- a/tests/test_async_processing_stdin_parameter_parser.py +++ b/tests/test_async_processing_stdin_parameter_parser.py @@ -106,7 +106,7 @@ class AsyncProcessStdinParameterParserTestCase(ActiniaResourceTestCaseBase): def test_glist_parsing(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(PC_GLIST), content_type="application/json", @@ -131,7 +131,7 @@ def test_glist_parsing(self): def test_runivar_parsing(self): min, max = 0, 21 rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(PC_RUNIVAR), content_type="application/json", diff --git a/tests/test_async_processing_stdout_parser.py b/tests/test_async_processing_stdout_parser.py index dc3a5458a..bba96530f 100644 --- a/tests/test_async_processing_stdout_parser.py +++ b/tests/test_async_processing_stdout_parser.py @@ -129,7 +129,7 @@ class AsyncProcessStdoutParserTestCase(ActiniaResourceTestCaseBase): def test_output_parsing(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain), content_type="application/json", @@ -156,7 +156,7 @@ def test_output_parsing(self): def test_output_parsing_r_what(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(r_what), content_type="application/json", diff --git a/tests/test_async_raster_export.py b/tests/test_async_raster_export.py index fc2717fe3..5c515c732 100644 --- a/tests/test_async_raster_export.py +++ b/tests/test_async_raster_export.py @@ -44,8 +44,8 @@ class RasterAsyncExport(ActiniaResourceTestCaseBase): def test_export(self): rv = self.server.post( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/geotiff_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/geotiff_async", headers=self.user_auth_header, ) resp = self.waitAsyncStatusAssertHTTP( @@ -72,8 +72,8 @@ def test_export(self): def test_export_region(self): rv = self.server.post( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/geotiff_async_orig", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/geotiff_async_orig", headers=self.user_auth_header, ) resp = self.waitAsyncStatusAssertHTTP( @@ -100,8 +100,8 @@ def test_export_region(self): def test_export_error(self): rv = self.server.post( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" - "raster_layers/elevationion/geotiff_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevationion/geotiff_async", headers=self.user_auth_header, ) self.waitAsyncStatusAssertHTTP( diff --git a/tests/test_mapset_management.py b/tests/test_mapset_management.py index dbfdb27f4..cc93ea75c 100644 --- a/tests/test_mapset_management.py +++ b/tests/test_mapset_management.py @@ -44,7 +44,7 @@ class MapsetTestCase(ActiniaResourceTestCaseBase): def test_list_mapsets(self): rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets", headers=self.user_auth_header, ) print(rv.data) @@ -64,7 +64,8 @@ def test_list_mapsets(self): def test_mapsets_region_1(self): rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT/info", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/info", headers=self.admin_auth_header, ) print(rv.data) @@ -86,7 +87,8 @@ def test_mapsets_region_1(self): def test_mapsets_region_2(self): rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets/user1/info", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/user1/" + "info", headers=self.admin_auth_header, ) print(rv.data) @@ -111,7 +113,8 @@ def test_mapset_creation_and_deletion(self): # Mapset already exists rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -126,7 +129,8 @@ def test_mapset_creation_and_deletion(self): # Delete mapset rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -141,7 +145,8 @@ def test_mapset_creation_and_deletion(self): # Delete should fail, since mapset does not exists rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -157,7 +162,8 @@ def test_mapset_creation_and_deletion(self): def test_mapset_creation_and_deletion_unprivileged(self): # Create new mapsets as unprivileged user rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset", headers=self.guest_auth_header, ) print(rv.data) @@ -169,7 +175,8 @@ def test_mapset_creation_and_deletion_unprivileged(self): # Delete mapset as unprivileged user rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset", headers=self.guest_auth_header, ) print(rv.data) @@ -182,7 +189,8 @@ def test_mapset_creation_and_deletion_unprivileged(self): def test_mapset_deletion_permanent_error(self): # Delete PERMANENT rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT", headers=self.admin_auth_header, ) print(rv.data) @@ -195,7 +203,7 @@ def test_mapset_deletion_permanent_error(self): def test_mapset_deletion_global_db_error(self): # Delete PERMANENT rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/user1", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/user1", headers=self.admin_auth_header, ) print(rv.data) @@ -208,21 +216,24 @@ def test_mapset_deletion_global_db_error(self): def test_mapset_creation_and_locking(self): # Unlock mapset for deletion rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) # Delete any existing mapsets rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset_2", headers=self.admin_auth_header, ) print(rv.data) # Create new mapsets rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset_2", headers=self.admin_auth_header, ) print(rv.data) @@ -237,7 +248,8 @@ def test_mapset_creation_and_locking(self): # Lock mapset rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) @@ -252,7 +264,8 @@ def test_mapset_creation_and_locking(self): # get mapset lock(False) rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) @@ -270,7 +283,8 @@ def test_mapset_creation_and_locking(self): # Unlock mapset rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) @@ -285,7 +299,8 @@ def test_mapset_creation_and_locking(self): # get mapset lock (False) rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) @@ -303,7 +318,8 @@ def test_mapset_creation_and_locking(self): # Delete mapset rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset_2", headers=self.admin_auth_header, ) print(rv.data) @@ -318,7 +334,8 @@ def test_mapset_creation_and_locking(self): # get mapset lock (False) rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) @@ -339,7 +356,8 @@ def test_mapset_creation_and_locking(self): # error is logged. Skip until fixed. TODO reactivate # https://github.com/actinia-org/actinia-core/issues/487 # rv = self.server.post( - # URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", + # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + # "test_mapset_2/lock", # headers=self.admin_auth_header, # ) # print(rv.data) @@ -356,7 +374,8 @@ def test_mapset_creation_and_locking(self): # Unlock mapset rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset_2/lock", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset_2/lock", headers=self.admin_auth_header, ) print(rv.data) diff --git a/tests/test_noauth.py b/tests/test_noauth.py index 59c71fba2..1143f08a0 100644 --- a/tests/test_noauth.py +++ b/tests/test_noauth.py @@ -188,7 +188,7 @@ def test_01_version(self): def test_02_list_projects(self): """Test list project endpoint""" - rv = self.server.get(f"{URL_PREFIX}/projects") + rv = self.server.get(f"{URL_PREFIX}/{self.project_url_part}") self.assertEqual( rv.status_code, 200, @@ -198,14 +198,16 @@ def test_02_list_projects(self): rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype ) self.assertIn( - "projects", json_loads(rv.data), "No projects in response" + self.project_url_part, + json_loads(rv.data), + f"No {self.project_url_part} in response", ) - projects = json_loads(rv.data)["projects"] + projects = json_loads(rv.data)[self.project_url_part] self.assertIn("nc_spm_08", projects, "Wrong project listed") def test_03_processing_ephemeral(self): """Test job resumption with processing_async endpoint and stdout""" - endpoint = "/projects/nc_spm_08/processing_async" + endpoint = f"{self.project_url_part}/nc_spm_08/processing_async" rv = self.server.post( f"{URL_PREFIX}{endpoint}", data=json_dumps(PC), @@ -222,7 +224,9 @@ def test_04_processing_persistent(self): """Test job resumption with persistent processing_async endpoint and stdout """ - endpoint = "/projects/nc_spm_08/mapsets/test/processing_async" + endpoint = ( + f"{self.project_url_part}/nc_spm_08/mapsets/test/processing_async" + ) rv = self.server.post( f"{URL_PREFIX}{endpoint}", data=json_dumps(PC), @@ -235,7 +239,9 @@ def test_04_processing_persistent(self): ) self.compare_stdout(resp) # check processing mapset - rv2 = self.server.get(f"{URL_PREFIX}/projects/nc_spm_08/mapsets") + rv2 = self.server.get( + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets" + ) self.assertEqual( rv2.status_code, 200, @@ -247,7 +253,8 @@ def test_04_processing_persistent(self): ) # check created raster rv3 = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/test/raster_layers" + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/test/" + "raster_layers" ) self.assertEqual( rv3.status_code, @@ -261,7 +268,9 @@ def test_04_processing_persistent(self): # delete test mapset self.admin_auth_header = None self.delete_mapset("test", "nc_spm_08") - rv4 = self.server.get(f"{URL_PREFIX}/projects/nc_spm_08/mapsets") + rv4 = self.server.get( + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets" + ) self.assertEqual( rv4.status_code, 200, diff --git a/tests/test_project_management.py b/tests/test_project_management.py index bca056523..0ae212671 100644 --- a/tests/test_project_management.py +++ b/tests/test_project_management.py @@ -44,7 +44,8 @@ class ProjectTestCase(ActiniaResourceTestCaseBase): def test_list_projects(self): rv = self.server.get( - URL_PREFIX + "/projects", headers=self.user_auth_header + f"{URL_PREFIX}/{self.project_url_part}", + headers=self.user_auth_header, ) print(rv.data) self.assertEqual( @@ -56,14 +57,14 @@ def test_list_projects(self): rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype ) - if "nc_spm_08" in json_loads(rv.data)["projects"]: + if "nc_spm_08" in json_loads(rv.data)[self.project_url_part]: project = "nc_spm_08" self.assertEqual(project, "nc_spm_08", "Wrong project listed") def test_project_info(self): rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/info", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/info", headers=self.admin_auth_header, ) print(rv.data) @@ -88,7 +89,7 @@ def test_project_global_db_error(self): # ERROR: Try to create a project as admin that exists in the global # database rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.admin_auth_header, @@ -106,13 +107,13 @@ def test_project_global_db_error(self): def test_project_creation_and_deletion(self): # Delete a potentially existing project rv = self.server.delete( - URL_PREFIX + "/projects/test_project", + f"{URL_PREFIX}/{self.project_url_part}/test_project", headers=self.admin_auth_header, ) # Create new project as admin rv = self.server.post( - URL_PREFIX + "/projects/test_project", + f"{URL_PREFIX}/{self.project_url_part}/test_project", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.admin_auth_header, @@ -129,7 +130,7 @@ def test_project_creation_and_deletion(self): # ERROR: Try to create a project as admin that already exists rv = self.server.post( - URL_PREFIX + "/projects/test_project", + f"{URL_PREFIX}/{self.project_url_part}/test_project", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.admin_auth_header, @@ -146,7 +147,7 @@ def test_project_creation_and_deletion(self): # Delete project rv = self.server.delete( - URL_PREFIX + "/projects/test_project", + f"{URL_PREFIX}/{self.project_url_part}/test_project", headers=self.admin_auth_header, ) print(rv.data) @@ -161,7 +162,7 @@ def test_project_creation_and_deletion(self): # ERROR: Delete should fail, since project does not exists rv = self.server.delete( - URL_PREFIX + "/projects/test_project", + f"{URL_PREFIX}/{self.project_url_part}/test_project", headers=self.admin_auth_header, ) print(rv.data) @@ -177,13 +178,13 @@ def test_project_creation_and_deletion(self): def test_project_creation_and_deletion_as_user(self): # Delete a potentially existing project rv = self.server.delete( - URL_PREFIX + "/projects/test_project", + f"{URL_PREFIX}/{self.project_url_part}/test_project", headers=self.user_auth_header, ) # Create new project as user rv = self.server.post( - URL_PREFIX + "/projects/test_project", + f"{URL_PREFIX}/{self.project_url_part}/test_project", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.user_auth_header, @@ -202,7 +203,7 @@ def test_project_creation_and_deletion_as_user(self): # ERROR: Try to create a project as user that already exists rv = self.server.post( - URL_PREFIX + "/projects/test_project", + f"{URL_PREFIX}/{self.project_url_part}/test_project", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.user_auth_header, @@ -221,7 +222,7 @@ def test_project_creation_and_deletion_as_user(self): # Delete project rv = self.server.delete( - URL_PREFIX + "/projects/test_project", + f"{URL_PREFIX}/{self.project_url_part}/test_project", headers=self.user_auth_header, ) self.assertEqual( @@ -238,7 +239,7 @@ def test_project_creation_and_deletion_as_user(self): # ERROR: Delete should fail, since project does not exists rv = self.server.delete( - URL_PREFIX + "/projects/test_project", + f"{URL_PREFIX}/{self.project_url_part}/test_project", headers=self.user_auth_header, ) self.assertEqual( @@ -256,7 +257,7 @@ def test_project_creation_and_deletion_as_user(self): def test_project_creation_and_deletion_as_guest(self): # ERROR: Try to create a project as guest rv = self.server.post( - URL_PREFIX + "/projects/test_project_user", + f"{URL_PREFIX}/{self.project_url_part}/test_project_user", data=json_dumps({"epsg": "4326"}), content_type="application/json", headers=self.guest_auth_header, @@ -273,7 +274,7 @@ def test_project_creation_and_deletion_as_guest(self): # ERROR: Delete should fail since the guest user is not authorized rv = self.server.delete( - URL_PREFIX + "/projects/test_project_user", + f"{URL_PREFIX}/{self.project_url_part}/test_project_user", headers=self.guest_auth_header, ) print(rv.data) diff --git a/tests/test_raster_colors.py b/tests/test_raster_colors.py index 9d5e29860..699932d95 100644 --- a/tests/test_raster_colors.py +++ b/tests/test_raster_colors.py @@ -47,8 +47,8 @@ class RasterLayerTestCase(ActiniaResourceTestCaseBase): def test_raster_layer_get_colors(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/colors", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/colors", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -94,8 +94,8 @@ def test_raster_layer_set_colors(self): "version": "1", } rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/processing_async" % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/processing_async", headers=self.user_auth_header, data=json_dumps(postbody), content_type="application/json", @@ -125,9 +125,8 @@ def test_raster_layer_set_colors(self): # Set the color table rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer/colors", headers=self.user_auth_header, data=json_dumps(rules), content_type="application/json", @@ -147,9 +146,8 @@ def test_raster_layer_set_colors(self): # Set the color table rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer/colors", headers=self.user_auth_header, data=json_dumps(rules), content_type="application/json", @@ -169,9 +167,8 @@ def test_raster_layer_set_colors(self): # Set the color table rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer/colors", headers=self.user_auth_header, data=json_dumps(rules), content_type="application/json", @@ -189,9 +186,8 @@ def test_raster_layer_set_colors(self): # Delete rv = self.server.delete( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -234,8 +230,8 @@ def test_1_raster_layer_set_colors_errors(self): "version": "1", } rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/processing_async" % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/processing_async", headers=self.user_auth_header, data=json_dumps(postbody), content_type="application/json", @@ -263,9 +259,8 @@ def test_1_raster_layer_set_colors_errors(self): } rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer/colors", headers=self.user_auth_header, data=json_dumps(rules), content_type="application/json", @@ -287,9 +282,8 @@ def test_1_raster_layer_set_colors_errors(self): rules = {"rules": "blub"} rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer/colors", headers=self.user_auth_header, data=json_dumps(rules), content_type="application/json", @@ -309,9 +303,8 @@ def test_1_raster_layer_set_colors_errors(self): rules = {"color": "elevation", "raster": "elevation@PERMANENT"} rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer/colors", headers=self.user_auth_header, data=json_dumps(rules), content_type="application/json", @@ -331,9 +324,8 @@ def test_1_raster_layer_set_colors_errors(self): rules = {"nonsense": "bla"} rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer/colors", headers=self.user_auth_header, data=json_dumps(rules), content_type="application/json", @@ -353,9 +345,8 @@ def test_1_raster_layer_set_colors_errors(self): rules = [1, 2, 3] rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer/colors", headers=self.user_auth_header, data=json_dumps(rules), content_type="application/json", @@ -375,9 +366,8 @@ def test_1_raster_layer_set_colors_errors(self): rules = {"raster": "elevation_nope@PERMANENT"} rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer/colors", headers=self.user_auth_header, data=json_dumps(rules), content_type="application/json", @@ -400,9 +390,8 @@ def test_1_raster_layer_set_colors_errors(self): rules = {"raster": "elevation"} rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer/colors" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer/colors", headers=self.user_auth_header, data=json_dumps(rules), content_type="application/json", @@ -420,9 +409,8 @@ def test_1_raster_layer_set_colors_errors(self): # Delete rv = self.server.delete( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/test_layer" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/test_layer", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -438,8 +426,8 @@ def test_1_raster_layer_set_colors_errors(self): def test_raster_layer_colors_error_1(self): # Raster does not exist rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevat/colors", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevat/colors", headers=self.user_auth_header, ) pprint(json_load(rv.data)) diff --git a/tests/test_raster_import_pixellimit.py b/tests/test_raster_import_pixellimit.py index 678b881d6..b5e2caae7 100644 --- a/tests/test_raster_import_pixellimit.py +++ b/tests/test_raster_import_pixellimit.py @@ -49,7 +49,7 @@ class ImportRasterLayerPixellimitTestCase(ActiniaResourceTestCaseBase): project = "nc_spm_08" tmp_mapset = "mapset_rasterimport_pixellimit" - endpoint = f"/projects/{project}/mapsets/{tmp_mapset}/processing_async" + endpoint = f"{project}/mapsets/{tmp_mapset}/processing_async" rimport_inp = "elevation" # import resolution with which the process should fail: rimport_res_fail = 0.1 @@ -91,7 +91,7 @@ def test_pixellimit_allowed(self): ], } rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=json_dumps(process_chain), content_type="application/json", @@ -139,7 +139,7 @@ def test_pixellimit_not_allowed(self): ], } rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=json_dumps(process_chain), content_type="application/json", @@ -183,7 +183,7 @@ def test_pixellimit_importer(self): ], } rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=json_dumps(process_chain), content_type="application/json", diff --git a/tests/test_raster_layer.py b/tests/test_raster_layer.py index d744ebe66..a6487464d 100644 --- a/tests/test_raster_layer.py +++ b/tests/test_raster_layer.py @@ -48,8 +48,8 @@ class RasterLayerTestCase(ActiniaResourceTestCaseBase): def test_raster_layer_info(self): rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/PERMANENT/raster_layers/elevation", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -73,8 +73,8 @@ def test_raster_layer_info(self): def test_raster_layer_info_error_1(self): # Raster does not exist rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/PERMANENT/raster_layers/elevat", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevat", headers=self.user_auth_header, ) pprint(json_load(rv.data)) diff --git a/tests/test_raster_layers.py b/tests/test_raster_layers.py index c60139359..75afa3ea4 100644 --- a/tests/test_raster_layers.py +++ b/tests/test_raster_layers.py @@ -45,9 +45,8 @@ class ListRasterLayersTestCase(ActiniaResourceTestCaseBase): def create_raster_layer(self, mapset_name, raster_name): # Remove potentially existing raster layer rv = self.server.delete( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/%s" - % (mapset_name, raster_name), + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{mapset_name}/raster_layers/{raster_name}", headers=self.user_auth_header, ) # print(rv.data) @@ -80,8 +79,8 @@ def create_raster_layer(self, mapset_name, raster_name): "version": "1", } rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/processing_async" % mapset_name, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{mapset_name}/processing_async", headers=self.user_auth_header, data=json_dumps(postbody), content_type="application/json", @@ -104,7 +103,8 @@ def create_raster_layer(self, mapset_name, raster_name): def test_list_raster_layers(self): rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT/raster_layers", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers", headers=self.user_auth_header, ) print(rv.data.decode()) @@ -125,7 +125,7 @@ def test_list_raster_layers(self): def test_list_raster_layers_pattern(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/PERMANENT/" "raster_layers?pattern=lsat*", headers=self.user_auth_header, ) @@ -147,7 +147,7 @@ def test_list_raster_layers_pattern(self): def test_list_raster_layers_empty_list(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/PERMANENT/" "raster_layers?pattern=NONE", headers=self.user_auth_header, ) @@ -176,7 +176,7 @@ def test_remove_raster_layers_pattern(self): # # Delete raster layers # rv = self.server.delete( - # f"{URL_PREFIX}/projects/nc_spm_08/mapsets/user1/raster_layers?" + # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/user1/raster_layers?" # "pattern=test_delete_layer_*", # headers=self.user_auth_header # ) @@ -193,7 +193,7 @@ def test_remove_raster_layers_pattern(self): # List raster layer rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{new_mapset}/" + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/{new_mapset}/" "raster_layers?pattern=test_delete_layer_*", headers=self.user_auth_header, ) @@ -214,9 +214,8 @@ def test_remove_raster_layers_pattern(self): # Delete raster layers for map_name in map_list: rv = self.server.delete( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/%s" - % (new_mapset, map_name), + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/{map_name}", headers=self.user_auth_header, ) print(rv.data.decode()) @@ -238,8 +237,8 @@ def test_rename_raster_layers(self): # Rename raster layer rv = self.server.put( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers" % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers", headers=self.user_auth_header, data=json_dumps(rename_map_list), content_type="application/json", @@ -256,8 +255,8 @@ def test_rename_raster_layers(self): # Rename raster layer rv = self.server.put( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers" % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers", headers=self.user_auth_header, data=json_dumps(rename_map_list), content_type="application/json", @@ -275,9 +274,8 @@ def test_rename_raster_layers(self): # Delete raster layers for map_name in new_map_list: rv = self.server.delete( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/raster_layers/%s" - % (new_mapset, map_name), + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers/{map_name}", headers=self.user_auth_header, ) print(rv.data.decode()) diff --git a/tests/test_raster_legend.py b/tests/test_raster_legend.py index b1007559e..376e03884 100644 --- a/tests/test_raster_legend.py +++ b/tests/test_raster_legend.py @@ -45,8 +45,8 @@ class RasterLegendTestCase(ActiniaResourceTestCaseBase): def test_raster_legend_no_args(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" - "raster_layers/elevation/legend", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/legend", headers=self.user_auth_header, ) @@ -61,8 +61,8 @@ def test_raster_legend_no_args(self): def test_raster_legend_args_1(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" - "raster_layers/elevation/legend?at=0,100,0,20", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/legend?at=0,100,0,20", headers=self.user_auth_header, ) @@ -77,8 +77,8 @@ def test_raster_legend_args_1(self): def test_raster_legend_args_2(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" - "raster_layers/elevation/legend?range=100,120", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/legend?range=100,120", headers=self.user_auth_header, ) @@ -93,8 +93,8 @@ def test_raster_legend_args_2(self): def test_raster_legend_args_3(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/legend?&use=100,110,120", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/legend?&use=100,110,120", headers=self.user_auth_header, ) @@ -109,8 +109,8 @@ def test_raster_legend_args_3(self): def test_raster_legend_args_4(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/legend?&fontsize=100", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/legend?&fontsize=100", headers=self.user_auth_header, ) @@ -125,8 +125,8 @@ def test_raster_legend_args_4(self): def test_raster_legend_args_5(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/legend?width=100&height=100", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/legend?width=100&height=100", headers=self.user_auth_header, ) @@ -141,9 +141,9 @@ def test_raster_legend_args_5(self): def test_raster_legend_args_6(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/legend?width=100&height=100&range=100,120&" - "use=105,110,115&at=0,100,0,30", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/legend?width=100&height=100&" + "range=100,120&use=105,110,115&at=0,100,0,30", headers=self.user_auth_header, ) @@ -158,8 +158,8 @@ def test_raster_legend_args_6(self): def test_raster_legend_args_7(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/legend?labelnum=4", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/legend?labelnum=4", headers=self.user_auth_header, ) @@ -175,8 +175,8 @@ def test_raster_legend_args_7(self): def test_raster_legend_args_error_1(self): # Wrong "at" parameter rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/legend?at=-0,-0", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/legend?at=-0,-0", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -194,8 +194,8 @@ def test_raster_legend_args_error_1(self): def test_raster_legend_args_error_2(self): # Wrong witdth rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/legend?width=-20&at=20,40,20,40", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/PERMANENT" + "/raster_layers/elevation/legend?width=-20&at=20,40,20,40", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -213,8 +213,8 @@ def test_raster_legend_args_error_2(self): def test_raster_legend_args_error_3(self): # Wrong range and use rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/legend?range=100,120&use=90,130,115", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/PERMANENT" + "/raster_layers/elevation/legend?range=100,120&use=90,130,115", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -232,8 +232,8 @@ def test_raster_legend_args_error_3(self): def test_raster_legend_args_error_4(self): # Wrong labelnum rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/legend?labelnum=-4", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/PERMANENT" + "/raster_layers/elevation/legend?labelnum=-4", headers=self.user_auth_header, ) pprint(json_load(rv.data)) diff --git a/tests/test_raster_renderer.py b/tests/test_raster_renderer.py index 69e8be5b7..a614a3192 100644 --- a/tests/test_raster_renderer.py +++ b/tests/test_raster_renderer.py @@ -46,8 +46,8 @@ class RasterLayerRendererTestCase(ActiniaResourceTestCaseBase): def test_raster_layer_image_no_args(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/render", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/render", headers=self.user_auth_header, ) @@ -62,8 +62,9 @@ def test_raster_layer_image_no_args(self): def test_raster_layer_image_args_1(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/render?n=228500&s=215000&w=630000&e=645000", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/render?" + "n=228500&s=215000&w=630000&e=645000", headers=self.user_auth_header, ) @@ -78,9 +79,9 @@ def test_raster_layer_image_args_1(self): def test_raster_layer_image_args_2(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/render?n=228500&s=215000&w=630000&e=645000&" - "width=100&height=100", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/render?n=228500&s=215000&" + "w=630000&e=645000&width=100&height=100", headers=self.user_auth_header, ) @@ -95,8 +96,8 @@ def test_raster_layer_image_args_2(self): def test_raster_layer_image_args_3(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/render?width=100&height=100", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/render?width=100&height=100", headers=self.user_auth_header, ) @@ -112,8 +113,8 @@ def test_raster_layer_image_args_3(self): def test_raster_layer_image_args_error_1(self): # North is smaller then south rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevation/render?n=-228500&s=215000", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/render?n=-228500&s=215000", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -129,9 +130,8 @@ def test_raster_layer_image_args_error_1(self): def test_raster_layer_image_args_error_2(self): # Negative size rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/PERMANENT/raster_layers/elevation/" - "render?&width=-100&height=-100", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevation/render?&width=-100&height=-100", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -147,8 +147,8 @@ def test_raster_layer_image_args_error_2(self): def test_raster_layer_image_args_error_3(self): # Raster does not exist rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/raster_layers" - "/elevat/render?", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers/elevat/render?", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -163,8 +163,9 @@ def test_raster_layer_image_args_error_3(self): def test_raster_layer_image_rgb_1(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" - "red=lsat5_1987_10&blue=lsat5_1987_20&green=lsat5_1987_30", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/landsat/" + "render_rgb?red=lsat5_1987_10&blue=lsat5_1987_20&" + "green=lsat5_1987_30", headers=self.user_auth_header, ) @@ -179,9 +180,10 @@ def test_raster_layer_image_rgb_1(self): def test_raster_layer_image_rgb_2(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" - "n=228513&s=214975.5&w=629992.5&e=645012&width=100&height=100" - "&red=lsat5_1987_30&blue=lsat5_1987_20&green=lsat5_1987_10", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/landsat/" + "render_rgb?n=228513&s=214975.5&w=629992.5&e=645012&width=100&" + "height=100&red=lsat5_1987_30&blue=lsat5_1987_20&" + "green=lsat5_1987_10", headers=self.user_auth_header, ) @@ -196,8 +198,8 @@ def test_raster_layer_image_rgb_2(self): def test_raster_layer_image_rgb_3(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" - "width=100&height=100" + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/landsat/" + "render_rgb?width=100&height=100" "&red=lsat5_1987_30&blue=lsat5_1987_20&green=lsat5_1987_10", headers=self.user_auth_header, ) @@ -214,8 +216,8 @@ def test_raster_layer_image_rgb_3(self): def test_raster_layer_image_rgb_error_green(self): # No green raster layer rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" - "n=228513&s=214975.5&w=629992.5&e=645012" + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/landsat/" + "render_rgb?n=228513&s=214975.5&w=629992.5&e=645012" "&red=lsat5_1987_30&blue=lsat5_1987_20", headers=self.user_auth_header, ) @@ -233,8 +235,8 @@ def test_raster_layer_image_rgb_error_green(self): def test_raster_layer_image_rgb_error_blue(self): # No blue raster layer rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" - "n=228513&s=214975.5&w=629992.5&e=645012" + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/landsat/" + "render_rgb?n=228513&s=214975.5&w=629992.5&e=645012" "&red=lsat5_1987_30&green=lsat5_1987_20", headers=self.user_auth_header, ) @@ -252,8 +254,8 @@ def test_raster_layer_image_rgb_error_blue(self): def test_raster_layer_image_rgb_error_red(self): # No red raster layer rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" - "n=228513&s=214975.5&w=629992.5&e=645012" + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/landsat/" + "render_rgb?n=228513&s=214975.5&w=629992.5&e=645012" "&blue=lsat5_1987_30&green=lsat5_1987_20", headers=self.user_auth_header, ) @@ -270,8 +272,8 @@ def test_raster_layer_image_rgb_error_red(self): def test_raster_layer_image_rgb_error_wrong_raster(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" - "n=228513&s=214975.5&w=629992.5&e=645012" + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/landsat/" + "render_rgb?n=228513&s=214975.5&w=629992.5&e=645012" "&red=lsat5_1987_30_1&blue=lsat5_1987_20&green=lsat5_1987_10", headers=self.user_auth_header, ) @@ -288,8 +290,9 @@ def test_raster_layer_image_rgb_error_wrong_raster(self): def test_raster_layer_image_rgb_error_mapset_in_name_1(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" - "red=lsat5_1987_10@landsat&blue=lsat5_1987_20&green=lsat5_1987_30", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/landsat/" + "render_rgb?red=lsat5_1987_10@landsat&blue=lsat5_1987_20&" + "green=lsat5_1987_30", headers=self.user_auth_header, ) @@ -305,8 +308,9 @@ def test_raster_layer_image_rgb_error_mapset_in_name_1(self): def test_raster_layer_image_rgb_error_mapset_in_name_2(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" - "red=lsat5_1987_10&blue=lsat5_1987_20@landsat&green=lsat5_1987_30", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/landsat/" + "render_rgb?red=lsat5_1987_10&blue=lsat5_1987_20@landsat&" + "green=lsat5_1987_30", headers=self.user_auth_header, ) @@ -322,8 +326,9 @@ def test_raster_layer_image_rgb_error_mapset_in_name_2(self): def test_raster_layer_image_rgb_error_mapset_in_name_3(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/landsat/render_rgb?" - "red=lsat5_1987_10&blue=lsat5_1987_20&green=lsat5_1987_30@landsat", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/landsat/" + "render_rgb?red=lsat5_1987_10&blue=lsat5_1987_20&" + "green=lsat5_1987_30@landsat", headers=self.user_auth_header, ) @@ -339,8 +344,8 @@ def test_raster_layer_image_rgb_error_mapset_in_name_3(self): def test_raster_layer_image_shade_1(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/render_shade?" - "shade=aspect&color=elevation", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/render_shade?shade=aspect&color=elevation", headers=self.user_auth_header, ) @@ -355,8 +360,8 @@ def test_raster_layer_image_shade_1(self): def test_raster_layer_image_shade_2(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/render_shade?" - "width=100&height=100" + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/render_shade?width=100&height=100" "&shade=aspect&color=elevation", headers=self.user_auth_header, ) @@ -372,8 +377,8 @@ def test_raster_layer_image_shade_2(self): def test_raster_layer_image_error_mapset_in_name_1(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/render_shade?" - "&shade=aspect@PERMANENT&color=elevation", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/render_shade?&shade=aspect@PERMANENT&color=elevation", headers=self.user_auth_header, ) @@ -389,8 +394,8 @@ def test_raster_layer_image_error_mapset_in_name_1(self): def test_raster_layer_image_error_mapset_in_name_2(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/render_shade?" - "&shade=aspect&color=elevation@PERMANENT", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/render_shade?&shade=aspect&color=elevation@PERMANENT", headers=self.user_auth_header, ) @@ -406,8 +411,8 @@ def test_raster_layer_image_error_mapset_in_name_2(self): def test_raster_layer_image_error_missing_color_1(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/render_shade?" - "&shade=aspect", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/render_shade?&shade=aspect", headers=self.user_auth_header, ) diff --git a/tests/test_raster_upload.py b/tests/test_raster_upload.py index 5d3808549..bf0c33e7b 100644 --- a/tests/test_raster_upload.py +++ b/tests/test_raster_upload.py @@ -101,8 +101,8 @@ def test_upload_raster_userdb(self): Test successful GeoTIFF upload and check against reference raster info """ url = ( - f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.tmp_mapset}" - f"/raster_layers/{self.raster}" + f"{URL_PREFIX}/{self.project_url_part}/{self.project}/mapsets/" + f"{self.tmp_mapset}/raster_layers/{self.raster}" ) multipart_form_data = {"file": open(self.local_raster, "rb")} rv = self.server.post( @@ -130,8 +130,8 @@ def test_upload_raster_userdb(self): def test_upload_raster_globaldb_error(self): """Test Error if raster is uploaded to global DB""" url = ( - f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.mapset}/" - f"raster_layers/{self.raster}" + f"{URL_PREFIX}/{self.project_url_part}/{self.project}/mapsets/" + f"{self.mapset}/raster_layers/{self.raster}" ) multipart_form_data = {"file": open(self.local_raster, "rb")} rv = self.server.post( diff --git a/tests/test_strds_management.py b/tests/test_strds_management.py index f009cbca0..03e81b650 100644 --- a/tests/test_strds_management.py +++ b/tests/test_strds_management.py @@ -48,16 +48,16 @@ project = "nc_spm_08" strds_mapset = "modis_lst" -strds_url = URL_PREFIX + "/projects/%(project)s/mapsets/%(mapset)s/strds" % { - "project": project, - "mapset": strds_mapset, -} +strds_endpoint = f"{project}/mapsets/{strds_mapset}/strds" strds_data = "LST_Day_monthly" class STRDSTestCase(ActiniaResourceTestCaseBase): def test_list_strds(self): - rv = self.server.get(strds_url, headers=self.user_auth_header) + rv = self.server.get( + f"{URL_PREFIX}/{self.project_url_part}/{strds_endpoint}", + headers=self.user_auth_header, + ) print(rv.data) self.assertEqual( rv.status_code, @@ -73,7 +73,8 @@ def test_list_strds(self): def test_list_strds_where_1(self): rv = self.server.get( - strds_url + "?where=start_time == '2015-01-01 00:00:00'", + f"{URL_PREFIX}/{self.project_url_part}/{strds_endpoint}?" + "where=start_time == '2015-01-01 00:00:00'", headers=self.user_auth_header, ) print(rv.data) @@ -91,7 +92,8 @@ def test_list_strds_where_1(self): def test_list_strds_where_2(self): rv = self.server.get( - strds_url + "?where=start_time > '2016-01-01'", + f"{URL_PREFIX}/{self.project_url_part}/{strds_endpoint}?" + "where=start_time > '2016-01-01'", headers=self.user_auth_header, ) print(rv.data) @@ -110,7 +112,9 @@ def test_list_strds_where_2(self): def test_strds_info(self): rv = self.server.get( - strds_url + "/%s" % strds_data, headers=self.user_auth_header + f"{URL_PREFIX}/{self.project_url_part}/{strds_endpoint}" + f"/{strds_data}", + headers=self.user_auth_header, ) print(rv.data) self.assertEqual( @@ -132,9 +136,8 @@ def test_strds_create_remove(self): # Create success rv = self.server.post( - URL_PREFIX - + "/projects/%s/mapsets/%s/strds/test_strds" - % (project, new_mapset), + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds", headers=self.admin_auth_header, data=json_dumps( { @@ -157,9 +160,8 @@ def test_strds_create_remove(self): # Create failure since the strds already exists rv = self.server.post( - URL_PREFIX - + "/projects/%s/mapsets/%s/strds/test_strds" - % (project, new_mapset), + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds", headers=self.admin_auth_header, data=json_dumps( { @@ -181,9 +183,8 @@ def test_strds_create_remove(self): ) # Read/check information of the new strds rv = self.server.get( - URL_PREFIX - + "/projects/%s/mapsets/%s/strds/test_strds" - % (project, new_mapset), + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds", headers=self.user_auth_header, ) print(rv.data) @@ -201,9 +202,8 @@ def test_strds_create_remove(self): self.assertEqual(start_time, "'None'") # Delete the strds rv = self.server.delete( - URL_PREFIX - + "/projects/%s/mapsets/%s/strds/test_strds" - % (project, new_mapset), + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds", headers=self.admin_auth_header, ) print(rv.data) @@ -217,9 +217,8 @@ def test_strds_create_remove(self): ) # Try to delete the strds again to produce an error rv = self.server.delete( - URL_PREFIX - + "/projects/%s/mapsets/%s/strds/test_strds" - % (project, new_mapset), + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds", headers=self.admin_auth_header, ) print(rv.data) @@ -233,9 +232,8 @@ def test_strds_create_remove(self): ) rv = self.server.get( - URL_PREFIX - + "/projects/%s/mapsets/%s/strds/test_strds" - % (project, new_mapset), + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds", headers=self.user_auth_header, ) print(rv.data) @@ -251,7 +249,8 @@ def test_strds_create_remove(self): def test_strds_info_error_1(self): # Raster does not exist rv = self.server.get( - strds_url + "/precipitation_1950_2013_yearly_mm_nope", + f"{URL_PREFIX}/{self.project_url_part}/{strds_endpoint}/" + "precipitation_1950_2013_yearly_mm_nope", headers=self.user_auth_header, ) print(rv.data) @@ -267,7 +266,8 @@ def test_strds_info_error_1(self): def test_list_strds_where_error_1(self): # Wrong where statement rv = self.server.get( - strds_url + "?where=start_timing > '2000-01-01'", + f"{URL_PREFIX}/{self.project_url_part}/{strds_endpoint}?" + "where=start_timing > '2000-01-01'", headers=self.user_auth_header, ) print(rv.data) diff --git a/tests/test_strds_raster_management.py b/tests/test_strds_raster_management.py index cfd92c32b..6392dc0ba 100644 --- a/tests/test_strds_raster_management.py +++ b/tests/test_strds_raster_management.py @@ -43,10 +43,7 @@ project = "nc_spm_08" strds_mapset = "modis_lst" -strds_url = URL_PREFIX + "/projects/%(project)s/mapsets/%(mapset)s/strds" % { - "project": project, - "mapset": strds_mapset, -} +strds_endpoint = f"{project}/mapsets/{strds_mapset}/strds" strds_data = "LST_Day_monthly" new_mapset = "raster_test_mapset" @@ -55,9 +52,8 @@ class STRDSTestCase(ActiniaResourceTestCaseBase): def create_raster_layer(self, project_name, mapset_name, raster_name, val): # Remove potentially existing raster layer rv = self.server.delete( - URL_PREFIX - + "/projects/%s/mapsets/%s/raster_layers/%s" - % (project_name, mapset_name, raster_name), + f"{URL_PREFIX}/{self.project_url_part}/{project_name}/mapsets/" + f"{mapset_name}/raster_layers/{raster_name}", headers=self.admin_auth_header, ) # print(rv.data) @@ -90,9 +86,8 @@ def create_raster_layer(self, project_name, mapset_name, raster_name, val): "version": "1", } rv = self.server.post( - URL_PREFIX - + "/projects/%s/mapsets/%s/processing_async" - % (project_name, mapset_name), + f"{URL_PREFIX}/{self.project_url_part}/{project_name}/mapsets/" + f"{mapset_name}/processing_async", headers=self.admin_auth_header, data=json_dumps(postbody), content_type="application/json", @@ -116,8 +111,8 @@ def create_raster_layer(self, project_name, mapset_name, raster_name, val): def test_strds_creation_error(self): # This must fail, global mapsets are not allowed to modify rv = self.server.post( - f"{URL_PREFIX}/projects/{project}/mapsets/{strds_mapset}/strds/" - "test_strds_register", + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{strds_mapset}/strds/test_strds_register", headers=self.admin_auth_header, data=json_dumps( { @@ -143,8 +138,8 @@ def test_strds_create_register_unregister_1(self): # Create success rv = self.server.post( - f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" - "test_strds_register", + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds_register", headers=self.admin_auth_header, data=json_dumps( { @@ -189,8 +184,8 @@ def test_strds_create_register_unregister_1(self): ] rv = self.server.put( - f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" - "test_strds_register/raster_layers", + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds_register/raster_layers", data=json_dumps(raster_layers), content_type="application/json", headers=self.admin_auth_header, @@ -207,8 +202,8 @@ def test_strds_create_register_unregister_1(self): # Check strds rv = self.server.get( - f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" - "test_strds_register", + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds_register", headers=self.admin_auth_header, ) pprint(json_loads(rv.data)) @@ -232,8 +227,8 @@ def test_strds_create_register_unregister_1(self): raster_layers = ["test_layer_1", "test_layer_2", "test_layer_3"] rv = self.server.delete( - f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" - "test_strds_register/raster_layers", + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds_register/raster_layers", data=json_dumps(raster_layers), content_type="application/json", headers=self.user_auth_header, @@ -250,8 +245,8 @@ def test_strds_create_register_unregister_1(self): # Check strds rv = self.server.get( - f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" - "test_strds_register", + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds_register", headers=self.user_auth_header, ) pprint(json_loads(rv.data)) @@ -273,8 +268,8 @@ def test_strds_create_register_unregister_1(self): # Delete the strds rv = self.server.delete( - f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" - "test_strds_register", + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds_register", headers=self.user_auth_header, ) pprint(json_loads(rv.data)) @@ -289,7 +284,8 @@ def test_strds_create_register_unregister_1(self): def test_strds_raster_layer_1(self): rv = self.server.get( - strds_url + "/%s/raster_layers" % strds_data, + f"{URL_PREFIX}/{self.project_url_part}/{strds_endpoint}/" + f"{strds_data}/raster_layers", headers=self.user_auth_header, ) print(rv.data) @@ -307,9 +303,8 @@ def test_strds_raster_layer_1(self): def test_strds_raster_layer_2(self): rv = self.server.get( - strds_url - + "/%s/raster_layers?where=start_time >= '2016-01-01'" - % strds_data, + f"{URL_PREFIX}/{self.project_url_part}/{strds_endpoint}/" + f"{strds_data}/raster_layers?where=start_time >= '2016-01-01'", headers=self.user_auth_header, ) print(rv.data) @@ -328,7 +323,8 @@ def test_strds_raster_layer_2(self): def test_strds_info_error_1(self): # Raster does not exist rv = self.server.get( - strds_url + "/precipitation_1950_2013_yearly_mm_nope", + f"{URL_PREFIX}/{self.project_url_part}/{strds_endpoint}/" + "precipitation_1950_2013_yearly_mm_nope", headers=self.user_auth_header, ) print(rv.data) @@ -344,7 +340,8 @@ def test_strds_info_error_1(self): def test_list_strds_where_error_1(self): # Wrong where statement rv = self.server.get( - strds_url + "/%s/raster_layers?where=start_timing < '2015-01-01'", + f"{URL_PREFIX}/{self.project_url_part}/{strds_endpoint}/" + f"{strds_data}/raster_layers?where=start_timing < '2015-01-01'", headers=self.user_auth_header, ) print(rv.data) diff --git a/tests/test_strds_raster_renderer.py b/tests/test_strds_raster_renderer.py index 8036837e6..71fd10ee9 100644 --- a/tests/test_strds_raster_renderer.py +++ b/tests/test_strds_raster_renderer.py @@ -43,10 +43,9 @@ project = "nc_spm_08" strds_mapset = "modis_lst" -strds_url = URL_PREFIX + "/projects/%(project)s/mapsets/%(mapset)s/strds" % { - "project": project, - "mapset": strds_mapset, -} +# strds_url = ( +# f"{URL_PREFIX}/PROJECT_URL_PART/{project}/mapsets/{strds_mapset}/strds" +# ) strds_data = "LST_Day_monthly" @@ -54,9 +53,8 @@ class STRDSRenderTestCase(ActiniaResourceTestCaseBase): def create_raster_layer(self, project_name, mapset_name, raster_name, val): # Remove potentially existing raster layer rv = self.server.delete( - URL_PREFIX - + "/projects/%s/mapsets/%s/raster_layers/%s" - % (project_name, mapset_name, raster_name), + f"{URL_PREFIX}/{self.project_url_part}/{project_name}/mapsets/" + f"{mapset_name}/raster_layers/{raster_name}", headers=self.admin_auth_header, ) # print(rv.data) @@ -89,9 +87,8 @@ def create_raster_layer(self, project_name, mapset_name, raster_name, val): "version": "1", } rv = self.server.post( - URL_PREFIX - + "/projects/%s/mapsets/%s/processing_async" - % (project_name, mapset_name), + f"{URL_PREFIX}/{self.project_url_part}/{project_name}/mapsets/" + f"{mapset_name}/processing_async", headers=self.admin_auth_header, data=json_dumps(postbody), content_type="application/json", @@ -118,8 +115,8 @@ def test_strds_render_1(self): # Create success rv = self.server.post( - f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" - "test_strds_register", + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds_register", headers=self.admin_auth_header, data=json_dumps( { @@ -164,8 +161,8 @@ def test_strds_render_1(self): ] rv = self.server.put( - f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" - "test_strds_register/raster_layers", + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds_register/raster_layers", data=json_dumps(raster_layers), content_type="application/json", headers=self.admin_auth_header, @@ -182,8 +179,9 @@ def test_strds_render_1(self): # Check strds rv = self.server.get( - f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" - "test_strds_register/render?width=100&height=100", + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds_register/render?width=100&" + "height=100", headers=self.admin_auth_header, ) @@ -198,8 +196,9 @@ def test_strds_render_1(self): # Check strds rv = self.server.get( - f"{URL_PREFIX}/projects/{project}/mapsets/{new_mapset}/strds/" - "test_strds_register/render?width=100&height=100&" + f"{URL_PREFIX}/{self.project_url_part}/{project}/mapsets/" + f"{new_mapset}/strds/test_strds_register/render?" + "width=100&height=100&" "start_time=2000-01-01 00:00:00&end_time=2000-01-02 00:00:00", headers=self.admin_auth_header, ) diff --git a/tests/test_vector_layer.py b/tests/test_vector_layer.py index d6e661ac2..47c893aef 100644 --- a/tests/test_vector_layer.py +++ b/tests/test_vector_layer.py @@ -56,46 +56,11 @@ def test_creation_1(self): self.create_vector_layer( "nc_spm_08", new_mapset, "test_layer", region, parameter ) - # rv = self.server.post( - # URL_PREFIX - # + "/projects/nc_spm_08/mapsets/%s/vector_layers/test_layer" - # % new_mapset, - # headers=self.user_auth_header, - # data=json_dumps( - # { - # "region": { - # "n": 228500, - # "s": 215000, - # "e": 645000, - # "w": 630000, - # }, - # "parameter": { - # "npoints": 1, - # "zmin": 1, - # "zmax": 1, - # "seed": 1, - # }, - # } - # ), - # content_type="application/json", - # ) - # print(rv.data) - # self.assertEqual( - # rv.status_code, - # 200, - # "HTML status code is wrong %i" % rv.status_code, - # ) - # self.assertEqual( - # rv.mimetype, - # "application/json", - # "Wrong mimetype %s" % rv.mimetype - # ) # Check rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/vector_layers/test_layer" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/vector_layers/test_layer", headers=self.user_auth_header, ) print(rv.data) @@ -118,9 +83,8 @@ def test_creation_1(self): # Delete rv = self.server.delete( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/vector_layers/test_layer" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/vector_layers/test_layer", headers=self.user_auth_header, ) print(rv.data) @@ -135,9 +99,8 @@ def test_creation_1(self): # Delete fail rv = self.server.delete( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/vector_layers/test_layer" - % new_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/vector_layers/test_layer", headers=self.user_auth_header, ) print(rv.data) @@ -152,8 +115,8 @@ def test_creation_1(self): def test_layer_info(self): rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers/" - "boundary_county", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers/boundary_county", headers=self.user_auth_header, ) print(rv.data) @@ -177,8 +140,8 @@ def test_layer_info(self): def test_layer_info_error_1(self): # Raster does not exist rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers/" - "boundary_county_nope", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers/boundary_county_nope", headers=self.user_auth_header, ) print(rv.data) diff --git a/tests/test_vector_layers.py b/tests/test_vector_layers.py index 0856e7bb8..8238b8ab0 100644 --- a/tests/test_vector_layers.py +++ b/tests/test_vector_layers.py @@ -44,7 +44,8 @@ class VectorLayersTestCase(ActiniaResourceTestCaseBase): def test_list_vector_layers(self): rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets/PERMANENT/vector_layers", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers", headers=self.user_auth_header, ) print(rv.data) @@ -65,8 +66,8 @@ def test_list_vector_layers(self): def test_list_vector_layers_pattern(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" - "vector_layers?pattern=elev_*", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers?pattern=elev_*", headers=self.user_auth_header, ) print(rv.data) @@ -89,8 +90,8 @@ def test_list_vector_layers_pattern(self): def test_list_vector_layers_empty_list(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/" - "vector_layers?pattern=NONE", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers?pattern=NONE", headers=self.user_auth_header, ) print(rv.data) diff --git a/tests/test_vector_renderer.py b/tests/test_vector_renderer.py index 1ec4a6fb8..d011550ec 100644 --- a/tests/test_vector_renderer.py +++ b/tests/test_vector_renderer.py @@ -45,8 +45,8 @@ class VectorLayerRendererTestCase(ActiniaResourceTestCaseBase): def test_vectorlayer_image_no_args(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" - "/boundary_county/render", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers/boundary_county/render", headers=self.user_auth_header, ) @@ -61,8 +61,9 @@ def test_vectorlayer_image_no_args(self): def test_vectorlayer_image_args_1(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" - "/boundary_county/render?n=228500&s=215000&w=630000&e=645000", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers/boundary_county/render?" + "n=228500&s=215000&w=630000&e=645000", headers=self.user_auth_header, ) @@ -77,9 +78,9 @@ def test_vectorlayer_image_args_1(self): def test_vectorlayer_image_args_2(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" - "/boundary_county/render?n=228500&s=215000&w=630000&e=645000&" - "width=100&height=100", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers/boundary_county/render?" + "n=228500&s=215000&w=630000&e=645000&width=100&height=100", headers=self.user_auth_header, ) @@ -94,8 +95,9 @@ def test_vectorlayer_image_args_2(self): def test_vectorlayer_image_args_3(self): rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" - "/boundary_county/render?width=100&height=100", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers/boundary_county/render?" + "width=100&height=100", headers=self.user_auth_header, ) @@ -111,8 +113,9 @@ def test_vectorlayer_image_args_3(self): def test_vectorlayer_image_args_error_1(self): # North is smaller then south rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" - "/boundary_county/render?n=-228500&s=215000", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers/boundary_county/render?" + "n=-228500&s=215000", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -128,8 +131,9 @@ def test_vectorlayer_image_args_error_1(self): def test_vectorlayer_image_args_error_2(self): # Negative size rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" - "/boundary_county/render?&width=-100&height=-100", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers/boundary_county/render?" + "&width=-100&height=-100", headers=self.user_auth_header, ) pprint(json_load(rv.data)) @@ -145,8 +149,8 @@ def test_vectorlayer_image_args_error_2(self): def test_vectorlayer_image_args_error_3(self): # Raster does not exist rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/PERMANENT/vector_layers" - "/boundary_county_nomap/render?", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/vector_layers/boundary_county_nomap/render?", headers=self.user_auth_header, ) pprint(json_load(rv.data)) diff --git a/tests/test_vector_upload.py b/tests/test_vector_upload.py index c07c6a531..f45c7c2e0 100644 --- a/tests/test_vector_upload.py +++ b/tests/test_vector_upload.py @@ -138,8 +138,8 @@ def test_upload_vector_geojson_userdb(self): Test successful GeoJSON upload and check against reference vector info """ url = ( - f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.tmp_mapset}" - f"/vector_layers/{self.vector}" + f"{URL_PREFIX}/{self.project_url_part}/{self.project}/" + f"mapsets/{self.tmp_mapset}/vector_layers/{self.vector}" ) multipart_form_data = {"file": open(self.local_geojson, "rb")} rv = self.server.post( @@ -168,8 +168,8 @@ def test_upload_vector_gpkg_userdb(self): Test successful GPKG upload and check against reference vector info """ url = ( - f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.tmp_mapset}" - f"/vector_layers/{self.vector}" + f"{URL_PREFIX}/{self.project_url_part}/{self.project}/" + f"mapsets/{self.tmp_mapset}/vector_layers/{self.vector}" ) multipart_form_data = {"file": open(self.gpkg_file, "rb")} rv = self.server.post( @@ -200,8 +200,8 @@ def test_upload_vector_zipped_shp_userdb(self): vector info """ url = ( - f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.tmp_mapset}" - f"/vector_layers/{self.vector}" + f"{URL_PREFIX}/{self.project_url_part}/{self.project}/" + f"mapsets/{self.tmp_mapset}/vector_layers/{self.vector}" ) multipart_form_data = {"file": open(self.zipped_shp_file, "rb")} rv = self.server.post( @@ -234,8 +234,8 @@ def test_upload_vector_zipped_shp_userdb(self): def test_upload_vector_globaldb_error(self): """Test Error if vector (GPKG) is uploaded to global DB""" url = ( - f"{URL_PREFIX}/projects/{self.project}/mapsets/{self.mapset}/" - f"vector_layers/{self.vector}" + f"{URL_PREFIX}/{self.project_url_part}/{self.project}/" + f"mapsets/{self.mapset}/vector_layers/{self.vector}" ) multipart_form_data = {"file": open(self.gpkg_file, "rb")} rv = self.server.post( From dc672e1a4375abab7024e07a0e408237e47da0c3 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Fri, 15 Nov 2024 08:24:14 +0100 Subject: [PATCH 12/24] add rest of changes --- docker/actinia-core-alpine/Dockerfile | 2 +- .../ephemeral/download_cache_management.py | 2 - src/actinia_core/rest/mapsets.py | 6 +- src/actinia_core/rest/resource_management.py | 15 ++++- tests/test_async_mapset_merging_strds.py | 46 +++++++------- tests/test_async_process_validation.py | 18 +++--- tests/test_async_processing.py | 28 ++++----- tests/test_async_processing_2.py | 15 +++-- tests/test_async_processing_export.py | 63 ++++++++++++------- tests/test_mapsets.py | 32 +++++----- tests/test_noauth.py | 2 +- tests/test_strds_raster_renderer.py | 3 - tests/test_webhook.py | 6 +- 13 files changed, 134 insertions(+), 104 deletions(-) diff --git a/docker/actinia-core-alpine/Dockerfile b/docker/actinia-core-alpine/Dockerfile index cf281ecb2..9929c696b 100644 --- a/docker/actinia-core-alpine/Dockerfile +++ b/docker/actinia-core-alpine/Dockerfile @@ -1,5 +1,5 @@ FROM mundialis/actinia:alpine-dependencies-2023-12-06 as build-base -FROM osgeo/grass-gis:releasebranch_8_3-alpine as grass +FROM osgeo/grass-gis:releasebranch_8_4-alpine as grass FROM build-base as requirements diff --git a/src/actinia_core/processing/actinia_processing/ephemeral/download_cache_management.py b/src/actinia_core/processing/actinia_processing/ephemeral/download_cache_management.py index e04bd39e1..99217f624 100644 --- a/src/actinia_core/processing/actinia_processing/ephemeral/download_cache_management.py +++ b/src/actinia_core/processing/actinia_processing/ephemeral/download_cache_management.py @@ -44,8 +44,6 @@ ) __maintainer__ = "mundialis GmbH & Co. KG" __email__ = "info@mundialis.de" -__maintainer__ = "mundialis GmbH & Co. KG" -__email__ = "info@mundialis.de" class DownloadCacheSize(EphemeralProcessing): diff --git a/src/actinia_core/rest/mapsets.py b/src/actinia_core/rest/mapsets.py index b51cc8887..cf75ef579 100644 --- a/src/actinia_core/rest/mapsets.py +++ b/src/actinia_core/rest/mapsets.py @@ -156,13 +156,13 @@ def get(self): ) else: user = self.user.get_id() - locs_mapsets = redis_interface.get_credentials(user)[ + projects_mapsets = redis_interface.get_credentials(user)[ "permissions" ]["accessible_datasets"] redis_interface.disconnect() mapsets = [] - for project in locs_mapsets: - for mapset in locs_mapsets[project]: + for project in projects_mapsets: + for mapset in projects_mapsets[project]: mapsets.append(f"{project}/{mapset}") try: return make_response( diff --git a/src/actinia_core/rest/resource_management.py b/src/actinia_core/rest/resource_management.py index db0996c9e..e5f6d9133 100644 --- a/src/actinia_core/rest/resource_management.py +++ b/src/actinia_core/rest/resource_management.py @@ -54,6 +54,7 @@ ProcessingResponseListModel, ) from actinia_core.core.interim_results import InterimResult +from actinia_core.version import G_VERSION __license__ = "GPLv3" __author__ = "Sören Gebbert, Anika Weinmann" @@ -353,8 +354,18 @@ def _create_ResourceDataContainer_for_resumption( is None ): return None, None, None - # TODO project project - project = re.findall(r"projects\/(.*?)\/", post_url)[0] + # check grass version for project / location + grass_version_s = G_VERSION["version"] + grass_version = [int(item) for item in grass_version_s.split(".")[:2]] + if grass_version < [8, 4]: + project = re.findall(r"locations\/(.*?)\/", post_url)[0] + elif grass_version < [9, 0]: + project = None + project = re.findall(r"projects\/(.*?)\/", post_url)[0] + if not project: + project = re.findall(r"locations\/(.*?)\/", post_url)[0] + else: + project = re.findall(r"projects\/(.*?)\/", post_url)[0] processing_class = global_config.INTERIM_SAVING_ENDPOINTS[endpoint] if processing_class == "AsyncEphemeralResource": # /projects//processing_async diff --git a/tests/test_async_mapset_merging_strds.py b/tests/test_async_mapset_merging_strds.py index e14800807..f43adde55 100644 --- a/tests/test_async_mapset_merging_strds.py +++ b/tests/test_async_mapset_merging_strds.py @@ -258,21 +258,21 @@ class AsyncMapsetMergingSTRDS(ActiniaResourceTestCaseBase): def tearDown(self): # unlock and delete the test mapsets rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets", headers=self.user_auth_header, ) existing_mapsets = json_load(rv.data)["process_results"] if self.user_mapset in existing_mapsets: rvdellock = self.server.delete( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/lock" % self.user_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{self.user_mapset}/lock", headers=self.admin_auth_header, ) print(rvdellock.data.decode()) rvdel = self.server.delete( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s" % self.user_mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + f"mapsets/{self.user_mapset}", headers=self.admin_auth_header, ) print(rvdel.data.decode()) @@ -282,8 +282,8 @@ def check_strds_in_mapset(self, strds_names): rv = dict() for strds_name in strds_names: rv[strds_name] = self.server.get( - URL_PREFIX - + f"/projects/nc_spm_08/mapsets/{self.user_mapset}/strds", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + f"mapsets/{self.user_mapset}/strds", headers=self.user_auth_header, ) strds = json_load(rv[strds_name].data)["process_results"] @@ -296,8 +296,8 @@ def check_strds_in_mapset(self, strds_names): def check_modis_strds(self, raster_dict, strds_name): # check if correct maps are listed in strds strds rv = self.server.get( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" - f"strds/{strds_name}/raster_layers", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{self.user_mapset}/strds/{strds_name}/raster_layers", headers=self.user_auth_header, ) strds_rasters = json_load(rv.data)["process_results"] @@ -318,8 +318,8 @@ def check_modis_strds(self, raster_dict, strds_name): def test_create_strds_in_persistent_user_db(self): rv = self.server.post( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" - "processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{self.user_mapset}/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds1), content_type="application/json", @@ -339,8 +339,8 @@ def test_create_strds_in_persistent_user_db(self): def test_create_strds_in_persistent_user_db_and_list_it(self): rv = self.server.post( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" - "processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{self.user_mapset}/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds1), content_type="application/json", @@ -354,8 +354,8 @@ def test_create_strds_in_persistent_user_db_and_list_it(self): ) rv2 = self.server.post( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" - "processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{self.user_mapset}/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_list), content_type="application/json", @@ -374,8 +374,8 @@ def test_create_strds_in_persistent_user_db_and_list_it(self): def test_create_strds_in_persistent_user_db_2(self): rv = self.server.post( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" - "processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{self.user_mapset}/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds1), content_type="application/json", @@ -389,8 +389,8 @@ def test_create_strds_in_persistent_user_db_2(self): ) rv = self.server.post( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" - "processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{self.user_mapset}/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds2), content_type="application/json", @@ -411,8 +411,8 @@ def test_create_strds_in_persistent_user_db_2(self): def test_create_strds_in_persistent_user_db_3(self): rv = self.server.post( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" - "processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{self.user_mapset}/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds1), content_type="application/json", @@ -431,8 +431,8 @@ def test_create_strds_in_persistent_user_db_3(self): self.check_modis_strds(self.raster_dict_modis, "modis") rv = self.server.post( - f"{URL_PREFIX}/projects/nc_spm_08/mapsets/{self.user_mapset}/" - "processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{self.user_mapset}/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_create_strds3), content_type="application/json", diff --git a/tests/test_async_process_validation.py b/tests/test_async_process_validation.py index 6cfbc72c8..82a19ac08 100644 --- a/tests/test_async_process_validation.py +++ b/tests/test_async_process_validation.py @@ -360,7 +360,8 @@ class AsyncProcessValidationTestCase(ActiniaResourceTestCaseBase): def test_async_processing_legacy(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_legacy), content_type="application/json", @@ -376,7 +377,8 @@ def test_async_processing_legacy(self): def test_async_processing_new(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_new), content_type="application/json", @@ -398,8 +400,8 @@ def test_async_processing_new(self): ) def test_async_processing_new_ndvi(self): rv = self.server.post( - URL_PREFIX - + "/projects/latlong_wgs84/process_chain_validation_async", + f"{URL_PREFIX}/{self.project_url_part}/latlong_wgs84/" + "process_chain_validation_async", headers=self.admin_auth_header, data=json_dumps(process_chain_ndvi), content_type="application/json", @@ -422,8 +424,8 @@ def test_async_processing_new_ndvi(self): ) def test_async_processing_new_ndvi_export_landsat(self): rv = self.server.post( - URL_PREFIX - + "/projects/latlong_wgs84/process_chain_validation_async", + f"{URL_PREFIX}/{self.project_url_part}/latlong_wgs84/" + "process_chain_validation_async", headers=self.admin_auth_header, data=json_dumps(process_chain_ndvi_landsat), content_type="application/json", @@ -445,8 +447,8 @@ def test_async_processing_new_ndvi_export_landsat(self): ) def test_async_processing_landsat(self): rv = self.server.post( - URL_PREFIX - + "/projects/latlong_wgs84/process_chain_validation_async", + f"{URL_PREFIX}/{self.project_url_part}/latlong_wgs84/" + "process_chain_validation_async", headers=self.admin_auth_header, data=json_dumps(process_chain_landsat), content_type="application/json", diff --git a/tests/test_async_processing.py b/tests/test_async_processing.py index 3334a6cab..55e1234b5 100644 --- a/tests/test_async_processing.py +++ b/tests/test_async_processing.py @@ -174,7 +174,7 @@ class AsyncProcessTestCase(ActiniaResourceTestCaseBase): def test_async_processing(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain), content_type="application/json", @@ -188,7 +188,7 @@ def test_async_processing(self): def test_async_processing_termination(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain), content_type="application/json", @@ -210,7 +210,7 @@ def test_async_processing_termination(self): while True: rv = self.server.get( - URL_PREFIX + "/resources/%s/%s" % (rv_user_id, rv_resource_id), + f"{URL_PREFIX}/resources/{rv_user_id}/{rv_resource_id}", headers=self.admin_auth_header, ) print("Get", rv.data.decode()) @@ -226,7 +226,7 @@ def test_async_processing_termination(self): # Send the termination request, again and again :) rv = self.server.delete( - URL_PREFIX + "/resources/%s/%s" % (rv_user_id, rv_resource_id), + f"{URL_PREFIX}/resources/{rv_user_id}/{rv_resource_id}", headers=self.admin_auth_header, ) print("Delete", rv.data.decode()) @@ -245,7 +245,7 @@ def test_async_processing_termination(self): def test_async_processing_large_region(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_region), content_type="application/json", @@ -265,7 +265,7 @@ def test_async_processing_error_1(self): """ rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_1), content_type="application/json", @@ -285,7 +285,7 @@ def test_async_processing_error_2(self): """ rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_2), content_type="application/json", @@ -300,7 +300,7 @@ def test_async_processing_error_2(self): def test_async_processing_error_3(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_3), content_type="application/json", @@ -315,7 +315,7 @@ def test_async_processing_error_3(self): def test_async_processing_error_4(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_4), content_type="application/json", @@ -332,7 +332,7 @@ def test_async_processing_error_5(self): """No JSON payload error""" rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.user_auth_header, ) @@ -348,7 +348,7 @@ def test_async_processing_error_5(self): def test_async_processing_error_6(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_5), content_type="application/json", @@ -363,7 +363,7 @@ def test_async_processing_error_6(self): def test_async_processing_error_7(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(process_chain_error_6), content_type="application/json", @@ -385,7 +385,7 @@ def test_async_processing_error_8(self): pc[3]["inputs"]["elevation"] = "elevation@NO_Mapset" rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(pc), content_type="application/json", @@ -408,7 +408,7 @@ def test_async_processing_error_9(self): pc[1]["flags"] = "p" rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.user_auth_header, data=json_dumps(pc), content_type="application/json", diff --git a/tests/test_async_processing_2.py b/tests/test_async_processing_2.py index a525d70dc..cf454ca75 100644 --- a/tests/test_async_processing_2.py +++ b/tests/test_async_processing_2.py @@ -324,7 +324,7 @@ class AsyncProcess2TestCase(ActiniaResourceTestCaseBase): def test_async_processing_legacy(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_legacy), content_type="application/json", @@ -339,7 +339,7 @@ def test_async_processing_legacy(self): def test_async_processing_rmapcalc(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_rmapcalc), content_type="application/json", @@ -354,7 +354,7 @@ def test_async_processing_rmapcalc(self): def test_async_processing_new(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_new), content_type="application/json", @@ -375,7 +375,8 @@ def test_async_processing_new(self): ) def test_async_processing_new_ndvi(self): rv = self.server.post( - URL_PREFIX + "/projects/latlong_wgs84/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/latlong_wgs84/" + "processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_ndvi), content_type="application/json", @@ -396,7 +397,8 @@ def test_async_processing_new_ndvi(self): ) def test_async_processing_new_ndvi_export(self): rv = self.server.post( - URL_PREFIX + "/projects/latlong_wgs84/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/latlong_wgs84/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_ndvi), content_type="application/json", @@ -417,7 +419,8 @@ def test_async_processing_new_ndvi_export(self): ) def test_async_processing_new_ndvi_export_landsat(self): rv = self.server.post( - URL_PREFIX + "/projects/latlong_wgs84/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/latlong_wgs84/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_ndvi_landsat), content_type="application/json", diff --git a/tests/test_async_processing_export.py b/tests/test_async_processing_export.py index dd50ffcc9..440c56a45 100644 --- a/tests/test_async_processing_export.py +++ b/tests/test_async_processing_export.py @@ -326,7 +326,8 @@ class AsyncProcessExportTestCaseUser(ActiniaResourceTestCaseBase): def test_async_processing(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_short), content_type="application/json", @@ -356,7 +357,8 @@ def test_long_fail(self): # The process num limit exceeds the credentials settings of the user rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -373,7 +375,8 @@ def test_long_run_fail(self): # The process time limit exceeds the credentials settings of the user rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_short_long_run), content_type="application/json", @@ -390,7 +393,8 @@ def test_large_Region_fail(self): # The cell limit exceeds the credentials settings of the user rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_short_large_region), content_type="application/json", @@ -405,7 +409,8 @@ def test_large_Region_fail(self): def test_termination_1(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_short_long_run), content_type="application/json", @@ -413,8 +418,7 @@ def test_termination_1(self): resp = json_loads(rv.data) # Send the termination request self.server.delete( - URL_PREFIX - + "/resources/%s/%s" % (resp["user_id"], resp["resource_id"]), + f"{URL_PREFIX}/resources/{resp['user_id']}/{resp['resource_id']}", headers=self.admin_auth_header, ) @@ -428,7 +432,8 @@ def test_termination_1(self): def test_termination_2(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_short_long_run), content_type="application/json", @@ -436,8 +441,7 @@ def test_termination_2(self): resp = json_loads(rv.data) # Send the termination request self.server.delete( - URL_PREFIX - + "/resources/%s/%s" % (resp["user_id"], resp["resource_id"]), + f"{URL_PREFIX}/resources/{resp['user_id']}/{resp['resource_id']}", headers=self.user_auth_header, ) @@ -451,7 +455,8 @@ def test_termination_2(self): def test_termination_3(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.root_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -459,8 +464,7 @@ def test_termination_3(self): resp = json_loads(rv.data) # Send the termination request self.server.delete( - URL_PREFIX - + "/resources/%s/%s" % (resp["user_id"], resp["resource_id"]), + f"{URL_PREFIX}/resources/{resp['user_id']}/{resp['resource_id']}", headers=self.root_auth_header, ) @@ -474,7 +478,8 @@ def test_termination_3(self): def test_error_1(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_error_1), content_type="application/json", @@ -489,7 +494,8 @@ def test_error_1(self): def test_error_2(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_error_2), content_type="application/json", @@ -504,7 +510,8 @@ def test_error_2(self): def test_error_3(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_error_3), content_type="application/json", @@ -519,7 +526,8 @@ def test_error_3(self): def test_error_4(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_error_4), content_type="application/json", @@ -535,7 +543,8 @@ def test_error_4(self): def test_stac_export(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.user_auth_header, data=json_dumps(process_chain_short_stac), content_type="application/json", @@ -562,7 +571,8 @@ def test_stac_export(self): class AsyncProcessExportTestCaseAdmin(ActiniaResourceTestCaseBase): def test_async_processing(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -588,7 +598,8 @@ def test_async_processing(self): def test_termination(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -611,7 +622,8 @@ def test_termination(self): def test_error_1(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_error_1), content_type="application/json", @@ -626,7 +638,8 @@ def test_error_1(self): def test_error_2(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_error_2), content_type="application/json", @@ -641,7 +654,8 @@ def test_error_2(self): def test_error_3(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_error_3), content_type="application/json", @@ -656,7 +670,8 @@ def test_error_3(self): def test_error_4(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=json_dumps(process_chain_error_4), content_type="application/json", diff --git a/tests/test_mapsets.py b/tests/test_mapsets.py index 97b19010a..fb96560e5 100644 --- a/tests/test_mapsets.py +++ b/tests/test_mapsets.py @@ -70,21 +70,22 @@ def setUpClass(cls): def tearDown(self): # unlock and delete the test mapsets rv = self.server.get( - URL_PREFIX + "/projects/nc_spm_08/mapsets", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets", headers=self.user_auth_header, ) existing_mapsets = json_load(rv.data)["process_results"] for mapset in self.test_mapsets: if mapset in existing_mapsets: rvdellock = self.server.delete( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/%s/lock" % mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{mapset}/lock", headers=self.admin_auth_header, ) print(rvdellock.data.decode()) rvdel = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/%s" % mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets" + f"/{mapset}", headers=self.admin_auth_header, ) print(rvdel.data.decode()) @@ -94,11 +95,12 @@ def test_two_locked_mapsets(self): for mapset in self.test_mapsets: self.create_new_mapset(mapset) self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/%s/lock" % mapset, + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{mapset}/lock", headers=self.root_auth_header, ) rv = self.server.get( - URL_PREFIX + "/mapsets?status=locked", + f"{URL_PREFIX}/mapsets?status=locked", headers=self.root_auth_header, ) self.assertEqual( @@ -116,7 +118,7 @@ def test_two_locked_mapsets(self): self.assertIn( ref_mapset, mapset_list, - "%s is not in the list of locked mapsets" % ref_mapset, + f"{ref_mapset} is not in the list of locked mapsets", ) message = rvdata["message"] @@ -133,22 +135,22 @@ def test_two_locked_mapsets(self): def test_user_error(self): # Test correct behaviour if user role is not admin rv = self.server.get( - URL_PREFIX + "/mapsets?status=locked", + f"{URL_PREFIX}/mapsets?status=locked", headers=self.user_auth_header, ) self.assertEqual( - rv.status_code, 401, "Status code is not 401: %s" % rv.status_code + rv.status_code, 401, f"Status code is not 401: {rv.status_code}" ) def test_user_own_mapsets(self): """Test if user can list available mapsets""" rv = self.server.get( - URL_PREFIX + "/mapsets", headers=self.test_user_auth_header + f"{URL_PREFIX}/mapsets", headers=self.test_user_auth_header ) self.assertEqual( rv.status_code, 200, - "HTML status code is wrong %i" % rv.status_code, + f"HTML status code is wrong {rv.status_code}", ) rvdata = json_load(rv.data) mapsets = rvdata["available_mapsets"] @@ -161,13 +163,13 @@ def test_user_own_mapsets(self): def test_superadmin_user_mapsets(self): """Test if superadmin can list available mapsets from test_user""" rv = self.server.get( - URL_PREFIX + f"/mapsets?user={self.test_user}", + f"{URL_PREFIX}/mapsets?user={self.test_user}", headers=self.root_auth_header, ) self.assertEqual( rv.status_code, 200, - "HTML status code is wrong %i" % rv.status_code, + f"HTML status code is wrong {rv.status_code}", ) rvdata = json_load(rv.data) mapsets = rvdata["available_mapsets"] @@ -180,11 +182,11 @@ def test_superadmin_user_mapsets(self): def test_user_user_mapsets(self): # Test if test_user can list available mapsets from user rv = self.server.get( - URL_PREFIX + f"/mapsets?user={self.test_user}", + f"{URL_PREFIX}/mapsets?user={self.test_user}", headers=self.user_auth_header, ) self.assertEqual( - rv.status_code, 401, "Status code is not 401: %s" % rv.status_code + rv.status_code, 401, f"Status code is not 401: {rv.status_code}" ) diff --git a/tests/test_noauth.py b/tests/test_noauth.py index 1143f08a0..a978cde8a 100644 --- a/tests/test_noauth.py +++ b/tests/test_noauth.py @@ -22,7 +22,7 @@ ####### """ -Tests: Projct test case +Tests: No authentication test case """ import os from flask.json import dumps as json_dumps diff --git a/tests/test_strds_raster_renderer.py b/tests/test_strds_raster_renderer.py index 71fd10ee9..da9edfc21 100644 --- a/tests/test_strds_raster_renderer.py +++ b/tests/test_strds_raster_renderer.py @@ -43,9 +43,6 @@ project = "nc_spm_08" strds_mapset = "modis_lst" -# strds_url = ( -# f"{URL_PREFIX}/PROJECT_URL_PART/{project}/mapsets/{strds_mapset}/strds" -# ) strds_data = "LST_Day_monthly" diff --git a/tests/test_webhook.py b/tests/test_webhook.py index b233998a5..d0c8b781b 100644 --- a/tests/test_webhook.py +++ b/tests/test_webhook.py @@ -145,7 +145,8 @@ def poll_job(self, resp_data): # time.sleep(3) # tm = Template(json_dumps(pc)) # rv = self.server.post( - # URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + # "processing_async_export", # headers=self.admin_auth_header, # data=tm.render(sleep=1), # content_type="application/json", @@ -158,7 +159,8 @@ def test_finished_webhook_retries(self): """ tm = Template(json_dumps(pc)) rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/processing_async_export", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "processing_async_export", headers=self.admin_auth_header, data=tm.render(sleep=30), content_type="application/json", From bd3a7a03bb9846eb05768af44241325427d205f3 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Fri, 15 Nov 2024 08:25:59 +0100 Subject: [PATCH 13/24] CT review --- src/actinia_core/testsuite.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/actinia_core/testsuite.py b/src/actinia_core/testsuite.py index c74261c9b..c4f4683a9 100644 --- a/src/actinia_core/testsuite.py +++ b/src/actinia_core/testsuite.py @@ -481,7 +481,6 @@ def create_vector_layer(self, project, mapset, vector, region, parameter): self.waitAsyncStatusAssertHTTP( rv, headers=self.user_auth_header, - # headers=self.admin_auth_header, http_status=200, status="finished", ) From 7498542dd7b7f63aafffc49645d037f74249967d Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Fri, 15 Nov 2024 08:38:34 +0100 Subject: [PATCH 14/24] CT review --- tests/test_async_mapset_merging.py | 218 ++++++++++++++--------------- 1 file changed, 109 insertions(+), 109 deletions(-) diff --git a/tests/test_async_mapset_merging.py b/tests/test_async_mapset_merging.py index 7a7daaba5..db5bda0f6 100644 --- a/tests/test_async_mapset_merging.py +++ b/tests/test_async_mapset_merging.py @@ -149,115 +149,115 @@ def check_remove_test_mapsets(self): "Wrong mimetype %s" % rv.mimetype, ) - # def test_1_merge_no_access_to_target_mapset_error(self): - # """No access to target mapset error test""" - # # Try merge source mapsets into target mapset - # rv = self.server.post( - # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/user1/" - # "merging_async", - # headers=self.user_auth_header, - # data=json_dumps(["Source_A", "Source_B"]), - # content_type="application/json", - # ) - - # print(rv.data) - # self.waitAsyncStatusAssertHTTP( - # rv, - # headers=self.user_auth_header, - # http_status=400, - # status="error", - # message_check="AsyncProcessError", - # ) - - # def test_2_merge_missing_target_mapset_error(self): - # """Missing target mapset test""" - # self.check_remove_test_mapsets() - - # # Try merge source mapsets into target mapset - # rv = self.server.post( - # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" - # "merging_async", - # headers=self.admin_auth_header, - # data=json_dumps(["Source_A", "Source_B"]), - # content_type="application/json", - # ) - # self.waitAsyncStatusAssertHTTP( - # rv, - # headers=self.admin_auth_header, - # http_status=400, - # status="error", - # message_check="AsyncProcessError", - # ) - - # def test_3_merge_missing_source_mapsets_error(self): - # """Test error for missing source mapsets""" - # self.check_remove_test_mapsets() - - # # Create target mapset - # rv = self.server.post( - # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target", - # headers=self.admin_auth_header, - # ) - # print(rv.data) - # self.assertEqual( - # rv.status_code, - # 200, - # "HTML status code is wrong %i" % rv.status_code, - # ) - # self.assertEqual( - # rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype - # ) - - # # Try merge source mapsets into target mapset - # rv = self.server.post( - # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" - # "merging_async", - # headers=self.admin_auth_header, - # data=json_dumps(["Source_A", "Source_B"]), - # content_type="application/json", - # ) - # self.waitAsyncStatusAssertHTTP( - # rv, - # headers=self.admin_auth_header, - # http_status=400, - # status="error", - # message_check="AsyncProcessError", - # ) - - # def test_4_merge_empty_mapset_list(self): - # """Test error for missing source mapsets""" - # self.check_remove_test_mapsets() - - # # Create target mapset - # rv = self.server.post( - # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target", - # headers=self.admin_auth_header, - # ) - # print(rv.data) - # self.assertEqual( - # rv.status_code, - # 200, - # "HTML status code is wrong %i" % rv.status_code, - # ) - # self.assertEqual( - # rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype - # ) - - # # Try merge source mapsets into target mapset - # rv = self.server.post( - # f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" - # "merging_async", - # headers=self.admin_auth_header, - # data=json_dumps([]), - # content_type="application/json", - # ) - # self.waitAsyncStatusAssertHTTP( - # rv, - # headers=self.admin_auth_header, - # http_status=400, - # status="error", - # message_check="AsyncProcessError", - # ) + def test_1_merge_no_access_to_target_mapset_error(self): + """No access to target mapset error test""" + # Try merge source mapsets into target mapset + rv = self.server.post( + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/user1/" + "merging_async", + headers=self.user_auth_header, + data=json_dumps(["Source_A", "Source_B"]), + content_type="application/json", + ) + + print(rv.data) + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.user_auth_header, + http_status=400, + status="error", + message_check="AsyncProcessError", + ) + + def test_2_merge_missing_target_mapset_error(self): + """Missing target mapset test""" + self.check_remove_test_mapsets() + + # Try merge source mapsets into target mapset + rv = self.server.post( + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" + "merging_async", + headers=self.admin_auth_header, + data=json_dumps(["Source_A", "Source_B"]), + content_type="application/json", + ) + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=400, + status="error", + message_check="AsyncProcessError", + ) + + def test_3_merge_missing_source_mapsets_error(self): + """Test error for missing source mapsets""" + self.check_remove_test_mapsets() + + # Create target mapset + rv = self.server.post( + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target", + headers=self.admin_auth_header, + ) + print(rv.data) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) + + # Try merge source mapsets into target mapset + rv = self.server.post( + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" + "merging_async", + headers=self.admin_auth_header, + data=json_dumps(["Source_A", "Source_B"]), + content_type="application/json", + ) + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=400, + status="error", + message_check="AsyncProcessError", + ) + + def test_4_merge_empty_mapset_list(self): + """Test error for missing source mapsets""" + self.check_remove_test_mapsets() + + # Create target mapset + rv = self.server.post( + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target", + headers=self.admin_auth_header, + ) + print(rv.data) + self.assertEqual( + rv.status_code, + 200, + "HTML status code is wrong %i" % rv.status_code, + ) + self.assertEqual( + rv.mimetype, "application/json", "Wrong mimetype %s" % rv.mimetype + ) + + # Try merge source mapsets into target mapset + rv = self.server.post( + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/Target/" + "merging_async", + headers=self.admin_auth_header, + data=json_dumps([]), + content_type="application/json", + ) + self.waitAsyncStatusAssertHTTP( + rv, + headers=self.admin_auth_header, + http_status=400, + status="error", + message_check="AsyncProcessError", + ) def test_5_merge_two_mapsets(self): """Test the merging of two mapsets into a target mapset""" From 8c1d5cb0c8f46b86ad067ddf0901542f3db95c5c Mon Sep 17 00:00:00 2001 From: Anika Weinmann <37300249+anikaweinmann@users.noreply.github.com> Date: Fri, 15 Nov 2024 08:57:22 +0100 Subject: [PATCH 15/24] Update tests/test_job_resumption.py Co-authored-by: Carmen Tawalika --- tests/test_job_resumption.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_job_resumption.py b/tests/test_job_resumption.py index 6a0c669e6..43f18b43e 100644 --- a/tests/test_job_resumption.py +++ b/tests/test_job_resumption.py @@ -4,7 +4,7 @@ # performance processing of geographical data that uses GRASS GIS for # computational tasks. For details, see https://actinia.mundialis.de/ # -# Copyright (c) 2021-20224 Sören Gebbert and mundialis GmbH & Co. KG +# Copyright (c) 2021-2024 Sören Gebbert and mundialis GmbH & Co. KG # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by From 4cc99f5d7267bb7cf5fef210285cce496c7bab5d Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Fri, 15 Nov 2024 10:24:06 +0100 Subject: [PATCH 16/24] fix endpoints --- src/actinia_core/core/common/api_logger.py | 4 +- src/actinia_core/endpoints.py | 168 +++++++++++++------- src/actinia_core/rest/base/resource_base.py | 4 +- 3 files changed, 114 insertions(+), 62 deletions(-) diff --git a/src/actinia_core/core/common/api_logger.py b/src/actinia_core/core/common/api_logger.py index f24b33b28..a061cf609 100644 --- a/src/actinia_core/core/common/api_logger.py +++ b/src/actinia_core/core/common/api_logger.py @@ -110,7 +110,9 @@ def add_entry(self, user_id, http_request): """ api_info = { - "endpoint": http_request.endpoint, + # For depricated location endpoints remove "_locations" from + # endpoint class name + "endpoint": http_request.endpoint.replace("_locations", ""), "method": http_request.method, "path": http_request.path, "request_url": http_request.url, diff --git a/src/actinia_core/endpoints.py b/src/actinia_core/endpoints.py index fdad67c92..80f87e815 100644 --- a/src/actinia_core/endpoints.py +++ b/src/actinia_core/endpoints.py @@ -133,6 +133,17 @@ __email__ = "info@mundialis.de" +def get_endpoint_class_name(endpoint_class, projects_url_part="projects"): + endpoint_class_name = endpoint_class.__name__.lower() + if "project" in endpoint_class_name: + name = endpoint_class_name.replace("project", projects_url_part[:-1]) + elif projects_url_part != "projects": + name = f"{endpoint_class_name}_{projects_url_part}" + else: + name = endpoint_class_name + return name + + def create_project_endpoints(projects_url_part="projects"): """Function to add resources with "project" inside the endpoint url. @@ -141,50 +152,65 @@ def create_project_endpoints(projects_url_part="projects"): URL; to add deprecated location endpoints set it to "locations" """ + endpoint_str = "" + if projects_url_part != "projects": + endpoint_str = projects_url_part.title() + # Project management flask_api.add_resource( ListProjectsResource, f"/{projects_url_part}", - endpoint=f"/{projects_url_part}", + endpoint=get_endpoint_class_name( + ListProjectsResource, projects_url_part + ), ) flask_api.add_resource( ProjectManagementResourceUser, f"/{projects_url_part}//info", - endpoint=f"/{projects_url_part}//info", + endpoint=get_endpoint_class_name( + ProjectManagementResourceUser, projects_url_part + ), ) flask_api.add_resource( ProjectManagementResourceAdminUser, f"/{projects_url_part}/", - endpoint=f"/{projects_url_part}/", + endpoint=get_endpoint_class_name( + ProjectManagementResourceAdminUser, projects_url_part + ), ) # Mapset management flask_api.add_resource( ListMapsetsResource, f"/{projects_url_part}//mapsets", - endpoint=f"/{projects_url_part}//mapsets", + endpoint=get_endpoint_class_name( + ListMapsetsResource, projects_url_part + ), ) flask_api.add_resource( MapsetManagementResourceUser, f"/{projects_url_part}//mapsets/" "/info", - endpoint=f"/{projects_url_part}//mapsets/" - "/info", + endpoint=get_endpoint_class_name( + MapsetManagementResourceUser, projects_url_part + ), ) flask_api.add_resource( MapsetManagementResourceAdmin, f"/{projects_url_part}//mapsets/" "", - endpoint=f"/{projects_url_part}//mapsets/" - "", + endpoint=get_endpoint_class_name( + MapsetManagementResourceAdmin, projects_url_part + ), ) flask_api.add_resource( MapsetLockManagementResource, f"/{projects_url_part}//mapsets/" "/lock", - endpoint=f"/{projects_url_part}//mapsets/" - "/lock", + endpoint=get_endpoint_class_name( + MapsetLockManagementResource, projects_url_part + ), ) # Raster management @@ -192,101 +218,115 @@ def create_project_endpoints(projects_url_part="projects"): RasterLayersResource, f"/{projects_url_part}//mapsets/" "/raster_layers", - endpoint=f"/{projects_url_part}//mapsets/" - "/raster_layers", + endpoint=get_endpoint_class_name( + RasterLayersResource, projects_url_part + ), ) flask_api.add_resource( RasterLayerResource, f"/{projects_url_part}//mapsets/" "/raster_layers/", - endpoint=f"/{projects_url_part}//mapsets/" - "/raster_layers/", + endpoint=get_endpoint_class_name( + RasterLayerResource, projects_url_part + ), ) flask_api.add_resource( SyncEphemeralRasterLegendResource, f"/{projects_url_part}//mapsets/" "/raster_layers//legend", - endpoint=f"/{projects_url_part}//mapsets/" - "/raster_layers//legend", + endpoint=get_endpoint_class_name( + SyncEphemeralRasterLegendResource, projects_url_part + ), ) flask_api.add_resource( SyncPersistentRasterColorsResource, f"/{projects_url_part}//mapsets/" "/raster_layers//colors", - endpoint=f"/{projects_url_part}//mapsets/" - "/raster_layers//colors", + endpoint=get_endpoint_class_name( + SyncPersistentRasterColorsResource, projects_url_part + ), ) flask_api.add_resource( SyncEphemeralRasterRendererResource, f"/{projects_url_part}//mapsets/" "/raster_layers//render", - endpoint=f"/{projects_url_part}//mapsets/" - "/raster_layers//render", + endpoint=get_endpoint_class_name( + SyncEphemeralRasterRendererResource, projects_url_part + ), ) flask_api.add_resource( SyncEphemeralRasterRGBRendererResource, f"/{projects_url_part}//mapsets/" "/render_rgb", - endpoint=f"/{projects_url_part}//mapsets/" - "/render_rgb", + endpoint=get_endpoint_class_name( + SyncEphemeralRasterRGBRendererResource, projects_url_part + ), ) flask_api.add_resource( SyncEphemeralRasterShapeRendererResource, f"/{projects_url_part}//mapsets/" "/render_shade", - endpoint=f"/{projects_url_part}//mapsets/" - "/render_shade", + endpoint=get_endpoint_class_name( + SyncEphemeralRasterShapeRendererResource, projects_url_part + ), ) # STRDS management flask_api.add_resource( SyncSTRDSListerResource, f"/{projects_url_part}//mapsets/" "/strds", - endpoint=f"/{projects_url_part}//mapsets/" - "/strds", + endpoint=get_endpoint_class_name( + SyncSTRDSListerResource, projects_url_part + ), ) flask_api.add_resource( STRDSManagementResource, f"/{projects_url_part}//mapsets/" "/strds/", - endpoint=f"/{projects_url_part}//mapsets/" - "/strds/", + endpoint=get_endpoint_class_name( + STRDSManagementResource, projects_url_part + ), ) flask_api.add_resource( STRDSRasterManagement, f"/{projects_url_part}//mapsets/" "/strds//raster_layers", - endpoint=f"/{projects_url_part}//mapsets/" - "/strds//raster_layers", + endpoint=get_endpoint_class_name( + STRDSRasterManagement, projects_url_part + ), ) # Vector management flask_api.add_resource( VectorLayersResource, f"/{projects_url_part}//mapsets/" "/vector_layers", - endpoint=f"/{projects_url_part}//mapsets/" - "/vector_layers", + endpoint=get_endpoint_class_name( + VectorLayersResource, projects_url_part + ), ) flask_api.add_resource( VectorLayerResource, f"/{projects_url_part}//mapsets/" "/vector_layers/", - endpoint=f"/{projects_url_part}//mapsets/" - "/vector_layers/", + endpoint=get_endpoint_class_name( + VectorLayerResource, projects_url_part + ), ) flask_api.add_resource( SyncEphemeralVectorRendererResource, f"/{projects_url_part}//mapsets/" "/vector_layers//render", - endpoint=f"/{projects_url_part}//mapsets/" - "/vector_layers//render", + endpoint=get_endpoint_class_name( + SyncEphemeralVectorRendererResource, projects_url_part + ), ) flask_api.add_resource( SyncEphemeralSTRDSRendererResource, f"/{projects_url_part}//mapsets/" "/strds//render", - endpoint=f"/{projects_url_part}//mapsets/" - "/strds//render", + endpoint=get_endpoint_class_name( + SyncEphemeralSTRDSRendererResource, projects_url_part + ), ) # Validation @@ -294,76 +334,84 @@ def create_project_endpoints(projects_url_part="projects"): AsyncProcessValidationResource, f"/{projects_url_part}//" "process_chain_validation_async", - endpoint=f"/{projects_url_part}//" - "process_chain_validation_async", + endpoint=get_endpoint_class_name( + AsyncProcessValidationResource, projects_url_part + ), ) flask_api.add_resource( SyncProcessValidationResource, f"/{projects_url_part}//" "process_chain_validation_sync", - endpoint=f"/{projects_url_part}//" - "process_chain_validation_sync", + endpoint=get_endpoint_class_name( + SyncProcessValidationResource, projects_url_part + ), ) # Async processing flask_api.add_resource( AsyncEphemeralResource, f"/{projects_url_part}//processing_async", - endpoint=f"/{projects_url_part}//" - "processing_async", + endpoint=get_endpoint_class_name( + AsyncEphemeralResource, projects_url_part + ), ) flask_api.add_resource( AsyncEphemeralExportResource, f"/{projects_url_part}//processing_async_export", - endpoint=f"/{projects_url_part}//" - "processing_async_export", + endpoint=get_endpoint_class_name( + AsyncEphemeralExportResource, projects_url_part + ), ) flask_api.add_resource( AsyncEphemeralExportS3Resource, f"/{projects_url_part}//" "processing_async_export_s3", - endpoint=f"/{projects_url_part}//" - "processing_async_export_s3", + endpoint=get_endpoint_class_name( + AsyncEphemeralExportS3Resource, projects_url_part + ), ) flask_api.add_resource( AsyncEphemeralExportGCSResource, f"/{projects_url_part}//" "processing_async_export_gcs", - endpoint=f"/{projects_url_part}//" - "processing_async_export_gcs", + endpoint=get_endpoint_class_name( + AsyncEphemeralExportGCSResource, projects_url_part + ), ) flask_api.add_resource( AsyncPersistentResource, f"/{projects_url_part}//mapsets/" "/processing_async", - endpoint=f"/{projects_url_part}//mapsets/" - "/processing_async", + endpoint=get_endpoint_class_name( + AsyncPersistentResource, projects_url_part + ), ) flask_api.add_resource( AsyncPersistentMapsetMergerResource, f"/{projects_url_part}//mapsets/" "/merging_async", - endpoint=f"/{projects_url_part}//mapsets/" - "/merging_async", + endpoint=get_endpoint_class_name( + AsyncPersistentMapsetMergerResource, projects_url_part + ), ) flask_api.add_resource( AsyncEphemeralRasterLayerExporterResource, f"/{projects_url_part}//mapsets/" "/raster_layers//" "geotiff_async", - endpoint=f"/{projects_url_part}//mapsets/" - "/raster_layers//" - "geotiff_async", + endpoint=get_endpoint_class_name( + AsyncEphemeralRasterLayerExporterResource, projects_url_part + ), ) flask_api.add_resource( AsyncEphemeralRasterLayerRegionExporterResource, f"/{projects_url_part}//mapsets/" "/raster_layers//" "geotiff_async_orig", - endpoint=f"/{projects_url_part}//mapsets/" - "/raster_layers//" - "geotiff_async_orig", + endpoint=get_endpoint_class_name( + AsyncEphemeralRasterLayerRegionExporterResource, projects_url_part + ), ) diff --git a/src/actinia_core/rest/base/resource_base.py b/src/actinia_core/rest/base/resource_base.py index 40aa15894..7d02dc481 100644 --- a/src/actinia_core/rest/base/resource_base.py +++ b/src/actinia_core/rest/base/resource_base.py @@ -183,7 +183,9 @@ def __init__(self, resource_id=None, iteration=None, post_url=None): # Put API information in the response for later accounting kwargs = { - "endpoint": request.endpoint, + # For depricated location endpoints remove "_locations" from + # endpoint class name + "endpoint": request.endpoint.replace("_locations", ""), "method": request.method, "path": request.path, "request_url": self.request_url, From ed2565fc0c07b14ea374efff699c7b245b0dd762 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Fri, 15 Nov 2024 10:42:17 +0100 Subject: [PATCH 17/24] fix job resumption tests --- tests/test_job_resumption.py | 49 ++++++++++++++++++------------------ 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/tests/test_job_resumption.py b/tests/test_job_resumption.py index 43f18b43e..8a3465205 100644 --- a/tests/test_job_resumption.py +++ b/tests/test_job_resumption.py @@ -288,8 +288,8 @@ class JobResumptionProcessingTestCase(ActiniaResourceTestCaseBase): cfg_file = os.environ.get("DEFAULT_CONFIG_PATH", "/etc/default/actinia") tmp_cfg_file = "%s_tmp" % cfg_file + endpoint = "nc_spm_08/processing_async" save_interim_results_value = None - endpoint = "/projects/nc_spm_08/processing_async" resource_user_id = None resource_resource_id = None sleep_time = 15 @@ -333,7 +333,7 @@ def test_notsaved_interim_results_by_success(self): """ tpl = Template(json_dumps(process_chain_1)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation@PERMANENT", map2="baum"), content_type="application/json", @@ -364,18 +364,19 @@ def test_saved_interim_results(self): """Test if the interim results are not saved correctly""" step = 4 tpl = Template(json_dumps(process_chain_1)) - rv = self.server.post( - URL_PREFIX + self.endpoint, + # self.server.get(f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets",headers=self.admin_auth_header,content_type="application/json") + rv1 = self.server.post( + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation@PERMANENT", map2="baum555"), content_type="application/json", ) self.waitAsyncStatusAssertHTTP( - rv, headers=self.admin_auth_header, http_status=400, status="error" + rv1, headers=self.admin_auth_header, http_status=400, status="error" ) # check if interim results are saved - resp_data = json_loads(rv.data) + resp_data = json_loads(rv1.data) rv_user_id = resp_data["user_id"] rv_resource_id = resp_data["resource_id"] interim_dir = os.path.join( @@ -407,7 +408,7 @@ def test_job_resumption(self): """Test job resumption with processing_async endpoint""" tpl = Template(json_dumps(process_chain_1)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation2@PERMANENT", map2="baum"), content_type="application/json", @@ -450,7 +451,7 @@ def test_job_2_times_resumption(self): """Test job 2 times resumption with processing_async endpoint""" tpl = Template(json_dumps(process_chain_1)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation2@PERMANENT", map2="baum"), content_type="application/json", @@ -512,7 +513,7 @@ def test_job_resumption_error_by_running(self): """ tpl = Template(json_dumps(process_chain_2_error)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(seconds=self.sleep_time), content_type="application/json", @@ -559,7 +560,7 @@ def test_job_resumption_importer(self): """Test job resumption with processing_async endpoint and importer""" tpl = Template(json_dumps(process_chain_3_importer)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation2@PERMANENT", map2="baum"), content_type="application/json", @@ -604,7 +605,7 @@ def test_job_2_times_resumption_importer(self): """ tpl = Template(json_dumps(process_chain_3_importer)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation2@PERMANENT", map2="baum"), content_type="application/json", @@ -729,7 +730,7 @@ def test_job_resumption_stdout(self): """Test job resumption with processing_async endpoint and stdout""" tpl = Template(json_dumps(process_chain_5_stdout)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation2@PERMANENT", map2="baum"), content_type="application/json", @@ -775,7 +776,7 @@ def test_job_2_times_resumption_stdout(self): """ tpl = Template(json_dumps(process_chain_5_stdout)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation2@PERMANENT", map2="baum"), content_type="application/json", @@ -916,7 +917,7 @@ def test_resource_endpoints(self): class JobResumptionProcessingExportTestCase(JobResumptionProcessingTestCase): - endpoint = "/projects/nc_spm_08/processing_async_export" + endpoint = "nc_spm_08/processing_async_export" resource_user_id = None resource_resource_id = None @@ -926,7 +927,7 @@ def test_job_resumption_exporter(self): """ tpl = Template(json_dumps(process_chain_4_exporter)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation2@PERMANENT", map2="baum"), content_type="application/json", @@ -993,7 +994,7 @@ def test_job_2_times_resumption_exporter(self): """ tpl = Template(json_dumps(process_chain_4_exporter)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation2@PERMANENT", map2="baum"), content_type="application/json", @@ -1077,7 +1078,7 @@ class JobResumptionPersistentProcessingTestCase( ): project = "nc_spm_08" mapset = "test_mapset" - endpoint = "/projects/%s/mapsets/%s/processing_async" % (project, mapset) + endpoint = f"{project}/mapsets/{mapset}/processing_async" resource_user_id = None resource_resource_id = None mapset_created = True @@ -1085,14 +1086,14 @@ class JobResumptionPersistentProcessingTestCase( def tearDown(self): if self.mapset_created is True: rv = self.server.delete( - URL_PREFIX - + "/projects/%s/mapsets/%s/lock" % (self.project, self.mapset), + f"{URL_PREFIX}/{self.project_url_part}/{self.project}/mapsets/" + f"{self.mapset}/lock", headers=self.admin_auth_header, ) self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header) rv2 = self.server.delete( - URL_PREFIX - + "/projects/%s/mapsets/%s" % (self.project, self.mapset), + f"{URL_PREFIX}/{self.project_url_part}/{self.project}/mapsets/" + f"{self.mapset}", headers=self.admin_auth_header, ) self.waitAsyncStatusAssertHTTP(rv2, headers=self.admin_auth_header) @@ -1105,7 +1106,7 @@ def test_saved_interim_results(self): self.create_new_mapset(self.mapset, self.project) tpl = Template(json_dumps(process_chain_1)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation@PERMANENT", map2="baum"), content_type="application/json", @@ -1148,7 +1149,7 @@ def test_resource_endpoints(self): class JobResumptionErrorTestCase(ActiniaResourceTestCaseBase): - endpoint = "/projects/nc_spm_08/processing_async" + endpoint = "nc_spm_08/processing_async" def test_job_resumption_config_error(self): """Test if the job resumption fails if save_interim_results is set to @@ -1156,7 +1157,7 @@ def test_job_resumption_config_error(self): """ tpl = Template(json_dumps(process_chain_1)) rv = self.server.post( - URL_PREFIX + self.endpoint, + f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation2@PERMANENT", map2="baum"), content_type="application/json", From a06ddf1d5dd1d3c86767856092369b7fb66dab3f Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Fri, 15 Nov 2024 10:53:18 +0100 Subject: [PATCH 18/24] linting --- src/actinia_core/endpoints.py | 5 ----- .../persistent/project_management.py | 20 ++++++++++--------- src/actinia_core/rest/project_management.py | 4 +++- src/actinia_core/testsuite.py | 5 ++++- tests/test_async_processing_mapset.py | 3 ++- tests/test_job_resumption.py | 6 +++--- tests/test_raster_layers.py | 12 +++++------ 7 files changed, 29 insertions(+), 26 deletions(-) diff --git a/src/actinia_core/endpoints.py b/src/actinia_core/endpoints.py index 80f87e815..9259b6ea2 100644 --- a/src/actinia_core/endpoints.py +++ b/src/actinia_core/endpoints.py @@ -152,11 +152,6 @@ def create_project_endpoints(projects_url_part="projects"): URL; to add deprecated location endpoints set it to "locations" """ - endpoint_str = "" - if projects_url_part != "projects": - endpoint_str = projects_url_part.title() - - # Project management flask_api.add_resource( ListProjectsResource, diff --git a/src/actinia_core/processing/actinia_processing/persistent/project_management.py b/src/actinia_core/processing/actinia_processing/persistent/project_management.py index d454cc0a3..ef67da3b9 100644 --- a/src/actinia_core/processing/actinia_processing/persistent/project_management.py +++ b/src/actinia_core/processing/actinia_processing/persistent/project_management.py @@ -66,15 +66,17 @@ def _execute(self): project_param = "location" if grass_version < [8, 4] else "project" pc = { "version": 1, - "list": [{ - "id": "1", - "module": "g.proj", - "inputs": [ - {"param": "epsg", "value": epsg_code}, - {"param": project_param, "value": new_project}, - ], - "flags": "t", - }] + "list": [ + { + "id": "1", + "module": "g.proj", + "inputs": [ + {"param": "epsg", "value": epsg_code}, + {"param": project_param, "value": new_project}, + ], + "flags": "t", + } + ], } process_list = self._validate_process_chain( diff --git a/src/actinia_core/rest/project_management.py b/src/actinia_core/rest/project_management.py index ec7db5154..67baf9b08 100644 --- a/src/actinia_core/rest/project_management.py +++ b/src/actinia_core/rest/project_management.py @@ -126,7 +126,9 @@ def get(self): if projects: param = {"status": "success"} grass_version_s = G_VERSION["version"] - grass_version = [int(item) for item in grass_version_s.split(".")[:2]] + grass_version = [ + int(item) for item in grass_version_s.split(".")[:2] + ] if grass_version >= [8, 4]: param["projects"] = projects else: diff --git a/src/actinia_core/testsuite.py b/src/actinia_core/testsuite.py index c4f4683a9..6dbefb7e6 100644 --- a/src/actinia_core/testsuite.py +++ b/src/actinia_core/testsuite.py @@ -142,6 +142,7 @@ class ActiniaTestCaseBase(unittest.TestCase): # set project_url_part to "locations" if GRASS GIS version < 8.4 init_versions() from .version import G_VERSION + grass_version_s = G_VERSION["version"] grass_version = [int(item) for item in grass_version_s.split(".")[:2]] if grass_version < [8, 4]: @@ -392,7 +393,9 @@ def assertVectorInfo(self, project, mapset, vector, ref_info, header): ) def create_new_mapset(self, mapset_name, project_name="nc_spm_08"): - get_url = f"{URL_PREFIX}/{self.project_url_part}/{project_name}/mapsets" + get_url = ( + f"{URL_PREFIX}/{self.project_url_part}/{project_name}/mapsets" + ) rv_get = self.server.get(get_url, headers=self.user_auth_header) self.assertEqual( rv_get.status_code, diff --git a/tests/test_async_processing_mapset.py b/tests/test_async_processing_mapset.py index c549ea1a3..9b5f2447f 100644 --- a/tests/test_async_processing_mapset.py +++ b/tests/test_async_processing_mapset.py @@ -148,7 +148,8 @@ def test_1_new_mapset(self): self.check_remove_test_mapset() rv = self.server.post( - f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/test_mapset/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", diff --git a/tests/test_job_resumption.py b/tests/test_job_resumption.py index 8a3465205..dcadb559d 100644 --- a/tests/test_job_resumption.py +++ b/tests/test_job_resumption.py @@ -365,18 +365,18 @@ def test_saved_interim_results(self): step = 4 tpl = Template(json_dumps(process_chain_1)) # self.server.get(f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets",headers=self.admin_auth_header,content_type="application/json") - rv1 = self.server.post( + rv = self.server.post( f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, data=tpl.render(map1="elevation@PERMANENT", map2="baum555"), content_type="application/json", ) self.waitAsyncStatusAssertHTTP( - rv1, headers=self.admin_auth_header, http_status=400, status="error" + rv, headers=self.admin_auth_header, http_status=400, status="error" ) # check if interim results are saved - resp_data = json_loads(rv1.data) + resp_data = json_loads(rv.data) rv_user_id = resp_data["user_id"] rv_resource_id = resp_data["resource_id"] interim_dir = os.path.join( diff --git a/tests/test_raster_layers.py b/tests/test_raster_layers.py index 75afa3ea4..1a74e65fe 100644 --- a/tests/test_raster_layers.py +++ b/tests/test_raster_layers.py @@ -125,8 +125,8 @@ def test_list_raster_layers(self): def test_list_raster_layers_pattern(self): rv = self.server.get( - f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/PERMANENT/" - "raster_layers?pattern=lsat*", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers?pattern=lsat*", headers=self.user_auth_header, ) print(rv.data.decode()) @@ -147,8 +147,8 @@ def test_list_raster_layers_pattern(self): def test_list_raster_layers_empty_list(self): rv = self.server.get( - f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/PERMANENT/" - "raster_layers?pattern=NONE", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/raster_layers?pattern=NONE", headers=self.user_auth_header, ) print(rv.data.decode()) @@ -193,8 +193,8 @@ def test_remove_raster_layers_pattern(self): # List raster layer rv = self.server.get( - f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/{new_mapset}/" - "raster_layers?pattern=test_delete_layer_*", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + f"{new_mapset}/raster_layers?pattern=test_delete_layer_*", headers=self.user_auth_header, ) print(rv.data.decode()) From 3ec62c3caec37c1c9ffcce2ea97c313fd9306905 Mon Sep 17 00:00:00 2001 From: Anika Weinmann <37300249+anikaweinmann@users.noreply.github.com> Date: Wed, 20 Nov 2024 12:47:17 +0100 Subject: [PATCH 19/24] Update src/actinia_core/core/common/api_logger.py Co-authored-by: Carmen Tawalika --- src/actinia_core/core/common/api_logger.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/actinia_core/core/common/api_logger.py b/src/actinia_core/core/common/api_logger.py index a061cf609..9c788d8d0 100644 --- a/src/actinia_core/core/common/api_logger.py +++ b/src/actinia_core/core/common/api_logger.py @@ -110,7 +110,7 @@ def add_entry(self, user_id, http_request): """ api_info = { - # For depricated location endpoints remove "_locations" from + # For deprecated location endpoints remove "_locations" from # endpoint class name "endpoint": http_request.endpoint.replace("_locations", ""), "method": http_request.method, From f3ce464f51f661f98072e8de545d14279467802d Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 21 Nov 2024 11:56:48 +0100 Subject: [PATCH 20/24] Tests for G84 --- docker/actinia-core-alpine/Dockerfile | 2 +- docker/actinia-core-tests/Dockerfile | 2 +- pyproject.toml | 2 +- tests/test_job_resumption.py | 1 - tests/test_noauth.py | 4 ++-- 5 files changed, 5 insertions(+), 6 deletions(-) diff --git a/docker/actinia-core-alpine/Dockerfile b/docker/actinia-core-alpine/Dockerfile index 9929c696b..209306bee 100644 --- a/docker/actinia-core-alpine/Dockerfile +++ b/docker/actinia-core-alpine/Dockerfile @@ -1,4 +1,4 @@ -FROM mundialis/actinia:alpine-dependencies-2023-12-06 as build-base +FROM mundialis/actinia:alpine-dependencies-2024-08-14 as build-base FROM osgeo/grass-gis:releasebranch_8_4-alpine as grass FROM build-base as requirements diff --git a/docker/actinia-core-tests/Dockerfile b/docker/actinia-core-tests/Dockerfile index 678eaa2e4..2cc8b07a1 100644 --- a/docker/actinia-core-tests/Dockerfile +++ b/docker/actinia-core-tests/Dockerfile @@ -1,4 +1,4 @@ -FROM mundialis/actinia-core:4.14.1 AS actinia_test +FROM mundialis/actinia-core:grass8.4 AS actinia_test LABEL authors="Carmen Tawalika,Anika Weinmann" LABEL maintainer="tawalika@mundialis.de,weinmann@mundialis.de" diff --git a/pyproject.toml b/pyproject.toml index 33b221f04..496826589 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ keywords = [ "osgeo", ] dependencies = [ - "actinia-api==3.4.1", + "actinia-api==3.5.0", "Flask>=3.0.0", "boto3>=1.6.6", "colorlog>=4.2.1", diff --git a/tests/test_job_resumption.py b/tests/test_job_resumption.py index dcadb559d..2c1baca80 100644 --- a/tests/test_job_resumption.py +++ b/tests/test_job_resumption.py @@ -364,7 +364,6 @@ def test_saved_interim_results(self): """Test if the interim results are not saved correctly""" step = 4 tpl = Template(json_dumps(process_chain_1)) - # self.server.get(f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets",headers=self.admin_auth_header,content_type="application/json") rv = self.server.post( f"{URL_PREFIX}/{self.project_url_part}/{self.endpoint}", headers=self.admin_auth_header, diff --git a/tests/test_noauth.py b/tests/test_noauth.py index a978cde8a..b03e4d9ae 100644 --- a/tests/test_noauth.py +++ b/tests/test_noauth.py @@ -209,7 +209,7 @@ def test_03_processing_ephemeral(self): """Test job resumption with processing_async endpoint and stdout""" endpoint = f"{self.project_url_part}/nc_spm_08/processing_async" rv = self.server.post( - f"{URL_PREFIX}{endpoint}", + f"{URL_PREFIX}/{endpoint}", data=json_dumps(PC), content_type="application/json", ) @@ -228,7 +228,7 @@ def test_04_processing_persistent(self): f"{self.project_url_part}/nc_spm_08/mapsets/test/processing_async" ) rv = self.server.post( - f"{URL_PREFIX}{endpoint}", + f"{URL_PREFIX}/{endpoint}", data=json_dumps(PC), content_type="application/json", ) From 1515f51eca311b06c9bcc34bac74f9bec5185a9a Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 21 Nov 2024 12:08:46 +0100 Subject: [PATCH 21/24] Test pipelines for G8.3 and G8.4 --- .github/workflows/tests.yml | 52 ++++++++++++++++++++++++++++++++++--- 1 file changed, 48 insertions(+), 4 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5110391d0..2c0c677e1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -9,13 +9,35 @@ on: jobs: + # Unittests for GRASS 8.3 + alpine-unittests-G8.3: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Replace run only unittest command + run: | + sed -i "1s+.*+FROM mundialis/actinia-core:4.14.1 AS actinia_test+" docker/actinia-core-tests/Dockerfile + sed -i "s+# RUN make test+RUN make unittest+g" docker/actinia-core-tests/Dockerfile + - name: Unittests of actinia + id: docker_build + uses: docker/build-push-action@v6 + with: + push: false + tags: actinia-core-tests:alpine + context: . + file: docker/actinia-core-tests/Dockerfile + no-cache: true + # pull: true + + # Unittests for GRASS 8.4 alpine-unittests: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 - # with: - # path: "." - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Replace run only unittest command @@ -32,13 +54,35 @@ jobs: no-cache: true # pull: true + # Integration tests for GRASS 8.3 + alpine-integration-tests-G8.3: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Replace run integration test command + run: | + sed -i "1s+.*+FROM mundialis/actinia-core:4.14.1 AS actinia_test+" docker/actinia-core-tests/Dockerfile + sed -i "s+# RUN make test+RUN make integrationtest+g" docker/actinia-core-tests/Dockerfile + - name: Integration tests of actinia + id: docker_build + uses: docker/build-push-action@v6 + with: + push: false + tags: actinia-core-tests:alpine + context: . + file: docker/actinia-core-tests/Dockerfile + no-cache: true + # pull: true + + # Integration tests for GRASS 8.4 alpine-integration-tests: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4 - # with: - # path: "." - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Replace run integration test command From 587a56e8083273970204c717ae837b77562c47fc Mon Sep 17 00:00:00 2001 From: Anika Weinmann <37300249+anikaweinmann@users.noreply.github.com> Date: Thu, 21 Nov 2024 12:10:19 +0100 Subject: [PATCH 22/24] Update src/actinia_core/rest/base/resource_base.py Co-authored-by: Carmen Tawalika --- src/actinia_core/rest/base/resource_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/actinia_core/rest/base/resource_base.py b/src/actinia_core/rest/base/resource_base.py index 7d02dc481..aba74c296 100644 --- a/src/actinia_core/rest/base/resource_base.py +++ b/src/actinia_core/rest/base/resource_base.py @@ -183,7 +183,7 @@ def __init__(self, resource_id=None, iteration=None, post_url=None): # Put API information in the response for later accounting kwargs = { - # For depricated location endpoints remove "_locations" from + # For deprecated location endpoints remove "_locations" from # endpoint class name "endpoint": request.endpoint.replace("_locations", ""), "method": request.method, From a6c24d849835154bb92611ca5d1eeaa2d22c572f Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 21 Nov 2024 12:18:37 +0100 Subject: [PATCH 23/24] fix test workflow --- .github/workflows/tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 2c0c677e1..b06400de7 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -10,7 +10,7 @@ on: jobs: # Unittests for GRASS 8.3 - alpine-unittests-G8.3: + alpine-unittests-G83: runs-on: ubuntu-latest steps: - name: Checkout @@ -55,7 +55,7 @@ jobs: # pull: true # Integration tests for GRASS 8.3 - alpine-integration-tests-G8.3: + alpine-integration-tests-G83: runs-on: ubuntu-latest steps: - name: Checkout From 52736cc9178a2f291cc06e800f3290551818e154 Mon Sep 17 00:00:00 2001 From: anikaweinmann Date: Thu, 21 Nov 2024 13:36:54 +0100 Subject: [PATCH 24/24] fix tests for G83 --- tests/test_async_process_validation_errors.py | 42 +++++++++++------ tests/test_async_processing_mapset.py | 45 ++++++++++--------- 2 files changed, 53 insertions(+), 34 deletions(-) diff --git a/tests/test_async_process_validation_errors.py b/tests/test_async_process_validation_errors.py index 78330d645..064ad6e98 100644 --- a/tests/test_async_process_validation_errors.py +++ b/tests/test_async_process_validation_errors.py @@ -329,7 +329,8 @@ class AsyncProcessValidationTestCase(ActiniaResourceTestCaseBase): def test_async_processing_error_webhook_finished(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_webhook_finished), content_type="application/json", @@ -347,7 +348,8 @@ def test_async_processing_error_webhook_finished(self): def test_async_processing_error_webhook_update(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_webhook_update), content_type="application/json", @@ -365,7 +367,8 @@ def test_async_processing_error_webhook_update(self): def test_async_processing_error_1(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_1), content_type="application/json", @@ -383,7 +386,8 @@ def test_async_processing_error_1(self): def test_async_processing_error_2(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_2), content_type="application/json", @@ -401,7 +405,8 @@ def test_async_processing_error_2(self): def test_async_processing_error_3(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_3), content_type="application/json", @@ -419,7 +424,8 @@ def test_async_processing_error_3(self): def test_async_processing_error_4(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_4), content_type="application/json", @@ -437,7 +443,8 @@ def test_async_processing_error_4(self): def test_async_processing_error_5(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_error_5), content_type="application/json", @@ -455,7 +462,8 @@ def test_async_processing_error_5(self): def test_async_processing_output_error_1(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_output_error_1), content_type="application/json", @@ -473,7 +481,8 @@ def test_async_processing_output_error_1(self): def test_async_processing_output_error_2(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_output_error_2), content_type="application/json", @@ -491,7 +500,8 @@ def test_async_processing_output_error_2(self): def test_async_processing_landsat_error_1(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_landsat_error_1), content_type="application/json", @@ -509,7 +519,8 @@ def test_async_processing_landsat_error_1(self): def test_async_processing_landsat_error_2(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_landsat_error_2), content_type="application/json", @@ -527,7 +538,8 @@ def test_async_processing_landsat_error_2(self): def test_async_processing_landsat_error_3(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_landsat_error_3), content_type="application/json", @@ -545,7 +557,8 @@ def test_async_processing_landsat_error_3(self): def test_async_processing_sent_error_1(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_sent_1), content_type="application/json", @@ -563,7 +576,8 @@ def test_async_processing_sent_error_1(self): def test_async_processing_sent_error_2(self): rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/process_chain_validation_sync", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/" + "process_chain_validation_sync", headers=self.admin_auth_header, data=json_dumps(process_chain_sent_2), content_type="application/json", diff --git a/tests/test_async_processing_mapset.py b/tests/test_async_processing_mapset.py index 9b5f2447f..eb50b4143 100644 --- a/tests/test_async_processing_mapset.py +++ b/tests/test_async_processing_mapset.py @@ -175,8 +175,8 @@ def test_1_new_mapset(self): self.assertTrue("test_mapset" in mapsets) rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/test_mapset/raster_layers", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset/raster_layers", headers=self.admin_auth_header, ) print(rv.data) @@ -196,7 +196,8 @@ def test_1_new_mapset(self): # Remove the mapset rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -218,7 +219,8 @@ def test_2_existing_mapset(self): # Create new mapset rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -234,8 +236,8 @@ def test_2_existing_mapset(self): # Run the processing using an existing mapset # Atemporary mapset will be created and merged in the existing rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/test_mapset/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -243,8 +245,8 @@ def test_2_existing_mapset(self): self.waitAsyncStatusAssertHTTP(rv, headers=self.admin_auth_header) rv = self.server.get( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/test_mapset/raster_layers", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset/raster_layers", headers=self.user_auth_header, ) print(rv.data) @@ -264,7 +266,8 @@ def test_2_existing_mapset(self): # Remove the mapset rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -287,7 +290,8 @@ def test_3_existing_mapset_lock(self): # Create new mapset rv = self.server.post( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -302,8 +306,8 @@ def test_3_existing_mapset_lock(self): # Run the processing inside the new mapset rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/test_mapset/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -330,8 +334,8 @@ def test_3_existing_mapset_lock(self): # Run the processing inside the new mapset # Second runner rv_lock_1 = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/test_mapset/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_short), content_type="application/json", @@ -350,8 +354,8 @@ def test_3_existing_mapset_lock(self): ) # Third runner rv_lock_2 = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/test_mapset/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json", @@ -417,7 +421,7 @@ def test_3_existing_mapset_lock(self): # Check the first runner while True: rv = self.server.get( - URL_PREFIX + "/resources/%s/%s" % (rv_user_id, rv_resource_id), + f"{URL_PREFIX}/resources/{rv_user_id}/{rv_resource_id}", headers=self.admin_auth_header, ) print(rv.data) @@ -435,7 +439,8 @@ def test_3_existing_mapset_lock(self): # Remove the mapset rv = self.server.delete( - URL_PREFIX + "/projects/nc_spm_08/mapsets/test_mapset", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "test_mapset", headers=self.admin_auth_header, ) print(rv.data) @@ -450,8 +455,8 @@ def test_3_existing_mapset_lock(self): def test_4_create_global_mapset(self): rv = self.server.post( - URL_PREFIX - + "/projects/nc_spm_08/mapsets/PERMANENT/processing_async", + f"{URL_PREFIX}/{self.project_url_part}/nc_spm_08/mapsets/" + "PERMANENT/processing_async", headers=self.admin_auth_header, data=json_dumps(process_chain_long), content_type="application/json",