diff --git a/main.py b/main.py index 62a466f..87d1406 100644 --- a/main.py +++ b/main.py @@ -45,6 +45,8 @@ def main(source_base_path, target_base_path, city_folder_name, pre_check_option) processing_config_df = _build_source_dataframes(abs_source_base_path, city_folder_name) return_code_configs = _validate_config_inputs(processing_config_df, abs_source_base_path, abs_target_path, city_folder_name, pre_check_option) + + if pre_check_option == 'no_pre_check': enabled_processing_tasks_df = processing_config_df[(processing_config_df['enabled'])] delayed_results, solweig_delayed_results = _build_processing_graphs(enabled_processing_tasks_df, abs_source_base_path, abs_target_path, city_folder_name) @@ -78,7 +80,7 @@ def _report_results(enabled_processing_tasks_df, results_df, solweig_results_df, combined_results = results_df if solweig_results_df.empty else\ pd.concat([results_df, solweig_results_df], ignore_index=True) - combined_results.sort_values(['task_index', 'step_index', 'met_file_name'], inplace=True) + combined_results.sort_values(['task_index', 'step_index', 'met_filename'], inplace=True) merged = pd.merge(enabled_processing_tasks_df, combined_results, left_index=True, right_on='task_index', how='outer') @@ -87,7 +89,7 @@ def _report_results(enabled_processing_tasks_df, results_df, solweig_results_df, reporting_df = merged.loc[:, ['run_status', 'task_index', 'city_folder_name', 'tile_folder_name', 'method', 'step_index', - 'step_method', 'met_file_name', + 'step_method', 'met_filename', 'return_code', 'start_time', 'run_duration']] report_folder = os.path.join(get_application_path(), '.reports') @@ -124,7 +126,7 @@ def _validate_config_inputs(processing_config_df, source_base_path, target_path, return 0 def _build_source_dataframes(source_base_path, city_folder_name): - config_processing_file_path = str(os.path.join(source_base_path, city_folder_name, CityData.file_name_umep_city_processing_config)) + config_processing_file_path = str(os.path.join(source_base_path, city_folder_name, CityData.filename_umep_city_processing_config)) processing_config_df = pd.read_csv(config_processing_file_path) return processing_config_df @@ -167,7 +169,7 @@ def _parse_and_report_row_results(dc): results.append(obj) # extract content from the return package and determine if there was a failure - results_df = pd.DataFrame(columns=['task_index', 'step_index', 'step_method', 'met_file_name', 'return_code', 'start_time', 'run_duration']) + results_df = pd.DataFrame(columns=['task_index', 'step_index', 'step_method', 'met_filename', 'return_code', 'start_time', 'run_duration']) all_passed = True failed_task_ids = [] failed_task_details = [] @@ -179,12 +181,12 @@ def _parse_and_report_row_results(dc): task_index = return_package['task_index'] step_index = return_package['step_index'] step_method = return_package['step_method'] - met_file_name = return_package['met_file_name'] + met_filename = return_package['met_filename'] return_code = return_package['return_code'] start_time = return_package['start_time'] run_duration = return_package['run_duration'] - new_row = [task_index, step_index, step_method, met_file_name, return_code, start_time, run_duration] + new_row = [task_index, step_index, step_method, met_filename, return_code, start_time, run_duration] results_df.loc[len(results_df.index)] = new_row if return_code != 0: @@ -249,33 +251,24 @@ def _build_solweig_only_steps(task_index, step_index, folder_name_city_data, fol city_data = CityData(folder_name_city_data, folder_name_tile_data, source_base_path, target_base_path) delayed_result = [] - config_meteorological_parameters_path = str(os.path.join(city_data.source_city_path, city_data.file_name_met_parameters_config)) - met_time_series_config_df = pd.read_csv(config_meteorological_parameters_path) - - return_code = 0 - for index, config_row in met_time_series_config_df.iterrows(): - enabled = bool(config_row.enabled) - if enabled: - met_file_name = config_row.met_file_name - utc_offset = config_row.utc_offset - - proc_array = _construct_proc_array(task_index, step_index, 'solweig_only', folder_name_city_data, folder_name_tile_data, - source_base_path, target_base_path, met_file_name, utc_offset) - solweig = dask.delayed(subprocess.run)(proc_array, capture_output=True, text=True) - delayed_result.append(solweig) - - if return_code != 0: - break + for met_file in city_data.met_files: + met_filename = met_file.get('filename') + utc_offset = met_file.get('utc_offset') + + proc_array = _construct_proc_array(task_index, step_index, 'solweig_only', folder_name_city_data, folder_name_tile_data, + source_base_path, target_base_path, met_filename, utc_offset) + solweig = dask.delayed(subprocess.run)(proc_array, capture_output=True, text=True) + delayed_result.append(solweig) return delayed_result def _construct_proc_array(task_index, step_index, step_method, folder_name_city_data, folder_name_tile_data, source_base_path, target_base_path, - met_file_name=None, utc_offset=None): + met_filename=None, utc_offset=None): proc_array = ['python', SCRIPT_PATH, f'--task_index={task_index}', f'--step_index={step_index}', f'--step_method={step_method}', f'--folder_name_city_data={folder_name_city_data}', f'--folder_name_tile_data={folder_name_tile_data}', f'--source_data_path={source_base_path}', f'--target_path={target_base_path}', - f'--met_file_name={met_file_name}', f'--utc_offset={utc_offset}'] + f'--met_filename={met_filename}', f'--utc_offset={utc_offset}'] return proc_array diff --git a/sample_cities/ZAF_Capetown_small_tile/.config_meteorological_parameters.csv b/sample_cities/ZAF_Capetown_small_tile/.config_meteorological_parameters.csv deleted file mode 100644 index d51bd32..0000000 --- a/sample_cities/ZAF_Capetown_small_tile/.config_meteorological_parameters.csv +++ /dev/null @@ -1,3 +0,0 @@ -enabled,met_file_name,utc_offset -TRUE,met_20jan2022.txt,2 -TRUE,met_22jan2022.txt,2 diff --git a/sample_cities/ZAF_Capetown_small_tile/.config_method_parameters.yml b/sample_cities/ZAF_Capetown_small_tile/.config_method_parameters.yml index 230afd4..439db48 100644 --- a/sample_cities/ZAF_Capetown_small_tile/.config_method_parameters.yml +++ b/sample_cities/ZAF_Capetown_small_tile/.config_method_parameters.yml @@ -15,8 +15,14 @@ emis_ground: 0.95 output_tmrt: True output_sh: True +# Names of meteorological files used for SOLWEIG processing +- MetFiles: + - filename: met_20jan2022.txt + utc_offset: 2 + - filename: met_22jan2022.txt + utc_offset: 2 # Name of source files used for processing -- FileNames: +- Tifffilenames: dem_tif_filename: dem.tif dsm_ground_build_tif_filename: dsm_ground_build.tif veg_canopy_tif_filename: tree_canopy.tif diff --git a/sample_cities/ZZZ_template_city/.config_meteorological_parameters.csv b/sample_cities/ZZZ_template_city/.config_meteorological_parameters.csv deleted file mode 100644 index 10dcf99..0000000 --- a/sample_cities/ZZZ_template_city/.config_meteorological_parameters.csv +++ /dev/null @@ -1,3 +0,0 @@ -enabled,met_file_name,utc_offset -TRUE,met_daymonthyear.txt,2 - diff --git a/sample_cities/ZZZ_template_city/.config_method_parameters.yml b/sample_cities/ZZZ_template_city/.config_method_parameters.yml index d51fbb3..4037647 100644 --- a/sample_cities/ZZZ_template_city/.config_method_parameters.yml +++ b/sample_cities/ZZZ_template_city/.config_method_parameters.yml @@ -15,9 +15,15 @@ emis_ground: 0.95 output_tmrt: True output_sh: True +# Names of meteorological files used for SOLWEIG processing +- MetFiles: + - filename: .txt + utc_offset: + - filename: .txt + utc_offset: # Name of source files used for processing -- FileNames: - dem_tif_filename: .tif - dsm_ground_build_tif_filename: .tif - veg_canopy_tif_filename: .tif - landcover_tif_filename: .tif +- Tifffilenames: + dem_tif_filename: .tif + dsm_ground_build_tif_filename: .tif + veg_canopy_tif_filename: .tif + landcover_tif_filename: .tif diff --git a/src/source_quality_verifier.py b/src/source_quality_verifier.py index 9eed1f9..3b745ab 100644 --- a/src/source_quality_verifier.py +++ b/src/source_quality_verifier.py @@ -22,7 +22,7 @@ def verify_fundamental_paths(source_base_path, target_path, city_folder_name): if invalids: return invalids - config_processing_file_path = str(os.path.join(city_path, CityData.file_name_umep_city_processing_config)) + config_processing_file_path = str(os.path.join(city_path, CityData.filename_umep_city_processing_config)) if _verify_path(config_processing_file_path) is False: msg = f'Processing registry file does not exist as: {config_processing_file_path}' invalids.append(msg) @@ -40,7 +40,7 @@ def verify_processing_config(processing_config_df, source_base_path, target_base enabled = str(config_row.enabled) valid_enabled = ['true', 'false'] if enabled.lower() not in valid_enabled: - invalids.append(f"Invalid enabled value ({str(enabled)}) on row {index}. Valid values: {valid_enabled}") + invalids.append(f"Invalid enabled value ({str(enabled)}) on row {index} in .config_umep_city_processing.csv. Valid values: {valid_enabled}") for index, config_row in processing_config_df.iterrows(): enabled = str(config_row.enabled) @@ -50,7 +50,7 @@ def verify_processing_config(processing_config_df, source_base_path, target_base source_tile_path = city_data.source_tile_data_path if not os.path.isdir(source_tile_path): invalids.append( - f"tile folder ({str(folder_name_tile_data)}) on row {index} not found under '{source_base_path}'.") + f"tile folder ({str(folder_name_tile_data)}) on row {index} of .config_umep_city_processing.csv not found under '{source_base_path}'.") for index, config_row in processing_config_df.iterrows(): enabled = str(config_row.enabled) @@ -58,7 +58,7 @@ def verify_processing_config(processing_config_df, source_base_path, target_base method = config_row.method valid_methods = CityData.plugin_methods if method not in valid_methods: - invalids.append(f"Invalid 'method' ({method}) on row {index}. Valid values: {valid_methods}") + invalids.append(f"Invalid 'method' ({method}) on row {index} in .config_umep_city_processing.csv. Valid values: {valid_methods}") # check file dependencies for index, config_row in processing_config_df.iterrows(): @@ -70,37 +70,47 @@ def verify_processing_config(processing_config_df, source_base_path, target_base prior_dsm = city_data.source_dsm_path if _verify_path(prior_dsm) is False: - msg = f'Required source file: {prior_dsm} not found for row {index}.' + msg = f'Required source file: {prior_dsm} not found for row {index} in .config_umep_city_processing.csv.' invalids.append(msg) if method in ['skyview_factor', 'solweig_full', 'solweig_only']: prior_veg_canopy = city_data.source_veg_canopy_path if _verify_path(prior_veg_canopy) is False: - msg = f'Required source file: {prior_veg_canopy} not found for method: {method} on row {index}.' + msg = f'Required source file: {prior_veg_canopy} not found for method: {method} on row {index} in .config_umep_city_processing.csv.' invalids.append(msg) if method in ['solweig_only', 'solweig_full']: prior_land_cover = city_data.source_land_cover_path prior_dem = city_data.source_dem_path if _verify_path(prior_land_cover) is False: - msg = f'Required source file: {prior_land_cover} not found for method: {method} on row {index}.' + msg = f'Required source file: {prior_land_cover} not found for method: {method} on row {index} in .config_umep_city_processing.csv.' invalids.append(msg) if _verify_path(prior_dem) is False: - msg = f'Required source file: {prior_dem} not found for method: {method} on row {index}.' + msg = f'Required source file: {prior_dem} not found for method: {method} on row {index} in .config_umep_city_processing.csv.' invalids.append(msg) + for met_file_row in city_data.met_files: + met_file = met_file_row.get('filename') + met_filepath = os.path.join(city_data.source_met_files_path, met_file) + if _verify_path(met_filepath) is False: + msg = f'Required meteorological file: {met_filepath} not found for method: {method} in .config_method_parameters.yml.' + invalids.append(msg) + utc_offset = met_file_row.get('utc_offset') + if not -24 <= utc_offset <= 24: + msg = f'UTC range for: {met_file} not in range for 24-hour offsets as specified in .config_method_parameters.yml.' + invalids.append(msg) if method in ['solweig_only']: prior_svfszip = city_data.target_svfszip_path prior_wallheight = city_data.target_wallheight_path prior_wallaspect = city_data.target_wallaspect_path if _verify_path(prior_svfszip) is False: - msg = f'Required source file: {prior_svfszip} currently not found for method: {method} on row {index}.' + msg = f'Required source file: {prior_svfszip} currently not found for method: {method} on row {index} in .config_umep_city_processing.csv.' invalids.append(msg) if _verify_path(prior_wallheight) is False: - msg = f'Required source file: {prior_wallheight} currently not found for method: {method} on row {index}.' + msg = f'Required source file: {prior_wallheight} currently not found for method: {method} on row {index} in .config_umep_city_processing.csv.' invalids.append(msg) if _verify_path(prior_wallaspect) is False: - msg = f'Required source file: {prior_wallaspect} currently not found for method: {method} on row {index}.' + msg = f'Required source file: {prior_wallaspect} currently not found for method: {method} on row {index} in .config_umep_city_processing.csv.' invalids.append(msg) return invalids diff --git a/test/test_processing_runs.py b/test/test_processing_runs.py index 042d624..69992cd 100644 --- a/test/test_processing_runs.py +++ b/test/test_processing_runs.py @@ -49,7 +49,7 @@ def test_main_check_enabled_only_failure(): def _verify_expected_output_folders(source_base_path, target_base_path, source_city_folder_name): enabled_target_folder = [] - config_processing_file_path = str(os.path.join(source_base_path, source_city_folder_name, CityData.file_name_umep_city_processing_config)) + config_processing_file_path = str(os.path.join(source_base_path, source_city_folder_name, CityData.filename_umep_city_processing_config)) processing_config_df = pd.read_csv(config_processing_file_path) for index, config_row in processing_config_df.iterrows(): enabled = bool(config_row.enabled) diff --git a/test/test_umep_for_processing_algorithms.py b/test/test_umep_for_processing_algorithms.py index 04bf4c4..5ad718e 100644 --- a/test/test_umep_for_processing_algorithms.py +++ b/test/test_umep_for_processing_algorithms.py @@ -61,7 +61,7 @@ # def test_solweig_generator(startup_teardown): # task_index = 'test_solweig' # step = 0 -# met_file_name = 'met_20jan2022.txt' +# met_filename = 'met_20jan2022.txt' # utc_offset = 2 # # temp_dir = startup_teardown @@ -69,11 +69,11 @@ # # UMEP_PLUGIN.generate_wall_height_aspect(task_index, city_data) # UMEP_PLUGIN.generate_skyview_factor_files(task_index, city_data) -# return_code = UMEP_PLUGIN.generate_solweig(task_index, step, city_data, met_file_name, utc_offset) +# return_code = UMEP_PLUGIN.generate_solweig(task_index, step, city_data, met_filename, utc_offset) # -# # out_directory = os.path.join(city_data.target_tcm_results_path, Path(met_file_name).stem) +# # out_directory = os.path.join(city_data.target_tcm_results_path, Path(met_filename).stem) # -# target_met_folder = os.path.join(city_data.target_tcm_results_path, Path(met_file_name).stem, city_data.folder_name_tile_data) +# target_met_folder = os.path.join(city_data.target_tcm_results_path, Path(met_filename).stem, city_data.folder_name_tile_data) # tmrt_average_file_path = os.path.join(target_met_folder, 'Tmrt_average.tif') # tmrt_average_file_exists = os.path.isfile(tmrt_average_file_path) # diff --git a/workers/city_data.py b/workers/city_data.py index 6075003..2b02284 100644 --- a/workers/city_data.py +++ b/workers/city_data.py @@ -4,18 +4,18 @@ class CityData: - file_name_method_parameters_config = '.config_method_parameters.yml' - file_name_met_parameters_config = '.config_meteorological_parameters.csv' - file_name_umep_city_processing_config = '.config_umep_city_processing.csv' + filename_method_parameters_config = '.config_method_parameters.yml' + filename_met_parameters_config = '.config_meteorological_parameters.csv' + filename_umep_city_processing_config = '.config_umep_city_processing.csv' folder_name_primary_source_data = 'primary_source_data' folder_name_met_files = 'met_files' folder_name_preprocessed_data = 'preprocessed_data' folder_name_tcm_results = 'tcm_results' - file_name_wall_height = 'wallheight.tif' - file_name_wall_aspect = 'wallaspect.tif' - file_name_svfs_zip = 'svfs.zip' + filename_wall_height = 'wallheight.tif' + filename_wall_aspect = 'wallaspect.tif' + filename_svfs_zip = 'svfs.zip' plugin_methods = ['all', 'wall_height_aspect', 'skyview_factor', 'solweig_only', 'solweig_full'] @@ -29,7 +29,7 @@ def __new__(cls, folder_name_city_data, folder_name_tile_data, source_base_path, obj.source_city_path = str(os.path.join(source_base_path, folder_name_city_data)) obj.source_city_data_path = str(os.path.join(obj.source_city_path, 'source_data')) - city_configs = os.path.join(obj.source_city_path, cls.file_name_method_parameters_config) + city_configs = os.path.join(obj.source_city_path, cls.filename_method_parameters_config) with open(city_configs, 'r') as stream: try: values = list(yaml.safe_load_all(stream))[0] @@ -49,15 +49,17 @@ def __new__(cls, folder_name_city_data, folder_name_tile_data, source_base_path, obj.output_tmrt = to_bool(method_attributes['solweig']['output_tmrt']) obj.output_sh = to_bool(method_attributes['solweig']['output_sh']) - file_names = values[1] - obj.dem_file = file_names['dem_tif_filename'] - obj.dsm_file = file_names['dsm_ground_build_tif_filename'] - obj.veg_canopy_file = file_names['veg_canopy_tif_filename'] - obj.landcover_file = file_names['landcover_tif_filename'] + obj.met_files = values[1].get('MetFiles') + + filenames = values[2] + obj.dem_file = filenames['dem_tif_filename'] + obj.dsm_file = filenames['dsm_ground_build_tif_filename'] + obj.veg_canopy_file = filenames['veg_canopy_tif_filename'] + obj.landcover_file = filenames['landcover_tif_filename'] except yaml.YAMLError as e_msg: raise Exception( - f'The {cls.file_name_method_parameters_config} file not found or improperly defined in {city_configs}. ({e_msg})') + f'The {cls.filename_method_parameters_config} file not found or improperly defined in {city_configs}. ({e_msg})') obj.target_path_city_data = str(os.path.join(obj.target_base_path, folder_name_city_data, 'results_data')) @@ -73,8 +75,8 @@ def __new__(cls, folder_name_city_data, folder_name_tile_data, source_base_path, obj.source_veg_canopy_path = os.path.join(obj.source_tile_data_path, obj.veg_canopy_file) obj.source_land_cover_path = os.path.join(obj.source_tile_data_path, obj.landcover_file) - obj.target_wallheight_path = os.path.join(obj.target_preprocessed_data_path, obj.file_name_wall_height) - obj.target_wallaspect_path = os.path.join(obj.target_preprocessed_data_path, obj.file_name_wall_aspect) - obj.target_svfszip_path = os.path.join(obj.target_preprocessed_data_path, obj.file_name_svfs_zip) + obj.target_wallheight_path = os.path.join(obj.target_preprocessed_data_path, obj.filename_wall_height) + obj.target_wallaspect_path = os.path.join(obj.target_preprocessed_data_path, obj.filename_wall_aspect) + obj.target_svfszip_path = os.path.join(obj.target_preprocessed_data_path, obj.filename_svfs_zip) return obj \ No newline at end of file diff --git a/workers/open_urban.py b/workers/open_urban.py index 2d29681..40efe1f 100644 --- a/workers/open_urban.py +++ b/workers/open_urban.py @@ -17,6 +17,7 @@ def get_data(self, bbox): ulu = ee.ImageCollection(dataset .filterBounds(ee.Geometry.BBox(*bbox)) .select(self.band) + .max() .reduce(ee.Reducer.firstNonNull()) .rename('lulc') ) diff --git a/workers/source_cif_data_downloader.py b/workers/source_cif_data_downloader.py index 64c4505..d405dfa 100644 --- a/workers/source_cif_data_downloader.py +++ b/workers/source_cif_data_downloader.py @@ -9,13 +9,13 @@ from workers.open_urban import OpenUrban, reclass_map -TREE_CANOPY_FILE_NAME = 'tree_canopy' -BUILDING_FOOTPRINT_FILE_NAME = 'building_footprints' -DSM_FILE_NAME = 'alos_dsm' -DEM_FILE_NAME = 'nasa_dem' -RESAMPLED_DEM_FILE_NAME = 'nasa_dem_1m' -BUILDING_HEIGHT_FILE_NAME = 'building_height' -LAND_COVER_FILE_NAME = 'land_cover' +TREE_CANOPY_FILENAME = 'tree_canopy' +BUILDING_FOOTPRINT_FILENAME = 'building_footprints' +DSM_FILENAME = 'alos_dsm' +DEM_FILENAME = 'nasa_dem' +RESAMPLED_DEM_FILENAME = 'nasa_dem_1m' +BUILDING_HEIGHT_FILENAME = 'building_height' +LAND_COVER_FILENAME = 'land_cover' def get_cif_data(target_path, folder_name_city_data, folder_name_tile_data, aoi_boundary): tile_data_path = os.path.join(target_path, folder_name_city_data, 'source_data', 'primary_source_data', folder_name_tile_data) @@ -57,7 +57,7 @@ def get_lulc(tile_data_path, aoi_bounds): print(f'There were no occurrences of the value {remove_value} found in data.') # Save data to file - save_raster_file(aoi_LULC_to_solweig, tile_data_path, LAND_COVER_FILE_NAME) + save_raster_file(aoi_LULC_to_solweig, tile_data_path, LAND_COVER_FILENAME) def count_occurrences(data, value): @@ -71,7 +71,7 @@ def get_canopy_height(tile_data_path, aoi_bounds): aoi_TreeCanopyHeight = TreeCanopyHeight().get_data(aoi_bounds) aoi_TreeCanopyHeight_float32 = aoi_TreeCanopyHeight.astype('float32') - save_raster_file(aoi_TreeCanopyHeight_float32, tile_data_path, TREE_CANOPY_FILE_NAME) + save_raster_file(aoi_TreeCanopyHeight_float32, tile_data_path, TREE_CANOPY_FILENAME) def get_dsm(tile_data_path, aoi_bounds): @@ -79,7 +79,7 @@ def get_dsm(tile_data_path, aoi_bounds): aoi_AlosDSM = AlosDSM().get_data(aoi_bounds) - save_raster_file(aoi_AlosDSM, tile_data_path, DSM_FILE_NAME) + save_raster_file(aoi_AlosDSM, tile_data_path, DSM_FILENAME) # resample to finer resolution of 1 meter # dsm_1m = resample_raster(aoi_AlosDSM, 1) @@ -91,12 +91,12 @@ def get_dem(tile_data_path, aoi_bounds): aoi_NasaDEM = NasaDEM().get_data(aoi_bounds) - save_raster_file(aoi_NasaDEM, tile_data_path, DEM_FILE_NAME) + save_raster_file(aoi_NasaDEM, tile_data_path, DEM_FILENAME) # resample to finer resolution of 1 meter dem_1m = resample_raster(aoi_NasaDEM, 1) - save_raster_file(dem_1m, tile_data_path, RESAMPLED_DEM_FILE_NAME) + save_raster_file(dem_1m, tile_data_path, RESAMPLED_DEM_FILENAME) def get_building_footprints(tile_data_path, aoi_bounds): @@ -104,14 +104,14 @@ def get_building_footprints(tile_data_path, aoi_bounds): aoi_OvertureBuildings = OvertureBuildings().get_data(aoi_bounds) - save_vector_file(aoi_OvertureBuildings, tile_data_path, BUILDING_FOOTPRINT_FILE_NAME) + save_vector_file(aoi_OvertureBuildings, tile_data_path, BUILDING_FOOTPRINT_FILENAME) def get_building_height(tile_data_path): - aoi_OvertureBuildings = read_vector_file(tile_data_path, BUILDING_FOOTPRINT_FILE_NAME) - aoi_AlosDSM = read_tiff_file(tile_data_path, DSM_FILE_NAME) - aoi_NasaDEM = read_tiff_file(tile_data_path, DEM_FILE_NAME) - aoi_NasaDEM_1m = read_tiff_file(tile_data_path, RESAMPLED_DEM_FILE_NAME) + aoi_OvertureBuildings = read_vector_file(tile_data_path, BUILDING_FOOTPRINT_FILENAME) + aoi_AlosDSM = read_tiff_file(tile_data_path, DSM_FILENAME) + aoi_NasaDEM = read_tiff_file(tile_data_path, DEM_FILENAME) + aoi_NasaDEM_1m = read_tiff_file(tile_data_path, RESAMPLED_DEM_FILENAME) # (aoi_name, aoi_gdf, aoi_OvertureBuildings, aoi_AlosDSM, aoi_NasaDEM, dem_1m): from exactextract import exact_extract @@ -128,13 +128,13 @@ def get_building_height(tile_data_path): aoi_OvertureBuildings['AlosDSM_max'] - aoi_OvertureBuildings['NasaDEM_max']) # Write to file - save_vector_file(aoi_OvertureBuildings, tile_data_path, BUILDING_HEIGHT_FILE_NAME) + save_vector_file(aoi_OvertureBuildings, tile_data_path, BUILDING_HEIGHT_FILENAME) # rasterize the building footprints aoi_OvertureBuildings_raster = rasterize_polygon(aoi_OvertureBuildings, values=["height_max"], snap_to_raster=aoi_NasaDEM_1m) # Save data to file - save_raster_file(aoi_OvertureBuildings_raster, tile_data_path, BUILDING_HEIGHT_FILE_NAME) + save_raster_file(aoi_OvertureBuildings_raster, tile_data_path, BUILDING_HEIGHT_FILENAME) def get_era5(): return @@ -163,27 +163,27 @@ def rasterize_polygon(gdf, values=["Value"], snap_to_raster=None): return feature_1m -def save_raster_file(raster_data_array, tile_data_path, tiff_data_file_name): +def save_raster_file(raster_data_array, tile_data_path, tiff_data_FILENAME): create_folder(tile_data_path) - file_path = os.path.join(tile_data_path, f'{tiff_data_file_name}.tif') + file_path = os.path.join(tile_data_path, f'{tiff_data_FILENAME}.tif') remove_file(file_path) raster_data_array.rio.to_raster(raster_path=file_path, driver="COG") -def save_vector_file(vector_geodataframe, tile_data_path, tiff_data_file_name): +def save_vector_file(vector_geodataframe, tile_data_path, tiff_data_FILENAME): create_folder(tile_data_path) - file_path = os.path.join(tile_data_path, f'{tiff_data_file_name}.geojson') + file_path = os.path.join(tile_data_path, f'{tiff_data_FILENAME}.geojson') remove_file(file_path) vector_geodataframe.to_file(file_path, driver='GeoJSON') -def read_tiff_file(tile_data_path, file_name): - file_path = os.path.join(tile_data_path, f'{file_name}.tif') +def read_tiff_file(tile_data_path, filename): + file_path = os.path.join(tile_data_path, f'{filename}.tif') raster_data = rioxarray.open_rasterio(file_path) return raster_data -def read_vector_file(tile_data_path, file_name): - file_path = os.path.join(tile_data_path, f'{file_name}.geojson') +def read_vector_file(tile_data_path, filename): + file_path = os.path.join(tile_data_path, f'{filename}.geojson') vector_data = gpd.read_file(file_path) return vector_data diff --git a/workers/umep_plugin_processor.py b/workers/umep_plugin_processor.py index 782d239..08b5144 100644 --- a/workers/umep_plugin_processor.py +++ b/workers/umep_plugin_processor.py @@ -26,7 +26,7 @@ MAX_RETRY_COUNT = 3 RETRY_PAUSE_TIME_SEC = 10 -def run_plugin(task_index, step_method, folder_name_city_data, folder_name_tile_data, source_base_path, target_path, met_file_name=None, utc_offset=None): +def run_plugin(task_index, step_method, folder_name_city_data, folder_name_tile_data, source_base_path, target_path, met_filename=None, utc_offset=None): start_time = datetime.now() city_data = CityData(folder_name_city_data, folder_name_tile_data, source_base_path, target_path) @@ -54,8 +54,9 @@ def run_plugin(task_index, step_method, folder_name_city_data, folder_name_tile_ retry_count = 0 with (tempfile.TemporaryDirectory() as tmpdirname): # Get the UMEP processing parameters and prepare for the method - input_params, umep_method_title, keepers = _prepare_method_execution(step_method, city_data, tmpdirname, met_file_name, utc_offset) + input_params, umep_method_title, keepers = _prepare_method_execution(step_method, city_data, tmpdirname, met_filename, utc_offset) + b = 2 while retry_count < MAX_RETRY_COUNT and return_code != 0: try: # Run the UMEP plugin!! @@ -95,14 +96,14 @@ def run_plugin(task_index, step_method, folder_name_city_data, folder_name_tile_ return return_code, start_time, run_duration -def _prepare_method_execution(method, city_data, tmpdirname, met_file_name=None, utc_offset=None): +def _prepare_method_execution(method, city_data, tmpdirname, met_filename=None, utc_offset=None): keepers = {} if method == 'wall_height_aspect': create_folder(city_data.target_preprocessed_data_path) - temp_target_wallheight_path = os.path.join(tmpdirname, city_data.file_name_wall_height) - temp_target_wallaspect_path = os.path.join(tmpdirname, city_data.file_name_wall_aspect) + temp_target_wallheight_path = os.path.join(tmpdirname, city_data.filename_wall_height) + temp_target_wallaspect_path = os.path.join(tmpdirname, city_data.filename_wall_aspect) input_params = { 'INPUT': city_data.source_dsm_path, 'INPUT_LIMIT': city_data.wall_lower_limit_height, @@ -131,10 +132,10 @@ def _prepare_method_execution(method, city_data, tmpdirname, met_file_name=None, umep_method_title = 'umep:Urban Geometry: Sky View Factor' keepers[temp_svfs_file_with_extension] = city_data.target_svfszip_path else: - source_met_file_path = os.path.join(city_data.source_met_files_path, met_file_name) - temp_met_folder = os.path.join(tmpdirname, Path(met_file_name).stem, city_data.folder_name_tile_data) + source_met_file_path = os.path.join(city_data.source_met_files_path, met_filename) + temp_met_folder = os.path.join(tmpdirname, Path(met_filename).stem, city_data.folder_name_tile_data) create_folder(temp_met_folder) - target_met_folder = os.path.join(city_data.target_tcm_results_path, Path(met_file_name).stem, city_data.folder_name_tile_data) + target_met_folder = os.path.join(city_data.target_tcm_results_path, Path(met_filename).stem, city_data.folder_name_tile_data) input_params = { "INPUT_DSM": city_data.source_dsm_path, "INPUT_SVF": city_data.target_svfszip_path, @@ -238,16 +239,16 @@ def _compute_time_diff_mins(start_time): parser.add_argument('--source_data_path', metavar='path', required=True, help='folder with source data') parser.add_argument('--target_path', metavar='path', required=True, help='folder that is to be populated') - parser.add_argument('--met_file_name', metavar='str', required=False, help='name of the meteorological file') + parser.add_argument('--met_filename', metavar='str', required=False, help='name of the meteorological file') parser.add_argument('--utc_offset', metavar='int', required=False, help='local hour offset from utc') args = parser.parse_args() return_code, start_time, run_duration = run_plugin(args.task_index, args.step_method, args.folder_name_city_data, args.folder_name_tile_data, - args.source_data_path, args.target_path, args.met_file_name, args.utc_offset) + args.source_data_path, args.target_path, args.met_filename, args.utc_offset) - met_file_name_str = args.met_file_name if args.met_file_name != 'None' else 'N/A' + met_filename_str = args.met_filename if args.met_filename != 'None' else 'N/A' start_time_str = start_time.strftime('%Y_%m_%d_%H:%M:%S') return_stdout = (f'{{"Return_package": {{"task_index": {args.task_index}, "step_index": {args.step_index}, \ - "step_method": "{args.step_method}", "met_file_name": "{met_file_name_str}", "return_code": {return_code}, \ + "step_method": "{args.step_method}", "met_filename": "{met_filename_str}", "return_code": {return_code}, \ "start_time": "{start_time_str}", "run_duration": {run_duration}}}}}') print(return_stdout) \ No newline at end of file