Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Removed separate meteorological config file #6

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 18 additions & 25 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ def main(source_base_path, target_base_path, city_folder_name, pre_check_option)
processing_config_df = _build_source_dataframes(abs_source_base_path, city_folder_name)
return_code_configs = _validate_config_inputs(processing_config_df, abs_source_base_path, abs_target_path, city_folder_name, pre_check_option)



if pre_check_option == 'no_pre_check':
enabled_processing_tasks_df = processing_config_df[(processing_config_df['enabled'])]
delayed_results, solweig_delayed_results = _build_processing_graphs(enabled_processing_tasks_df, abs_source_base_path, abs_target_path, city_folder_name)
Expand Down Expand Up @@ -78,7 +80,7 @@ def _report_results(enabled_processing_tasks_df, results_df, solweig_results_df,
combined_results = results_df if solweig_results_df.empty else\
pd.concat([results_df, solweig_results_df], ignore_index=True)

combined_results.sort_values(['task_index', 'step_index', 'met_file_name'], inplace=True)
combined_results.sort_values(['task_index', 'step_index', 'met_filename'], inplace=True)

merged = pd.merge(enabled_processing_tasks_df, combined_results, left_index=True, right_on='task_index',
how='outer')
Expand All @@ -87,7 +89,7 @@ def _report_results(enabled_processing_tasks_df, results_df, solweig_results_df,

reporting_df = merged.loc[:,
['run_status', 'task_index', 'city_folder_name', 'tile_folder_name', 'method', 'step_index',
'step_method', 'met_file_name',
'step_method', 'met_filename',
'return_code', 'start_time', 'run_duration']]

report_folder = os.path.join(get_application_path(), '.reports')
Expand Down Expand Up @@ -124,7 +126,7 @@ def _validate_config_inputs(processing_config_df, source_base_path, target_path,
return 0

def _build_source_dataframes(source_base_path, city_folder_name):
config_processing_file_path = str(os.path.join(source_base_path, city_folder_name, CityData.file_name_umep_city_processing_config))
config_processing_file_path = str(os.path.join(source_base_path, city_folder_name, CityData.filename_umep_city_processing_config))
processing_config_df = pd.read_csv(config_processing_file_path)

return processing_config_df
Expand Down Expand Up @@ -167,7 +169,7 @@ def _parse_and_report_row_results(dc):
results.append(obj)

# extract content from the return package and determine if there was a failure
results_df = pd.DataFrame(columns=['task_index', 'step_index', 'step_method', 'met_file_name', 'return_code', 'start_time', 'run_duration'])
results_df = pd.DataFrame(columns=['task_index', 'step_index', 'step_method', 'met_filename', 'return_code', 'start_time', 'run_duration'])
all_passed = True
failed_task_ids = []
failed_task_details = []
Expand All @@ -179,12 +181,12 @@ def _parse_and_report_row_results(dc):
task_index = return_package['task_index']
step_index = return_package['step_index']
step_method = return_package['step_method']
met_file_name = return_package['met_file_name']
met_filename = return_package['met_filename']
return_code = return_package['return_code']
start_time = return_package['start_time']
run_duration = return_package['run_duration']

new_row = [task_index, step_index, step_method, met_file_name, return_code, start_time, run_duration]
new_row = [task_index, step_index, step_method, met_filename, return_code, start_time, run_duration]
results_df.loc[len(results_df.index)] = new_row

if return_code != 0:
Expand Down Expand Up @@ -249,33 +251,24 @@ def _build_solweig_only_steps(task_index, step_index, folder_name_city_data, fol
city_data = CityData(folder_name_city_data, folder_name_tile_data, source_base_path, target_base_path)

delayed_result = []
config_meteorological_parameters_path = str(os.path.join(city_data.source_city_path, city_data.file_name_met_parameters_config))
met_time_series_config_df = pd.read_csv(config_meteorological_parameters_path)

return_code = 0
for index, config_row in met_time_series_config_df.iterrows():
enabled = bool(config_row.enabled)
if enabled:
met_file_name = config_row.met_file_name
utc_offset = config_row.utc_offset

proc_array = _construct_proc_array(task_index, step_index, 'solweig_only', folder_name_city_data, folder_name_tile_data,
source_base_path, target_base_path, met_file_name, utc_offset)
solweig = dask.delayed(subprocess.run)(proc_array, capture_output=True, text=True)
delayed_result.append(solweig)

if return_code != 0:
break
for met_file in city_data.met_files:
met_filename = met_file.get('filename')
utc_offset = met_file.get('utc_offset')

proc_array = _construct_proc_array(task_index, step_index, 'solweig_only', folder_name_city_data, folder_name_tile_data,
source_base_path, target_base_path, met_filename, utc_offset)
solweig = dask.delayed(subprocess.run)(proc_array, capture_output=True, text=True)
delayed_result.append(solweig)
return delayed_result


def _construct_proc_array(task_index, step_index, step_method, folder_name_city_data, folder_name_tile_data, source_base_path, target_base_path,
met_file_name=None, utc_offset=None):
met_filename=None, utc_offset=None):
proc_array = ['python', SCRIPT_PATH, f'--task_index={task_index}', f'--step_index={step_index}', f'--step_method={step_method}',
f'--folder_name_city_data={folder_name_city_data}',
f'--folder_name_tile_data={folder_name_tile_data}',
f'--source_data_path={source_base_path}', f'--target_path={target_base_path}',
f'--met_file_name={met_file_name}', f'--utc_offset={utc_offset}']
f'--met_filename={met_filename}', f'--utc_offset={utc_offset}']
return proc_array


Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,14 @@
emis_ground: 0.95
output_tmrt: True
output_sh: True
# Names of meteorological files used for SOLWEIG processing
- MetFiles:
- filename: met_20jan2022.txt
utc_offset: 2
- filename: met_22jan2022.txt
utc_offset: 2
# Name of source files used for processing
- FileNames:
- Tifffilenames:
dem_tif_filename: dem.tif
dsm_ground_build_tif_filename: dsm_ground_build.tif
veg_canopy_tif_filename: tree_canopy.tif
Expand Down

This file was deleted.

16 changes: 11 additions & 5 deletions sample_cities/ZZZ_template_city/.config_method_parameters.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,15 @@
emis_ground: 0.95
output_tmrt: True
output_sh: True
# Names of meteorological files used for SOLWEIG processing
- MetFiles:
- filename: <met_filename_1>.txt
utc_offset: <n>
- filename: <met_filename_2>.txt
utc_offset: <n>
# Name of source files used for processing
- FileNames:
dem_tif_filename: <DEM_file_name>.tif
dsm_ground_build_tif_filename: <DSM_file_name>.tif
veg_canopy_tif_filename: <tree_canopy_file_name>.tif
landcover_tif_filename: <landcover_file_name>.tif
- Tifffilenames:
dem_tif_filename: <DEM_filename>.tif
dsm_ground_build_tif_filename: <DSM_filename>.tif
veg_canopy_tif_filename: <tree_canopy_filename>.tif
landcover_tif_filename: <landcover_filename>.tif
32 changes: 21 additions & 11 deletions src/source_quality_verifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def verify_fundamental_paths(source_base_path, target_path, city_folder_name):
if invalids:
return invalids

config_processing_file_path = str(os.path.join(city_path, CityData.file_name_umep_city_processing_config))
config_processing_file_path = str(os.path.join(city_path, CityData.filename_umep_city_processing_config))
if _verify_path(config_processing_file_path) is False:
msg = f'Processing registry file does not exist as: {config_processing_file_path}'
invalids.append(msg)
Expand All @@ -40,7 +40,7 @@ def verify_processing_config(processing_config_df, source_base_path, target_base
enabled = str(config_row.enabled)
valid_enabled = ['true', 'false']
if enabled.lower() not in valid_enabled:
invalids.append(f"Invalid enabled value ({str(enabled)}) on row {index}. Valid values: {valid_enabled}")
invalids.append(f"Invalid enabled value ({str(enabled)}) on row {index} in .config_umep_city_processing.csv. Valid values: {valid_enabled}")

for index, config_row in processing_config_df.iterrows():
enabled = str(config_row.enabled)
Expand All @@ -50,15 +50,15 @@ def verify_processing_config(processing_config_df, source_base_path, target_base
source_tile_path = city_data.source_tile_data_path
if not os.path.isdir(source_tile_path):
invalids.append(
f"tile folder ({str(folder_name_tile_data)}) on row {index} not found under '{source_base_path}'.")
f"tile folder ({str(folder_name_tile_data)}) on row {index} of .config_umep_city_processing.csv not found under '{source_base_path}'.")

for index, config_row in processing_config_df.iterrows():
enabled = str(config_row.enabled)
if bool(enabled) or pre_check_option == 'check_all':
method = config_row.method
valid_methods = CityData.plugin_methods
if method not in valid_methods:
invalids.append(f"Invalid 'method' ({method}) on row {index}. Valid values: {valid_methods}")
invalids.append(f"Invalid 'method' ({method}) on row {index} in .config_umep_city_processing.csv. Valid values: {valid_methods}")

# check file dependencies
for index, config_row in processing_config_df.iterrows():
Expand All @@ -70,37 +70,47 @@ def verify_processing_config(processing_config_df, source_base_path, target_base

prior_dsm = city_data.source_dsm_path
if _verify_path(prior_dsm) is False:
msg = f'Required source file: {prior_dsm} not found for row {index}.'
msg = f'Required source file: {prior_dsm} not found for row {index} in .config_umep_city_processing.csv.'
invalids.append(msg)

if method in ['skyview_factor', 'solweig_full', 'solweig_only']:
prior_veg_canopy = city_data.source_veg_canopy_path
if _verify_path(prior_veg_canopy) is False:
msg = f'Required source file: {prior_veg_canopy} not found for method: {method} on row {index}.'
msg = f'Required source file: {prior_veg_canopy} not found for method: {method} on row {index} in .config_umep_city_processing.csv.'
invalids.append(msg)

if method in ['solweig_only', 'solweig_full']:
prior_land_cover = city_data.source_land_cover_path
prior_dem = city_data.source_dem_path
if _verify_path(prior_land_cover) is False:
msg = f'Required source file: {prior_land_cover} not found for method: {method} on row {index}.'
msg = f'Required source file: {prior_land_cover} not found for method: {method} on row {index} in .config_umep_city_processing.csv.'
invalids.append(msg)
if _verify_path(prior_dem) is False:
msg = f'Required source file: {prior_dem} not found for method: {method} on row {index}.'
msg = f'Required source file: {prior_dem} not found for method: {method} on row {index} in .config_umep_city_processing.csv.'
invalids.append(msg)
for met_file_row in city_data.met_files:
met_file = met_file_row.get('filename')
met_filepath = os.path.join(city_data.source_met_files_path, met_file)
if _verify_path(met_filepath) is False:
msg = f'Required meteorological file: {met_filepath} not found for method: {method} in .config_method_parameters.yml.'
invalids.append(msg)
utc_offset = met_file_row.get('utc_offset')
if not -24 <= utc_offset <= 24:
msg = f'UTC range for: {met_file} not in range for 24-hour offsets as specified in .config_method_parameters.yml.'
invalids.append(msg)

if method in ['solweig_only']:
prior_svfszip = city_data.target_svfszip_path
prior_wallheight = city_data.target_wallheight_path
prior_wallaspect = city_data.target_wallaspect_path
if _verify_path(prior_svfszip) is False:
msg = f'Required source file: {prior_svfszip} currently not found for method: {method} on row {index}.'
msg = f'Required source file: {prior_svfszip} currently not found for method: {method} on row {index} in .config_umep_city_processing.csv.'
invalids.append(msg)
if _verify_path(prior_wallheight) is False:
msg = f'Required source file: {prior_wallheight} currently not found for method: {method} on row {index}.'
msg = f'Required source file: {prior_wallheight} currently not found for method: {method} on row {index} in .config_umep_city_processing.csv.'
invalids.append(msg)
if _verify_path(prior_wallaspect) is False:
msg = f'Required source file: {prior_wallaspect} currently not found for method: {method} on row {index}.'
msg = f'Required source file: {prior_wallaspect} currently not found for method: {method} on row {index} in .config_umep_city_processing.csv.'
invalids.append(msg)

return invalids
Expand Down
2 changes: 1 addition & 1 deletion test/test_processing_runs.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def test_main_check_enabled_only_failure():

def _verify_expected_output_folders(source_base_path, target_base_path, source_city_folder_name):
enabled_target_folder = []
config_processing_file_path = str(os.path.join(source_base_path, source_city_folder_name, CityData.file_name_umep_city_processing_config))
config_processing_file_path = str(os.path.join(source_base_path, source_city_folder_name, CityData.filename_umep_city_processing_config))
processing_config_df = pd.read_csv(config_processing_file_path)
for index, config_row in processing_config_df.iterrows():
enabled = bool(config_row.enabled)
Expand Down
8 changes: 4 additions & 4 deletions test/test_umep_for_processing_algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,19 +61,19 @@
# def test_solweig_generator(startup_teardown):
# task_index = 'test_solweig'
# step = 0
# met_file_name = 'met_20jan2022.txt'
# met_filename = 'met_20jan2022.txt'
# utc_offset = 2
#
# temp_dir = startup_teardown
# city_data = instantiate_city_data(folder_name_city_data, 'tile1', SOURCE_PATH, temp_dir)
#
# UMEP_PLUGIN.generate_wall_height_aspect(task_index, city_data)
# UMEP_PLUGIN.generate_skyview_factor_files(task_index, city_data)
# return_code = UMEP_PLUGIN.generate_solweig(task_index, step, city_data, met_file_name, utc_offset)
# return_code = UMEP_PLUGIN.generate_solweig(task_index, step, city_data, met_filename, utc_offset)
#
# # out_directory = os.path.join(city_data.target_tcm_results_path, Path(met_file_name).stem)
# # out_directory = os.path.join(city_data.target_tcm_results_path, Path(met_filename).stem)
#
# target_met_folder = os.path.join(city_data.target_tcm_results_path, Path(met_file_name).stem, city_data.folder_name_tile_data)
# target_met_folder = os.path.join(city_data.target_tcm_results_path, Path(met_filename).stem, city_data.folder_name_tile_data)
# tmrt_average_file_path = os.path.join(target_met_folder, 'Tmrt_average.tif')
# tmrt_average_file_exists = os.path.isfile(tmrt_average_file_path)
#
Expand Down
34 changes: 18 additions & 16 deletions workers/city_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,18 @@


class CityData:
file_name_method_parameters_config = '.config_method_parameters.yml'
file_name_met_parameters_config = '.config_meteorological_parameters.csv'
file_name_umep_city_processing_config = '.config_umep_city_processing.csv'
filename_method_parameters_config = '.config_method_parameters.yml'
filename_met_parameters_config = '.config_meteorological_parameters.csv'
filename_umep_city_processing_config = '.config_umep_city_processing.csv'

folder_name_primary_source_data = 'primary_source_data'
folder_name_met_files = 'met_files'
folder_name_preprocessed_data = 'preprocessed_data'
folder_name_tcm_results = 'tcm_results'

file_name_wall_height = 'wallheight.tif'
file_name_wall_aspect = 'wallaspect.tif'
file_name_svfs_zip = 'svfs.zip'
filename_wall_height = 'wallheight.tif'
filename_wall_aspect = 'wallaspect.tif'
filename_svfs_zip = 'svfs.zip'

plugin_methods = ['all', 'wall_height_aspect', 'skyview_factor', 'solweig_only', 'solweig_full']

Expand All @@ -29,7 +29,7 @@ def __new__(cls, folder_name_city_data, folder_name_tile_data, source_base_path,

obj.source_city_path = str(os.path.join(source_base_path, folder_name_city_data))
obj.source_city_data_path = str(os.path.join(obj.source_city_path, 'source_data'))
city_configs = os.path.join(obj.source_city_path, cls.file_name_method_parameters_config)
city_configs = os.path.join(obj.source_city_path, cls.filename_method_parameters_config)
with open(city_configs, 'r') as stream:
try:
values = list(yaml.safe_load_all(stream))[0]
Expand All @@ -49,15 +49,17 @@ def __new__(cls, folder_name_city_data, folder_name_tile_data, source_base_path,
obj.output_tmrt = to_bool(method_attributes['solweig']['output_tmrt'])
obj.output_sh = to_bool(method_attributes['solweig']['output_sh'])

file_names = values[1]
obj.dem_file = file_names['dem_tif_filename']
obj.dsm_file = file_names['dsm_ground_build_tif_filename']
obj.veg_canopy_file = file_names['veg_canopy_tif_filename']
obj.landcover_file = file_names['landcover_tif_filename']
obj.met_files = values[1].get('MetFiles')

filenames = values[2]
obj.dem_file = filenames['dem_tif_filename']
obj.dsm_file = filenames['dsm_ground_build_tif_filename']
obj.veg_canopy_file = filenames['veg_canopy_tif_filename']
obj.landcover_file = filenames['landcover_tif_filename']

except yaml.YAMLError as e_msg:
raise Exception(
f'The {cls.file_name_method_parameters_config} file not found or improperly defined in {city_configs}. ({e_msg})')
f'The {cls.filename_method_parameters_config} file not found or improperly defined in {city_configs}. ({e_msg})')

obj.target_path_city_data = str(os.path.join(obj.target_base_path, folder_name_city_data, 'results_data'))

Expand All @@ -73,8 +75,8 @@ def __new__(cls, folder_name_city_data, folder_name_tile_data, source_base_path,
obj.source_veg_canopy_path = os.path.join(obj.source_tile_data_path, obj.veg_canopy_file)
obj.source_land_cover_path = os.path.join(obj.source_tile_data_path, obj.landcover_file)

obj.target_wallheight_path = os.path.join(obj.target_preprocessed_data_path, obj.file_name_wall_height)
obj.target_wallaspect_path = os.path.join(obj.target_preprocessed_data_path, obj.file_name_wall_aspect)
obj.target_svfszip_path = os.path.join(obj.target_preprocessed_data_path, obj.file_name_svfs_zip)
obj.target_wallheight_path = os.path.join(obj.target_preprocessed_data_path, obj.filename_wall_height)
obj.target_wallaspect_path = os.path.join(obj.target_preprocessed_data_path, obj.filename_wall_aspect)
obj.target_svfszip_path = os.path.join(obj.target_preprocessed_data_path, obj.filename_svfs_zip)

return obj
Loading