Skip to content

Commit

Permalink
Merge pull request #605 from ungarj/better_input_names
Browse files Browse the repository at this point in the history
use better input names when printing process info
  • Loading branch information
ungarj authored Dec 5, 2023
2 parents 527075e + 07d9f2d commit fc28048
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 15 deletions.
47 changes: 32 additions & 15 deletions mapchete/config/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ def __repr__(self): # pragma: no cover
def input_at_zoom(self, key=None, zoom=None):
if zoom is None: # pragma: no cover
raise ValueError("zoom not provided")
return self.input[get_hash(self._params_at_zoom[zoom]["input"][key])]
return self.input[get_input_key(self._params_at_zoom[zoom]["input"][key])]

def preprocessing_tasks_per_input(self):
"""Get all preprocessing tasks defined by the input drivers."""
Expand Down Expand Up @@ -450,18 +450,21 @@ def input(self):
explicitly provided in process configuration.
"""
# get input items only of initialized zoom levels
raw_inputs = OrderedDict(
[
# convert input definition to hash
(get_hash(v), v)
for zoom in self.init_zoom_levels
if "input" in self._params_at_zoom[zoom]
# to preserve file groups, "flatten" the input tree and use
# the tree paths as keys
for key, v in _flatten_tree(self._params_at_zoom[zoom]["input"])
if v is not None
]
)
try:
raw_inputs = OrderedDict(
[
# convert input definition to hash
(get_input_key(v), v)
for zoom in self.init_zoom_levels
if "input" in self._params_at_zoom[zoom]
# to preserve file groups, "flatten" the input tree and use
# the tree paths as keys
for key, v in _flatten_tree(self._params_at_zoom[zoom]["input"])
if v is not None
]
)
except TypeError as exc:
raise MapcheteConfigError(exc)
if self._init_inputs:
return initialize_inputs(
raw_inputs,
Expand Down Expand Up @@ -575,7 +578,7 @@ def params_at_zoom(self, zoom):
if v is None:
flat_inputs[k] = None
else:
flat_inputs[k] = self.input[get_hash(v)]
flat_inputs[k] = self.input[get_input_key(v)]
out["input"] = _unflatten_tree(flat_inputs)
else:
out["input"] = OrderedDict()
Expand Down Expand Up @@ -620,7 +623,7 @@ def _area_at_zoom(self, zoom):
if "input" in self._params_at_zoom[zoom]:
input_union = unary_union(
[
self.input[get_hash(v)].bbox(self.process_pyramid.crs)
self.input[get_input_key(v)].bbox(self.process_pyramid.crs)
for k, v in _flatten_tree(self._params_at_zoom[zoom]["input"])
if v is not None
]
Expand Down Expand Up @@ -762,6 +765,20 @@ def at_zoom(self, zoom):
return self.params_at_zoom(zoom)


def get_input_key(
input_definition: Union[MPathLike, dict], hash_length: int = 6
) -> str:
try:
path = MPath.from_inp(input_definition)
return f"{path}-{get_hash(input_definition, length=hash_length)}"
except ValueError:
pass
if isinstance(input_definition, dict):
return f"{input_definition['format']}-{get_hash(input_definition, length=hash_length)}"
else: # pragma: no cover
raise ValueError(f"cannot generate input_key from {input_definition}")


def get_hash(some_object: Any, length: int = 16) -> str:
"""Return hash of some_object."""
if isinstance(some_object, MPath):
Expand Down
4 changes: 4 additions & 0 deletions test/test_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,10 @@ def test_profile_wrapper(request, path_fixture):
assert result.profiling["memory"].total_allocated > 0


@pytest.mark.skip(
reason="this test is flaky and the feature is also tested in "
"test_processing_profilers.py::test_requests_return_result"
)
@pytest.mark.integration
@pytest.mark.parametrize(
"path_fixture",
Expand Down

0 comments on commit fc28048

Please sign in to comment.