Skip to content

Commit

Permalink
Merge pull request #397 from lsst/tickets/DM-40150
Browse files Browse the repository at this point in the history
DM-40150: Remove some post v26 deprecations
  • Loading branch information
timj authored Jan 5, 2024
2 parents 3bc6aad + d55dd3f commit 572f9e4
Show file tree
Hide file tree
Showing 13 changed files with 17 additions and 167 deletions.
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ repos:
- id: trailing-whitespace
- id: check-toml
- repo: https://github.com/psf/black
rev: 23.11.0
rev: 23.12.1
hooks:
- id: black
# It is recommended to specify the latest version of Python
Expand All @@ -16,13 +16,13 @@ repos:
# https://pre-commit.com/#top_level-default_language_version
language_version: python3.11
- repo: https://github.com/pycqa/isort
rev: 5.13.1
rev: 5.13.2
hooks:
- id: isort
name: isort (python)
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.1.7
rev: v0.1.11
hooks:
- id: ruff
- repo: https://github.com/numpy/numpydoc
Expand Down
5 changes: 5 additions & 0 deletions doc/changes/DM-40150.removal.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
* Removed ``topLevelOnly`` parameter from ``TaskMetadata.names()``.
* Removed the ``saveMetadata`` configuration from ``PipelineTask``.
* Removed ``lsst.pipe.base.cmdLineTask.profile`` (use ``lsst.utils.timer.profile`` instead).
* Removed ``ButlerQuantumContext`` class. Use ``QuantumContext`` instead.
* Removed ``recontitutedDimensions`` parameter from ``QuantumNode.from_simple()``
2 changes: 0 additions & 2 deletions mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,6 @@ disallow_untyped_defs = False
# worth adding new annotations to them.
[mypy-lsst.pipe.base.argumentParser.*]
disallow_untyped_defs = False
[mypy-lsst.pipe.base.cmdLineTask.*]
disallow_untyped_defs = False
[mypy-lsst.pipe.base.shims.*]
disallow_untyped_defs = False

Expand Down
16 changes: 1 addition & 15 deletions python/lsst/pipe/base/_quantumContext.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,14 @@

from __future__ import annotations

__all__ = ("ButlerQuantumContext", "ExecutionResources", "QuantumContext")
__all__ = ("ExecutionResources", "QuantumContext")

import numbers
from collections.abc import Callable, Sequence
from dataclasses import dataclass
from typing import Any

import astropy.units as u
from deprecated.sphinx import deprecated
from lsst.daf.butler import DatasetRef, DimensionUniverse, LimitedButler, Quantum
from lsst.utils.introspection import get_full_type_name
from lsst.utils.logging import PeriodicLogger, getLogger
Expand Down Expand Up @@ -421,16 +420,3 @@ def dimensions(self) -> DimensionUniverse:
repository (`~lsst.daf.butler.DimensionUniverse`).
"""
return self.__butler.dimensions


# TODO: remove on DM-40063.
@deprecated(
reason="ButlerQuantumContext has been renamed to QuantumContext and been given extra functionality. "
"Please use the new name. Will be removed after v26.",
version="v26",
category=FutureWarning,
)
class ButlerQuantumContext(QuantumContext):
"""Deprecated version of `QuantumContext`."""

pass
24 changes: 1 addition & 23 deletions python/lsst/pipe/base/_task_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,9 @@
import itertools
import numbers
import sys
import warnings
from collections.abc import Collection, Iterator, Mapping, Sequence
from typing import Any, Protocol

from lsst.utils.introspection import find_outside_stacklevel
from pydantic import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr

# The types allowed in a Task metadata field are restricted
Expand Down Expand Up @@ -260,35 +258,15 @@ def getArray(self, key: str) -> list[Any]:
# Report the correct key.
raise KeyError(f"'{key}' not found") from None

def names(self, topLevelOnly: bool | None = None) -> set[str]:
def names(self) -> set[str]:
"""Return the hierarchical keys from the metadata.
Parameters
----------
topLevelOnly : `bool` or `None`, optional
This parameter is deprecated and will be removed in the future.
If given it can only be `False`. All names in the hierarchy are
always returned.
Returns
-------
names : `collections.abc.Set`
A set of all keys, including those from the hierarchy and the
top-level hierarchy.
"""
if topLevelOnly:
raise RuntimeError(
"The topLevelOnly parameter is no longer supported and can not have a True value."
)

if topLevelOnly is False:
warnings.warn(
"The topLevelOnly parameter is deprecated and is always assumed to be False."
" It will be removed completely after v26.",
category=FutureWarning,
stacklevel=find_outside_stacklevel("lsst.pipe.base"),
)

names = set()
for k, v in self.items():
names.add(k) # Always include the current level
Expand Down
91 changes: 0 additions & 91 deletions python/lsst/pipe/base/cmdLineTask.py

This file was deleted.

6 changes: 0 additions & 6 deletions python/lsst/pipe/base/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,12 +217,6 @@ class to allow configuration of the connections class. This dynamically
based on a PipelineTaskConnections class.
"""

saveMetadata = pexConfig.Field[bool](
default=True,
optional=False,
doc="Flag to enable/disable metadata saving for a task, enabled by default.",
deprecated="This field is deprecated and will be removed after v26.",
)
saveLogOutput = pexConfig.Field[bool](
default=True,
optional=False,
Expand Down
2 changes: 1 addition & 1 deletion python/lsst/pipe/base/graph/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -1028,7 +1028,7 @@ def _buildSaveObjectImpl(self, returnHeader: bool = False) -> bytearray | tuple[
# a large impact on on disk size, so it is worth doing
simpleNode = node.to_simple(accumulator=dimAccumulator)

dump = lzma.compress(simpleNode.json().encode(), preset=2)
dump = lzma.compress(simpleNode.model_dump_json().encode(), preset=2)
jsonData.append(dump)
nodeMap.append(
(
Expand Down
10 changes: 0 additions & 10 deletions python/lsst/pipe/base/graph/quantumNode.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,20 +29,17 @@
__all__ = ("QuantumNode", "NodeId", "BuildId")

import uuid
import warnings
from dataclasses import dataclass
from typing import Any, NewType

import pydantic
from lsst.daf.butler import (
DatasetRef,
DimensionRecord,
DimensionRecordsAccumulator,
DimensionUniverse,
Quantum,
SerializedQuantum,
)
from lsst.utils.introspection import find_outside_stacklevel

from ..pipeline import TaskDef

Expand Down Expand Up @@ -143,14 +140,7 @@ def from_simple(
simple: SerializedQuantumNode,
taskDefMap: dict[str, TaskDef],
universe: DimensionUniverse,
recontitutedDimensions: dict[int, tuple[str, DimensionRecord]] | None = None,
) -> QuantumNode:
if recontitutedDimensions is not None:
warnings.warn(
"The recontitutedDimensions argument is now ignored and may be removed after v26",
category=FutureWarning,
stacklevel=find_outside_stacklevel("lsst.pipe.base"),
)
return QuantumNode(
quantum=Quantum.from_simple(simple.quantum, universe),
taskDef=taskDefMap[simple.taskLabel],
Expand Down
4 changes: 2 additions & 2 deletions python/lsst/pipe/base/pipeline_graph/_pipeline_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -1073,7 +1073,7 @@ def _read_stream(

with gzip.open(stream, "rb") as uncompressed_stream:
data = json.load(uncompressed_stream)
serialized_graph = SerializedPipelineGraph.parse_obj(data)
serialized_graph = SerializedPipelineGraph.model_validate(data)
return serialized_graph.deserialize(import_mode)

@classmethod
Expand Down Expand Up @@ -1142,7 +1142,7 @@ def _write_stream(self, stream: BinaryIO) -> None:

with gzip.open(stream, mode="wb") as compressed_stream:
compressed_stream.write(
SerializedPipelineGraph.serialize(self).json(exclude_defaults=True).encode("utf-8")
SerializedPipelineGraph.serialize(self).model_dump_json(exclude_defaults=True).encode("utf-8")
)

def _write_uri(self, uri: ResourcePathExpression) -> None:
Expand Down
2 changes: 0 additions & 2 deletions python/lsst/pipe/base/pipeline_graph/_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,8 +158,6 @@ def configure(
acc.CONFIG_INIT_OUTPUT_TEMPLATE.format(label=label),
acc.CONFIG_INIT_OUTPUT_STORAGE_CLASS,
)
if not config.saveMetadata:
raise ValueError(f"Metadata for task {label} cannot be disabled.")
connection_map[acc.METADATA_OUTPUT_CONNECTION_NAME] = Output(
acc.METADATA_OUTPUT_TEMPLATE.format(label=label),
acc.METADATA_OUTPUT_STORAGE_CLASS,
Expand Down
4 changes: 2 additions & 2 deletions tests/test_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,8 +318,8 @@ def testTimeMethod(self):
# TaskMetadata can serialize to JSON but not YAML
# and PropertySet can serialize to YAML and not JSON.
if hasattr(addMultTask.metadata, "json"):
j = addMultTask.metadata.json()
new_meta = pipeBase.TaskMetadata.parse_obj(json.loads(j))
j = addMultTask.metadata.model_dump_json()
new_meta = pipeBase.TaskMetadata.model_validate(json.loads(j))
else:
y = yaml.dump(addMultTask.metadata)
new_meta = yaml.safe_load(y)
Expand Down
12 changes: 2 additions & 10 deletions tests/test_taskmetadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,8 +196,8 @@ def testDict(self):
d2 = meta.to_dict()
self.assertEqual(d2, d)

j = meta.json()
meta2 = TaskMetadata.parse_obj(json.loads(j))
j = meta.model_dump_json()
meta2 = TaskMetadata.model_validate(json.loads(j))
self.assertEqual(meta2, meta)

# Round trip.
Expand Down Expand Up @@ -241,14 +241,6 @@ def testNumpy(self):
with self.assertRaises(ValueError):
meta["numpy"] = numpy.zeros(5)

def test_deprecated(self):
meta = TaskMetadata()
with self.assertRaises(RuntimeError):
meta.names(topLevelOnly=True)

with self.assertWarns(FutureWarning):
meta.names(topLevelOnly=False)


if __name__ == "__main__":
unittest.main()

0 comments on commit 572f9e4

Please sign in to comment.