diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index dc6a310f3..2c49c6885 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -7,7 +7,7 @@ repos:
- id: trailing-whitespace
- id: check-toml
- repo: https://github.com/psf/black
- rev: 23.11.0
+ rev: 23.12.1
hooks:
- id: black
# It is recommended to specify the latest version of Python
@@ -16,13 +16,13 @@ repos:
# https://pre-commit.com/#top_level-default_language_version
language_version: python3.11
- repo: https://github.com/pycqa/isort
- rev: 5.13.1
+ rev: 5.13.2
hooks:
- id: isort
name: isort (python)
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
- rev: v0.1.7
+ rev: v0.1.11
hooks:
- id: ruff
- repo: https://github.com/numpy/numpydoc
diff --git a/doc/changes/DM-40150.removal.rst b/doc/changes/DM-40150.removal.rst
new file mode 100644
index 000000000..fa71ba4ba
--- /dev/null
+++ b/doc/changes/DM-40150.removal.rst
@@ -0,0 +1,5 @@
+* Removed ``topLevelOnly`` parameter from ``TaskMetadata.names()``.
+* Removed the ``saveMetadata`` configuration from ``PipelineTask``.
+* Removed ``lsst.pipe.base.cmdLineTask.profile`` (use ``lsst.utils.timer.profile`` instead).
+* Removed ``ButlerQuantumContext`` class. Use ``QuantumContext`` instead.
+* Removed ``recontitutedDimensions`` parameter from ``QuantumNode.from_simple()``
diff --git a/mypy.ini b/mypy.ini
index 5039958e3..accc3241b 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -72,8 +72,6 @@ disallow_untyped_defs = False
# worth adding new annotations to them.
[mypy-lsst.pipe.base.argumentParser.*]
disallow_untyped_defs = False
-[mypy-lsst.pipe.base.cmdLineTask.*]
-disallow_untyped_defs = False
[mypy-lsst.pipe.base.shims.*]
disallow_untyped_defs = False
diff --git a/python/lsst/pipe/base/_quantumContext.py b/python/lsst/pipe/base/_quantumContext.py
index 4be54a310..ea53b9098 100644
--- a/python/lsst/pipe/base/_quantumContext.py
+++ b/python/lsst/pipe/base/_quantumContext.py
@@ -31,7 +31,7 @@
from __future__ import annotations
-__all__ = ("ButlerQuantumContext", "ExecutionResources", "QuantumContext")
+__all__ = ("ExecutionResources", "QuantumContext")
import numbers
from collections.abc import Callable, Sequence
@@ -39,7 +39,6 @@
from typing import Any
import astropy.units as u
-from deprecated.sphinx import deprecated
from lsst.daf.butler import DatasetRef, DimensionUniverse, LimitedButler, Quantum
from lsst.utils.introspection import get_full_type_name
from lsst.utils.logging import PeriodicLogger, getLogger
@@ -421,16 +420,3 @@ def dimensions(self) -> DimensionUniverse:
repository (`~lsst.daf.butler.DimensionUniverse`).
"""
return self.__butler.dimensions
-
-
-# TODO: remove on DM-40063.
-@deprecated(
- reason="ButlerQuantumContext has been renamed to QuantumContext and been given extra functionality. "
- "Please use the new name. Will be removed after v26.",
- version="v26",
- category=FutureWarning,
-)
-class ButlerQuantumContext(QuantumContext):
- """Deprecated version of `QuantumContext`."""
-
- pass
diff --git a/python/lsst/pipe/base/_task_metadata.py b/python/lsst/pipe/base/_task_metadata.py
index 66702a5ae..25cf816fc 100644
--- a/python/lsst/pipe/base/_task_metadata.py
+++ b/python/lsst/pipe/base/_task_metadata.py
@@ -30,11 +30,9 @@
import itertools
import numbers
import sys
-import warnings
from collections.abc import Collection, Iterator, Mapping, Sequence
from typing import Any, Protocol
-from lsst.utils.introspection import find_outside_stacklevel
from pydantic import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr
# The types allowed in a Task metadata field are restricted
@@ -260,35 +258,15 @@ def getArray(self, key: str) -> list[Any]:
# Report the correct key.
raise KeyError(f"'{key}' not found") from None
- def names(self, topLevelOnly: bool | None = None) -> set[str]:
+ def names(self) -> set[str]:
"""Return the hierarchical keys from the metadata.
- Parameters
- ----------
- topLevelOnly : `bool` or `None`, optional
- This parameter is deprecated and will be removed in the future.
- If given it can only be `False`. All names in the hierarchy are
- always returned.
-
Returns
-------
names : `collections.abc.Set`
A set of all keys, including those from the hierarchy and the
top-level hierarchy.
"""
- if topLevelOnly:
- raise RuntimeError(
- "The topLevelOnly parameter is no longer supported and can not have a True value."
- )
-
- if topLevelOnly is False:
- warnings.warn(
- "The topLevelOnly parameter is deprecated and is always assumed to be False."
- " It will be removed completely after v26.",
- category=FutureWarning,
- stacklevel=find_outside_stacklevel("lsst.pipe.base"),
- )
-
names = set()
for k, v in self.items():
names.add(k) # Always include the current level
diff --git a/python/lsst/pipe/base/cmdLineTask.py b/python/lsst/pipe/base/cmdLineTask.py
deleted file mode 100644
index fa3916045..000000000
--- a/python/lsst/pipe/base/cmdLineTask.py
+++ /dev/null
@@ -1,91 +0,0 @@
-#
-# LSST Data Management System
-# Copyright 2008-2015 AURA/LSST.
-#
-# This product includes software developed by the
-# LSST Project (http://www.lsst.org/).
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the LSST License Statement and
-# the GNU General Public License along with this program. If not,
-# see .
-#
-from __future__ import annotations
-
-__all__: list[str] = []
-
-import contextlib
-import logging
-from collections.abc import Generator
-from typing import TYPE_CHECKING
-
-from deprecated.sphinx import deprecated
-
-if TYPE_CHECKING:
- import cProfile
-
-
-@deprecated(
- reason="Replaced by lsst.utils.timer.profile(). Will be removed after v26.0",
- version="v25.0",
- category=FutureWarning,
-)
-@contextlib.contextmanager
-def profile(filename: str, log: logging.Logger | None = None) -> Generator[cProfile.Profile, None, None]:
- """Context manager for profiling with cProfile.
-
- Parameters
- ----------
- filename : `str`
- Filename to which to write profile (profiling disabled if `None` or
- empty).
- log : `logging.Logger`, optional
- Log object for logging the profile operations.
-
- Yields
- ------
- `cProfile.Profile`
- Profiling object.
-
- Notes
- -----
- If profiling is enabled, the context manager returns the cProfile.Profile
- object (otherwise it returns None), which allows additional control over
- profiling. You can obtain this using the "as" clause, e.g.:
-
- .. code-block:: python
-
- with profile(filename) as prof:
- runYourCodeHere()
-
- The output cumulative profile can be printed with a command-line like:
-
- .. code-block:: bash
-
- python -c 'import pstats; \
- pstats.Stats("").sort_stats("cumtime").print_stats(30)'
- """
- if not filename:
- # Nothing to do
- yield None # type: ignore
- return
- from cProfile import Profile
-
- profile = Profile()
- if log is not None:
- log.info("Enabling cProfile profiling")
- profile.enable()
- yield profile
- profile.disable()
- profile.dump_stats(filename)
- if log is not None:
- log.info("cProfile stats written to %s", filename)
diff --git a/python/lsst/pipe/base/config.py b/python/lsst/pipe/base/config.py
index 26ab8a230..fcab1d905 100644
--- a/python/lsst/pipe/base/config.py
+++ b/python/lsst/pipe/base/config.py
@@ -217,12 +217,6 @@ class to allow configuration of the connections class. This dynamically
based on a PipelineTaskConnections class.
"""
- saveMetadata = pexConfig.Field[bool](
- default=True,
- optional=False,
- doc="Flag to enable/disable metadata saving for a task, enabled by default.",
- deprecated="This field is deprecated and will be removed after v26.",
- )
saveLogOutput = pexConfig.Field[bool](
default=True,
optional=False,
diff --git a/python/lsst/pipe/base/graph/graph.py b/python/lsst/pipe/base/graph/graph.py
index cbb6f446f..cdbe3359f 100644
--- a/python/lsst/pipe/base/graph/graph.py
+++ b/python/lsst/pipe/base/graph/graph.py
@@ -1028,7 +1028,7 @@ def _buildSaveObjectImpl(self, returnHeader: bool = False) -> bytearray | tuple[
# a large impact on on disk size, so it is worth doing
simpleNode = node.to_simple(accumulator=dimAccumulator)
- dump = lzma.compress(simpleNode.json().encode(), preset=2)
+ dump = lzma.compress(simpleNode.model_dump_json().encode(), preset=2)
jsonData.append(dump)
nodeMap.append(
(
diff --git a/python/lsst/pipe/base/graph/quantumNode.py b/python/lsst/pipe/base/graph/quantumNode.py
index a20fc2c86..7c6677c4f 100644
--- a/python/lsst/pipe/base/graph/quantumNode.py
+++ b/python/lsst/pipe/base/graph/quantumNode.py
@@ -29,20 +29,17 @@
__all__ = ("QuantumNode", "NodeId", "BuildId")
import uuid
-import warnings
from dataclasses import dataclass
from typing import Any, NewType
import pydantic
from lsst.daf.butler import (
DatasetRef,
- DimensionRecord,
DimensionRecordsAccumulator,
DimensionUniverse,
Quantum,
SerializedQuantum,
)
-from lsst.utils.introspection import find_outside_stacklevel
from ..pipeline import TaskDef
@@ -143,14 +140,7 @@ def from_simple(
simple: SerializedQuantumNode,
taskDefMap: dict[str, TaskDef],
universe: DimensionUniverse,
- recontitutedDimensions: dict[int, tuple[str, DimensionRecord]] | None = None,
) -> QuantumNode:
- if recontitutedDimensions is not None:
- warnings.warn(
- "The recontitutedDimensions argument is now ignored and may be removed after v26",
- category=FutureWarning,
- stacklevel=find_outside_stacklevel("lsst.pipe.base"),
- )
return QuantumNode(
quantum=Quantum.from_simple(simple.quantum, universe),
taskDef=taskDefMap[simple.taskLabel],
diff --git a/python/lsst/pipe/base/pipeline_graph/_pipeline_graph.py b/python/lsst/pipe/base/pipeline_graph/_pipeline_graph.py
index 16cd0c2c5..c68a9ae03 100644
--- a/python/lsst/pipe/base/pipeline_graph/_pipeline_graph.py
+++ b/python/lsst/pipe/base/pipeline_graph/_pipeline_graph.py
@@ -1073,7 +1073,7 @@ def _read_stream(
with gzip.open(stream, "rb") as uncompressed_stream:
data = json.load(uncompressed_stream)
- serialized_graph = SerializedPipelineGraph.parse_obj(data)
+ serialized_graph = SerializedPipelineGraph.model_validate(data)
return serialized_graph.deserialize(import_mode)
@classmethod
@@ -1142,7 +1142,7 @@ def _write_stream(self, stream: BinaryIO) -> None:
with gzip.open(stream, mode="wb") as compressed_stream:
compressed_stream.write(
- SerializedPipelineGraph.serialize(self).json(exclude_defaults=True).encode("utf-8")
+ SerializedPipelineGraph.serialize(self).model_dump_json(exclude_defaults=True).encode("utf-8")
)
def _write_uri(self, uri: ResourcePathExpression) -> None:
diff --git a/python/lsst/pipe/base/pipeline_graph/_tasks.py b/python/lsst/pipe/base/pipeline_graph/_tasks.py
index c556b66d9..945706fd3 100644
--- a/python/lsst/pipe/base/pipeline_graph/_tasks.py
+++ b/python/lsst/pipe/base/pipeline_graph/_tasks.py
@@ -158,8 +158,6 @@ def configure(
acc.CONFIG_INIT_OUTPUT_TEMPLATE.format(label=label),
acc.CONFIG_INIT_OUTPUT_STORAGE_CLASS,
)
- if not config.saveMetadata:
- raise ValueError(f"Metadata for task {label} cannot be disabled.")
connection_map[acc.METADATA_OUTPUT_CONNECTION_NAME] = Output(
acc.METADATA_OUTPUT_TEMPLATE.format(label=label),
acc.METADATA_OUTPUT_STORAGE_CLASS,
diff --git a/tests/test_task.py b/tests/test_task.py
index 62f8acc98..bf246d2f0 100644
--- a/tests/test_task.py
+++ b/tests/test_task.py
@@ -318,8 +318,8 @@ def testTimeMethod(self):
# TaskMetadata can serialize to JSON but not YAML
# and PropertySet can serialize to YAML and not JSON.
if hasattr(addMultTask.metadata, "json"):
- j = addMultTask.metadata.json()
- new_meta = pipeBase.TaskMetadata.parse_obj(json.loads(j))
+ j = addMultTask.metadata.model_dump_json()
+ new_meta = pipeBase.TaskMetadata.model_validate(json.loads(j))
else:
y = yaml.dump(addMultTask.metadata)
new_meta = yaml.safe_load(y)
diff --git a/tests/test_taskmetadata.py b/tests/test_taskmetadata.py
index 7f4b5479c..2cf918c81 100644
--- a/tests/test_taskmetadata.py
+++ b/tests/test_taskmetadata.py
@@ -196,8 +196,8 @@ def testDict(self):
d2 = meta.to_dict()
self.assertEqual(d2, d)
- j = meta.json()
- meta2 = TaskMetadata.parse_obj(json.loads(j))
+ j = meta.model_dump_json()
+ meta2 = TaskMetadata.model_validate(json.loads(j))
self.assertEqual(meta2, meta)
# Round trip.
@@ -241,14 +241,6 @@ def testNumpy(self):
with self.assertRaises(ValueError):
meta["numpy"] = numpy.zeros(5)
- def test_deprecated(self):
- meta = TaskMetadata()
- with self.assertRaises(RuntimeError):
- meta.names(topLevelOnly=True)
-
- with self.assertWarns(FutureWarning):
- meta.names(topLevelOnly=False)
-
if __name__ == "__main__":
unittest.main()