Coverage for python/lsst/daf/butler/tests/utils.py: 29%
86 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-03-11 02:06 -0800
« prev ^ index » next coverage.py v6.5.0, created at 2023-03-11 02:06 -0800
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ()
26import os
27import shutil
28import tempfile
29from collections.abc import Callable, Iterator, Sequence
30from contextlib import contextmanager
31from typing import TYPE_CHECKING, Any
33import astropy
34from astropy.table import Table as AstropyTable
36from .. import Butler, Config, StorageClassFactory
37from ..registry import CollectionType
38from ..tests import MetricsExample, addDatasetType
40if TYPE_CHECKING: 40 ↛ 41line 40 didn't jump to line 41, because the condition on line 40 was never true
41 from lsst.daf.butler import DatasetType
44def makeTestTempDir(default_base: str) -> str:
45 """Create a temporary directory for test usage.
47 The directory will be created within ``DAF_BUTLER_TEST_TMP`` if that
48 environment variable is set, falling back to ``default_base`` if it is
49 not.
51 Parameters
52 ----------
53 default_base : `str`
54 Default parent directory.
56 Returns
57 -------
58 dir : `str`
59 Name of the new temporary directory.
60 """
61 base = os.environ.get("DAF_BUTLER_TEST_TMP", default_base)
62 return tempfile.mkdtemp(dir=base)
65def removeTestTempDir(root: str | None) -> None:
66 """Attempt to remove a temporary test directory, but do not raise if
67 unable to.
69 Unlike `tempfile.TemporaryDirectory`, this passes ``ignore_errors=True``
70 to ``shutil.rmtree`` at close, making it safe to use on NFS.
72 Parameters
73 ----------
74 root : `str`, optional
75 Name of the directory to be removed. If `None`, nothing will be done.
76 """
77 if root is not None and os.path.exists(root):
78 shutil.rmtree(root, ignore_errors=True)
81@contextmanager
82def safeTestTempDir(default_base: str) -> Iterator[str]:
83 """Return a context manager that creates a temporary directory and then
84 attempts to remove it.
86 Parameters
87 ----------
88 default_base : `str`
89 Default parent directory, forwarded to `makeTestTempDir`.
91 Returns
92 -------
93 context : `contextlib.ContextManager`
94 A context manager that returns the new directory name on ``__enter__``
95 and removes the temporary directory (via `removeTestTempDir`) on
96 ``__exit__``.
97 """
98 root = makeTestTempDir(default_base)
99 try:
100 yield root
101 finally:
102 removeTestTempDir(root)
105class ButlerTestHelper:
106 """Mixin with helpers for unit tests."""
108 assertEqual: Callable
109 assertIsInstance: Callable
110 maxDiff: int | None
112 def assertAstropyTablesEqual(
113 self,
114 tables: AstropyTable | Sequence[AstropyTable],
115 expectedTables: AstropyTable | Sequence[AstropyTable],
116 filterColumns: bool = False,
117 unorderedRows: bool = False,
118 ) -> None:
119 """Verify that a list of astropy tables matches a list of expected
120 astropy tables.
122 Parameters
123 ----------
124 tables : `astropy.table.Table` or iterable [`astropy.table.Table`]
125 The table or tables that should match the expected tables.
126 expectedTables : `astropy.table.Table`
127 or iterable [`astropy.table.Table`]
128 The tables with expected values to which the tables under test will
129 be compared.
130 filterColumns : `bool`
131 If `True` then only compare columns that exist in
132 ``expectedTables``.
133 unorderedRows : `bool`, optional
134 If `True` (`False` is default), don't require tables to have their
135 rows in the same order.
136 """
137 # If a single table is passed in for tables or expectedTables, put it
138 # in a list.
139 if isinstance(tables, AstropyTable):
140 tables = [tables]
141 if isinstance(expectedTables, AstropyTable):
142 expectedTables = [expectedTables]
143 self.assertEqual(len(tables), len(expectedTables))
144 for table, expected in zip(tables, expectedTables):
145 # Assert that we are testing what we think we are testing:
146 self.assertIsInstance(table, AstropyTable)
147 self.assertIsInstance(expected, AstropyTable)
148 if filterColumns:
149 table = table.copy()
150 table.keep_columns(expected.colnames)
151 if unorderedRows:
152 table = table.copy()
153 table.sort(table.colnames)
154 expected = expected.copy()
155 expected.sort(expected.colnames)
156 # Assert that they match.
157 # Recommendation from Astropy Slack is to format the table into
158 # lines for comparison. We do not compare column data types.
159 table1 = table.pformat_all()
160 expected1 = expected.pformat_all()
161 original_max = self.maxDiff
162 self.maxDiff = None # This is required to get the full diff.
163 try:
164 self.assertEqual(table1, expected1)
165 finally:
166 self.maxDiff = original_max
169def readTable(textTable: str) -> AstropyTable:
170 """Read an astropy table from formatted text.
172 Contains formatting that causes the astropy table to print an empty string
173 instead of "--" for missing/unpopulated values in the text table.
176 Parameters
177 ----------
178 textTable : `str`
179 The text version of the table to read.
181 Returns
182 -------
183 table : `astropy.table.Table`
184 The table as an astropy table.
185 """
186 return AstropyTable.read(
187 textTable,
188 format="ascii",
189 data_start=2, # skip the header row and the header row underlines.
190 fill_values=[("", 0, "")],
191 )
194class MetricTestRepo:
195 """Creates and manage a test repository on disk with datasets that
196 may be queried and modified for unit tests.
198 Parameters
199 ----------
200 root : `str`
201 The location of the repository, to pass to ``Butler.makeRepo``.
202 configFile : `str`
203 The path to the config file, to pass to ``Butler.makeRepo``.
204 """
206 @staticmethod
207 def _makeExampleMetrics() -> MetricsExample:
208 """Make an object to put into the repository."""
209 return MetricsExample(
210 {"AM1": 5.2, "AM2": 30.6},
211 {"a": [1, 2, 3], "b": {"blue": 5, "red": "green"}},
212 [563, 234, 456.7, 752, 8, 9, 27],
213 )
215 def __init__(self, root: str, configFile: str) -> None:
216 self.root = root
217 Butler.makeRepo(self.root, config=Config(configFile))
218 butlerConfigFile = os.path.join(self.root, "butler.yaml")
219 self.storageClassFactory = StorageClassFactory()
220 self.storageClassFactory.addFromConfig(butlerConfigFile)
222 # New datasets will be added to run and tag, but we will only look in
223 # tag when looking up datasets.
224 run = "ingest/run"
225 tag = "ingest"
226 self.butler = Butler(butlerConfigFile, run=run, collections=[tag])
227 self.butler.registry.registerCollection(tag, CollectionType.TAGGED)
229 # Create and register a DatasetType
230 self.datasetType = addDatasetType(
231 self.butler, "test_metric_comp", {"instrument", "visit"}, "StructuredCompositeReadComp"
232 )
234 # Add needed Dimensions
235 self.butler.registry.insertDimensionData("instrument", {"name": "DummyCamComp"})
236 self.butler.registry.insertDimensionData(
237 "physical_filter", {"instrument": "DummyCamComp", "name": "d-r", "band": "R"}
238 )
239 self.butler.registry.insertDimensionData(
240 "visit_system", {"instrument": "DummyCamComp", "id": 1, "name": "default"}
241 )
242 visitStart = astropy.time.Time("2020-01-01 08:00:00.123456789", scale="tai")
243 visitEnd = astropy.time.Time("2020-01-01 08:00:36.66", scale="tai")
244 self.butler.registry.insertDimensionData(
245 "visit",
246 dict(
247 instrument="DummyCamComp",
248 id=423,
249 name="fourtwentythree",
250 physical_filter="d-r",
251 datetimeBegin=visitStart,
252 datetimeEnd=visitEnd,
253 ),
254 )
255 self.butler.registry.insertDimensionData(
256 "visit",
257 dict(
258 instrument="DummyCamComp",
259 id=424,
260 name="fourtwentyfour",
261 physical_filter="d-r",
262 ),
263 )
265 self.addDataset({"instrument": "DummyCamComp", "visit": 423})
266 self.addDataset({"instrument": "DummyCamComp", "visit": 424})
268 def addDataset(
269 self, dataId: dict[str, Any], run: str | None = None, datasetType: DatasetType | None = None
270 ) -> None:
271 """Create a new example metric and add it to the named run with the
272 given dataId.
274 Overwrites tags, so this does not try to associate the new dataset with
275 existing tags. (If/when tags are needed this can be added to the
276 arguments of this function.)
278 Parameters
279 ----------
280 dataId : `dict`
281 The dataId for the new metric.
282 run : `str`, optional
283 The name of the run to create and add a dataset to. If `None`, the
284 dataset will be added to the root butler.
285 datasetType : ``DatasetType``, optional
286 The dataset type of the added dataset. If `None`, will use the
287 default dataset type.
288 """
289 if run:
290 self.butler.registry.registerCollection(run, type=CollectionType.RUN)
291 metric = self._makeExampleMetrics()
292 self.butler.put(metric, self.datasetType if datasetType is None else datasetType, dataId, run=run)