Coverage for tests / test_cliCmdQueryDataIds.py: 29%
70 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-18 08:43 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-18 08:43 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28"""Unit tests for daf_butler CLI query-collections command."""
30import os
31import unittest
33from astropy.table import Table as AstropyTable
34from numpy import array
36from lsst.daf.butler import Butler, DatasetType, script
37from lsst.daf.butler.direct_butler import DirectButler
38from lsst.daf.butler.tests.utils import ButlerTestHelper, makeTestTempDir, removeTestTempDir
40TESTDIR = os.path.abspath(os.path.dirname(__file__))
43class QueryDataIdsTest(unittest.TestCase, ButlerTestHelper):
44 """Test the query-data-ids command-line."""
46 mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.queryDataIds"
48 @staticmethod
49 def _queryDataIds(repo, dimensions=(), collections=(), datasets=None, where=""):
50 """Call script.queryDataIds, allowing for default values."""
51 return script.queryDataIds(
52 repo=repo,
53 dimensions=dimensions,
54 collections=collections,
55 datasets=datasets,
56 where=where,
57 order_by=None,
58 limit=0,
59 offset=0,
60 )
62 def setUp(self):
63 self.root = makeTestTempDir(TESTDIR)
64 self.repo = Butler.makeRepo(self.root)
66 def tearDown(self):
67 removeTestTempDir(self.root)
69 def loadData(self, *filenames: str) -> Butler:
70 """Load registry test data from ``TESTDIR/data/registry/<filename>``,
71 which should be a YAML import/export file.
72 """
73 butler = Butler.from_config(self.repo, writeable=True)
74 self.enterContext(butler)
75 assert isinstance(butler, DirectButler), "Test expects DirectButler"
76 for filename in filenames:
77 butler.import_(
78 filename=f"resource://lsst.daf.butler/tests/registry_data/{filename}",
79 without_datastore=True,
80 )
81 return butler
83 def testDimensions(self):
84 """Test getting a dimension."""
85 self.loadData("base.yaml")
86 res, msg = self._queryDataIds(self.root, dimensions=("detector",))
87 expected = AstropyTable(
88 array((("Cam1", 1), ("Cam1", 2), ("Cam1", 3), ("Cam1", 4))), names=("instrument", "detector")
89 )
90 self.assertFalse(msg)
91 self.assertAstropyTablesEqual(res, expected)
93 def testNoDimensions(self):
94 """Test asking for no dimensions."""
95 res, msg = self._queryDataIds(self.root)
96 self.assertIsNone(res, msg)
97 self.assertEqual(
98 msg, "Result has one logical row but no columns because no dimensions were requested."
99 )
101 def testNoResultsEasy(self):
102 """Test getting no results in a way that's detectable without having
103 to execute the full query.
104 """
105 self.loadData("base.yaml", "spatial.yaml")
106 res, msg = self._queryDataIds(
107 self.root,
108 dimensions=("visit", "tract"),
109 where="instrument='Cam1' AND skymap='SkyMap1' AND visit=1 AND tract=1",
110 )
111 self.assertIsNone(res, msg)
112 self.assertEqual(msg, "")
114 def testNoResultsHard(self):
115 """Test getting no results in a way that can't be detected unless we
116 run the whole query.
117 """
118 self.loadData("base.yaml", "spatial.yaml")
119 res, msg = self._queryDataIds(
120 self.root,
121 dimensions=("visit", "tract"),
122 where="instrument='Cam1' AND skymap='SkyMap1' AND visit=1 AND tract=0 AND patch=5",
123 )
124 self.assertIsNone(res, msg)
125 self.assertIn("Post-query region filtering removed all rows", msg)
127 def testWhere(self):
128 """Test with a WHERE constraint."""
129 self.loadData("base.yaml")
130 res, msg = self._queryDataIds(
131 self.root, dimensions=("detector",), where="instrument='Cam1' AND detector=2"
132 )
133 expected = AstropyTable(
134 array((("Cam1", 2),)),
135 names=(
136 "instrument",
137 "detector",
138 ),
139 )
140 self.assertAstropyTablesEqual(res, expected)
141 self.assertIsNone(msg)
143 def testDatasetsAndCollections(self):
144 """Test constraining via datasets and collections."""
145 butler = self.loadData("base.yaml", "datasets.yaml")
146 # See that the data IDs returned are constrained by that collection's
147 # contents.
148 res, msg = self._queryDataIds(
149 repo=self.root, dimensions=("detector",), collections=("imported_g",), datasets="bias"
150 )
151 expected = AstropyTable(
152 array((("Cam1", 1), ("Cam1", 2), ("Cam1", 3))),
153 names=(
154 "instrument",
155 "detector",
156 ),
157 )
158 self.assertAstropyTablesEqual(res, expected)
159 self.assertIsNone(msg)
161 # Check that the dimensions are inferred when not provided.
162 with self.assertLogs("lsst.daf.butler.script.queryDataIds", "INFO") as cm:
163 res, msg = self._queryDataIds(repo=self.root, collections=("imported_g",), datasets="bias")
164 self.assertIn("Determined dimensions", "\n".join(cm.output))
165 self.assertAstropyTablesEqual(res, expected)
166 self.assertIsNone(msg)
168 # Check that we get a reason if no dimensions can be inferred.
169 new_dataset_type = DatasetType(
170 "test_metric_dimensionless",
171 (),
172 "StructuredDataDict",
173 universe=butler.dimensions,
174 )
175 butler.registry.registerDatasetType(new_dataset_type)
176 res, msg = self._queryDataIds(repo=self.root, collections=("imported_g",), datasets=...)
177 self.assertIsNone(res)
178 self.assertIn("No dimensions in common", msg)
180 # Check that we get a reason returned if no dataset is found in
181 # collection.
182 res, msg = self._queryDataIds(
183 repo=self.root,
184 dimensions=("detector",),
185 collections=("imported_g",),
186 datasets="test_metric_dimensionless",
187 )
188 self.assertIsNone(res)
189 self.assertIn("No datasets of type 'test_metric_dimensionless'", msg)
192if __name__ == "__main__":
193 unittest.main()