Coverage for tests/test_cliCmdQueryDataIds.py: 28%
76 statements
« prev ^ index » next coverage.py v7.3.1, created at 2023-10-02 08:00 +0000
« prev ^ index » next coverage.py v7.3.1, created at 2023-10-02 08:00 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28"""Unit tests for daf_butler CLI query-collections command.
29"""
31import os
32import unittest
34from astropy.table import Table as AstropyTable
35from lsst.daf.butler import Butler, DatasetType, script
36from lsst.daf.butler.tests.utils import ButlerTestHelper, makeTestTempDir, removeTestTempDir
37from lsst.daf.butler.transfers import YamlRepoImportBackend
38from numpy import array
40TESTDIR = os.path.abspath(os.path.dirname(__file__))
43class QueryDataIdsTest(unittest.TestCase, ButlerTestHelper):
44 """Test the query-data-ids command-line."""
46 mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.queryDataIds"
48 @staticmethod
49 def _queryDataIds(repo, dimensions=(), collections=(), datasets=None, where=""):
50 """Call script.queryDataIds, allowing for default values."""
51 return script.queryDataIds(
52 repo=repo,
53 dimensions=dimensions,
54 collections=collections,
55 datasets=datasets,
56 where=where,
57 order_by=None,
58 limit=0,
59 offset=0,
60 )
62 def setUp(self):
63 self.root = makeTestTempDir(TESTDIR)
64 self.repo = Butler.makeRepo(self.root)
66 def tearDown(self):
67 removeTestTempDir(self.root)
69 def loadData(self, *filenames: str) -> Butler:
70 """Load registry test data from ``TESTDIR/data/registry/<filename>``,
71 which should be a YAML import/export file.
72 """
73 butler = Butler(self.repo, writeable=True)
74 for filename in filenames:
75 with open(os.path.join(TESTDIR, "data", "registry", filename)) as stream:
76 # Go behind the back of the import code a bit to deal with
77 # the fact that this is just registry content with no actual
78 # files for the datastore.
79 backend = YamlRepoImportBackend(stream, butler.registry)
80 backend.register()
81 backend.load(datastore=None)
82 return butler
84 def testDimensions(self):
85 """Test getting a dimension."""
86 self.loadData("base.yaml")
87 res, msg = self._queryDataIds(self.root, dimensions=("detector",))
88 expected = AstropyTable(
89 array((("Cam1", 1), ("Cam1", 2), ("Cam1", 3), ("Cam1", 4))), names=("instrument", "detector")
90 )
91 self.assertFalse(msg)
92 self.assertAstropyTablesEqual(res, expected)
94 def testNoDimensions(self):
95 """Test asking for no dimensions."""
96 res, msg = self._queryDataIds(self.root)
97 self.assertIsNone(res, msg)
98 self.assertEqual(
99 msg, "Result has one logical row but no columns because no dimensions were requested."
100 )
102 def testNoResultsEasy(self):
103 """Test getting no results in a way that's detectable without having
104 to execute the full query.
105 """
106 self.loadData("base.yaml", "spatial.yaml")
107 res, msg = self._queryDataIds(
108 self.root,
109 dimensions=("visit", "tract"),
110 where="instrument='Cam1' AND skymap='SkyMap1' AND visit=1 AND tract=1",
111 )
112 self.assertIsNone(res, msg)
113 self.assertIn("yields no results when applied to", msg)
115 def testNoResultsHard(self):
116 """Test getting no results in a way that can't be detected unless we
117 run the whole query.
118 """
119 self.loadData("base.yaml", "spatial.yaml")
120 res, msg = self._queryDataIds(
121 self.root,
122 dimensions=("visit", "tract"),
123 where="instrument='Cam1' AND skymap='SkyMap1' AND visit=1 AND tract=0 AND patch=5",
124 )
125 self.assertIsNone(res, msg)
126 self.assertIn("Post-query region filtering removed all rows", msg)
128 def testWhere(self):
129 """Test with a WHERE constraint."""
130 self.loadData("base.yaml")
131 res, msg = self._queryDataIds(
132 self.root, dimensions=("detector",), where="instrument='Cam1' AND detector=2"
133 )
134 expected = AstropyTable(
135 array((("Cam1", 2),)),
136 names=(
137 "instrument",
138 "detector",
139 ),
140 )
141 self.assertAstropyTablesEqual(res, expected)
142 self.assertIsNone(msg)
144 def testDatasetsAndCollections(self):
145 """Test constraining via datasets and collections."""
146 butler = self.loadData("base.yaml", "datasets-uuid.yaml")
147 # See that the data IDs returned are constrained by that collection's
148 # contents.
149 res, msg = self._queryDataIds(
150 repo=self.root, dimensions=("detector",), collections=("imported_g",), datasets="bias"
151 )
152 expected = AstropyTable(
153 array((("Cam1", 1), ("Cam1", 2), ("Cam1", 3))),
154 names=(
155 "instrument",
156 "detector",
157 ),
158 )
159 self.assertAstropyTablesEqual(res, expected)
160 self.assertIsNone(msg)
162 # Check that the dimensions are inferred when not provided.
163 with self.assertLogs("lsst.daf.butler.script.queryDataIds", "INFO") as cm:
164 res, msg = self._queryDataIds(repo=self.root, collections=("imported_g",), datasets="bias")
165 self.assertIn("Determined dimensions", "\n".join(cm.output))
166 self.assertAstropyTablesEqual(res, expected)
167 self.assertIsNone(msg)
169 # Check that we get a reason if no dimensions can be inferred.
170 new_dataset_type = DatasetType(
171 "test_metric_dimensionless",
172 (),
173 "StructuredDataDict",
174 universe=butler.dimensions,
175 )
176 butler.registry.registerDatasetType(new_dataset_type)
177 res, msg = self._queryDataIds(repo=self.root, collections=("imported_g",), datasets=...)
178 self.assertIsNone(res)
179 self.assertIn("No dimensions in common", msg)
181 # Check that we get a reason returned if no dataset type is found.
182 with self.assertWarns(FutureWarning):
183 res, msg = self._queryDataIds(
184 repo=self.root, dimensions=("detector",), collections=("imported_g",), datasets="raw"
185 )
186 self.assertIsNone(res)
187 self.assertEqual(msg, "Dataset type raw is not registered.")
189 # Check that we get a reason returned if no dataset is found in
190 # collection.
191 res, msg = self._queryDataIds(
192 repo=self.root,
193 dimensions=("detector",),
194 collections=("imported_g",),
195 datasets="test_metric_dimensionless",
196 )
197 self.assertIsNone(res)
198 self.assertIn("No datasets of type test_metric_dimensionless", msg)
201if __name__ == "__main__":
202 unittest.main()