Coverage for tests/test_cliCmdQueryDataIds.py: 30%
74 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-10 10:14 +0000
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-10 10:14 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28"""Unit tests for daf_butler CLI query-collections command.
29"""
31import os
32import unittest
34from astropy.table import Table as AstropyTable
35from lsst.daf.butler import Butler, DatasetType, script
36from lsst.daf.butler.direct_butler import DirectButler
37from lsst.daf.butler.tests.utils import ButlerTestHelper, makeTestTempDir, removeTestTempDir
38from lsst.daf.butler.transfers import YamlRepoImportBackend
39from numpy import array
41TESTDIR = os.path.abspath(os.path.dirname(__file__))
44class QueryDataIdsTest(unittest.TestCase, ButlerTestHelper):
45 """Test the query-data-ids command-line."""
47 mockFuncName = "lsst.daf.butler.cli.cmd.commands.script.queryDataIds"
49 @staticmethod
50 def _queryDataIds(repo, dimensions=(), collections=(), datasets=None, where=""):
51 """Call script.queryDataIds, allowing for default values."""
52 return script.queryDataIds(
53 repo=repo,
54 dimensions=dimensions,
55 collections=collections,
56 datasets=datasets,
57 where=where,
58 order_by=None,
59 limit=0,
60 offset=0,
61 )
63 def setUp(self):
64 self.root = makeTestTempDir(TESTDIR)
65 self.repo = Butler.makeRepo(self.root)
67 def tearDown(self):
68 removeTestTempDir(self.root)
70 def loadData(self, *filenames: str) -> Butler:
71 """Load registry test data from ``TESTDIR/data/registry/<filename>``,
72 which should be a YAML import/export file.
73 """
74 butler = Butler.from_config(self.repo, writeable=True)
75 assert isinstance(butler, DirectButler), "Test expects DirectButler"
76 for filename in filenames:
77 with open(os.path.join(TESTDIR, "data", "registry", filename)) as stream:
78 # Go behind the back of the import code a bit to deal with
79 # the fact that this is just registry content with no actual
80 # files for the datastore.
81 backend = YamlRepoImportBackend(stream, butler._registry)
82 backend.register()
83 backend.load(datastore=None)
84 return butler
86 def testDimensions(self):
87 """Test getting a dimension."""
88 self.loadData("base.yaml")
89 res, msg = self._queryDataIds(self.root, dimensions=("detector",))
90 expected = AstropyTable(
91 array((("Cam1", 1), ("Cam1", 2), ("Cam1", 3), ("Cam1", 4))), names=("instrument", "detector")
92 )
93 self.assertFalse(msg)
94 self.assertAstropyTablesEqual(res, expected)
96 def testNoDimensions(self):
97 """Test asking for no dimensions."""
98 res, msg = self._queryDataIds(self.root)
99 self.assertIsNone(res, msg)
100 self.assertEqual(
101 msg, "Result has one logical row but no columns because no dimensions were requested."
102 )
104 def testNoResultsEasy(self):
105 """Test getting no results in a way that's detectable without having
106 to execute the full query.
107 """
108 self.loadData("base.yaml", "spatial.yaml")
109 res, msg = self._queryDataIds(
110 self.root,
111 dimensions=("visit", "tract"),
112 where="instrument='Cam1' AND skymap='SkyMap1' AND visit=1 AND tract=1",
113 )
114 self.assertIsNone(res, msg)
115 self.assertIn("yields no results when applied to", msg)
117 def testNoResultsHard(self):
118 """Test getting no results in a way that can't be detected unless we
119 run the whole query.
120 """
121 self.loadData("base.yaml", "spatial.yaml")
122 res, msg = self._queryDataIds(
123 self.root,
124 dimensions=("visit", "tract"),
125 where="instrument='Cam1' AND skymap='SkyMap1' AND visit=1 AND tract=0 AND patch=5",
126 )
127 self.assertIsNone(res, msg)
128 self.assertIn("Post-query region filtering removed all rows", msg)
130 def testWhere(self):
131 """Test with a WHERE constraint."""
132 self.loadData("base.yaml")
133 res, msg = self._queryDataIds(
134 self.root, dimensions=("detector",), where="instrument='Cam1' AND detector=2"
135 )
136 expected = AstropyTable(
137 array((("Cam1", 2),)),
138 names=(
139 "instrument",
140 "detector",
141 ),
142 )
143 self.assertAstropyTablesEqual(res, expected)
144 self.assertIsNone(msg)
146 def testDatasetsAndCollections(self):
147 """Test constraining via datasets and collections."""
148 butler = self.loadData("base.yaml", "datasets.yaml")
149 # See that the data IDs returned are constrained by that collection's
150 # contents.
151 res, msg = self._queryDataIds(
152 repo=self.root, dimensions=("detector",), collections=("imported_g",), datasets="bias"
153 )
154 expected = AstropyTable(
155 array((("Cam1", 1), ("Cam1", 2), ("Cam1", 3))),
156 names=(
157 "instrument",
158 "detector",
159 ),
160 )
161 self.assertAstropyTablesEqual(res, expected)
162 self.assertIsNone(msg)
164 # Check that the dimensions are inferred when not provided.
165 with self.assertLogs("lsst.daf.butler.script.queryDataIds", "INFO") as cm:
166 res, msg = self._queryDataIds(repo=self.root, collections=("imported_g",), datasets="bias")
167 self.assertIn("Determined dimensions", "\n".join(cm.output))
168 self.assertAstropyTablesEqual(res, expected)
169 self.assertIsNone(msg)
171 # Check that we get a reason if no dimensions can be inferred.
172 new_dataset_type = DatasetType(
173 "test_metric_dimensionless",
174 (),
175 "StructuredDataDict",
176 universe=butler.dimensions,
177 )
178 butler.registry.registerDatasetType(new_dataset_type)
179 res, msg = self._queryDataIds(repo=self.root, collections=("imported_g",), datasets=...)
180 self.assertIsNone(res)
181 self.assertIn("No dimensions in common", msg)
183 # Check that we get a reason returned if no dataset is found in
184 # collection.
185 res, msg = self._queryDataIds(
186 repo=self.root,
187 dimensions=("detector",),
188 collections=("imported_g",),
189 datasets="test_metric_dimensionless",
190 )
191 self.assertIsNone(res)
192 self.assertIn("No datasets of type test_metric_dimensionless", msg)
195if __name__ == "__main__":
196 unittest.main()