Coverage for python/lsst/daf/butler/registry/tests/_registry.py : 6%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
21from __future__ import annotations
23__all__ = ["RegistryTests"]
25from abc import ABC, abstractmethod
26import os
28import astropy.time
29import sqlalchemy
30from typing import Optional
32from ...core import (
33 DataCoordinate,
34 DatasetType,
35 DimensionGraph,
36 StorageClass,
37 ddl,
38 YamlRepoImportBackend
39)
40from .._registry import (
41 CollectionType,
42 ConflictingDefinitionError,
43 ConsistentDataIds,
44 OrphanedRecordError,
45 Registry,
46 RegistryConfig,
47)
48from ..wildcards import DatasetTypeRestriction
49from ..interfaces import MissingCollectionError
52class RegistryTests(ABC):
53 """Generic tests for the `Registry` class that can be subclassed to
54 generate tests for different configurations.
55 """
57 collectionsManager: Optional[str] = None
58 """Name of the collections manager class, if subclass provides value for
59 this member then it overrides name specified in default configuration
60 (`str`).
61 """
63 @classmethod
64 @abstractmethod
65 def getDataDir(cls) -> str:
66 """Return the root directory containing test data YAML files.
67 """
68 raise NotImplementedError()
70 def makeRegistryConfig(self) -> RegistryConfig:
71 """Create RegistryConfig used to create a registry.
73 This method should be called by a subclass from `makeRegistry`.
74 Returned instance will be pre-configured based on the values of class
75 members, and default-configured for all other parametrs. Subclasses
76 that need default configuration should just instantiate
77 `RegistryConfig` directly.
78 """
79 config = RegistryConfig()
80 if self.collectionsManager:
81 config["managers"]["collections"] = self.collectionsManager
82 return config
84 @abstractmethod
85 def makeRegistry(self) -> Registry:
86 """Return the Registry instance to be tested.
87 """
88 raise NotImplementedError()
90 def loadData(self, registry: Registry, filename: str):
91 """Load registry test data from ``getDataDir/<filename>``,
92 which should be a YAML import/export file.
93 """
94 with open(os.path.join(self.getDataDir(), filename), 'r') as stream:
95 backend = YamlRepoImportBackend(stream, registry)
96 backend.register()
97 backend.load(datastore=None)
99 def assertRowCount(self, registry: Registry, table: str, count: int):
100 """Check the number of rows in table.
101 """
102 # TODO: all tests that rely on this method should be rewritten, as it
103 # needs to depend on Registry implementation details to have any chance
104 # of working.
105 sql = sqlalchemy.sql.select(
106 [sqlalchemy.sql.func.count()]
107 ).select_from(
108 getattr(registry._tables, table)
109 )
110 self.assertEqual(registry._db.query(sql).scalar(), count)
112 def testOpaque(self):
113 """Tests for `Registry.registerOpaqueTable`,
114 `Registry.insertOpaqueData`, `Registry.fetchOpaqueData`, and
115 `Registry.deleteOpaqueData`.
116 """
117 registry = self.makeRegistry()
118 table = "opaque_table_for_testing"
119 registry.registerOpaqueTable(
120 table,
121 spec=ddl.TableSpec(
122 fields=[
123 ddl.FieldSpec("id", dtype=sqlalchemy.BigInteger, primaryKey=True),
124 ddl.FieldSpec("name", dtype=sqlalchemy.String, length=16, nullable=False),
125 ddl.FieldSpec("count", dtype=sqlalchemy.SmallInteger, nullable=True),
126 ],
127 )
128 )
129 rows = [
130 {"id": 1, "name": "one", "count": None},
131 {"id": 2, "name": "two", "count": 5},
132 {"id": 3, "name": "three", "count": 6},
133 ]
134 registry.insertOpaqueData(table, *rows)
135 self.assertCountEqual(rows, list(registry.fetchOpaqueData(table)))
136 self.assertEqual(rows[0:1], list(registry.fetchOpaqueData(table, id=1)))
137 self.assertEqual(rows[1:2], list(registry.fetchOpaqueData(table, name="two")))
138 self.assertEqual([], list(registry.fetchOpaqueData(table, id=1, name="two")))
139 registry.deleteOpaqueData(table, id=3)
140 self.assertCountEqual(rows[:2], list(registry.fetchOpaqueData(table)))
141 registry.deleteOpaqueData(table)
142 self.assertEqual([], list(registry.fetchOpaqueData(table)))
144 def testDatasetType(self):
145 """Tests for `Registry.registerDatasetType` and
146 `Registry.getDatasetType`.
147 """
148 registry = self.makeRegistry()
149 # Check valid insert
150 datasetTypeName = "test"
151 storageClass = StorageClass("testDatasetType")
152 registry.storageClasses.registerStorageClass(storageClass)
153 dimensions = registry.dimensions.extract(("instrument", "visit"))
154 differentDimensions = registry.dimensions.extract(("instrument", "patch"))
155 inDatasetType = DatasetType(datasetTypeName, dimensions, storageClass)
156 # Inserting for the first time should return True
157 self.assertTrue(registry.registerDatasetType(inDatasetType))
158 outDatasetType1 = registry.getDatasetType(datasetTypeName)
159 self.assertEqual(outDatasetType1, inDatasetType)
161 # Re-inserting should work
162 self.assertFalse(registry.registerDatasetType(inDatasetType))
163 # Except when they are not identical
164 with self.assertRaises(ConflictingDefinitionError):
165 nonIdenticalDatasetType = DatasetType(datasetTypeName, differentDimensions, storageClass)
166 registry.registerDatasetType(nonIdenticalDatasetType)
168 # Template can be None
169 datasetTypeName = "testNoneTemplate"
170 storageClass = StorageClass("testDatasetType2")
171 registry.storageClasses.registerStorageClass(storageClass)
172 dimensions = registry.dimensions.extract(("instrument", "visit"))
173 inDatasetType = DatasetType(datasetTypeName, dimensions, storageClass)
174 registry.registerDatasetType(inDatasetType)
175 outDatasetType2 = registry.getDatasetType(datasetTypeName)
176 self.assertEqual(outDatasetType2, inDatasetType)
178 allTypes = set(registry.queryDatasetTypes())
179 self.assertEqual(allTypes, {outDatasetType1, outDatasetType2})
181 def testDimensions(self):
182 """Tests for `Registry.insertDimensionData`,
183 `Registry.syncDimensionData`, and `Registry.expandDataId`.
184 """
185 registry = self.makeRegistry()
186 dimensionName = "instrument"
187 dimension = registry.dimensions[dimensionName]
188 dimensionValue = {"name": "DummyCam", "visit_max": 10, "exposure_max": 10, "detector_max": 2,
189 "class_name": "lsst.obs.base.Instrument"}
190 registry.insertDimensionData(dimensionName, dimensionValue)
191 # Inserting the same value twice should fail
192 with self.assertRaises(sqlalchemy.exc.IntegrityError):
193 registry.insertDimensionData(dimensionName, dimensionValue)
194 # expandDataId should retrieve the record we just inserted
195 self.assertEqual(
196 registry.expandDataId(
197 instrument="DummyCam",
198 graph=dimension.graph
199 ).records[dimensionName].toDict(),
200 dimensionValue
201 )
202 # expandDataId should raise if there is no record with the given ID.
203 with self.assertRaises(LookupError):
204 registry.expandDataId({"instrument": "Unknown"}, graph=dimension.graph)
205 # abstract_filter doesn't have a table; insert should fail.
206 with self.assertRaises(TypeError):
207 registry.insertDimensionData("abstract_filter", {"abstract_filter": "i"})
208 dimensionName2 = "physical_filter"
209 dimension2 = registry.dimensions[dimensionName2]
210 dimensionValue2 = {"name": "DummyCam_i", "abstract_filter": "i"}
211 # Missing required dependency ("instrument") should fail
212 with self.assertRaises(sqlalchemy.exc.IntegrityError):
213 registry.insertDimensionData(dimensionName2, dimensionValue2)
214 # Adding required dependency should fix the failure
215 dimensionValue2["instrument"] = "DummyCam"
216 registry.insertDimensionData(dimensionName2, dimensionValue2)
217 # expandDataId should retrieve the record we just inserted.
218 self.assertEqual(
219 registry.expandDataId(
220 instrument="DummyCam", physical_filter="DummyCam_i",
221 graph=dimension2.graph
222 ).records[dimensionName2].toDict(),
223 dimensionValue2
224 )
225 # Use syncDimensionData to insert a new record successfully.
226 dimensionName3 = "detector"
227 dimensionValue3 = {"instrument": "DummyCam", "id": 1, "full_name": "one",
228 "name_in_raft": "zero", "purpose": "SCIENCE"}
229 self.assertTrue(registry.syncDimensionData(dimensionName3, dimensionValue3))
230 # Sync that again. Note that one field ("raft") is NULL, and that
231 # should be okay.
232 self.assertFalse(registry.syncDimensionData(dimensionName3, dimensionValue3))
233 # Now try that sync with the same primary key but a different value.
234 # This should fail.
235 with self.assertRaises(ConflictingDefinitionError):
236 registry.syncDimensionData(
237 dimensionName3,
238 {"instrument": "DummyCam", "id": 1, "full_name": "one",
239 "name_in_raft": "four", "purpose": "SCIENCE"}
240 )
242 def testDataIdRelationships(self):
243 """Test `Registry.relateDataId`.
244 """
245 registry = self.makeRegistry()
246 self.loadData(registry, "base.yaml")
247 # Simple cases where the dimension key-value pairs tell us everything.
248 self.assertEqual(
249 registry.relateDataIds(
250 {"instrument": "Cam1"},
251 {"instrument": "Cam1"},
252 ),
253 ConsistentDataIds(contains=True, within=True, overlaps=True)
254 )
255 self.assertEqual(
256 registry.relateDataIds({}, {}),
257 ConsistentDataIds(contains=True, within=True, overlaps=False)
258 )
259 self.assertEqual(
260 registry.relateDataIds({"instrument": "Cam1"}, {}),
261 ConsistentDataIds(contains=True, within=False, overlaps=False)
262 )
263 self.assertEqual(
264 registry.relateDataIds({}, {"instrument": "Cam1"}),
265 ConsistentDataIds(contains=False, within=True, overlaps=False)
266 )
267 self.assertEqual(
268 registry.relateDataIds(
269 {"instrument": "Cam1", "physical_filter": "Cam1-G"},
270 {"instrument": "Cam1"},
271 ),
272 ConsistentDataIds(contains=True, within=False, overlaps=True)
273 )
274 self.assertEqual(
275 registry.relateDataIds(
276 {"instrument": "Cam1"},
277 {"instrument": "Cam1", "physical_filter": "Cam1-G"},
278 ),
279 ConsistentDataIds(contains=False, within=True, overlaps=True)
280 )
281 self.assertIsNone(
282 registry.relateDataIds(
283 {"instrument": "Cam1", "physical_filter": "Cam1-G"},
284 {"instrument": "Cam1", "physical_filter": "Cam1-R1"},
285 )
286 )
287 # Trickier cases where we need to expand data IDs, but it's still just
288 # required and implied dimension relationships.
289 self.assertEqual(
290 registry.relateDataIds(
291 {"instrument": "Cam1", "physical_filter": "Cam1-G"},
292 {"instrument": "Cam1", "abstract_filter": "g"},
293 ),
294 ConsistentDataIds(contains=True, within=False, overlaps=True)
295 )
296 self.assertEqual(
297 registry.relateDataIds(
298 {"instrument": "Cam1", "abstract_filter": "g"},
299 {"instrument": "Cam1", "physical_filter": "Cam1-G"},
300 ),
301 ConsistentDataIds(contains=False, within=True, overlaps=True)
302 )
303 self.assertEqual(
304 registry.relateDataIds(
305 {"instrument": "Cam1"},
306 {"htm7": 131073},
307 ),
308 ConsistentDataIds(contains=False, within=False, overlaps=False)
309 )
310 # Trickiest cases involve spatial or temporal overlaps or non-dimension
311 # elements that relate things (of which visit_definition is our only
312 # current example).
313 #
314 # These two HTM IDs at different levels have a "contains" relationship
315 # spatially, but there is no overlap of dimension keys. The exact
316 # result of relateDataIds is unspecified for this case, but it's
317 # guaranteed to be truthy (see relateDataIds docs.).
318 self.assertTrue(
319 registry.relateDataIds({"htm7": 131073}, {"htm9": 2097169})
320 )
321 # These two HTM IDs at different levels are disjoint spatially, which
322 # means the data IDs are inconsistent.
323 self.assertIsNone(
324 registry.relateDataIds({"htm7": 131073}, {"htm9": 2097391})
325 )
326 # Insert a few more dimension records for the next test.
327 registry.insertDimensionData(
328 "exposure",
329 {"instrument": "Cam1", "id": 1, "name": "one", "physical_filter": "Cam1-G"},
330 )
331 registry.insertDimensionData(
332 "exposure",
333 {"instrument": "Cam1", "id": 2, "name": "two", "physical_filter": "Cam1-G"},
334 )
335 registry.insertDimensionData(
336 "visit_system",
337 {"instrument": "Cam1", "id": 0, "name": "one-to-one"},
338 )
339 registry.insertDimensionData(
340 "visit",
341 {"instrument": "Cam1", "id": 1, "name": "one", "physical_filter": "Cam1-G", "visit_system": 0},
342 )
343 registry.insertDimensionData(
344 "visit_definition",
345 {"instrument": "Cam1", "visit": 1, "exposure": 1, "visit_system": 0},
346 )
347 self.assertEqual(
348 registry.relateDataIds(
349 {"instrument": "Cam1", "visit": 1},
350 {"instrument": "Cam1", "exposure": 1},
351 ),
352 ConsistentDataIds(contains=False, within=False, overlaps=True)
353 )
354 self.assertIsNone(
355 registry.relateDataIds(
356 {"instrument": "Cam1", "visit": 1},
357 {"instrument": "Cam1", "exposure": 2},
358 )
359 )
361 def testDataset(self):
362 """Basic tests for `Registry.insertDatasets`, `Registry.getDataset`,
363 and `Registry.removeDatasets`.
364 """
365 registry = self.makeRegistry()
366 self.loadData(registry, "base.yaml")
367 run = "test"
368 registry.registerRun(run)
369 datasetType = registry.getDatasetType("permabias")
370 dataId = {"instrument": "Cam1", "detector": 2}
371 ref, = registry.insertDatasets(datasetType, dataIds=[dataId], run=run)
372 outRef = registry.getDataset(ref.id)
373 self.assertIsNotNone(ref.id)
374 self.assertEqual(ref, outRef)
375 with self.assertRaises(ConflictingDefinitionError):
376 registry.insertDatasets(datasetType, dataIds=[dataId], run=run)
377 registry.removeDatasets([ref])
378 self.assertIsNone(registry.findDataset(datasetType, dataId, collections=[run]))
380 def testComponents(self):
381 """Tests for `Registry.attachComponents` and other dataset operations
382 on composite datasets.
383 """
384 registry = self.makeRegistry()
385 self.loadData(registry, "base.yaml")
386 run = "test"
387 registry.registerRun(run)
388 parentDatasetType = registry.getDatasetType("permabias")
389 childDatasetType1 = registry.getDatasetType("permabias.image")
390 childDatasetType2 = registry.getDatasetType("permabias.mask")
391 dataId = {"instrument": "Cam1", "detector": 2}
392 parent, = registry.insertDatasets(parentDatasetType, dataIds=[dataId], run=run)
393 children = {"image": registry.insertDatasets(childDatasetType1, dataIds=[dataId], run=run)[0],
394 "mask": registry.insertDatasets(childDatasetType2, dataIds=[dataId], run=run)[0]}
395 parent = registry.attachComponents(parent, children)
396 self.assertEqual(parent.components, children)
397 outParent = registry.getDataset(parent.id)
398 self.assertEqual(outParent.components, children)
399 # Remove the parent; this should remove all children.
400 registry.removeDatasets([parent])
401 self.assertIsNone(registry.findDataset(parentDatasetType, dataId, collections=[run]))
402 self.assertIsNone(registry.findDataset(childDatasetType1, dataId, collections=[run]))
403 self.assertIsNone(registry.findDataset(childDatasetType2, dataId, collections=[run]))
405 def testFindDataset(self):
406 """Tests for `Registry.findDataset`.
407 """
408 registry = self.makeRegistry()
409 self.loadData(registry, "base.yaml")
410 run = "test"
411 datasetType = registry.getDatasetType("permabias")
412 dataId = {"instrument": "Cam1", "detector": 4}
413 registry.registerRun(run)
414 inputRef, = registry.insertDatasets(datasetType, dataIds=[dataId], run=run)
415 outputRef = registry.findDataset(datasetType, dataId, collections=[run])
416 self.assertEqual(outputRef, inputRef)
417 # Check that retrieval with invalid dataId raises
418 with self.assertRaises(LookupError):
419 dataId = {"instrument": "Cam1"} # no detector
420 registry.findDataset(datasetType, dataId, collections=run)
421 # Check that different dataIds match to different datasets
422 dataId1 = {"instrument": "Cam1", "detector": 1}
423 inputRef1, = registry.insertDatasets(datasetType, dataIds=[dataId1], run=run)
424 dataId2 = {"instrument": "Cam1", "detector": 2}
425 inputRef2, = registry.insertDatasets(datasetType, dataIds=[dataId2], run=run)
426 self.assertEqual(registry.findDataset(datasetType, dataId1, collections=run), inputRef1)
427 self.assertEqual(registry.findDataset(datasetType, dataId2, collections=run), inputRef2)
428 self.assertNotEqual(registry.findDataset(datasetType, dataId1, collections=run), inputRef2)
429 self.assertNotEqual(registry.findDataset(datasetType, dataId2, collections=run), inputRef1)
430 # Check that requesting a non-existing dataId returns None
431 nonExistingDataId = {"instrument": "Cam1", "detector": 3}
432 self.assertIsNone(registry.findDataset(datasetType, nonExistingDataId, collections=run))
434 def testCollections(self):
435 """Tests for registry methods that manage collections.
436 """
437 registry = self.makeRegistry()
438 self.loadData(registry, "base.yaml")
439 self.loadData(registry, "datasets.yaml")
440 run1 = "imported_g"
441 run2 = "imported_r"
442 datasetType = "permabias"
443 # Find some datasets via their run's collection.
444 dataId1 = {"instrument": "Cam1", "detector": 1}
445 ref1 = registry.findDataset(datasetType, dataId1, collections=run1)
446 self.assertIsNotNone(ref1)
447 dataId2 = {"instrument": "Cam1", "detector": 2}
448 ref2 = registry.findDataset(datasetType, dataId2, collections=run1)
449 self.assertIsNotNone(ref2)
450 # Associate those into a new collection,then look for them there.
451 tag1 = "tag1"
452 registry.registerCollection(tag1, type=CollectionType.TAGGED)
453 registry.associate(tag1, [ref1, ref2])
454 self.assertEqual(registry.findDataset(datasetType, dataId1, collections=tag1), ref1)
455 self.assertEqual(registry.findDataset(datasetType, dataId2, collections=tag1), ref2)
456 # Disassociate one and verify that we can't it there anymore...
457 registry.disassociate(tag1, [ref1])
458 self.assertIsNone(registry.findDataset(datasetType, dataId1, collections=tag1))
459 # ...but we can still find ref2 in tag1, and ref1 in the run.
460 self.assertEqual(registry.findDataset(datasetType, dataId1, collections=run1), ref1)
461 self.assertEqual(registry.findDataset(datasetType, dataId2, collections=tag1), ref2)
462 collections = set(registry.queryCollections())
463 self.assertEqual(collections, {run1, run2, tag1})
464 # Associate both refs into tag1 again; ref2 is already there, but that
465 # should be a harmless no-op.
466 registry.associate(tag1, [ref1, ref2])
467 self.assertEqual(registry.findDataset(datasetType, dataId1, collections=tag1), ref1)
468 self.assertEqual(registry.findDataset(datasetType, dataId2, collections=tag1), ref2)
469 # Get a different dataset (from a different run) that has the same
470 # dataset type and data ID as ref2.
471 ref2b = registry.findDataset(datasetType, dataId2, collections=run2)
472 self.assertNotEqual(ref2, ref2b)
473 # Attempting to associate that into tag1 should be an error.
474 with self.assertRaises(ConflictingDefinitionError):
475 registry.associate(tag1, [ref2b])
476 # That error shouldn't have messed up what we had before.
477 self.assertEqual(registry.findDataset(datasetType, dataId1, collections=tag1), ref1)
478 self.assertEqual(registry.findDataset(datasetType, dataId2, collections=tag1), ref2)
479 # Attempt to associate the conflicting dataset again, this time with
480 # a dataset that isn't in the collection and won't cause a conflict.
481 # Should also fail without modifying anything.
482 dataId3 = {"instrument": "Cam1", "detector": 3}
483 ref3 = registry.findDataset(datasetType, dataId3, collections=run1)
484 with self.assertRaises(ConflictingDefinitionError):
485 registry.associate(tag1, [ref3, ref2b])
486 self.assertEqual(registry.findDataset(datasetType, dataId1, collections=tag1), ref1)
487 self.assertEqual(registry.findDataset(datasetType, dataId2, collections=tag1), ref2)
488 self.assertIsNone(registry.findDataset(datasetType, dataId3, collections=tag1))
489 # Register a chained collection that searches:
490 # 1. 'tag1'
491 # 2. 'run1', but only for the permaflat dataset
492 # 3. 'run2'
493 chain1 = "chain1"
494 registry.registerCollection(chain1, type=CollectionType.CHAINED)
495 self.assertIs(registry.getCollectionType(chain1), CollectionType.CHAINED)
496 # Chained collection exists, but has no collections in it.
497 self.assertFalse(registry.getCollectionChain(chain1))
498 # If we query for all collections, we should get the chained collection
499 # only if we don't ask to flatten it (i.e. yield only its children).
500 self.assertEqual(set(registry.queryCollections(flattenChains=False)), {tag1, run1, run2, chain1})
501 self.assertEqual(set(registry.queryCollections(flattenChains=True)), {tag1, run1, run2})
502 # Attempt to set its child collections to something circular; that
503 # should fail.
504 with self.assertRaises(ValueError):
505 registry.setCollectionChain(chain1, [tag1, chain1])
506 # Add the child collections.
507 registry.setCollectionChain(chain1, [tag1, (run1, "permaflat"), run2])
508 self.assertEqual(
509 list(registry.getCollectionChain(chain1)),
510 [(tag1, DatasetTypeRestriction.any),
511 (run1, DatasetTypeRestriction.fromExpression("permaflat")),
512 (run2, DatasetTypeRestriction.any)]
513 )
514 # Searching for dataId1 or dataId2 in the chain should return ref1 and
515 # ref2, because both are in tag1.
516 self.assertEqual(registry.findDataset(datasetType, dataId1, collections=chain1), ref1)
517 self.assertEqual(registry.findDataset(datasetType, dataId2, collections=chain1), ref2)
518 # Now disassociate ref2 from tag1. The search (for permabias) with
519 # dataId2 in chain1 should then:
520 # 1. not find it in tag1
521 # 2. not look in tag2, because it's restricted to permaflat here
522 # 3. find a different dataset in run2
523 registry.disassociate(tag1, [ref2])
524 ref2b = registry.findDataset(datasetType, dataId2, collections=chain1)
525 self.assertNotEqual(ref2b, ref2)
526 self.assertEqual(ref2b, registry.findDataset(datasetType, dataId2, collections=run2))
527 # Look in the chain for a permaflat that is in run1; should get the
528 # same ref as if we'd searched run1 directly.
529 dataId3 = {"instrument": "Cam1", "detector": 2, "physical_filter": "Cam1-G"}
530 self.assertEqual(registry.findDataset("permaflat", dataId3, collections=chain1),
531 registry.findDataset("permaflat", dataId3, collections=run1),)
532 # Define a new chain so we can test recursive chains.
533 chain2 = "chain2"
534 registry.registerCollection(chain2, type=CollectionType.CHAINED)
535 registry.setCollectionChain(chain2, [(run2, "permabias"), chain1])
536 # Search for permabias with dataId1 should find it via tag1 in chain2,
537 # recursing, because is not in run1.
538 self.assertIsNone(registry.findDataset(datasetType, dataId1, collections=run2))
539 self.assertEqual(registry.findDataset(datasetType, dataId1, collections=chain2), ref1)
540 # Search for permabias with dataId2 should find it in run2 (ref2b).
541 self.assertEqual(registry.findDataset(datasetType, dataId2, collections=chain2), ref2b)
542 # Search for a permaflat that is in run2. That should not be found
543 # at the front of chain2, because of the restriction to permabias
544 # on run2 there, but it should be found in at the end of chain1.
545 dataId4 = {"instrument": "Cam1", "detector": 3, "physical_filter": "Cam1-R2"}
546 ref4 = registry.findDataset("permaflat", dataId4, collections=run2)
547 self.assertIsNotNone(ref4)
548 self.assertEqual(ref4, registry.findDataset("permaflat", dataId4, collections=chain2))
549 # Deleting a collection that's part of a CHAINED collection is not
550 # allowed, and is exception-safe.
551 with self.assertRaises(Exception):
552 registry.removeCollection(run2)
553 self.assertEqual(registry.getCollectionType(run2), CollectionType.RUN)
554 with self.assertRaises(Exception):
555 registry.removeCollection(chain1)
556 self.assertEqual(registry.getCollectionType(chain1), CollectionType.CHAINED)
557 # Actually remove chain2, test that it's gone by asking for its type.
558 registry.removeCollection(chain2)
559 with self.assertRaises(MissingCollectionError):
560 registry.getCollectionType(chain2)
561 # Actually remove run2 and chain1, which should work now.
562 registry.removeCollection(chain1)
563 registry.removeCollection(run2)
564 with self.assertRaises(MissingCollectionError):
565 registry.getCollectionType(run2)
566 with self.assertRaises(MissingCollectionError):
567 registry.getCollectionType(chain1)
568 # Remove tag1 as well, just to test that we can remove TAGGED
569 # collections.
570 registry.removeCollection(tag1)
571 with self.assertRaises(MissingCollectionError):
572 registry.getCollectionType(tag1)
574 def testDatasetLocations(self):
575 """Tests for `Registry.insertDatasetLocations`,
576 `Registry.getDatasetLocations`, and `Registry.removeDatasetLocation`.
577 """
578 registry = self.makeRegistry()
579 self.loadData(registry, "base.yaml")
580 self.loadData(registry, "datasets.yaml")
581 run = "imported_g"
582 ref = registry.findDataset("permabias", dataId={"instrument": "Cam1", "detector": 1}, collections=run)
583 ref2 = registry.findDataset("permaflat",
584 dataId={"instrument": "Cam1", "detector": 3, "physical_filter": "Cam1-G"},
585 collections=run)
586 datastoreName = "dummystore"
587 datastoreName2 = "dummystore2"
588 # Test adding information about a new dataset
589 registry.insertDatasetLocations(datastoreName, [ref])
590 addresses = registry.getDatasetLocations(ref)
591 self.assertIn(datastoreName, addresses)
592 self.assertEqual(len(addresses), 1)
593 registry.insertDatasetLocations(datastoreName2, [ref, ref2])
594 addresses = registry.getDatasetLocations(ref)
595 self.assertEqual(len(addresses), 2)
596 self.assertIn(datastoreName, addresses)
597 self.assertIn(datastoreName2, addresses)
598 registry.removeDatasetLocation(datastoreName, [ref])
599 addresses = registry.getDatasetLocations(ref)
600 self.assertEqual(len(addresses), 1)
601 self.assertNotIn(datastoreName, addresses)
602 self.assertIn(datastoreName2, addresses)
603 with self.assertRaises(OrphanedRecordError):
604 registry.removeDatasets([ref])
605 registry.removeDatasetLocation(datastoreName2, [ref])
606 addresses = registry.getDatasetLocations(ref)
607 self.assertEqual(len(addresses), 0)
608 self.assertNotIn(datastoreName2, addresses)
609 registry.removeDatasets([ref]) # should not raise
610 addresses = registry.getDatasetLocations(ref2)
611 self.assertEqual(len(addresses), 1)
612 self.assertIn(datastoreName2, addresses)
614 def testBasicTransaction(self):
615 """Test that all operations within a single transaction block are
616 rolled back if an exception propagates out of the block.
617 """
618 registry = self.makeRegistry()
619 storageClass = StorageClass("testDatasetType")
620 registry.storageClasses.registerStorageClass(storageClass)
621 with registry.transaction():
622 registry.insertDimensionData("instrument", {"name": "Cam1", "class_name": "A"})
623 with self.assertRaises(ValueError):
624 with registry.transaction():
625 registry.insertDimensionData("instrument", {"name": "Cam2"})
626 raise ValueError("Oops, something went wrong")
627 # Cam1 should exist
628 self.assertEqual(registry.expandDataId(instrument="Cam1").records["instrument"].class_name, "A")
629 # But Cam2 and Cam3 should both not exist
630 with self.assertRaises(LookupError):
631 registry.expandDataId(instrument="Cam2")
632 with self.assertRaises(LookupError):
633 registry.expandDataId(instrument="Cam3")
635 def testNestedTransaction(self):
636 """Test that operations within a transaction block are not rolled back
637 if an exception propagates out of an inner transaction block and is
638 then caught.
639 """
640 registry = self.makeRegistry()
641 dimension = registry.dimensions["instrument"]
642 dataId1 = {"instrument": "DummyCam"}
643 dataId2 = {"instrument": "DummyCam2"}
644 checkpointReached = False
645 with registry.transaction():
646 # This should be added and (ultimately) committed.
647 registry.insertDimensionData(dimension, dataId1)
648 with self.assertRaises(sqlalchemy.exc.IntegrityError):
649 with registry.transaction():
650 # This does not conflict, and should succeed (but not
651 # be committed).
652 registry.insertDimensionData(dimension, dataId2)
653 checkpointReached = True
654 # This should conflict and raise, triggerring a rollback
655 # of the previous insertion within the same transaction
656 # context, but not the original insertion in the outer
657 # block.
658 registry.insertDimensionData(dimension, dataId1)
659 self.assertTrue(checkpointReached)
660 self.assertIsNotNone(registry.expandDataId(dataId1, graph=dimension.graph))
661 with self.assertRaises(LookupError):
662 registry.expandDataId(dataId2, graph=dimension.graph)
664 def testInstrumentDimensions(self):
665 """Test queries involving only instrument dimensions, with no joins to
666 skymap."""
667 registry = self.makeRegistry()
669 # need a bunch of dimensions and datasets for test
670 registry.insertDimensionData(
671 "instrument",
672 dict(name="DummyCam", visit_max=25, exposure_max=300, detector_max=6)
673 )
674 registry.insertDimensionData(
675 "physical_filter",
676 dict(instrument="DummyCam", name="dummy_r", abstract_filter="r"),
677 dict(instrument="DummyCam", name="dummy_i", abstract_filter="i"),
678 )
679 registry.insertDimensionData(
680 "detector",
681 *[dict(instrument="DummyCam", id=i, full_name=str(i)) for i in range(1, 6)]
682 )
683 registry.insertDimensionData(
684 "visit_system",
685 dict(instrument="DummyCam", id=1, name="default"),
686 )
687 registry.insertDimensionData(
688 "visit",
689 dict(instrument="DummyCam", id=10, name="ten", physical_filter="dummy_i", visit_system=1),
690 dict(instrument="DummyCam", id=11, name="eleven", physical_filter="dummy_r", visit_system=1),
691 dict(instrument="DummyCam", id=20, name="twelve", physical_filter="dummy_r", visit_system=1),
692 )
693 registry.insertDimensionData(
694 "exposure",
695 dict(instrument="DummyCam", id=100, name="100", physical_filter="dummy_i"),
696 dict(instrument="DummyCam", id=101, name="101", physical_filter="dummy_i"),
697 dict(instrument="DummyCam", id=110, name="110", physical_filter="dummy_r"),
698 dict(instrument="DummyCam", id=111, name="111", physical_filter="dummy_r"),
699 dict(instrument="DummyCam", id=200, name="200", physical_filter="dummy_r"),
700 dict(instrument="DummyCam", id=201, name="201", physical_filter="dummy_r"),
701 )
702 registry.insertDimensionData(
703 "visit_definition",
704 dict(instrument="DummyCam", exposure=100, visit_system=1, visit=10),
705 dict(instrument="DummyCam", exposure=101, visit_system=1, visit=10),
706 dict(instrument="DummyCam", exposure=110, visit_system=1, visit=11),
707 dict(instrument="DummyCam", exposure=111, visit_system=1, visit=11),
708 dict(instrument="DummyCam", exposure=200, visit_system=1, visit=20),
709 dict(instrument="DummyCam", exposure=201, visit_system=1, visit=20),
710 )
711 # dataset types
712 run1 = "test1_r"
713 run2 = "test2_r"
714 tagged2 = "test2_t"
715 registry.registerRun(run1)
716 registry.registerRun(run2)
717 registry.registerCollection(tagged2)
718 storageClass = StorageClass("testDataset")
719 registry.storageClasses.registerStorageClass(storageClass)
720 rawType = DatasetType(name="RAW",
721 dimensions=registry.dimensions.extract(("instrument", "exposure", "detector")),
722 storageClass=storageClass)
723 registry.registerDatasetType(rawType)
724 calexpType = DatasetType(name="CALEXP",
725 dimensions=registry.dimensions.extract(("instrument", "visit", "detector")),
726 storageClass=storageClass)
727 registry.registerDatasetType(calexpType)
729 # add pre-existing datasets
730 for exposure in (100, 101, 110, 111):
731 for detector in (1, 2, 3):
732 # note that only 3 of 5 detectors have datasets
733 dataId = dict(instrument="DummyCam", exposure=exposure, detector=detector)
734 ref, = registry.insertDatasets(rawType, dataIds=[dataId], run=run1)
735 # exposures 100 and 101 appear in both run1 and tagged2.
736 # 100 has different datasets in the different collections
737 # 101 has the same dataset in both collections.
738 if exposure == 100:
739 ref, = registry.insertDatasets(rawType, dataIds=[dataId], run=run2)
740 if exposure in (100, 101):
741 registry.associate(tagged2, [ref])
742 # Add pre-existing datasets to tagged2.
743 for exposure in (200, 201):
744 for detector in (3, 4, 5):
745 # note that only 3 of 5 detectors have datasets
746 dataId = dict(instrument="DummyCam", exposure=exposure, detector=detector)
747 ref, = registry.insertDatasets(rawType, dataIds=[dataId], run=run2)
748 registry.associate(tagged2, [ref])
750 dimensions = DimensionGraph(
751 registry.dimensions,
752 dimensions=(rawType.dimensions.required | calexpType.dimensions.required)
753 )
754 # Test that single dim string works as well as list of str
755 rows = list(registry.queryDimensions("visit", datasets=rawType, collections=run1, expand=True))
756 rowsI = list(registry.queryDimensions(["visit"], datasets=rawType, collections=run1, expand=True))
757 self.assertEqual(rows, rowsI)
758 # with empty expression
759 rows = list(registry.queryDimensions(dimensions, datasets=rawType, collections=run1, expand=True))
760 self.assertEqual(len(rows), 4*3) # 4 exposures times 3 detectors
761 for dataId in rows:
762 self.assertCountEqual(dataId.keys(), ("instrument", "detector", "exposure", "visit"))
763 packer1 = registry.dimensions.makePacker("visit_detector", dataId)
764 packer2 = registry.dimensions.makePacker("exposure_detector", dataId)
765 self.assertEqual(packer1.unpack(packer1.pack(dataId)),
766 DataCoordinate.standardize(dataId, graph=packer1.dimensions))
767 self.assertEqual(packer2.unpack(packer2.pack(dataId)),
768 DataCoordinate.standardize(dataId, graph=packer2.dimensions))
769 self.assertNotEqual(packer1.pack(dataId), packer2.pack(dataId))
770 self.assertCountEqual(set(dataId["exposure"] for dataId in rows),
771 (100, 101, 110, 111))
772 self.assertCountEqual(set(dataId["visit"] for dataId in rows), (10, 11))
773 self.assertCountEqual(set(dataId["detector"] for dataId in rows), (1, 2, 3))
775 # second collection
776 rows = list(registry.queryDimensions(dimensions, datasets=rawType, collections=tagged2))
777 self.assertEqual(len(rows), 4*3) # 4 exposures times 3 detectors
778 for dataId in rows:
779 self.assertCountEqual(dataId.keys(), ("instrument", "detector", "exposure", "visit"))
780 self.assertCountEqual(set(dataId["exposure"] for dataId in rows),
781 (100, 101, 200, 201))
782 self.assertCountEqual(set(dataId["visit"] for dataId in rows), (10, 20))
783 self.assertCountEqual(set(dataId["detector"] for dataId in rows), (1, 2, 3, 4, 5))
785 # with two input datasets
786 rows = list(registry.queryDimensions(dimensions, datasets=rawType, collections=[run1, tagged2]))
787 self.assertEqual(len(set(rows)), 6*3) # 6 exposures times 3 detectors; set needed to de-dupe
788 for dataId in rows:
789 self.assertCountEqual(dataId.keys(), ("instrument", "detector", "exposure", "visit"))
790 self.assertCountEqual(set(dataId["exposure"] for dataId in rows),
791 (100, 101, 110, 111, 200, 201))
792 self.assertCountEqual(set(dataId["visit"] for dataId in rows), (10, 11, 20))
793 self.assertCountEqual(set(dataId["detector"] for dataId in rows), (1, 2, 3, 4, 5))
795 # limit to single visit
796 rows = list(registry.queryDimensions(dimensions, datasets=rawType, collections=run1,
797 where="visit = 10"))
798 self.assertEqual(len(rows), 2*3) # 2 exposures times 3 detectors
799 self.assertCountEqual(set(dataId["exposure"] for dataId in rows), (100, 101))
800 self.assertCountEqual(set(dataId["visit"] for dataId in rows), (10,))
801 self.assertCountEqual(set(dataId["detector"] for dataId in rows), (1, 2, 3))
803 # more limiting expression, using link names instead of Table.column
804 rows = list(registry.queryDimensions(dimensions, datasets=rawType, collections=run1,
805 where="visit = 10 and detector > 1"))
806 self.assertEqual(len(rows), 2*2) # 2 exposures times 2 detectors
807 self.assertCountEqual(set(dataId["exposure"] for dataId in rows), (100, 101))
808 self.assertCountEqual(set(dataId["visit"] for dataId in rows), (10,))
809 self.assertCountEqual(set(dataId["detector"] for dataId in rows), (2, 3))
811 # expression excludes everything
812 rows = list(registry.queryDimensions(dimensions, datasets=rawType, collections=run1,
813 where="visit > 1000"))
814 self.assertEqual(len(rows), 0)
816 # Selecting by physical_filter, this is not in the dimensions, but it
817 # is a part of the full expression so it should work too.
818 rows = list(registry.queryDimensions(dimensions, datasets=rawType, collections=run1,
819 where="physical_filter = 'dummy_r'"))
820 self.assertEqual(len(rows), 2*3) # 2 exposures times 3 detectors
821 self.assertCountEqual(set(dataId["exposure"] for dataId in rows), (110, 111))
822 self.assertCountEqual(set(dataId["visit"] for dataId in rows), (11,))
823 self.assertCountEqual(set(dataId["detector"] for dataId in rows), (1, 2, 3))
825 def testSkyMapDimensions(self):
826 """Tests involving only skymap dimensions, no joins to instrument."""
827 registry = self.makeRegistry()
829 # need a bunch of dimensions and datasets for test, we want
830 # "abstract_filter" in the test so also have to add physical_filter
831 # dimensions
832 registry.insertDimensionData(
833 "instrument",
834 dict(instrument="DummyCam")
835 )
836 registry.insertDimensionData(
837 "physical_filter",
838 dict(instrument="DummyCam", name="dummy_r", abstract_filter="r"),
839 dict(instrument="DummyCam", name="dummy_i", abstract_filter="i"),
840 )
841 registry.insertDimensionData(
842 "skymap",
843 dict(name="DummyMap", hash="sha!".encode("utf8"))
844 )
845 for tract in range(10):
846 registry.insertDimensionData("tract", dict(skymap="DummyMap", id=tract))
847 registry.insertDimensionData(
848 "patch",
849 *[dict(skymap="DummyMap", tract=tract, id=patch, cell_x=0, cell_y=0)
850 for patch in range(10)]
851 )
853 # dataset types
854 run = "test"
855 registry.registerRun(run)
856 storageClass = StorageClass("testDataset")
857 registry.storageClasses.registerStorageClass(storageClass)
858 calexpType = DatasetType(name="deepCoadd_calexp",
859 dimensions=registry.dimensions.extract(("skymap", "tract", "patch",
860 "abstract_filter")),
861 storageClass=storageClass)
862 registry.registerDatasetType(calexpType)
863 mergeType = DatasetType(name="deepCoadd_mergeDet",
864 dimensions=registry.dimensions.extract(("skymap", "tract", "patch")),
865 storageClass=storageClass)
866 registry.registerDatasetType(mergeType)
867 measType = DatasetType(name="deepCoadd_meas",
868 dimensions=registry.dimensions.extract(("skymap", "tract", "patch",
869 "abstract_filter")),
870 storageClass=storageClass)
871 registry.registerDatasetType(measType)
873 dimensions = DimensionGraph(
874 registry.dimensions,
875 dimensions=(calexpType.dimensions.required | mergeType.dimensions.required
876 | measType.dimensions.required)
877 )
879 # add pre-existing datasets
880 for tract in (1, 3, 5):
881 for patch in (2, 4, 6, 7):
882 dataId = dict(skymap="DummyMap", tract=tract, patch=patch)
883 registry.insertDatasets(mergeType, dataIds=[dataId], run=run)
884 for aFilter in ("i", "r"):
885 dataId = dict(skymap="DummyMap", tract=tract, patch=patch, abstract_filter=aFilter)
886 registry.insertDatasets(calexpType, dataIds=[dataId], run=run)
888 # with empty expression
889 rows = list(registry.queryDimensions(dimensions,
890 datasets=[calexpType, mergeType], collections=run))
891 self.assertEqual(len(rows), 3*4*2) # 4 tracts x 4 patches x 2 filters
892 for dataId in rows:
893 self.assertCountEqual(dataId.keys(), ("skymap", "tract", "patch", "abstract_filter"))
894 self.assertCountEqual(set(dataId["tract"] for dataId in rows), (1, 3, 5))
895 self.assertCountEqual(set(dataId["patch"] for dataId in rows), (2, 4, 6, 7))
896 self.assertCountEqual(set(dataId["abstract_filter"] for dataId in rows), ("i", "r"))
898 # limit to 2 tracts and 2 patches
899 rows = list(registry.queryDimensions(dimensions,
900 datasets=[calexpType, mergeType], collections=run,
901 where="tract IN (1, 5) AND patch IN (2, 7)"))
902 self.assertEqual(len(rows), 2*2*2) # 2 tracts x 2 patches x 2 filters
903 self.assertCountEqual(set(dataId["tract"] for dataId in rows), (1, 5))
904 self.assertCountEqual(set(dataId["patch"] for dataId in rows), (2, 7))
905 self.assertCountEqual(set(dataId["abstract_filter"] for dataId in rows), ("i", "r"))
907 # limit to single filter
908 rows = list(registry.queryDimensions(dimensions,
909 datasets=[calexpType, mergeType], collections=run,
910 where="abstract_filter = 'i'"))
911 self.assertEqual(len(rows), 3*4*1) # 4 tracts x 4 patches x 2 filters
912 self.assertCountEqual(set(dataId["tract"] for dataId in rows), (1, 3, 5))
913 self.assertCountEqual(set(dataId["patch"] for dataId in rows), (2, 4, 6, 7))
914 self.assertCountEqual(set(dataId["abstract_filter"] for dataId in rows), ("i",))
916 # expression excludes everything, specifying non-existing skymap is
917 # not a fatal error, it's operator error
918 rows = list(registry.queryDimensions(dimensions,
919 datasets=[calexpType, mergeType], collections=run,
920 where="skymap = 'Mars'"))
921 self.assertEqual(len(rows), 0)
923 def testSpatialMatch(self):
924 """Test involving spatial match using join tables.
926 Note that realistic test needs a reasonably-defined skypix and regions
927 in registry tables which is hard to implement in this simple test.
928 So we do not actually fill registry with any data and all queries will
929 return empty result, but this is still useful for coverage of the code
930 that generates query.
931 """
932 registry = self.makeRegistry()
934 # dataset types
935 collection = "test"
936 registry.registerRun(name=collection)
937 storageClass = StorageClass("testDataset")
938 registry.storageClasses.registerStorageClass(storageClass)
940 calexpType = DatasetType(name="CALEXP",
941 dimensions=registry.dimensions.extract(("instrument", "visit", "detector")),
942 storageClass=storageClass)
943 registry.registerDatasetType(calexpType)
945 coaddType = DatasetType(name="deepCoadd_calexp",
946 dimensions=registry.dimensions.extract(("skymap", "tract", "patch",
947 "abstract_filter")),
948 storageClass=storageClass)
949 registry.registerDatasetType(coaddType)
951 dimensions = DimensionGraph(
952 registry.dimensions,
953 dimensions=(calexpType.dimensions.required | coaddType.dimensions.required)
954 )
956 # without data this should run OK but return empty set
957 rows = list(registry.queryDimensions(dimensions, datasets=calexpType, collections=collection))
958 self.assertEqual(len(rows), 0)
960 def testCalibrationLabelIndirection(self):
961 """Test that we can look up datasets with calibration_label dimensions
962 from a data ID with exposure dimensions.
963 """
965 def _dt(iso_string):
966 return astropy.time.Time(iso_string, format="iso", scale="utc")
968 registry = self.makeRegistry()
970 flat = DatasetType(
971 "flat",
972 registry.dimensions.extract(
973 ["instrument", "detector", "physical_filter", "calibration_label"]
974 ),
975 "ImageU"
976 )
977 registry.registerDatasetType(flat)
978 registry.insertDimensionData("instrument", dict(name="DummyCam"))
979 registry.insertDimensionData(
980 "physical_filter",
981 dict(instrument="DummyCam", name="dummy_i", abstract_filter="i"),
982 )
983 registry.insertDimensionData(
984 "detector",
985 *[dict(instrument="DummyCam", id=i, full_name=str(i)) for i in (1, 2, 3, 4, 5)]
986 )
987 registry.insertDimensionData(
988 "exposure",
989 dict(instrument="DummyCam", id=100, name="100", physical_filter="dummy_i",
990 datetime_begin=_dt("2005-12-15 02:00:00"), datetime_end=_dt("2005-12-15 03:00:00")),
991 dict(instrument="DummyCam", id=101, name="101", physical_filter="dummy_i",
992 datetime_begin=_dt("2005-12-16 02:00:00"), datetime_end=_dt("2005-12-16 03:00:00")),
993 )
994 registry.insertDimensionData(
995 "calibration_label",
996 dict(instrument="DummyCam", name="first_night",
997 datetime_begin=_dt("2005-12-15 01:00:00"), datetime_end=_dt("2005-12-15 04:00:00")),
998 dict(instrument="DummyCam", name="second_night",
999 datetime_begin=_dt("2005-12-16 01:00:00"), datetime_end=_dt("2005-12-16 04:00:00")),
1000 dict(instrument="DummyCam", name="both_nights",
1001 datetime_begin=_dt("2005-12-15 01:00:00"), datetime_end=_dt("2005-12-16 04:00:00")),
1002 )
1003 # Different flats for different nights for detectors 1-3 in first
1004 # collection.
1005 run1 = "calibs1"
1006 registry.registerRun(run1)
1007 for detector in (1, 2, 3):
1008 registry.insertDatasets(flat, [dict(instrument="DummyCam", calibration_label="first_night",
1009 physical_filter="dummy_i", detector=detector)],
1010 run=run1)
1011 registry.insertDatasets(flat, [dict(instrument="DummyCam", calibration_label="second_night",
1012 physical_filter="dummy_i", detector=detector)],
1013 run=run1)
1014 # The same flat for both nights for detectors 3-5 (so detector 3 has
1015 # multiple valid flats) in second collection.
1016 run2 = "calib2"
1017 registry.registerRun(run2)
1018 for detector in (3, 4, 5):
1019 registry.insertDatasets(flat, [dict(instrument="DummyCam", calibration_label="both_nights",
1020 physical_filter="dummy_i", detector=detector)],
1021 run=run2)
1022 # Perform queries for individual exposure+detector combinations, which
1023 # should always return exactly one flat.
1024 for exposure in (100, 101):
1025 for detector in (1, 2, 3):
1026 with self.subTest(exposure=exposure, detector=detector):
1027 rows = list(registry.queryDatasets("flat", collections=[run1],
1028 instrument="DummyCam",
1029 exposure=exposure,
1030 detector=detector))
1031 self.assertEqual(len(rows), 1)
1032 for detector in (3, 4, 5):
1033 with self.subTest(exposure=exposure, detector=detector):
1034 rows = registry.queryDatasets("flat", collections=[run2],
1035 instrument="DummyCam",
1036 exposure=exposure,
1037 detector=detector)
1038 self.assertEqual(len(list(rows)), 1)
1039 for detector in (1, 2, 4, 5):
1040 with self.subTest(exposure=exposure, detector=detector):
1041 rows = registry.queryDatasets("flat", collections=[run1, run2],
1042 instrument="DummyCam",
1043 exposure=exposure,
1044 detector=detector)
1045 self.assertEqual(len(list(rows)), 1)
1046 for detector in (3,):
1047 with self.subTest(exposure=exposure, detector=detector):
1048 rows = registry.queryDatasets("flat", collections=[run1, run2],
1049 instrument="DummyCam",
1050 exposure=exposure,
1051 detector=detector)
1052 self.assertEqual(len(list(rows)), 2)
1054 def testAbstractFilterQuery(self):
1055 """Test that we can run a query that just lists the known
1056 abstract_filters. This is tricky because abstract_filter is
1057 backed by a query against physical_filter.
1058 """
1059 registry = self.makeRegistry()
1060 registry.insertDimensionData("instrument", dict(name="DummyCam"))
1061 registry.insertDimensionData(
1062 "physical_filter",
1063 dict(instrument="DummyCam", name="dummy_i", abstract_filter="i"),
1064 dict(instrument="DummyCam", name="dummy_i2", abstract_filter="i"),
1065 dict(instrument="DummyCam", name="dummy_r", abstract_filter="r"),
1066 )
1067 rows = list(registry.queryDimensions(["abstract_filter"]))
1068 self.assertCountEqual(
1069 rows,
1070 [DataCoordinate.standardize(abstract_filter="i", universe=registry.dimensions),
1071 DataCoordinate.standardize(abstract_filter="r", universe=registry.dimensions)]
1072 )