Coverage for python/lsst/daf/butler/tests/_datasetsHelper.py : 23%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22__all__ = ("FitsCatalogDatasetsHelper", "DatasetTestHelper", "DatastoreTestHelper",
23 "BadWriteFormatter", "BadNoWriteFormatter", "MultiDetectorFormatter")
25import os
26from lsst.daf.butler import DatasetType, DatasetRef
27from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter
30class FitsCatalogDatasetsHelper:
32 def makeExampleCatalog(self):
33 import lsst.afw.table
34 catalogPath = os.path.join(self.testDir, "data", "basic", "source_catalog.fits")
35 return lsst.afw.table.SourceCatalog.readFits(catalogPath)
37 def assertCatalogEqual(self, inputCatalog, outputCatalog):
38 import lsst.afw.table
39 self.assertIsInstance(outputCatalog, lsst.afw.table.SourceCatalog)
40 inputTable = inputCatalog.getTable()
41 inputRecord = inputCatalog[0]
42 outputTable = outputCatalog.getTable()
43 outputRecord = outputCatalog[0]
44 self.assertEqual(inputRecord.getPsfInstFlux(), outputRecord.getPsfInstFlux())
45 self.assertEqual(inputRecord.getPsfFluxFlag(), outputRecord.getPsfFluxFlag())
46 self.assertEqual(inputTable.getSchema().getAliasMap().get("slot_Centroid"),
47 outputTable.getSchema().getAliasMap().get("slot_Centroid"))
48 self.assertEqual(inputRecord.getCentroid(), outputRecord.getCentroid())
49 self.assertFloatsAlmostEqual(
50 inputRecord.getCentroidErr()[0, 0],
51 outputRecord.getCentroidErr()[0, 0], rtol=1e-6)
52 self.assertFloatsAlmostEqual(
53 inputRecord.getCentroidErr()[1, 1],
54 outputRecord.getCentroidErr()[1, 1], rtol=1e-6)
55 self.assertEqual(inputTable.getSchema().getAliasMap().get("slot_Shape"),
56 outputTable.getSchema().getAliasMap().get("slot_Shape"))
57 self.assertFloatsAlmostEqual(
58 inputRecord.getShapeErr()[0, 0],
59 outputRecord.getShapeErr()[0, 0], rtol=1e-6)
60 self.assertFloatsAlmostEqual(
61 inputRecord.getShapeErr()[1, 1],
62 outputRecord.getShapeErr()[1, 1], rtol=1e-6)
63 self.assertFloatsAlmostEqual(
64 inputRecord.getShapeErr()[2, 2],
65 outputRecord.getShapeErr()[2, 2], rtol=1e-6)
68class DatasetTestHelper:
69 """Helper methods for Datasets"""
71 def makeDatasetRef(self, datasetTypeName, dimensions, storageClass, dataId, *, id=None, run=None,
72 conform=True):
73 """Make a DatasetType and wrap it in a DatasetRef for a test"""
74 return self._makeDatasetRef(datasetTypeName, dimensions, storageClass, dataId, id=id, run=run,
75 conform=conform)
77 def _makeDatasetRef(self, datasetTypeName, dimensions, storageClass, dataId, *, id=None, run=None,
78 conform=True):
79 # helper for makeDatasetRef
80 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
81 if id is None:
82 self.id += 1
83 id = self.id
84 if run is None:
85 run = "dummy"
86 return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform)
89class DatastoreTestHelper:
90 """Helper methods for Datastore tests"""
92 def setUpDatastoreTests(self, registryClass, configClass):
93 """Shared setUp code for all Datastore tests"""
94 self.registry = registryClass()
96 # Need to keep ID for each datasetRef since we have no butler
97 # for these tests
98 self.id = 1
100 self.config = configClass(self.configFile)
102 # Some subclasses override the working root directory
103 if self.root is not None:
104 self.datastoreType.setConfigRoot(self.root, self.config, self.config.copy())
106 def makeDatastore(self, sub=None):
107 """Make a new Datastore instance of the appropriate type.
109 Parameters
110 ----------
111 sub : str, optional
112 If not None, the returned Datastore will be distinct from any
113 Datastore constructed with a different value of ``sub``. For
114 PosixDatastore, for example, the converse is also true, and ``sub``
115 is used as a subdirectory to form the new root.
117 Returns
118 -------
119 datastore : `Datastore`
120 Datastore constructed by this routine using the supplied
121 optional subdirectory if supported.
122 """
123 config = self.config.copy()
124 if sub is not None and self.root is not None:
125 self.datastoreType.setConfigRoot(os.path.join(self.root, sub), config, self.config)
126 if sub is not None:
127 # Ensure that each datastore gets its own registry
128 registryClass = type(self.registry)
129 registry = registryClass()
130 else:
131 registry = self.registry
132 return self.datastoreType(config=config, bridgeManager=registry.getDatastoreBridgeManager())
135class BadWriteFormatter(YamlFormatter):
136 """A formatter that never works but does leave a file behind."""
138 def _readFile(self, path, pytype=None):
139 raise NotImplementedError("This formatter can not read anything")
141 def _writeFile(self, inMemoryDataset):
142 """Write an empty file and then raise an exception."""
143 with open(self.fileDescriptor.location.path, "wb"):
144 pass
145 raise RuntimeError("Did not succeed in writing file")
148class BadNoWriteFormatter(BadWriteFormatter):
149 """A formatter that always fails without writing anything."""
151 def _writeFile(self, inMemoryDataset):
152 raise RuntimeError("Did not writing anything at all")
155class MultiDetectorFormatter(YamlFormatter):
157 def _writeFile(self, inMemoryDataset):
158 raise NotImplementedError("Can not write")
160 def _fromBytes(self, serializedDataset, pytype=None):
161 data = super()._fromBytes(serializedDataset)
162 if self.dataId is None:
163 raise RuntimeError("This formatter requires a dataId")
164 if "detector" not in self.dataId:
165 raise RuntimeError("This formatter requires detector to be present in dataId")
166 key = f"detector{self.dataId['detector']}"
167 if key in data:
168 return pytype(data[key])
169 raise RuntimeError(f"Could not find '{key}' in data file")