Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = ("FitsCatalogDatasetsHelper", "DatasetTestHelper", "DatastoreTestHelper", 

23 "BadWriteFormatter", "BadNoWriteFormatter", "MultiDetectorFormatter") 

24 

25import os 

26from lsst.daf.butler import DatasetType, DatasetRef 

27from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter 

28 

29 

30class FitsCatalogDatasetsHelper: 

31 

32 def makeExampleCatalog(self): 

33 import lsst.afw.table 

34 catalogPath = os.path.join(self.testDir, "data", "basic", "source_catalog.fits") 

35 return lsst.afw.table.SourceCatalog.readFits(catalogPath) 

36 

37 def assertCatalogEqual(self, inputCatalog, outputCatalog): 

38 import lsst.afw.table 

39 self.assertIsInstance(outputCatalog, lsst.afw.table.SourceCatalog) 

40 inputTable = inputCatalog.getTable() 

41 inputRecord = inputCatalog[0] 

42 outputTable = outputCatalog.getTable() 

43 outputRecord = outputCatalog[0] 

44 self.assertEqual(inputRecord.getPsfInstFlux(), outputRecord.getPsfInstFlux()) 

45 self.assertEqual(inputRecord.getPsfFluxFlag(), outputRecord.getPsfFluxFlag()) 

46 self.assertEqual(inputTable.getSchema().getAliasMap().get("slot_Centroid"), 

47 outputTable.getSchema().getAliasMap().get("slot_Centroid")) 

48 self.assertEqual(inputRecord.getCentroid(), outputRecord.getCentroid()) 

49 self.assertFloatsAlmostEqual( 

50 inputRecord.getCentroidErr()[0, 0], 

51 outputRecord.getCentroidErr()[0, 0], rtol=1e-6) 

52 self.assertFloatsAlmostEqual( 

53 inputRecord.getCentroidErr()[1, 1], 

54 outputRecord.getCentroidErr()[1, 1], rtol=1e-6) 

55 self.assertEqual(inputTable.getSchema().getAliasMap().get("slot_Shape"), 

56 outputTable.getSchema().getAliasMap().get("slot_Shape")) 

57 self.assertFloatsAlmostEqual( 

58 inputRecord.getShapeErr()[0, 0], 

59 outputRecord.getShapeErr()[0, 0], rtol=1e-6) 

60 self.assertFloatsAlmostEqual( 

61 inputRecord.getShapeErr()[1, 1], 

62 outputRecord.getShapeErr()[1, 1], rtol=1e-6) 

63 self.assertFloatsAlmostEqual( 

64 inputRecord.getShapeErr()[2, 2], 

65 outputRecord.getShapeErr()[2, 2], rtol=1e-6) 

66 

67 

68class DatasetTestHelper: 

69 """Helper methods for Datasets""" 

70 

71 def makeDatasetRef(self, datasetTypeName, dimensions, storageClass, dataId, *, id=None, run=None, 

72 conform=True): 

73 """Make a DatasetType and wrap it in a DatasetRef for a test""" 

74 compRefs = {} 

75 for compName, sc in storageClass.components.items(): 

76 compRefs[compName] = self._makeDatasetRef(DatasetType.nameWithComponent(datasetTypeName, 

77 compName), 

78 dimensions, sc, dataId, id=None, run=run, 

79 conform=conform) 

80 

81 return self._makeDatasetRef(datasetTypeName, dimensions, storageClass, dataId, id=id, run=run, 

82 conform=conform, components=compRefs) 

83 

84 def _makeDatasetRef(self, datasetTypeName, dimensions, storageClass, dataId, *, id=None, run=None, 

85 conform=True, components=None): 

86 # helper for makeDatasetRef 

87 datasetType = DatasetType(datasetTypeName, dimensions, storageClass) 

88 if id is None: 

89 self.id += 1 

90 id = self.id 

91 if run is None: 

92 run = "dummy" 

93 return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform, components=components) 

94 

95 

96class DatastoreTestHelper: 

97 """Helper methods for Datastore tests""" 

98 

99 def setUpDatastoreTests(self, registryClass, configClass): 

100 """Shared setUp code for all Datastore tests""" 

101 self.registry = registryClass() 

102 

103 # Need to keep ID for each datasetRef since we have no butler 

104 # for these tests 

105 self.id = 1 

106 

107 self.config = configClass(self.configFile) 

108 

109 # Some subclasses override the working root directory 

110 if self.root is not None: 

111 self.datastoreType.setConfigRoot(self.root, self.config, self.config.copy()) 

112 

113 def makeDatastore(self, sub=None): 

114 """Make a new Datastore instance of the appropriate type. 

115 

116 Parameters 

117 ---------- 

118 sub : str, optional 

119 If not None, the returned Datastore will be distinct from any 

120 Datastore constructed with a different value of ``sub``. For 

121 PosixDatastore, for example, the converse is also true, and ``sub`` 

122 is used as a subdirectory to form the new root. 

123 

124 Returns 

125 ------- 

126 datastore : `Datastore` 

127 Datastore constructed by this routine using the supplied 

128 optional subdirectory if supported. 

129 """ 

130 config = self.config.copy() 

131 if sub is not None and self.root is not None: 

132 self.datastoreType.setConfigRoot(os.path.join(self.root, sub), config, self.config) 

133 if sub is not None: 

134 # Ensure that each datastore gets its own registry 

135 registryClass = type(self.registry) 

136 registry = registryClass() 

137 else: 

138 registry = self.registry 

139 return self.datastoreType(config=config, bridgeManager=registry.getDatastoreBridgeManager()) 

140 

141 

142class BadWriteFormatter(YamlFormatter): 

143 """A formatter that never works but does leave a file behind.""" 

144 

145 def _readFile(self, path, pytype=None): 

146 raise NotImplementedError("This formatter can not read anything") 

147 

148 def _writeFile(self, inMemoryDataset): 

149 """Write an empty file and then raise an exception.""" 

150 with open(self.fileDescriptor.location.path, "wb"): 

151 pass 

152 raise RuntimeError("Did not succeed in writing file") 

153 

154 

155class BadNoWriteFormatter(BadWriteFormatter): 

156 """A formatter that always fails without writing anything.""" 

157 

158 def _writeFile(self, inMemoryDataset): 

159 raise RuntimeError("Did not writing anything at all") 

160 

161 

162class MultiDetectorFormatter(YamlFormatter): 

163 

164 def _writeFile(self, inMemoryDataset): 

165 raise NotImplementedError("Can not write") 

166 

167 def _fromBytes(self, serializedDataset, pytype=None): 

168 data = super()._fromBytes(serializedDataset) 

169 if self.dataId is None: 

170 raise RuntimeError("This formatter requires a dataId") 

171 if "detector" not in self.dataId: 

172 raise RuntimeError("This formatter requires detector to be present in dataId") 

173 key = f"detector{self.dataId['detector']}" 

174 if key in data: 

175 return pytype(data[key]) 

176 raise RuntimeError(f"Could not find '{key}' in data file")