Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = ("FitsCatalogDatasetsHelper", "DatasetTestHelper", "DatastoreTestHelper", 

23 "BadWriteFormatter", "BadNoWriteFormatter", "MultiDetectorFormatter") 

24 

25import os 

26from lsst.daf.butler import DatasetType, DatasetRef 

27from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter 

28 

29 

30class FitsCatalogDatasetsHelper: 

31 

32 def makeExampleCatalog(self): 

33 import lsst.afw.table 

34 catalogPath = os.path.join(self.testDir, "data", "basic", "source_catalog.fits") 

35 return lsst.afw.table.SourceCatalog.readFits(catalogPath) 

36 

37 def assertCatalogEqual(self, inputCatalog, outputCatalog): 

38 import lsst.afw.table 

39 self.assertIsInstance(outputCatalog, lsst.afw.table.SourceCatalog) 

40 inputTable = inputCatalog.getTable() 

41 inputRecord = inputCatalog[0] 

42 outputTable = outputCatalog.getTable() 

43 outputRecord = outputCatalog[0] 

44 self.assertEqual(inputRecord.getPsfInstFlux(), outputRecord.getPsfInstFlux()) 

45 self.assertEqual(inputRecord.getPsfFluxFlag(), outputRecord.getPsfFluxFlag()) 

46 self.assertEqual(inputTable.getSchema().getAliasMap().get("slot_Centroid"), 

47 outputTable.getSchema().getAliasMap().get("slot_Centroid")) 

48 self.assertEqual(inputRecord.getCentroid(), outputRecord.getCentroid()) 

49 self.assertFloatsAlmostEqual( 

50 inputRecord.getCentroidErr()[0, 0], 

51 outputRecord.getCentroidErr()[0, 0], rtol=1e-6) 

52 self.assertFloatsAlmostEqual( 

53 inputRecord.getCentroidErr()[1, 1], 

54 outputRecord.getCentroidErr()[1, 1], rtol=1e-6) 

55 self.assertEqual(inputTable.getSchema().getAliasMap().get("slot_Shape"), 

56 outputTable.getSchema().getAliasMap().get("slot_Shape")) 

57 self.assertFloatsAlmostEqual( 

58 inputRecord.getShapeErr()[0, 0], 

59 outputRecord.getShapeErr()[0, 0], rtol=1e-6) 

60 self.assertFloatsAlmostEqual( 

61 inputRecord.getShapeErr()[1, 1], 

62 outputRecord.getShapeErr()[1, 1], rtol=1e-6) 

63 self.assertFloatsAlmostEqual( 

64 inputRecord.getShapeErr()[2, 2], 

65 outputRecord.getShapeErr()[2, 2], rtol=1e-6) 

66 

67 

68class DatasetTestHelper: 

69 """Helper methods for Datasets""" 

70 

71 def makeDatasetRef(self, datasetTypeName, dimensions, storageClass, dataId, *, id=None, run=None, 

72 conform=True): 

73 """Make a DatasetType and wrap it in a DatasetRef for a test""" 

74 compRefs = {} 

75 for compName, sc in storageClass.components.items(): 

76 compRefs[compName] = self._makeDatasetRef(DatasetType.nameWithComponent(datasetTypeName, 

77 compName), 

78 dimensions, sc, dataId, id=None, run=run, 

79 conform=conform) 

80 

81 print(compRefs) 

82 return self._makeDatasetRef(datasetTypeName, dimensions, storageClass, dataId, id=id, run=run, 

83 conform=conform, components=compRefs) 

84 

85 def _makeDatasetRef(self, datasetTypeName, dimensions, storageClass, dataId, *, id=None, run=None, 

86 conform=True, components=None): 

87 # helper for makeDatasetRef 

88 datasetType = DatasetType(datasetTypeName, dimensions, storageClass) 

89 if id is None: 

90 self.id += 1 

91 id = self.id 

92 if run is None: 

93 run = "dummy" 

94 return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform, components=components) 

95 

96 

97class DatastoreTestHelper: 

98 """Helper methods for Datastore tests""" 

99 

100 def setUpDatastoreTests(self, registryClass, configClass): 

101 """Shared setUp code for all Datastore tests""" 

102 self.registry = registryClass() 

103 

104 # Need to keep ID for each datasetRef since we have no butler 

105 # for these tests 

106 self.id = 1 

107 

108 self.config = configClass(self.configFile) 

109 

110 # Some subclasses override the working root directory 

111 if self.root is not None: 

112 self.datastoreType.setConfigRoot(self.root, self.config, self.config.copy()) 

113 

114 def makeDatastore(self, sub=None): 

115 """Make a new Datastore instance of the appropriate type. 

116 

117 Parameters 

118 ---------- 

119 sub : str, optional 

120 If not None, the returned Datastore will be distinct from any 

121 Datastore constructed with a different value of ``sub``. For 

122 PosixDatastore, for example, the converse is also true, and ``sub`` 

123 is used as a subdirectory to form the new root. 

124 

125 Returns 

126 ------- 

127 datastore : `Datastore` 

128 Datastore constructed by this routine using the supplied 

129 optional subdirectory if supported. 

130 """ 

131 config = self.config.copy() 

132 if sub is not None and self.root is not None: 

133 self.datastoreType.setConfigRoot(os.path.join(self.root, sub), config, self.config) 

134 if sub is not None: 

135 # Ensure that each datastore gets its own registry 

136 registryClass = type(self.registry) 

137 registry = registryClass() 

138 else: 

139 registry = self.registry 

140 return self.datastoreType(config=config, registry=registry) 

141 

142 

143class BadWriteFormatter(YamlFormatter): 

144 """A formatter that never works but does leave a file behind.""" 

145 

146 def _readFile(self, path, pytype=None): 

147 raise NotImplementedError("This formatter can not read anything") 

148 

149 def _writeFile(self, inMemoryDataset): 

150 """Write an empty file and then raise an exception.""" 

151 with open(self.fileDescriptor.location.path, "wb"): 

152 pass 

153 raise RuntimeError("Did not succeed in writing file") 

154 

155 

156class BadNoWriteFormatter(BadWriteFormatter): 

157 """A formatter that always fails without writing anything.""" 

158 

159 def _writeFile(self, inMemoryDataset): 

160 raise RuntimeError("Did not writing anything at all") 

161 

162 

163class MultiDetectorFormatter(YamlFormatter): 

164 

165 def _writeFile(self, inMemoryDataset): 

166 raise NotImplementedError("Can not write") 

167 

168 def _fromBytes(self, serializedDataset, pytype=None): 

169 data = super()._fromBytes(serializedDataset) 

170 if self.dataId is None: 

171 raise RuntimeError("This formatter requires a dataId") 

172 if "detector" not in self.dataId: 

173 raise RuntimeError("This formatter requires detector to be present in dataId") 

174 key = f"detector{self.dataId['detector']}" 

175 if key in data: 

176 return pytype(data[key]) 

177 raise RuntimeError(f"Could not find '{key}' in data file")