Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = ("FitsCatalogDatasetsHelper", "DatasetTestHelper", "DatastoreTestHelper", 

23 "BadWriteFormatter", "BadNoWriteFormatter", "MultiDetectorFormatter") 

24 

25import os 

26from lsst.daf.butler import DatasetType, DatasetRef 

27from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter 

28 

29 

30class FitsCatalogDatasetsHelper: 

31 

32 def makeExampleCatalog(self): 

33 import lsst.afw.table 

34 catalogPath = os.path.join(self.testDir, "data", "basic", "source_catalog.fits") 

35 return lsst.afw.table.SourceCatalog.readFits(catalogPath) 

36 

37 def assertCatalogEqual(self, inputCatalog, outputCatalog): 

38 import lsst.afw.table 

39 self.assertIsInstance(outputCatalog, lsst.afw.table.SourceCatalog) 

40 inputTable = inputCatalog.getTable() 

41 inputRecord = inputCatalog[0] 

42 outputTable = outputCatalog.getTable() 

43 outputRecord = outputCatalog[0] 

44 self.assertEqual(inputRecord.getPsfInstFlux(), outputRecord.getPsfInstFlux()) 

45 self.assertEqual(inputRecord.getPsfFluxFlag(), outputRecord.getPsfFluxFlag()) 

46 self.assertEqual(inputTable.getSchema().getAliasMap().get("slot_Centroid"), 

47 outputTable.getSchema().getAliasMap().get("slot_Centroid")) 

48 self.assertEqual(inputRecord.getCentroid(), outputRecord.getCentroid()) 

49 self.assertFloatsAlmostEqual( 

50 inputRecord.getCentroidErr()[0, 0], 

51 outputRecord.getCentroidErr()[0, 0], rtol=1e-6) 

52 self.assertFloatsAlmostEqual( 

53 inputRecord.getCentroidErr()[1, 1], 

54 outputRecord.getCentroidErr()[1, 1], rtol=1e-6) 

55 self.assertEqual(inputTable.getSchema().getAliasMap().get("slot_Shape"), 

56 outputTable.getSchema().getAliasMap().get("slot_Shape")) 

57 self.assertFloatsAlmostEqual( 

58 inputRecord.getShapeErr()[0, 0], 

59 outputRecord.getShapeErr()[0, 0], rtol=1e-6) 

60 self.assertFloatsAlmostEqual( 

61 inputRecord.getShapeErr()[1, 1], 

62 outputRecord.getShapeErr()[1, 1], rtol=1e-6) 

63 self.assertFloatsAlmostEqual( 

64 inputRecord.getShapeErr()[2, 2], 

65 outputRecord.getShapeErr()[2, 2], rtol=1e-6) 

66 

67 

68class DatasetTestHelper: 

69 """Helper methods for Datasets""" 

70 

71 def makeDatasetRef(self, datasetTypeName, dimensions, storageClass, dataId, *, id=None, run=None, 

72 conform=True): 

73 """Make a DatasetType and wrap it in a DatasetRef for a test""" 

74 datasetType = DatasetType(datasetTypeName, dimensions, storageClass) 

75 if id is None: 

76 self.id += 1 

77 id = self.id 

78 if run is None: 

79 run = "dummy" 

80 return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform) 

81 

82 

83class DatastoreTestHelper: 

84 """Helper methods for Datastore tests""" 

85 

86 def setUpDatastoreTests(self, registryClass, configClass): 

87 """Shared setUp code for all Datastore tests""" 

88 self.registry = registryClass() 

89 

90 # Need to keep ID for each datasetRef since we have no butler 

91 # for these tests 

92 self.id = 1 

93 

94 self.config = configClass(self.configFile) 

95 

96 # Some subclasses override the working root directory 

97 if self.root is not None: 

98 self.datastoreType.setConfigRoot(self.root, self.config, self.config.copy()) 

99 

100 def makeDatastore(self, sub=None): 

101 """Make a new Datastore instance of the appropriate type. 

102 

103 Parameters 

104 ---------- 

105 sub : str, optional 

106 If not None, the returned Datastore will be distinct from any 

107 Datastore constructed with a different value of ``sub``. For 

108 PosixDatastore, for example, the converse is also true, and ``sub`` 

109 is used as a subdirectory to form the new root. 

110 

111 Returns 

112 ------- 

113 datastore : `Datastore` 

114 Datastore constructed by this routine using the supplied 

115 optional subdirectory if supported. 

116 """ 

117 config = self.config.copy() 

118 if sub is not None and self.root is not None: 

119 self.datastoreType.setConfigRoot(os.path.join(self.root, sub), config, self.config) 

120 if sub is not None: 

121 # Ensure that each datastore gets its own registry 

122 registryClass = type(self.registry) 

123 registry = registryClass() 

124 else: 

125 registry = self.registry 

126 return self.datastoreType(config=config, registry=registry) 

127 

128 

129class BadWriteFormatter(YamlFormatter): 

130 """A formatter that never works but does leave a file behind.""" 

131 

132 def _readFile(self, path, pytype=None): 

133 raise NotImplementedError("This formatter can not read anything") 

134 

135 def _writeFile(self, inMemoryDataset): 

136 """Write an empty file and then raise an exception.""" 

137 with open(self.fileDescriptor.location.path, "wb"): 

138 pass 

139 raise RuntimeError("Did not succeed in writing file") 

140 

141 

142class BadNoWriteFormatter(BadWriteFormatter): 

143 """A formatter that always fails without writing anything.""" 

144 

145 def _writeFile(self, inMemoryDataset): 

146 raise RuntimeError("Did not writing anything at all") 

147 

148 

149class MultiDetectorFormatter(YamlFormatter): 

150 

151 def _writeFile(self, inMemoryDataset): 

152 raise NotImplementedError("Can not write") 

153 

154 def _fromBytes(self, serializedDataset, pytype=None): 

155 data = super()._fromBytes(serializedDataset) 

156 if self.dataId is None: 

157 raise RuntimeError("This formatter requires a dataId") 

158 if "detector" not in self.dataId: 

159 raise RuntimeError("This formatter requires detector to be present in dataId") 

160 key = f"detector{self.dataId['detector']}" 

161 if key in data: 

162 return pytype(data[key]) 

163 raise RuntimeError(f"Could not find '{key}' in data file")