Coverage for python/lsst/obs/subaru/_instrument.py: 21%

124 statements  

« prev     ^ index     » next       coverage.py v6.4.4, created at 2022-09-13 03:16 -0700

1# This file is part of obs_subaru. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22"""Gen3 Butler registry declarations for Hyper Suprime-Cam. 

23""" 

24 

25__all__ = ("HyperSuprimeCam",) 

26 

27import os 

28import pickle 

29import logging 

30 

31from functools import lru_cache 

32 

33import astropy.time 

34from lsst.utils import getPackageDir 

35from lsst.afw.cameraGeom import makeCameraFromPath, CameraConfig 

36from lsst.daf.butler import (DatasetType, DataCoordinate, FileDataset, DatasetRef, 

37 CollectionType, Timespan) 

38from lsst.utils.introspection import get_full_type_name 

39from lsst.obs.base import Instrument, VisitSystem 

40 

41from ..hsc.hscPupil import HscPupilFactory 

42from ..hsc.hscFilters import HSC_FILTER_DEFINITIONS 

43from ..hsc.makeTransmissionCurves import (getSensorTransmission, getOpticsTransmission, 

44 getFilterTransmission, getAtmosphereTransmission) 

45from .strayLight.formatter import SubaruStrayLightDataFormatter 

46 

47log = logging.getLogger(__name__) 

48 

49 

50class HyperSuprimeCam(Instrument): 

51 """Gen3 Butler specialization class for Subaru's Hyper Suprime-Cam. 

52 """ 

53 

54 policyName = "hsc" 

55 obsDataPackage = "obs_subaru_data" 

56 filterDefinitions = HSC_FILTER_DEFINITIONS 

57 additionalCuratedDatasetTypes = ("bfKernel", "transmission_optics", "transmission_sensor", 

58 "transmission_filter", "transmission_atmosphere", "yBackground") 

59 

60 def __init__(self, **kwargs): 

61 super().__init__(**kwargs) 

62 packageDir = getPackageDir("obs_subaru") 

63 self.configPaths = [os.path.join(packageDir, "config"), 

64 os.path.join(packageDir, "config", self.policyName)] 

65 

66 @classmethod 

67 def getName(cls): 

68 # Docstring inherited from Instrument.getName 

69 return "HSC" 

70 

71 def register(self, registry, update=False): 

72 # Docstring inherited from Instrument.register 

73 camera = self.getCamera() 

74 # The maximum values below make Gen3's ObservationDataIdPacker produce 

75 # outputs that match Gen2's ccdExposureId. 

76 obsMax = 21474800 

77 with registry.transaction(): 

78 registry.syncDimensionData( 

79 "instrument", 

80 { 

81 "name": self.getName(), 

82 "detector_max": 200, 

83 "visit_max": obsMax, 

84 "exposure_max": obsMax, 

85 "class_name": get_full_type_name(self), 

86 # Some schemas support default visit_system 

87 "visit_system": VisitSystem.ONE_TO_ONE.value, 

88 }, 

89 update=update 

90 ) 

91 for detector in camera: 

92 registry.syncDimensionData( 

93 "detector", 

94 { 

95 "instrument": self.getName(), 

96 "id": detector.getId(), 

97 "full_name": detector.getName(), 

98 # TODO: make sure these definitions are consistent with 

99 # those extracted by astro_metadata_translator, and 

100 # test that they remain consistent somehow. 

101 "name_in_raft": detector.getName().split("_")[1], 

102 "raft": detector.getName().split("_")[0], 

103 "purpose": str(detector.getType()).split(".")[-1], 

104 }, 

105 update=update 

106 ) 

107 self._registerFilters(registry, update=update) 

108 

109 def getRawFormatter(self, dataId): 

110 # Docstring inherited from Instrument.getRawFormatter 

111 # Import the formatter here to prevent a circular dependency. 

112 from .rawFormatter import HyperSuprimeCamRawFormatter, HyperSuprimeCamCornerRawFormatter 

113 if dataId["detector"] in (100, 101, 102, 103): 

114 return HyperSuprimeCamCornerRawFormatter 

115 else: 

116 return HyperSuprimeCamRawFormatter 

117 

118 def getCamera(self): 

119 """Retrieve the cameraGeom representation of HSC. 

120 

121 This is a temporary API that should go away once obs_ packages have 

122 a standardized approach to writing versioned cameras to a Gen3 repo. 

123 """ 

124 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "camera") 

125 return self._getCameraFromPath(path) 

126 

127 @staticmethod 

128 @lru_cache() 

129 def _getCameraFromPath(path): 

130 """Return the camera geometry given solely the path to the location 

131 of that definition.""" 

132 config = CameraConfig() 

133 config.load(os.path.join(path, "camera.py")) 

134 return makeCameraFromPath( 

135 cameraConfig=config, 

136 ampInfoPath=path, 

137 shortNameFunc=lambda name: name.replace(" ", "_"), 

138 pupilFactoryClass=HscPupilFactory 

139 ) 

140 

141 def getBrighterFatterKernel(self): 

142 """Return the brighter-fatter kernel for HSC as a `numpy.ndarray`. 

143 

144 This is a temporary API that should go away once obs_ packages have 

145 a standardized approach to writing versioned kernels to a Gen3 repo. 

146 """ 

147 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "brighter_fatter_kernel.pkl") 

148 with open(path, "rb") as fd: 

149 kernel = pickle.load(fd, encoding='latin1') # encoding for pickle written with Python 2 

150 return kernel 

151 

152 def writeAdditionalCuratedCalibrations(self, butler, collection=None, labels=()): 

153 # Register the CALIBRATION collection that adds validity ranges. 

154 # This does nothing if it is already registered. 

155 if collection is None: 

156 collection = self.makeCalibrationCollectionName(*labels) 

157 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION) 

158 

159 # Register the RUN collection that holds these datasets directly. We 

160 # only need one because all of these datasets have the same (unbounded) 

161 # validity range right now. 

162 run = self.makeUnboundedCalibrationRunName(*labels) 

163 butler.registry.registerRun(run) 

164 baseDataId = butler.registry.expandDataId(instrument=self.getName()) 

165 refs = [] 

166 

167 # Write brighter-fatter kernel, with an infinite validity range. 

168 datasetType = DatasetType("bfKernel", ("instrument",), "NumpyArray", 

169 universe=butler.registry.dimensions, 

170 isCalibration=True) 

171 butler.registry.registerDatasetType(datasetType) 

172 

173 # Load and then put instead of just moving the file in part to ensure 

174 # the version in-repo is written with Python 3 and does not need 

175 # `encoding='latin1'` to be read. 

176 bfKernel = self.getBrighterFatterKernel() 

177 refs.append(butler.put(bfKernel, datasetType, baseDataId, run=run)) 

178 

179 # The following iterate over the values of the dictionaries returned 

180 # by the transmission functions and ignore the date that is supplied. 

181 # This is due to the dates not being ranges but single dates, 

182 # which do not give the proper notion of validity. As such unbounded 

183 # calibration labels are used when inserting into the database. 

184 # In the future these could and probably should be updated to 

185 # properly account for what ranges are considered valid. 

186 

187 # Write optical transmissions 

188 opticsTransmissions = getOpticsTransmission() 

189 datasetType = DatasetType("transmission_optics", 

190 ("instrument",), 

191 "TransmissionCurve", 

192 universe=butler.registry.dimensions, 

193 isCalibration=True) 

194 butler.registry.registerDatasetType(datasetType) 

195 for entry in opticsTransmissions.values(): 

196 if entry is None: 

197 continue 

198 refs.append(butler.put(entry, datasetType, baseDataId, run=run)) 

199 

200 # Write transmission sensor 

201 sensorTransmissions = getSensorTransmission() 

202 datasetType = DatasetType("transmission_sensor", 

203 ("instrument", "detector",), 

204 "TransmissionCurve", 

205 universe=butler.registry.dimensions, 

206 isCalibration=True) 

207 butler.registry.registerDatasetType(datasetType) 

208 for entry in sensorTransmissions.values(): 

209 if entry is None: 

210 continue 

211 for sensor, curve in entry.items(): 

212 dataId = DataCoordinate.standardize(baseDataId, detector=sensor) 

213 refs.append(butler.put(curve, datasetType, dataId, run=run)) 

214 

215 # Write filter transmissions 

216 filterTransmissions = getFilterTransmission() 

217 datasetType = DatasetType("transmission_filter", 

218 ("instrument", "physical_filter",), 

219 "TransmissionCurve", 

220 universe=butler.registry.dimensions, 

221 isCalibration=True) 

222 butler.registry.registerDatasetType(datasetType) 

223 for entry in filterTransmissions.values(): 

224 if entry is None: 

225 continue 

226 for band, curve in entry.items(): 

227 dataId = DataCoordinate.standardize(baseDataId, physical_filter=band) 

228 refs.append(butler.put(curve, datasetType, dataId, run=run)) 

229 

230 # Write atmospheric transmissions 

231 atmosphericTransmissions = getAtmosphereTransmission() 

232 datasetType = DatasetType("transmission_atmosphere", ("instrument",), 

233 "TransmissionCurve", 

234 universe=butler.registry.dimensions, 

235 isCalibration=True) 

236 butler.registry.registerDatasetType(datasetType) 

237 for entry in atmosphericTransmissions.values(): 

238 if entry is None: 

239 continue 

240 refs.append(butler.put(entry, datasetType, {"instrument": self.getName()}, run=run)) 

241 

242 # Associate all datasets with the unbounded validity range. 

243 butler.registry.certify(collection, refs, Timespan(begin=None, end=None)) 

244 

245 def ingestStrayLightData(self, butler, directory, *, transfer=None, collection=None, labels=()): 

246 """Ingest externally-produced y-band stray light data files into 

247 a data repository. 

248 

249 Parameters 

250 ---------- 

251 butler : `lsst.daf.butler.Butler` 

252 Butler to write with. Any collections associated with it are 

253 ignored in favor of ``collection`` and/or ``labels``. 

254 directory : `str` 

255 Directory containing yBackground-*.fits files. 

256 transfer : `str`, optional 

257 If not `None`, must be one of 'move', 'copy', 'hardlink', or 

258 'symlink', indicating how to transfer the files. 

259 collection : `str`, optional 

260 Name to use for the calibration collection that associates all 

261 datasets with a validity range. If this collection already exists, 

262 it must be a `~CollectionType.CALIBRATION` collection, and it must 

263 not have any datasets that would conflict with those inserted by 

264 this method. If `None`, a collection name is worked out 

265 automatically from the instrument name and other metadata by 

266 calling ``makeCuratedCalibrationCollectionName``, but this 

267 default name may not work well for long-lived repositories unless 

268 ``labels`` is also provided (and changed every time curated 

269 calibrations are ingested). 

270 labels : `Sequence` [ `str` ], optional 

271 Extra strings to include in collection names, after concatenating 

272 them with the standard collection name delimeter. If provided, 

273 these are inserted into to the names of the `~CollectionType.RUN` 

274 collections that datasets are inserted directly into, as well the 

275 `~CollectionType.CALIBRATION` collection if it is generated 

276 automatically (i.e. if ``collection is None``). Usually this is 

277 just the name of the ticket on which the calibration collection is 

278 being created. 

279 """ 

280 # Register the CALIBRATION collection that adds validity ranges. 

281 # This does nothing if it is already registered. 

282 if collection is None: 

283 collection = self.makeCalibrationCollectionName(*labels) 

284 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION) 

285 

286 # Register the RUN collection that holds these datasets directly. We 

287 # only need one because there is only one validity range and hence no 

288 # data ID conflicts even when there are no validity ranges. 

289 run = self.makeUnboundedCalibrationRunName(*labels) 

290 butler.registry.registerRun(run) 

291 

292 # LEDs covered up around 2018-01-01, no need for correctin after that 

293 # date. 

294 timespan = Timespan(begin=None, end=astropy.time.Time("2018-01-01", format="iso", scale="tai")) 

295 datasets = [] 

296 # TODO: should we use a more generic name for the dataset type? 

297 # This is just the (rather HSC-specific) name used in Gen2, and while 

298 # the instances of this dataset are camera-specific, the datasetType 

299 # (which is used in the generic IsrTask) should not be. 

300 datasetType = DatasetType("yBackground", 

301 dimensions=("physical_filter", "detector",), 

302 storageClass="StrayLightData", 

303 universe=butler.registry.dimensions, 

304 isCalibration=True) 

305 for detector in self.getCamera(): 

306 path = os.path.join(directory, f"ybackground-{detector.getId():03d}.fits") 

307 if not os.path.exists(path): 

308 log.warning("No stray light data found for detector %s @ %s.", detector.getId(), path) 

309 continue 

310 ref = DatasetRef(datasetType, dataId={"instrument": self.getName(), 

311 "detector": detector.getId(), 

312 "physical_filter": "HSC-Y"}) 

313 datasets.append(FileDataset(refs=ref, path=path, formatter=SubaruStrayLightDataFormatter)) 

314 butler.registry.registerDatasetType(datasetType) 

315 with butler.transaction(): 

316 butler.ingest(*datasets, transfer=transfer, run=run) 

317 refs = [] 

318 for dataset in datasets: 

319 refs.extend(dataset.refs) 

320 butler.registry.certify(collection, refs, timespan)