Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = ("FitsExposureFormatter", "FitsImageFormatter", "FitsMaskFormatter", 

23 "FitsMaskedImageFormatter") 

24 

25from astro_metadata_translator import fix_header 

26from lsst.daf.base import PropertySet 

27from lsst.daf.butler import Formatter 

28# Do not use ExposureFitsReader.readMetadata because that strips 

29# out lots of headers and there is no way to recover them 

30from lsst.afw.fits import readMetadata 

31from lsst.afw.image import ExposureFitsReader, ImageFitsReader, MaskFitsReader, MaskedImageFitsReader 

32# Needed for ApCorrMap to resolve properly 

33from lsst.afw.math import BoundedField # noqa: F401 

34 

35 

36class FitsExposureFormatter(Formatter): 

37 """Interface for reading and writing Exposures to and from FITS files. 

38 

39 This Formatter supports write recipes. 

40 

41 Each ``FitsExposureFormatter`` recipe for FITS compression should 

42 define ``image``, ``mask`` and ``variance`` entries, each of which may 

43 contain ``compression`` and ``scaling`` entries. Defaults will be 

44 provided for any missing elements under ``compression`` and 

45 ``scaling``. 

46 

47 The allowed entries under ``compression`` are: 

48 

49 * ``algorithm`` (`str`): compression algorithm to use 

50 * ``rows`` (`int`): number of rows per tile (0 = entire dimension) 

51 * ``columns`` (`int`): number of columns per tile (0 = entire dimension) 

52 * ``quantizeLevel`` (`float`): cfitsio quantization level 

53 

54 The allowed entries under ``scaling`` are: 

55 

56 * ``algorithm`` (`str`): scaling algorithm to use 

57 * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64) 

58 * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values? 

59 * ``seed`` (`int`): seed for random number generator when fuzzing 

60 * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing 

61 statistics 

62 * ``quantizeLevel`` (`float`): divisor of the standard deviation for 

63 ``STDEV_*`` scaling 

64 * ``quantizePad`` (`float`): number of stdev to allow on the low side (for 

65 ``STDEV_POSITIVE``/``NEGATIVE``) 

66 * ``bscale`` (`float`): manually specified ``BSCALE`` 

67 (for ``MANUAL`` scaling) 

68 * ``bzero`` (`float`): manually specified ``BSCALE`` 

69 (for ``MANUAL`` scaling) 

70 

71 A very simple example YAML recipe: 

72 

73 .. code-block:: yaml 

74 

75 lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter: 

76 default: 

77 image: &default 

78 compression: 

79 algorithm: GZIP_SHUFFLE 

80 mask: *default 

81 variance: *default 

82 

83 """ 

84 supportedExtensions = frozenset({".fits", ".fits.gz", ".fits.fz", ".fz", ".fit"}) 

85 extension = ".fits" 

86 _metadata = None 

87 supportedWriteParameters = frozenset({"recipe"}) 

88 _readerClass = ExposureFitsReader 

89 

90 unsupportedParameters = {} 

91 """Support all parameters.""" 

92 

93 @property 

94 def metadata(self): 

95 """The metadata read from this file. It will be stripped as 

96 components are extracted from it 

97 (`lsst.daf.base.PropertyList`). 

98 """ 

99 if self._metadata is None: 

100 self._metadata = self.readMetadata() 

101 return self._metadata 

102 

103 def readMetadata(self): 

104 """Read all header metadata directly into a PropertyList. 

105 

106 Returns 

107 ------- 

108 metadata : `~lsst.daf.base.PropertyList` 

109 Header metadata. 

110 """ 

111 md = readMetadata(self.fileDescriptor.location.path) 

112 fix_header(md) 

113 return md 

114 

115 def stripMetadata(self): 

116 """Remove metadata entries that are parsed into components. 

117 

118 This is only called when just the metadata is requested; stripping 

119 entries there forces code that wants other components to ask for those 

120 components directly rather than trying to extract them from the 

121 metadata manually, which is fragile. This behavior is an intentional 

122 change from Gen2. 

123 

124 Parameters 

125 ---------- 

126 metadata : `~lsst.daf.base.PropertyList` 

127 Header metadata, to be modified in-place. 

128 """ 

129 # TODO: make sure this covers everything, by delegating to something 

130 # that doesn't yet exist in afw.image.ExposureInfo. 

131 from lsst.afw.image import bboxFromMetadata 

132 from lsst.afw.geom import makeSkyWcs 

133 

134 # Protect against the metadata being missing 

135 try: 

136 bboxFromMetadata(self.metadata) # always strips 

137 except LookupError: 

138 pass 

139 try: 

140 makeSkyWcs(self.metadata, strip=True) 

141 except Exception: 

142 pass 

143 

144 def readComponent(self, component, parameters=None): 

145 """Read a component held by the Exposure. 

146 

147 Parameters 

148 ---------- 

149 component : `str`, optional 

150 Component to read from the file. 

151 parameters : `dict`, optional 

152 If specified, a dictionary of slicing parameters that 

153 overrides those in ``fileDescriptor``. 

154 

155 Returns 

156 ------- 

157 obj : component-dependent 

158 In-memory component object. 

159 

160 Raises 

161 ------ 

162 KeyError 

163 Raised if the requested component cannot be handled. 

164 """ 

165 

166 # Metadata is handled explicitly elsewhere 

167 componentMap = {'wcs': ('readWcs', False), 

168 'coaddInputs': ('readCoaddInputs', False), 

169 'psf': ('readPsf', False), 

170 'image': ('readImage', True), 

171 'mask': ('readMask', True), 

172 'variance': ('readVariance', True), 

173 'photoCalib': ('readPhotoCalib', False), 

174 'bbox': ('readBBox', True), 

175 'dimensions': ('readBBox', True), 

176 'xy0': ('readXY0', True), 

177 # TODO: deprecate in DM-27170, remove in DM-27177 

178 'filter': ('readFilter', False), 

179 # TODO: deprecate in DM-27177, remove in DM-27811 

180 'filterLabel': ('readFilterLabel', False), 

181 'validPolygon': ('readValidPolygon', False), 

182 'apCorrMap': ('readApCorrMap', False), 

183 'visitInfo': ('readVisitInfo', False), 

184 'transmissionCurve': ('readTransmissionCurve', False), 

185 'detector': ('readDetector', False), 

186 'extras': ('readExtraComponents', False), 

187 'exposureInfo': ('readExposureInfo', False), 

188 } 

189 method, hasParams = componentMap.get(component, (None, False)) 

190 

191 if method: 

192 # This reader can read standalone Image/Mask files as well 

193 # when dealing with components. 

194 reader = self._readerClass(self.fileDescriptor.location.path) 

195 caller = getattr(reader, method, None) 

196 

197 if caller: 

198 if parameters is None: 

199 parameters = self.fileDescriptor.parameters 

200 if parameters is None: 

201 parameters = {} 

202 self.fileDescriptor.storageClass.validateParameters(parameters) 

203 

204 if hasParams and parameters: 

205 thisComponent = caller(**parameters) 

206 else: 

207 thisComponent = caller() 

208 if component == "dimensions" and thisComponent is not None: 

209 thisComponent = thisComponent.getDimensions() 

210 return thisComponent 

211 else: 

212 raise KeyError(f"Unknown component requested: {component}") 

213 

214 def readFull(self, parameters=None): 

215 """Read the full Exposure object. 

216 

217 Parameters 

218 ---------- 

219 parameters : `dict`, optional 

220 If specified a dictionary of slicing parameters that overrides 

221 those in ``fileDescriptor``. 

222 

223 Returns 

224 ------- 

225 exposure : `~lsst.afw.image.Exposure` 

226 Complete in-memory exposure. 

227 """ 

228 fileDescriptor = self.fileDescriptor 

229 if parameters is None: 

230 parameters = fileDescriptor.parameters 

231 if parameters is None: 

232 parameters = {} 

233 fileDescriptor.storageClass.validateParameters(parameters) 

234 reader = self._readerClass(fileDescriptor.location.path) 

235 return reader.read(**parameters) 

236 

237 def read(self, component=None): 

238 """Read data from a file. 

239 

240 Parameters 

241 ---------- 

242 component : `str`, optional 

243 Component to read from the file. Only used if the `StorageClass` 

244 for reading differed from the `StorageClass` used to write the 

245 file. 

246 

247 Returns 

248 ------- 

249 inMemoryDataset : `object` 

250 The requested data as a Python object. The type of object 

251 is controlled by the specific formatter. 

252 

253 Raises 

254 ------ 

255 ValueError 

256 Component requested but this file does not seem to be a concrete 

257 composite. 

258 KeyError 

259 Raised when parameters passed with fileDescriptor are not 

260 supported. 

261 """ 

262 fileDescriptor = self.fileDescriptor 

263 if fileDescriptor.readStorageClass != fileDescriptor.storageClass: 

264 if component == "metadata": 

265 self.stripMetadata() 

266 return self.metadata 

267 elif component is not None: 

268 return self.readComponent(component) 

269 else: 

270 raise ValueError("Storage class inconsistency ({} vs {}) but no" 

271 " component requested".format(fileDescriptor.readStorageClass.name, 

272 fileDescriptor.storageClass.name)) 

273 return self.readFull() 

274 

275 def write(self, inMemoryDataset): 

276 """Write a Python object to a file. 

277 

278 Parameters 

279 ---------- 

280 inMemoryDataset : `object` 

281 The Python object to store. 

282 """ 

283 # Update the location with the formatter-preferred file extension 

284 self.fileDescriptor.location.updateExtension(self.extension) 

285 outputPath = self.fileDescriptor.location.path 

286 

287 # check to see if we have a recipe requested 

288 recipeName = self.writeParameters.get("recipe") 

289 recipe = self.getImageCompressionSettings(recipeName) 

290 if recipe: 

291 # Can not construct a PropertySet from a hierarchical 

292 # dict but can update one. 

293 ps = PropertySet() 

294 ps.update(recipe) 

295 inMemoryDataset.writeFitsWithOptions(outputPath, options=ps) 

296 else: 

297 inMemoryDataset.writeFits(outputPath) 

298 

299 def getImageCompressionSettings(self, recipeName): 

300 """Retrieve the relevant compression settings for this recipe. 

301 

302 Parameters 

303 ---------- 

304 recipeName : `str` 

305 Label associated with the collection of compression parameters 

306 to select. 

307 

308 Returns 

309 ------- 

310 settings : `dict` 

311 The selected settings. 

312 """ 

313 # if no recipe has been provided and there is no default 

314 # return immediately 

315 if not recipeName: 

316 if "default" not in self.writeRecipes: 

317 return {} 

318 recipeName = "default" 

319 

320 if recipeName not in self.writeRecipes: 

321 raise RuntimeError(f"Unrecognized recipe option given for compression: {recipeName}") 

322 

323 recipe = self.writeRecipes[recipeName] 

324 

325 # Set the seed based on dataId 

326 seed = hash(tuple(self.dataId.items())) % 2**31 

327 for plane in ("image", "mask", "variance"): 

328 if plane in recipe and "scaling" in recipe[plane]: 

329 scaling = recipe[plane]["scaling"] 

330 if "seed" in scaling and scaling["seed"] == 0: 

331 scaling["seed"] = seed 

332 

333 return recipe 

334 

335 @classmethod 

336 def validateWriteRecipes(cls, recipes): 

337 """Validate supplied recipes for this formatter. 

338 

339 The recipes are supplemented with default values where appropriate. 

340 

341 TODO: replace this custom validation code with Cerberus (DM-11846) 

342 

343 Parameters 

344 ---------- 

345 recipes : `dict` 

346 Recipes to validate. Can be empty dict or `None`. 

347 

348 Returns 

349 ------- 

350 validated : `dict` 

351 Validated recipes. Returns what was given if there are no 

352 recipes listed. 

353 

354 Raises 

355 ------ 

356 RuntimeError 

357 Raised if validation fails. 

358 """ 

359 # Schemas define what should be there, and the default values (and by 

360 # the default value, the expected type). 

361 compressionSchema = { 

362 "algorithm": "NONE", 

363 "rows": 1, 

364 "columns": 0, 

365 "quantizeLevel": 0.0, 

366 } 

367 scalingSchema = { 

368 "algorithm": "NONE", 

369 "bitpix": 0, 

370 "maskPlanes": ["NO_DATA"], 

371 "seed": 0, 

372 "quantizeLevel": 4.0, 

373 "quantizePad": 5.0, 

374 "fuzz": True, 

375 "bscale": 1.0, 

376 "bzero": 0.0, 

377 } 

378 

379 if not recipes: 

380 # We can not insist on recipes being specified 

381 return recipes 

382 

383 def checkUnrecognized(entry, allowed, description): 

384 """Check to see if the entry contains unrecognised keywords""" 

385 unrecognized = set(entry) - set(allowed) 

386 if unrecognized: 

387 raise RuntimeError( 

388 f"Unrecognized entries when parsing image compression recipe {description}: " 

389 f"{unrecognized}") 

390 

391 validated = {} 

392 for name in recipes: 

393 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name) 

394 validated[name] = {} 

395 for plane in ("image", "mask", "variance"): 

396 checkUnrecognized(recipes[name][plane], ["compression", "scaling"], 

397 f"{name}->{plane}") 

398 

399 np = {} 

400 validated[name][plane] = np 

401 for settings, schema in (("compression", compressionSchema), 

402 ("scaling", scalingSchema)): 

403 np[settings] = {} 

404 if settings not in recipes[name][plane]: 

405 for key in schema: 

406 np[settings][key] = schema[key] 

407 continue 

408 entry = recipes[name][plane][settings] 

409 checkUnrecognized(entry, schema.keys(), f"{name}->{plane}->{settings}") 

410 for key in schema: 

411 value = type(schema[key])(entry[key]) if key in entry else schema[key] 

412 np[settings][key] = value 

413 return validated 

414 

415 

416class FitsImageFormatter(FitsExposureFormatter): 

417 """Specialisation for `~lsst.afw.image.Image` reading. 

418 """ 

419 

420 _readerClass = ImageFitsReader 

421 

422 

423class FitsMaskFormatter(FitsExposureFormatter): 

424 """Specialisation for `~lsst.afw.image.Mask` reading. 

425 """ 

426 

427 _readerClass = MaskFitsReader 

428 

429 

430class FitsMaskedImageFormatter(FitsExposureFormatter): 

431 """Specialisation for `~lsst.afw.image.MaskedImage` reading. 

432 """ 

433 

434 _readerClass = MaskedImageFitsReader