Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = ("FitsExposureFormatter", ) 

23 

24from astro_metadata_translator import fix_header 

25from lsst.daf.butler import Formatter 

26from lsst.afw.image import ExposureFitsReader 

27from lsst.daf.base import PropertySet 

28 

29 

30class FitsExposureFormatter(Formatter): 

31 """Interface for reading and writing Exposures to and from FITS files. 

32 

33 This Formatter supports write recipes. 

34 

35 Each ``FitsExposureFormatter`` recipe for FITS compression should 

36 define ``image``, ``mask`` and ``variance`` entries, each of which may 

37 contain ``compression`` and ``scaling`` entries. Defaults will be 

38 provided for any missing elements under ``compression`` and 

39 ``scaling``. 

40 

41 The allowed entries under ``compression`` are: 

42 

43 * ``algorithm`` (`str`): compression algorithm to use 

44 * ``rows`` (`int`): number of rows per tile (0 = entire dimension) 

45 * ``columns`` (`int`): number of columns per tile (0 = entire dimension) 

46 * ``quantizeLevel`` (`float`): cfitsio quantization level 

47 

48 The allowed entries under ``scaling`` are: 

49 

50 * ``algorithm`` (`str`): scaling algorithm to use 

51 * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64) 

52 * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values? 

53 * ``seed`` (`int`): seed for random number generator when fuzzing 

54 * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing 

55 statistics 

56 * ``quantizeLevel`` (`float`): divisor of the standard deviation for 

57 ``STDEV_*`` scaling 

58 * ``quantizePad`` (`float`): number of stdev to allow on the low side (for 

59 ``STDEV_POSITIVE``/``NEGATIVE``) 

60 * ``bscale`` (`float`): manually specified ``BSCALE`` 

61 (for ``MANUAL`` scaling) 

62 * ``bzero`` (`float`): manually specified ``BSCALE`` 

63 (for ``MANUAL`` scaling) 

64 

65 A very simple example YAML recipe: 

66 

67 .. code-block:: yaml 

68 

69 lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter: 

70 default: 

71 image: &default 

72 compression: 

73 algorithm: GZIP_SHUFFLE 

74 mask: *default 

75 variance: *default 

76 

77 """ 

78 supportedExtensions = frozenset({".fits", ".fits.gz", ".fits.fz"}) 

79 extension = ".fits" 

80 _metadata = None 

81 supportedWriteParameters = frozenset({"recipe"}) 

82 

83 @property 

84 def metadata(self): 

85 """The metadata read from this file. It will be stripped as 

86 components are extracted from it 

87 (`lsst.daf.base.PropertyList`). 

88 """ 

89 if self._metadata is None: 

90 self._metadata = self.readMetadata() 

91 return self._metadata 

92 

93 def readMetadata(self): 

94 """Read all header metadata directly into a PropertyList. 

95 

96 Returns 

97 ------- 

98 metadata : `~lsst.daf.base.PropertyList` 

99 Header metadata. 

100 """ 

101 # Do not use ExposureFitsReader.readMetadata because that strips 

102 # out lots of headers and there is no way to recover them 

103 from lsst.afw.image import readMetadata 

104 md = readMetadata(self.fileDescriptor.location.path) 

105 fix_header(md) 

106 return md 

107 

108 def stripMetadata(self): 

109 """Remove metadata entries that are parsed into components. 

110 

111 This is only called when just the metadata is requested; stripping 

112 entries there forces code that wants other components to ask for those 

113 components directly rather than trying to extract them from the 

114 metadata manually, which is fragile. This behavior is an intentional 

115 change from Gen2. 

116 

117 Parameters 

118 ---------- 

119 metadata : `~lsst.daf.base.PropertyList` 

120 Header metadata, to be modified in-place. 

121 """ 

122 # TODO: make sure this covers everything, by delegating to something 

123 # that doesn't yet exist in afw.image.ExposureInfo. 

124 from lsst.afw.image import bboxFromMetadata 

125 from lsst.afw.geom import makeSkyWcs 

126 

127 # Protect against the metadata being missing 

128 try: 

129 bboxFromMetadata(self.metadata) # always strips 

130 except LookupError: 

131 pass 

132 try: 

133 makeSkyWcs(self.metadata, strip=True) 

134 except Exception: 

135 pass 

136 

137 def readComponent(self, component, parameters=None): 

138 """Read a component held by the Exposure. 

139 

140 Parameters 

141 ---------- 

142 component : `str`, optional 

143 Component to read from the file. 

144 parameters : `dict`, optional 

145 If specified, a dictionary of slicing parameters that 

146 overrides those in ``fileDescriptor``. 

147 

148 Returns 

149 ------- 

150 obj : component-dependent 

151 In-memory component object. 

152 

153 Raises 

154 ------ 

155 KeyError 

156 Raised if the requested component cannot be handled. 

157 """ 

158 # Metadata is handled explicitly elsewhere 

159 componentMap = {'wcs': ('readWcs', False), 

160 'coaddInputs': ('readCoaddInputs', False), 

161 'psf': ('readPsf', False), 

162 'image': ('readImage', True), 

163 'mask': ('readMask', True), 

164 'variance': ('readVariance', True), 

165 'photoCalib': ('readPhotoCalib', False), 

166 'bbox': ('readBBox', True), 

167 'dimensions': ('readBBox', True), 

168 'xy0': ('readXY0', True), 

169 'filter': ('readFilter', False), 

170 'validPolygon': ('readValidPolygon', False), 

171 'apCorrMap': ('readApCorrMap', False), 

172 'visitInfo': ('readVisitInfo', False), 

173 'transmissionCurve': ('readTransmissionCurve', False), 

174 'detector': ('readDetector', False), 

175 'extras': ('readExtraComponents', False), 

176 'exposureInfo': ('readExposureInfo', False), 

177 } 

178 method, hasParams = componentMap.get(component, (None, False)) 

179 

180 if method: 

181 # This reader can read standalone Image/Mask files as well 

182 # when dealing with components. 

183 reader = ExposureFitsReader(self.fileDescriptor.location.path) 

184 caller = getattr(reader, method, None) 

185 

186 if caller: 

187 if parameters is None: 

188 parameters = self.fileDescriptor.parameters 

189 if parameters is None: 

190 parameters = {} 

191 self.fileDescriptor.storageClass.validateParameters(parameters) 

192 

193 if hasParams and parameters: 

194 thisComponent = caller(**parameters) 

195 else: 

196 thisComponent = caller() 

197 if component == "dimensions" and thisComponent is not None: 

198 thisComponent = thisComponent.getDimensions() 

199 return thisComponent 

200 else: 

201 raise KeyError(f"Unknown component requested: {component}") 

202 

203 def readFull(self, parameters=None): 

204 """Read the full Exposure object. 

205 

206 Parameters 

207 ---------- 

208 parameters : `dict`, optional 

209 If specified a dictionary of slicing parameters that overrides 

210 those in ``fileDescriptor``. 

211 

212 Returns 

213 ------- 

214 exposure : `~lsst.afw.image.Exposure` 

215 Complete in-memory exposure. 

216 """ 

217 fileDescriptor = self.fileDescriptor 

218 if parameters is None: 

219 parameters = fileDescriptor.parameters 

220 if parameters is None: 

221 parameters = {} 

222 fileDescriptor.storageClass.validateParameters(parameters) 

223 try: 

224 output = fileDescriptor.storageClass.pytype(fileDescriptor.location.path, **parameters) 

225 except TypeError: 

226 reader = ExposureFitsReader(fileDescriptor.location.path) 

227 output = reader.read(**parameters) 

228 return output 

229 

230 def read(self, component=None): 

231 """Read data from a file. 

232 

233 Parameters 

234 ---------- 

235 component : `str`, optional 

236 Component to read from the file. Only used if the `StorageClass` 

237 for reading differed from the `StorageClass` used to write the 

238 file. 

239 

240 Returns 

241 ------- 

242 inMemoryDataset : `object` 

243 The requested data as a Python object. The type of object 

244 is controlled by the specific formatter. 

245 

246 Raises 

247 ------ 

248 ValueError 

249 Component requested but this file does not seem to be a concrete 

250 composite. 

251 KeyError 

252 Raised when parameters passed with fileDescriptor are not 

253 supported. 

254 """ 

255 fileDescriptor = self.fileDescriptor 

256 if fileDescriptor.readStorageClass != fileDescriptor.storageClass: 

257 if component == "metadata": 

258 self.stripMetadata() 

259 return self.metadata 

260 elif component is not None: 

261 return self.readComponent(component) 

262 else: 

263 raise ValueError("Storage class inconsistency ({} vs {}) but no" 

264 " component requested".format(fileDescriptor.readStorageClass.name, 

265 fileDescriptor.storageClass.name)) 

266 return self.readFull() 

267 

268 def write(self, inMemoryDataset): 

269 """Write a Python object to a file. 

270 

271 Parameters 

272 ---------- 

273 inMemoryDataset : `object` 

274 The Python object to store. 

275 

276 Returns 

277 ------- 

278 path : `str` 

279 The `URI` where the primary file is stored. 

280 """ 

281 # Update the location with the formatter-preferred file extension 

282 self.fileDescriptor.location.updateExtension(self.extension) 

283 outputPath = self.fileDescriptor.location.path 

284 

285 # check to see if we have a recipe requested 

286 recipeName = self.writeParameters.get("recipe") 

287 recipe = self.getImageCompressionSettings(recipeName) 

288 if recipe: 

289 # Can not construct a PropertySet from a hierarchical 

290 # dict but can update one. 

291 ps = PropertySet() 

292 ps.update(recipe) 

293 inMemoryDataset.writeFitsWithOptions(outputPath, options=ps) 

294 else: 

295 inMemoryDataset.writeFits(outputPath) 

296 return self.fileDescriptor.location.pathInStore 

297 

298 def getImageCompressionSettings(self, recipeName): 

299 """Retrieve the relevant compression settings for this recipe. 

300 

301 Parameters 

302 ---------- 

303 recipeName : `str` 

304 Label associated with the collection of compression parameters 

305 to select. 

306 

307 Returns 

308 ------- 

309 settings : `dict` 

310 The selected settings. 

311 """ 

312 # if no recipe has been provided and there is no default 

313 # return immediately 

314 if not recipeName: 

315 if "default" not in self.writeRecipes: 

316 return {} 

317 recipeName = "default" 

318 

319 if recipeName not in self.writeRecipes: 

320 raise RuntimeError(f"Unrecognized recipe option given for compression: {recipeName}") 

321 

322 recipe = self.writeRecipes[recipeName] 

323 

324 # Set the seed based on dataId 

325 seed = hash(tuple(self.dataId.items())) % 2**31 

326 for plane in ("image", "mask", "variance"): 

327 if plane in recipe and "scaling" in recipe[plane]: 

328 scaling = recipe[plane]["scaling"] 

329 if "seed" in scaling and scaling["seed"] == 0: 

330 scaling["seed"] = seed 

331 

332 return recipe 

333 

334 @classmethod 

335 def validateWriteRecipes(cls, recipes): 

336 """Validate supplied recipes for this formatter. 

337 

338 The recipes are supplemented with default values where appropriate. 

339 

340 TODO: replace this custom validation code with Cerberus (DM-11846) 

341 

342 Parameters 

343 ---------- 

344 recipes : `dict` 

345 Recipes to validate. Can be empty dict or `None`. 

346 

347 Returns 

348 ------- 

349 validated : `dict` 

350 Validated recipes. Returns what was given if there are no 

351 recipes listed. 

352 

353 Raises 

354 ------ 

355 RuntimeError 

356 Raised if validation fails. 

357 """ 

358 # Schemas define what should be there, and the default values (and by the default 

359 # value, the expected type). 

360 compressionSchema = { 

361 "algorithm": "NONE", 

362 "rows": 1, 

363 "columns": 0, 

364 "quantizeLevel": 0.0, 

365 } 

366 scalingSchema = { 

367 "algorithm": "NONE", 

368 "bitpix": 0, 

369 "maskPlanes": ["NO_DATA"], 

370 "seed": 0, 

371 "quantizeLevel": 4.0, 

372 "quantizePad": 5.0, 

373 "fuzz": True, 

374 "bscale": 1.0, 

375 "bzero": 0.0, 

376 } 

377 

378 if not recipes: 

379 # We can not insist on recipes being specified 

380 return recipes 

381 

382 def checkUnrecognized(entry, allowed, description): 

383 """Check to see if the entry contains unrecognised keywords""" 

384 unrecognized = set(entry) - set(allowed) 

385 if unrecognized: 

386 raise RuntimeError( 

387 f"Unrecognized entries when parsing image compression recipe {description}: " 

388 f"{unrecognized}") 

389 

390 validated = {} 

391 for name in recipes: 

392 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name) 

393 validated[name] = {} 

394 for plane in ("image", "mask", "variance"): 

395 checkUnrecognized(recipes[name][plane], ["compression", "scaling"], 

396 f"{name}->{plane}") 

397 

398 np = {} 

399 validated[name][plane] = np 

400 for settings, schema in (("compression", compressionSchema), 

401 ("scaling", scalingSchema)): 

402 np[settings] = {} 

403 if settings not in recipes[name][plane]: 

404 for key in schema: 

405 np[settings][key] = schema[key] 

406 continue 

407 entry = recipes[name][plane][settings] 

408 checkUnrecognized(entry, schema.keys(), f"{name}->{plane}->{settings}") 

409 for key in schema: 

410 value = type(schema[key])(entry[key]) if key in entry else schema[key] 

411 np[settings][key] = value 

412 return validated