Coverage for python/lsst/obs/base/fitsExposureFormatter.py : 11%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22__all__ = ("FitsExposureFormatter", )
24from astro_metadata_translator import fix_header
25from lsst.daf.butler import Formatter
26from lsst.afw.image import ExposureFitsReader
27from lsst.daf.base import PropertySet
30class FitsExposureFormatter(Formatter):
31 """Interface for reading and writing Exposures to and from FITS files.
33 This Formatter supports write recipes.
35 Each ``FitsExposureFormatter`` recipe for FITS compression should
36 define ``image``, ``mask`` and ``variance`` entries, each of which may
37 contain ``compression`` and ``scaling`` entries. Defaults will be
38 provided for any missing elements under ``compression`` and
39 ``scaling``.
41 The allowed entries under ``compression`` are:
43 * ``algorithm`` (`str`): compression algorithm to use
44 * ``rows`` (`int`): number of rows per tile (0 = entire dimension)
45 * ``columns`` (`int`): number of columns per tile (0 = entire dimension)
46 * ``quantizeLevel`` (`float`): cfitsio quantization level
48 The allowed entries under ``scaling`` are:
50 * ``algorithm`` (`str`): scaling algorithm to use
51 * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64)
52 * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values?
53 * ``seed`` (`int`): seed for random number generator when fuzzing
54 * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing
55 statistics
56 * ``quantizeLevel`` (`float`): divisor of the standard deviation for
57 ``STDEV_*`` scaling
58 * ``quantizePad`` (`float`): number of stdev to allow on the low side (for
59 ``STDEV_POSITIVE``/``NEGATIVE``)
60 * ``bscale`` (`float`): manually specified ``BSCALE``
61 (for ``MANUAL`` scaling)
62 * ``bzero`` (`float`): manually specified ``BSCALE``
63 (for ``MANUAL`` scaling)
65 A very simple example YAML recipe:
67 .. code-block:: yaml
69 lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter:
70 default:
71 image: &default
72 compression:
73 algorithm: GZIP_SHUFFLE
74 mask: *default
75 variance: *default
77 """
78 extension = ".fits"
79 _metadata = None
80 supportedWriteParameters = frozenset({"recipe"})
82 @property
83 def metadata(self):
84 """The metadata read from this file. It will be stripped as
85 components are extracted from it
86 (`lsst.daf.base.PropertyList`).
87 """
88 if self._metadata is None:
89 self._metadata = self.readMetadata()
90 return self._metadata
92 def readMetadata(self):
93 """Read all header metadata directly into a PropertyList.
95 Returns
96 -------
97 metadata : `~lsst.daf.base.PropertyList`
98 Header metadata.
99 """
100 from lsst.afw.image import readMetadata
101 md = readMetadata(self.fileDescriptor.location.path)
102 fix_header(md)
103 return md
105 def stripMetadata(self):
106 """Remove metadata entries that are parsed into components.
108 This is only called when just the metadata is requested; stripping
109 entries there forces code that wants other components to ask for those
110 components directly rather than trying to extract them from the
111 metadata manually, which is fragile. This behavior is an intentional
112 change from Gen2.
114 Parameters
115 ----------
116 metadata : `~lsst.daf.base.PropertyList`
117 Header metadata, to be modified in-place.
118 """
119 # TODO: make sure this covers everything, by delegating to something
120 # that doesn't yet exist in afw.image.ExposureInfo.
121 from lsst.afw.image import bboxFromMetadata
122 from lsst.afw.geom import makeSkyWcs
123 bboxFromMetadata(self.metadata) # always strips
124 makeSkyWcs(self.metadata, strip=True)
126 def readComponent(self, component, parameters=None):
127 """Read a component held by the Exposure.
129 Parameters
130 ----------
131 component : `str`, optional
132 Component to read from the file.
133 parameters : `dict`, optional
134 If specified, a dictionary of slicing parameters that
135 overrides those in ``fileDescriptor``.
137 Returns
138 -------
139 obj : component-dependent
140 In-memory component object.
142 Raises
143 ------
144 KeyError
145 Raised if the requested component cannot be handled.
146 """
147 componentMap = {'wcs': ('readWcs', False),
148 'coaddInputs': ('readCoaddInputs', False),
149 'psf': ('readPsf', False),
150 'image': ('readImage', True),
151 'mask': ('readMask', True),
152 'variance': ('readVariance', True),
153 'photoCalib': ('readPhotoCalib', False),
154 'bbox': ('readBBox', True),
155 'xy0': ('readXY0', True),
156 'metadata': ('readMetadata', False),
157 'filter': ('readFilter', False),
158 'polygon': ('readValidPolygon', False),
159 'apCorrMap': ('readApCorrMap', False),
160 'visitInfo': ('readVisitInfo', False),
161 'transmissionCurve': ('readTransmissionCurve', False),
162 'detector': ('readDetector', False),
163 'extras': ('readExtraComponents', False),
164 'exposureInfo': ('readExposureInfo', False),
165 }
166 method, hasParams = componentMap.get(component, None)
168 if method:
169 reader = ExposureFitsReader(self.fileDescriptor.location.path)
170 caller = getattr(reader, method, None)
172 if caller:
173 if parameters is None:
174 parameters = self.fileDescriptor.parameters
175 if parameters is None:
176 parameters = {}
177 self.fileDescriptor.storageClass.validateParameters(parameters)
179 if hasParams and parameters:
180 return caller(**parameters)
181 else:
182 return caller()
183 else:
184 raise KeyError(f"Unknown component requested: {component}")
186 def readFull(self, parameters=None):
187 """Read the full Exposure object.
189 Parameters
190 ----------
191 parameters : `dict`, optional
192 If specified a dictionary of slicing parameters that overrides
193 those in ``fileDescriptor``.
195 Returns
196 -------
197 exposure : `~lsst.afw.image.Exposure`
198 Complete in-memory exposure.
199 """
200 fileDescriptor = self.fileDescriptor
201 if parameters is None:
202 parameters = fileDescriptor.parameters
203 if parameters is None:
204 parameters = {}
205 fileDescriptor.storageClass.validateParameters(parameters)
206 try:
207 output = fileDescriptor.storageClass.pytype(fileDescriptor.location.path, **parameters)
208 except TypeError:
209 reader = ExposureFitsReader(fileDescriptor.location.path)
210 output = reader.read(**parameters)
211 return output
213 def read(self, component=None, parameters=None):
214 """Read data from a file.
216 Parameters
217 ----------
218 component : `str`, optional
219 Component to read from the file. Only used if the `StorageClass`
220 for reading differed from the `StorageClass` used to write the
221 file.
222 parameters : `dict`, optional
223 If specified, a dictionary of slicing parameters that
224 overrides those in ``fileDescriptor``.
226 Returns
227 -------
228 inMemoryDataset : `object`
229 The requested data as a Python object. The type of object
230 is controlled by the specific formatter.
232 Raises
233 ------
234 ValueError
235 Component requested but this file does not seem to be a concrete
236 composite.
237 KeyError
238 Raised when parameters passed with fileDescriptor are not
239 supported.
240 """
241 fileDescriptor = self.fileDescriptor
242 if fileDescriptor.readStorageClass != fileDescriptor.storageClass:
243 if component == "metadata":
244 self.stripMetadata()
245 return self.metadata
246 elif component is not None:
247 return self.readComponent(component, parameters)
248 else:
249 raise ValueError("Storage class inconsistency ({} vs {}) but no"
250 " component requested".format(fileDescriptor.readStorageClass.name,
251 fileDescriptor.storageClass.name))
252 return self.readFull(parameters=parameters)
254 def write(self, inMemoryDataset):
255 """Write a Python object to a file.
257 Parameters
258 ----------
259 inMemoryDataset : `object`
260 The Python object to store.
262 Returns
263 -------
264 path : `str`
265 The `URI` where the primary file is stored.
266 """
267 # Update the location with the formatter-preferred file extension
268 self.fileDescriptor.location.updateExtension(self.extension)
269 outputPath = self.fileDescriptor.location.path
271 # check to see if we have a recipe requested
272 recipeName = self.writeParameters.get("recipe")
273 recipe = self.getImageCompressionSettings(recipeName)
274 if recipe:
275 # Can not construct a PropertySet from a hierarchical
276 # dict but can update one.
277 ps = PropertySet()
278 ps.update(recipe)
279 inMemoryDataset.writeFitsWithOptions(outputPath, options=ps)
280 else:
281 inMemoryDataset.writeFits(outputPath)
282 return self.fileDescriptor.location.pathInStore
284 def getImageCompressionSettings(self, recipeName):
285 """Retrieve the relevant compression settings for this recipe.
287 Parameters
288 ----------
289 recipeName : `str`
290 Label associated with the collection of compression parameters
291 to select.
293 Returns
294 -------
295 settings : `dict`
296 The selected settings.
297 """
298 # if no recipe has been provided and there is no default
299 # return immediately
300 if not recipeName:
301 if "default" not in self.writeRecipes:
302 return {}
303 recipeName = "default"
305 if recipeName not in self.writeRecipes:
306 raise RuntimeError(f"Unrecognized recipe option given for compression: {recipeName}")
308 recipe = self.writeRecipes[recipeName]
310 # Set the seed based on dataId
311 seed = hash(tuple(self.dataId.items())) % 2**31
312 for plane in ("image", "mask", "variance"):
313 if plane in recipe and "scaling" in recipe[plane]:
314 scaling = recipe[plane]["scaling"]
315 if "seed" in scaling and scaling["seed"] == 0:
316 scaling["seed"] = seed
318 return recipe
320 @classmethod
321 def validateWriteRecipes(cls, recipes):
322 """Validate supplied recipes for this formatter.
324 The recipes are supplemented with default values where appropriate.
326 TODO: replace this custom validation code with Cerberus (DM-11846)
328 Parameters
329 ----------
330 recipes : `dict`
331 Recipes to validate. Can be empty dict or `None`.
333 Returns
334 -------
335 validated : `dict`
336 Validated recipes. Returns what was given if there are no
337 recipes listed.
339 Raises
340 ------
341 RuntimeError
342 Raised if validation fails.
343 """
344 # Schemas define what should be there, and the default values (and by the default
345 # value, the expected type).
346 compressionSchema = {
347 "algorithm": "NONE",
348 "rows": 1,
349 "columns": 0,
350 "quantizeLevel": 0.0,
351 }
352 scalingSchema = {
353 "algorithm": "NONE",
354 "bitpix": 0,
355 "maskPlanes": ["NO_DATA"],
356 "seed": 0,
357 "quantizeLevel": 4.0,
358 "quantizePad": 5.0,
359 "fuzz": True,
360 "bscale": 1.0,
361 "bzero": 0.0,
362 }
364 if not recipes:
365 # We can not insist on recipes being specified
366 return recipes
368 def checkUnrecognized(entry, allowed, description):
369 """Check to see if the entry contains unrecognised keywords"""
370 unrecognized = set(entry) - set(allowed)
371 if unrecognized:
372 raise RuntimeError(
373 f"Unrecognized entries when parsing image compression recipe {description}: "
374 f"{unrecognized}")
376 validated = {}
377 for name in recipes:
378 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
379 validated[name] = {}
380 for plane in ("image", "mask", "variance"):
381 checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
382 f"{name}->{plane}")
384 np = {}
385 validated[name][plane] = np
386 for settings, schema in (("compression", compressionSchema),
387 ("scaling", scalingSchema)):
388 np[settings] = {}
389 if settings not in recipes[name][plane]:
390 for key in schema:
391 np[settings][key] = schema[key]
392 continue
393 entry = recipes[name][plane][settings]
394 checkUnrecognized(entry, schema.keys(), f"{name}->{plane}->{settings}")
395 for key in schema:
396 value = type(schema[key])(entry[key]) if key in entry else schema[key]
397 np[settings][key] = value
398 return validated