Coverage for python/lsst/obs/base/formatters/fitsExposure.py : 11%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22__all__ = ("FitsExposureFormatter", )
24from astro_metadata_translator import fix_header
25from lsst.daf.butler import Formatter
26from lsst.afw.image import ExposureFitsReader
27from lsst.daf.base import PropertySet
30class FitsExposureFormatter(Formatter):
31 """Interface for reading and writing Exposures to and from FITS files.
33 This Formatter supports write recipes.
35 Each ``FitsExposureFormatter`` recipe for FITS compression should
36 define ``image``, ``mask`` and ``variance`` entries, each of which may
37 contain ``compression`` and ``scaling`` entries. Defaults will be
38 provided for any missing elements under ``compression`` and
39 ``scaling``.
41 The allowed entries under ``compression`` are:
43 * ``algorithm`` (`str`): compression algorithm to use
44 * ``rows`` (`int`): number of rows per tile (0 = entire dimension)
45 * ``columns`` (`int`): number of columns per tile (0 = entire dimension)
46 * ``quantizeLevel`` (`float`): cfitsio quantization level
48 The allowed entries under ``scaling`` are:
50 * ``algorithm`` (`str`): scaling algorithm to use
51 * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64)
52 * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values?
53 * ``seed`` (`int`): seed for random number generator when fuzzing
54 * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing
55 statistics
56 * ``quantizeLevel`` (`float`): divisor of the standard deviation for
57 ``STDEV_*`` scaling
58 * ``quantizePad`` (`float`): number of stdev to allow on the low side (for
59 ``STDEV_POSITIVE``/``NEGATIVE``)
60 * ``bscale`` (`float`): manually specified ``BSCALE``
61 (for ``MANUAL`` scaling)
62 * ``bzero`` (`float`): manually specified ``BSCALE``
63 (for ``MANUAL`` scaling)
65 A very simple example YAML recipe:
67 .. code-block:: yaml
69 lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter:
70 default:
71 image: &default
72 compression:
73 algorithm: GZIP_SHUFFLE
74 mask: *default
75 variance: *default
77 """
78 supportedExtensions = frozenset({".fits", ".fits.gz", ".fits.fz"})
79 extension = ".fits"
80 _metadata = None
81 supportedWriteParameters = frozenset({"recipe"})
83 @property
84 def metadata(self):
85 """The metadata read from this file. It will be stripped as
86 components are extracted from it
87 (`lsst.daf.base.PropertyList`).
88 """
89 if self._metadata is None:
90 self._metadata = self.readMetadata()
91 return self._metadata
93 def readMetadata(self):
94 """Read all header metadata directly into a PropertyList.
96 Returns
97 -------
98 metadata : `~lsst.daf.base.PropertyList`
99 Header metadata.
100 """
101 from lsst.afw.image import readMetadata
102 md = readMetadata(self.fileDescriptor.location.path)
103 fix_header(md)
104 return md
106 def stripMetadata(self):
107 """Remove metadata entries that are parsed into components.
109 This is only called when just the metadata is requested; stripping
110 entries there forces code that wants other components to ask for those
111 components directly rather than trying to extract them from the
112 metadata manually, which is fragile. This behavior is an intentional
113 change from Gen2.
115 Parameters
116 ----------
117 metadata : `~lsst.daf.base.PropertyList`
118 Header metadata, to be modified in-place.
119 """
120 # TODO: make sure this covers everything, by delegating to something
121 # that doesn't yet exist in afw.image.ExposureInfo.
122 from lsst.afw.image import bboxFromMetadata
123 from lsst.afw.geom import makeSkyWcs
124 bboxFromMetadata(self.metadata) # always strips
125 makeSkyWcs(self.metadata, strip=True)
127 def readComponent(self, component, parameters=None):
128 """Read a component held by the Exposure.
130 Parameters
131 ----------
132 component : `str`, optional
133 Component to read from the file.
134 parameters : `dict`, optional
135 If specified, a dictionary of slicing parameters that
136 overrides those in ``fileDescriptor``.
138 Returns
139 -------
140 obj : component-dependent
141 In-memory component object.
143 Raises
144 ------
145 KeyError
146 Raised if the requested component cannot be handled.
147 """
148 componentMap = {'wcs': ('readWcs', False),
149 'coaddInputs': ('readCoaddInputs', False),
150 'psf': ('readPsf', False),
151 'image': ('readImage', True),
152 'mask': ('readMask', True),
153 'variance': ('readVariance', True),
154 'photoCalib': ('readPhotoCalib', False),
155 'bbox': ('readBBox', True),
156 'xy0': ('readXY0', True),
157 'metadata': ('readMetadata', False),
158 'filter': ('readFilter', False),
159 'polygon': ('readValidPolygon', False),
160 'apCorrMap': ('readApCorrMap', False),
161 'visitInfo': ('readVisitInfo', False),
162 'transmissionCurve': ('readTransmissionCurve', False),
163 'detector': ('readDetector', False),
164 'extras': ('readExtraComponents', False),
165 'exposureInfo': ('readExposureInfo', False),
166 }
167 method, hasParams = componentMap.get(component, None)
169 if method:
170 reader = ExposureFitsReader(self.fileDescriptor.location.path)
171 caller = getattr(reader, method, None)
173 if caller:
174 if parameters is None:
175 parameters = self.fileDescriptor.parameters
176 if parameters is None:
177 parameters = {}
178 self.fileDescriptor.storageClass.validateParameters(parameters)
180 if hasParams and parameters:
181 return caller(**parameters)
182 else:
183 return caller()
184 else:
185 raise KeyError(f"Unknown component requested: {component}")
187 def readFull(self, parameters=None):
188 """Read the full Exposure object.
190 Parameters
191 ----------
192 parameters : `dict`, optional
193 If specified a dictionary of slicing parameters that overrides
194 those in ``fileDescriptor``.
196 Returns
197 -------
198 exposure : `~lsst.afw.image.Exposure`
199 Complete in-memory exposure.
200 """
201 fileDescriptor = self.fileDescriptor
202 if parameters is None:
203 parameters = fileDescriptor.parameters
204 if parameters is None:
205 parameters = {}
206 fileDescriptor.storageClass.validateParameters(parameters)
207 try:
208 output = fileDescriptor.storageClass.pytype(fileDescriptor.location.path, **parameters)
209 except TypeError:
210 reader = ExposureFitsReader(fileDescriptor.location.path)
211 output = reader.read(**parameters)
212 return output
214 def read(self, component=None, parameters=None):
215 """Read data from a file.
217 Parameters
218 ----------
219 component : `str`, optional
220 Component to read from the file. Only used if the `StorageClass`
221 for reading differed from the `StorageClass` used to write the
222 file.
223 parameters : `dict`, optional
224 If specified, a dictionary of slicing parameters that
225 overrides those in ``fileDescriptor``.
227 Returns
228 -------
229 inMemoryDataset : `object`
230 The requested data as a Python object. The type of object
231 is controlled by the specific formatter.
233 Raises
234 ------
235 ValueError
236 Component requested but this file does not seem to be a concrete
237 composite.
238 KeyError
239 Raised when parameters passed with fileDescriptor are not
240 supported.
241 """
242 fileDescriptor = self.fileDescriptor
243 if fileDescriptor.readStorageClass != fileDescriptor.storageClass:
244 if component == "metadata":
245 self.stripMetadata()
246 return self.metadata
247 elif component is not None:
248 return self.readComponent(component, parameters)
249 else:
250 raise ValueError("Storage class inconsistency ({} vs {}) but no"
251 " component requested".format(fileDescriptor.readStorageClass.name,
252 fileDescriptor.storageClass.name))
253 return self.readFull(parameters=parameters)
255 def write(self, inMemoryDataset):
256 """Write a Python object to a file.
258 Parameters
259 ----------
260 inMemoryDataset : `object`
261 The Python object to store.
263 Returns
264 -------
265 path : `str`
266 The `URI` where the primary file is stored.
267 """
268 # Update the location with the formatter-preferred file extension
269 self.fileDescriptor.location.updateExtension(self.extension)
270 outputPath = self.fileDescriptor.location.path
272 # check to see if we have a recipe requested
273 recipeName = self.writeParameters.get("recipe")
274 recipe = self.getImageCompressionSettings(recipeName)
275 if recipe:
276 # Can not construct a PropertySet from a hierarchical
277 # dict but can update one.
278 ps = PropertySet()
279 ps.update(recipe)
280 inMemoryDataset.writeFitsWithOptions(outputPath, options=ps)
281 else:
282 inMemoryDataset.writeFits(outputPath)
283 return self.fileDescriptor.location.pathInStore
285 def getImageCompressionSettings(self, recipeName):
286 """Retrieve the relevant compression settings for this recipe.
288 Parameters
289 ----------
290 recipeName : `str`
291 Label associated with the collection of compression parameters
292 to select.
294 Returns
295 -------
296 settings : `dict`
297 The selected settings.
298 """
299 # if no recipe has been provided and there is no default
300 # return immediately
301 if not recipeName:
302 if "default" not in self.writeRecipes:
303 return {}
304 recipeName = "default"
306 if recipeName not in self.writeRecipes:
307 raise RuntimeError(f"Unrecognized recipe option given for compression: {recipeName}")
309 recipe = self.writeRecipes[recipeName]
311 # Set the seed based on dataId
312 seed = hash(tuple(self.dataId.items())) % 2**31
313 for plane in ("image", "mask", "variance"):
314 if plane in recipe and "scaling" in recipe[plane]:
315 scaling = recipe[plane]["scaling"]
316 if "seed" in scaling and scaling["seed"] == 0:
317 scaling["seed"] = seed
319 return recipe
321 @classmethod
322 def validateWriteRecipes(cls, recipes):
323 """Validate supplied recipes for this formatter.
325 The recipes are supplemented with default values where appropriate.
327 TODO: replace this custom validation code with Cerberus (DM-11846)
329 Parameters
330 ----------
331 recipes : `dict`
332 Recipes to validate. Can be empty dict or `None`.
334 Returns
335 -------
336 validated : `dict`
337 Validated recipes. Returns what was given if there are no
338 recipes listed.
340 Raises
341 ------
342 RuntimeError
343 Raised if validation fails.
344 """
345 # Schemas define what should be there, and the default values (and by the default
346 # value, the expected type).
347 compressionSchema = {
348 "algorithm": "NONE",
349 "rows": 1,
350 "columns": 0,
351 "quantizeLevel": 0.0,
352 }
353 scalingSchema = {
354 "algorithm": "NONE",
355 "bitpix": 0,
356 "maskPlanes": ["NO_DATA"],
357 "seed": 0,
358 "quantizeLevel": 4.0,
359 "quantizePad": 5.0,
360 "fuzz": True,
361 "bscale": 1.0,
362 "bzero": 0.0,
363 }
365 if not recipes:
366 # We can not insist on recipes being specified
367 return recipes
369 def checkUnrecognized(entry, allowed, description):
370 """Check to see if the entry contains unrecognised keywords"""
371 unrecognized = set(entry) - set(allowed)
372 if unrecognized:
373 raise RuntimeError(
374 f"Unrecognized entries when parsing image compression recipe {description}: "
375 f"{unrecognized}")
377 validated = {}
378 for name in recipes:
379 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
380 validated[name] = {}
381 for plane in ("image", "mask", "variance"):
382 checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
383 f"{name}->{plane}")
385 np = {}
386 validated[name][plane] = np
387 for settings, schema in (("compression", compressionSchema),
388 ("scaling", scalingSchema)):
389 np[settings] = {}
390 if settings not in recipes[name][plane]:
391 for key in schema:
392 np[settings][key] = schema[key]
393 continue
394 entry = recipes[name][plane][settings]
395 checkUnrecognized(entry, schema.keys(), f"{name}->{plane}->{settings}")
396 for key in schema:
397 value = type(schema[key])(entry[key]) if key in entry else schema[key]
398 np[settings][key] = value
399 return validated