lsst.obs.base  20.0.0-73-gf477d90+2054c0bfca
fitsExposure.py
Go to the documentation of this file.
1 # This file is part of obs_base.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (http://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 
22 __all__ = ("FitsExposureFormatter", "FitsImageFormatter", "FitsMaskFormatter",
23  "FitsMaskedImageFormatter")
24 
25 from astro_metadata_translator import fix_header
26 from lsst.daf.base import PropertySet
27 from lsst.daf.butler import Formatter
28 # Do not use ExposureFitsReader.readMetadata because that strips
29 # out lots of headers and there is no way to recover them
30 from lsst.afw.fits import readMetadata
31 from lsst.afw.image import ExposureFitsReader, ImageFitsReader, MaskFitsReader, MaskedImageFitsReader
32 # Needed for ApCorrMap to resolve properly
33 from lsst.afw.math import BoundedField # noqa: F401
34 
35 
36 class FitsExposureFormatter(Formatter):
37  """Interface for reading and writing Exposures to and from FITS files.
38 
39  This Formatter supports write recipes.
40 
41  Each ``FitsExposureFormatter`` recipe for FITS compression should
42  define ``image``, ``mask`` and ``variance`` entries, each of which may
43  contain ``compression`` and ``scaling`` entries. Defaults will be
44  provided for any missing elements under ``compression`` and
45  ``scaling``.
46 
47  The allowed entries under ``compression`` are:
48 
49  * ``algorithm`` (`str`): compression algorithm to use
50  * ``rows`` (`int`): number of rows per tile (0 = entire dimension)
51  * ``columns`` (`int`): number of columns per tile (0 = entire dimension)
52  * ``quantizeLevel`` (`float`): cfitsio quantization level
53 
54  The allowed entries under ``scaling`` are:
55 
56  * ``algorithm`` (`str`): scaling algorithm to use
57  * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64)
58  * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values?
59  * ``seed`` (`int`): seed for random number generator when fuzzing
60  * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing
61  statistics
62  * ``quantizeLevel`` (`float`): divisor of the standard deviation for
63  ``STDEV_*`` scaling
64  * ``quantizePad`` (`float`): number of stdev to allow on the low side (for
65  ``STDEV_POSITIVE``/``NEGATIVE``)
66  * ``bscale`` (`float`): manually specified ``BSCALE``
67  (for ``MANUAL`` scaling)
68  * ``bzero`` (`float`): manually specified ``BSCALE``
69  (for ``MANUAL`` scaling)
70 
71  A very simple example YAML recipe:
72 
73  .. code-block:: yaml
74 
75  lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter:
76  default:
77  image: &default
78  compression:
79  algorithm: GZIP_SHUFFLE
80  mask: *default
81  variance: *default
82 
83  """
84  supportedExtensions = frozenset({".fits", ".fits.gz", ".fits.fz", ".fz", ".fit"})
85  extension = ".fits"
86  _metadata = None
87  supportedWriteParameters = frozenset({"recipe"})
88  _readerClass = ExposureFitsReader
89 
90  unsupportedParameters = {}
91  """Support all parameters."""
92 
93  @property
94  def metadata(self):
95  """The metadata read from this file. It will be stripped as
96  components are extracted from it
97  (`lsst.daf.base.PropertyList`).
98  """
99  if self._metadata is None:
100  self._metadata = self.readMetadata()
101  return self._metadata
102 
103  def readMetadata(self):
104  """Read all header metadata directly into a PropertyList.
105 
106  Returns
107  -------
108  metadata : `~lsst.daf.base.PropertyList`
109  Header metadata.
110  """
111  md = readMetadata(self.fileDescriptor.location.path)
112  fix_header(md)
113  return md
114 
115  def stripMetadata(self):
116  """Remove metadata entries that are parsed into components.
117 
118  This is only called when just the metadata is requested; stripping
119  entries there forces code that wants other components to ask for those
120  components directly rather than trying to extract them from the
121  metadata manually, which is fragile. This behavior is an intentional
122  change from Gen2.
123 
124  Parameters
125  ----------
126  metadata : `~lsst.daf.base.PropertyList`
127  Header metadata, to be modified in-place.
128  """
129  # TODO: make sure this covers everything, by delegating to something
130  # that doesn't yet exist in afw.image.ExposureInfo.
131  from lsst.afw.image import bboxFromMetadata
132  from lsst.afw.geom import makeSkyWcs
133 
134  # Protect against the metadata being missing
135  try:
136  bboxFromMetadata(self.metadata) # always strips
137  except LookupError:
138  pass
139  try:
140  makeSkyWcs(self.metadata, strip=True)
141  except Exception:
142  pass
143 
144  def readComponent(self, component, parameters=None):
145  """Read a component held by the Exposure.
146 
147  Parameters
148  ----------
149  component : `str`, optional
150  Component to read from the file.
151  parameters : `dict`, optional
152  If specified, a dictionary of slicing parameters that
153  overrides those in ``fileDescriptor``.
154 
155  Returns
156  -------
157  obj : component-dependent
158  In-memory component object.
159 
160  Raises
161  ------
162  KeyError
163  Raised if the requested component cannot be handled.
164  """
165 
166  # Metadata is handled explicitly elsewhere
167  componentMap = {'wcs': ('readWcs', False),
168  'coaddInputs': ('readCoaddInputs', False),
169  'psf': ('readPsf', False),
170  'image': ('readImage', True),
171  'mask': ('readMask', True),
172  'variance': ('readVariance', True),
173  'photoCalib': ('readPhotoCalib', False),
174  'bbox': ('readBBox', True),
175  'dimensions': ('readBBox', True),
176  'xy0': ('readXY0', True),
177  'filter': ('readFilter', False),
178  'validPolygon': ('readValidPolygon', False),
179  'apCorrMap': ('readApCorrMap', False),
180  'visitInfo': ('readVisitInfo', False),
181  'transmissionCurve': ('readTransmissionCurve', False),
182  'detector': ('readDetector', False),
183  'extras': ('readExtraComponents', False),
184  'exposureInfo': ('readExposureInfo', False),
185  }
186  method, hasParams = componentMap.get(component, (None, False))
187 
188  if method:
189  # This reader can read standalone Image/Mask files as well
190  # when dealing with components.
191  reader = self._readerClass(self.fileDescriptor.location.path)
192  caller = getattr(reader, method, None)
193 
194  if caller:
195  if parameters is None:
196  parameters = self.fileDescriptor.parameters
197  if parameters is None:
198  parameters = {}
199  self.fileDescriptor.storageClass.validateParameters(parameters)
200 
201  if hasParams and parameters:
202  thisComponent = caller(**parameters)
203  else:
204  thisComponent = caller()
205  if component == "dimensions" and thisComponent is not None:
206  thisComponent = thisComponent.getDimensions()
207  return thisComponent
208  else:
209  raise KeyError(f"Unknown component requested: {component}")
210 
211  def readFull(self, parameters=None):
212  """Read the full Exposure object.
213 
214  Parameters
215  ----------
216  parameters : `dict`, optional
217  If specified a dictionary of slicing parameters that overrides
218  those in ``fileDescriptor``.
219 
220  Returns
221  -------
222  exposure : `~lsst.afw.image.Exposure`
223  Complete in-memory exposure.
224  """
225  fileDescriptor = self.fileDescriptor
226  if parameters is None:
227  parameters = fileDescriptor.parameters
228  if parameters is None:
229  parameters = {}
230  fileDescriptor.storageClass.validateParameters(parameters)
231  reader = self._readerClass(fileDescriptor.location.path)
232  return reader.read(**parameters)
233 
234  def read(self, component=None):
235  """Read data from a file.
236 
237  Parameters
238  ----------
239  component : `str`, optional
240  Component to read from the file. Only used if the `StorageClass`
241  for reading differed from the `StorageClass` used to write the
242  file.
243 
244  Returns
245  -------
246  inMemoryDataset : `object`
247  The requested data as a Python object. The type of object
248  is controlled by the specific formatter.
249 
250  Raises
251  ------
252  ValueError
253  Component requested but this file does not seem to be a concrete
254  composite.
255  KeyError
256  Raised when parameters passed with fileDescriptor are not
257  supported.
258  """
259  fileDescriptor = self.fileDescriptor
260  if fileDescriptor.readStorageClass != fileDescriptor.storageClass:
261  if component == "metadata":
262  self.stripMetadata()
263  return self.metadata
264  elif component is not None:
265  return self.readComponent(component)
266  else:
267  raise ValueError("Storage class inconsistency ({} vs {}) but no"
268  " component requested".format(fileDescriptor.readStorageClass.name,
269  fileDescriptor.storageClass.name))
270  return self.readFull()
271 
272  def write(self, inMemoryDataset):
273  """Write a Python object to a file.
274 
275  Parameters
276  ----------
277  inMemoryDataset : `object`
278  The Python object to store.
279  """
280  # Update the location with the formatter-preferred file extension
281  self.fileDescriptor.location.updateExtension(self.extension)
282  outputPath = self.fileDescriptor.location.path
283 
284  # check to see if we have a recipe requested
285  recipeName = self.writeParameters.get("recipe")
286  recipe = self.getImageCompressionSettings(recipeName)
287  if recipe:
288  # Can not construct a PropertySet from a hierarchical
289  # dict but can update one.
290  ps = PropertySet()
291  ps.update(recipe)
292  inMemoryDataset.writeFitsWithOptions(outputPath, options=ps)
293  else:
294  inMemoryDataset.writeFits(outputPath)
295 
296  def getImageCompressionSettings(self, recipeName):
297  """Retrieve the relevant compression settings for this recipe.
298 
299  Parameters
300  ----------
301  recipeName : `str`
302  Label associated with the collection of compression parameters
303  to select.
304 
305  Returns
306  -------
307  settings : `dict`
308  The selected settings.
309  """
310  # if no recipe has been provided and there is no default
311  # return immediately
312  if not recipeName:
313  if "default" not in self.writeRecipes:
314  return {}
315  recipeName = "default"
316 
317  if recipeName not in self.writeRecipes:
318  raise RuntimeError(f"Unrecognized recipe option given for compression: {recipeName}")
319 
320  recipe = self.writeRecipes[recipeName]
321 
322  # Set the seed based on dataId
323  seed = hash(tuple(self.dataId.items())) % 2**31
324  for plane in ("image", "mask", "variance"):
325  if plane in recipe and "scaling" in recipe[plane]:
326  scaling = recipe[plane]["scaling"]
327  if "seed" in scaling and scaling["seed"] == 0:
328  scaling["seed"] = seed
329 
330  return recipe
331 
332  @classmethod
333  def validateWriteRecipes(cls, recipes):
334  """Validate supplied recipes for this formatter.
335 
336  The recipes are supplemented with default values where appropriate.
337 
338  TODO: replace this custom validation code with Cerberus (DM-11846)
339 
340  Parameters
341  ----------
342  recipes : `dict`
343  Recipes to validate. Can be empty dict or `None`.
344 
345  Returns
346  -------
347  validated : `dict`
348  Validated recipes. Returns what was given if there are no
349  recipes listed.
350 
351  Raises
352  ------
353  RuntimeError
354  Raised if validation fails.
355  """
356  # Schemas define what should be there, and the default values (and by
357  # the default value, the expected type).
358  compressionSchema = {
359  "algorithm": "NONE",
360  "rows": 1,
361  "columns": 0,
362  "quantizeLevel": 0.0,
363  }
364  scalingSchema = {
365  "algorithm": "NONE",
366  "bitpix": 0,
367  "maskPlanes": ["NO_DATA"],
368  "seed": 0,
369  "quantizeLevel": 4.0,
370  "quantizePad": 5.0,
371  "fuzz": True,
372  "bscale": 1.0,
373  "bzero": 0.0,
374  }
375 
376  if not recipes:
377  # We can not insist on recipes being specified
378  return recipes
379 
380  def checkUnrecognized(entry, allowed, description):
381  """Check to see if the entry contains unrecognised keywords"""
382  unrecognized = set(entry) - set(allowed)
383  if unrecognized:
384  raise RuntimeError(
385  f"Unrecognized entries when parsing image compression recipe {description}: "
386  f"{unrecognized}")
387 
388  validated = {}
389  for name in recipes:
390  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
391  validated[name] = {}
392  for plane in ("image", "mask", "variance"):
393  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
394  f"{name}->{plane}")
395 
396  np = {}
397  validated[name][plane] = np
398  for settings, schema in (("compression", compressionSchema),
399  ("scaling", scalingSchema)):
400  np[settings] = {}
401  if settings not in recipes[name][plane]:
402  for key in schema:
403  np[settings][key] = schema[key]
404  continue
405  entry = recipes[name][plane][settings]
406  checkUnrecognized(entry, schema.keys(), f"{name}->{plane}->{settings}")
407  for key in schema:
408  value = type(schema[key])(entry[key]) if key in entry else schema[key]
409  np[settings][key] = value
410  return validated
411 
412 
414  """Specialisation for `~lsst.afw.image.Image` reading.
415  """
416 
417  _readerClass = ImageFitsReader
418 
419 
421  """Specialisation for `~lsst.afw.image.Mask` reading.
422  """
423 
424  _readerClass = MaskFitsReader
425 
426 
428  """Specialisation for `~lsst.afw.image.MaskedImage` reading.
429  """
430 
431  _readerClass = MaskedImageFitsReader
lsst.obs.base.formatters.fitsExposure.FitsImageFormatter
Definition: fitsExposure.py:413
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.write
def write(self, inMemoryDataset)
Definition: fitsExposure.py:272
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.readFull
def readFull(self, parameters=None)
Definition: fitsExposure.py:211
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.read
def read(self, component=None)
Definition: fitsExposure.py:234
lsst.obs.base.formatters.fitsExposure.FitsMaskedImageFormatter
Definition: fitsExposure.py:427
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter._metadata
_metadata
Definition: fitsExposure.py:86
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.stripMetadata
def stripMetadata(self)
Definition: fitsExposure.py:115
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter
Definition: fitsExposure.py:36
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.getImageCompressionSettings
def getImageCompressionSettings(self, recipeName)
Definition: fitsExposure.py:296
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.metadata
def metadata(self)
Definition: fitsExposure.py:94
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.validateWriteRecipes
def validateWriteRecipes(cls, recipes)
Definition: fitsExposure.py:333
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.extension
string extension
Definition: fitsExposure.py:85
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.readMetadata
def readMetadata(self)
Definition: fitsExposure.py:103
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter._readerClass
_readerClass
Definition: fitsExposure.py:88
lsst.obs.base.formatters.fitsExposure.FitsMaskFormatter
Definition: fitsExposure.py:420
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.readComponent
def readComponent(self, component, parameters=None)
Definition: fitsExposure.py:144