lsst.obs.base  20.0.0-27-g22bab1b+96b59cb0f5
fitsExposure.py
Go to the documentation of this file.
1 # This file is part of obs_base.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (http://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 
22 __all__ = ("FitsExposureFormatter", "FitsImageFormatter", "FitsMaskFormatter",
23  "FitsMaskedImageFormatter")
24 
25 from astro_metadata_translator import fix_header
26 from lsst.daf.base import PropertySet
27 from lsst.daf.butler import Formatter
28 from lsst.afw.image import ExposureFitsReader, ImageFitsReader, MaskFitsReader, MaskedImageFitsReader
29 # Needed for ApCorrMap to resolve properly
30 from lsst.afw.math import BoundedField # noqa: F401
31 
32 
33 class FitsExposureFormatter(Formatter):
34  """Interface for reading and writing Exposures to and from FITS files.
35 
36  This Formatter supports write recipes.
37 
38  Each ``FitsExposureFormatter`` recipe for FITS compression should
39  define ``image``, ``mask`` and ``variance`` entries, each of which may
40  contain ``compression`` and ``scaling`` entries. Defaults will be
41  provided for any missing elements under ``compression`` and
42  ``scaling``.
43 
44  The allowed entries under ``compression`` are:
45 
46  * ``algorithm`` (`str`): compression algorithm to use
47  * ``rows`` (`int`): number of rows per tile (0 = entire dimension)
48  * ``columns`` (`int`): number of columns per tile (0 = entire dimension)
49  * ``quantizeLevel`` (`float`): cfitsio quantization level
50 
51  The allowed entries under ``scaling`` are:
52 
53  * ``algorithm`` (`str`): scaling algorithm to use
54  * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64)
55  * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values?
56  * ``seed`` (`int`): seed for random number generator when fuzzing
57  * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing
58  statistics
59  * ``quantizeLevel`` (`float`): divisor of the standard deviation for
60  ``STDEV_*`` scaling
61  * ``quantizePad`` (`float`): number of stdev to allow on the low side (for
62  ``STDEV_POSITIVE``/``NEGATIVE``)
63  * ``bscale`` (`float`): manually specified ``BSCALE``
64  (for ``MANUAL`` scaling)
65  * ``bzero`` (`float`): manually specified ``BSCALE``
66  (for ``MANUAL`` scaling)
67 
68  A very simple example YAML recipe:
69 
70  .. code-block:: yaml
71 
72  lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter:
73  default:
74  image: &default
75  compression:
76  algorithm: GZIP_SHUFFLE
77  mask: *default
78  variance: *default
79 
80  """
81  supportedExtensions = frozenset({".fits", ".fits.gz", ".fits.fz"})
82  extension = ".fits"
83  _metadata = None
84  supportedWriteParameters = frozenset({"recipe"})
85  _readerClass = ExposureFitsReader
86 
87  unsupportedParameters = {}
88  """Support all parameters."""
89 
90  @property
91  def metadata(self):
92  """The metadata read from this file. It will be stripped as
93  components are extracted from it
94  (`lsst.daf.base.PropertyList`).
95  """
96  if self._metadata is None:
97  self._metadata = self.readMetadata()
98  return self._metadata
99 
100  def readMetadata(self):
101  """Read all header metadata directly into a PropertyList.
102 
103  Returns
104  -------
105  metadata : `~lsst.daf.base.PropertyList`
106  Header metadata.
107  """
108  # Do not use ExposureFitsReader.readMetadata because that strips
109  # out lots of headers and there is no way to recover them
110  from lsst.afw.image import readMetadata
111  md = readMetadata(self.fileDescriptor.location.path)
112  fix_header(md)
113  return md
114 
115  def stripMetadata(self):
116  """Remove metadata entries that are parsed into components.
117 
118  This is only called when just the metadata is requested; stripping
119  entries there forces code that wants other components to ask for those
120  components directly rather than trying to extract them from the
121  metadata manually, which is fragile. This behavior is an intentional
122  change from Gen2.
123 
124  Parameters
125  ----------
126  metadata : `~lsst.daf.base.PropertyList`
127  Header metadata, to be modified in-place.
128  """
129  # TODO: make sure this covers everything, by delegating to something
130  # that doesn't yet exist in afw.image.ExposureInfo.
131  from lsst.afw.image import bboxFromMetadata
132  from lsst.afw.geom import makeSkyWcs
133 
134  # Protect against the metadata being missing
135  try:
136  bboxFromMetadata(self.metadata) # always strips
137  except LookupError:
138  pass
139  try:
140  makeSkyWcs(self.metadata, strip=True)
141  except Exception:
142  pass
143 
144  def readComponent(self, component, parameters=None):
145  """Read a component held by the Exposure.
146 
147  Parameters
148  ----------
149  component : `str`, optional
150  Component to read from the file.
151  parameters : `dict`, optional
152  If specified, a dictionary of slicing parameters that
153  overrides those in ``fileDescriptor``.
154 
155  Returns
156  -------
157  obj : component-dependent
158  In-memory component object.
159 
160  Raises
161  ------
162  KeyError
163  Raised if the requested component cannot be handled.
164  """
165 
166  # Metadata is handled explicitly elsewhere
167  componentMap = {'wcs': ('readWcs', False),
168  'coaddInputs': ('readCoaddInputs', False),
169  'psf': ('readPsf', False),
170  'image': ('readImage', True),
171  'mask': ('readMask', True),
172  'variance': ('readVariance', True),
173  'photoCalib': ('readPhotoCalib', False),
174  'bbox': ('readBBox', True),
175  'dimensions': ('readBBox', True),
176  'xy0': ('readXY0', True),
177  'filter': ('readFilter', False),
178  'validPolygon': ('readValidPolygon', False),
179  'apCorrMap': ('readApCorrMap', False),
180  'visitInfo': ('readVisitInfo', False),
181  'transmissionCurve': ('readTransmissionCurve', False),
182  'detector': ('readDetector', False),
183  'extras': ('readExtraComponents', False),
184  'exposureInfo': ('readExposureInfo', False),
185  }
186  method, hasParams = componentMap.get(component, (None, False))
187 
188  if method:
189  # This reader can read standalone Image/Mask files as well
190  # when dealing with components.
191  reader = self._readerClass(self.fileDescriptor.location.path)
192  caller = getattr(reader, method, None)
193 
194  if caller:
195  if parameters is None:
196  parameters = self.fileDescriptor.parameters
197  if parameters is None:
198  parameters = {}
199  self.fileDescriptor.storageClass.validateParameters(parameters)
200 
201  if hasParams and parameters:
202  thisComponent = caller(**parameters)
203  else:
204  thisComponent = caller()
205  if component == "dimensions" and thisComponent is not None:
206  thisComponent = thisComponent.getDimensions()
207  return thisComponent
208  else:
209  raise KeyError(f"Unknown component requested: {component}")
210 
211  def readFull(self, parameters=None):
212  """Read the full Exposure object.
213 
214  Parameters
215  ----------
216  parameters : `dict`, optional
217  If specified a dictionary of slicing parameters that overrides
218  those in ``fileDescriptor``.
219 
220  Returns
221  -------
222  exposure : `~lsst.afw.image.Exposure`
223  Complete in-memory exposure.
224  """
225  fileDescriptor = self.fileDescriptor
226  if parameters is None:
227  parameters = fileDescriptor.parameters
228  if parameters is None:
229  parameters = {}
230  fileDescriptor.storageClass.validateParameters(parameters)
231  reader = self._readerClass(fileDescriptor.location.path)
232  return reader.read(**parameters)
233 
234  def read(self, component=None):
235  """Read data from a file.
236 
237  Parameters
238  ----------
239  component : `str`, optional
240  Component to read from the file. Only used if the `StorageClass`
241  for reading differed from the `StorageClass` used to write the
242  file.
243 
244  Returns
245  -------
246  inMemoryDataset : `object`
247  The requested data as a Python object. The type of object
248  is controlled by the specific formatter.
249 
250  Raises
251  ------
252  ValueError
253  Component requested but this file does not seem to be a concrete
254  composite.
255  KeyError
256  Raised when parameters passed with fileDescriptor are not
257  supported.
258  """
259  fileDescriptor = self.fileDescriptor
260  if fileDescriptor.readStorageClass != fileDescriptor.storageClass:
261  if component == "metadata":
262  self.stripMetadata()
263  return self.metadata
264  elif component is not None:
265  return self.readComponent(component)
266  else:
267  raise ValueError("Storage class inconsistency ({} vs {}) but no"
268  " component requested".format(fileDescriptor.readStorageClass.name,
269  fileDescriptor.storageClass.name))
270  return self.readFull()
271 
272  def write(self, inMemoryDataset):
273  """Write a Python object to a file.
274 
275  Parameters
276  ----------
277  inMemoryDataset : `object`
278  The Python object to store.
279 
280  Returns
281  -------
282  path : `str`
283  The `URI` where the primary file is stored.
284  """
285  # Update the location with the formatter-preferred file extension
286  self.fileDescriptor.location.updateExtension(self.extension)
287  outputPath = self.fileDescriptor.location.path
288 
289  # check to see if we have a recipe requested
290  recipeName = self.writeParameters.get("recipe")
291  recipe = self.getImageCompressionSettings(recipeName)
292  if recipe:
293  # Can not construct a PropertySet from a hierarchical
294  # dict but can update one.
295  ps = PropertySet()
296  ps.update(recipe)
297  inMemoryDataset.writeFitsWithOptions(outputPath, options=ps)
298  else:
299  inMemoryDataset.writeFits(outputPath)
300  return self.fileDescriptor.location.pathInStore
301 
302  def getImageCompressionSettings(self, recipeName):
303  """Retrieve the relevant compression settings for this recipe.
304 
305  Parameters
306  ----------
307  recipeName : `str`
308  Label associated with the collection of compression parameters
309  to select.
310 
311  Returns
312  -------
313  settings : `dict`
314  The selected settings.
315  """
316  # if no recipe has been provided and there is no default
317  # return immediately
318  if not recipeName:
319  if "default" not in self.writeRecipes:
320  return {}
321  recipeName = "default"
322 
323  if recipeName not in self.writeRecipes:
324  raise RuntimeError(f"Unrecognized recipe option given for compression: {recipeName}")
325 
326  recipe = self.writeRecipes[recipeName]
327 
328  # Set the seed based on dataId
329  seed = hash(tuple(self.dataId.items())) % 2**31
330  for plane in ("image", "mask", "variance"):
331  if plane in recipe and "scaling" in recipe[plane]:
332  scaling = recipe[plane]["scaling"]
333  if "seed" in scaling and scaling["seed"] == 0:
334  scaling["seed"] = seed
335 
336  return recipe
337 
338  @classmethod
339  def validateWriteRecipes(cls, recipes):
340  """Validate supplied recipes for this formatter.
341 
342  The recipes are supplemented with default values where appropriate.
343 
344  TODO: replace this custom validation code with Cerberus (DM-11846)
345 
346  Parameters
347  ----------
348  recipes : `dict`
349  Recipes to validate. Can be empty dict or `None`.
350 
351  Returns
352  -------
353  validated : `dict`
354  Validated recipes. Returns what was given if there are no
355  recipes listed.
356 
357  Raises
358  ------
359  RuntimeError
360  Raised if validation fails.
361  """
362  # Schemas define what should be there, and the default values (and by the default
363  # value, the expected type).
364  compressionSchema = {
365  "algorithm": "NONE",
366  "rows": 1,
367  "columns": 0,
368  "quantizeLevel": 0.0,
369  }
370  scalingSchema = {
371  "algorithm": "NONE",
372  "bitpix": 0,
373  "maskPlanes": ["NO_DATA"],
374  "seed": 0,
375  "quantizeLevel": 4.0,
376  "quantizePad": 5.0,
377  "fuzz": True,
378  "bscale": 1.0,
379  "bzero": 0.0,
380  }
381 
382  if not recipes:
383  # We can not insist on recipes being specified
384  return recipes
385 
386  def checkUnrecognized(entry, allowed, description):
387  """Check to see if the entry contains unrecognised keywords"""
388  unrecognized = set(entry) - set(allowed)
389  if unrecognized:
390  raise RuntimeError(
391  f"Unrecognized entries when parsing image compression recipe {description}: "
392  f"{unrecognized}")
393 
394  validated = {}
395  for name in recipes:
396  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
397  validated[name] = {}
398  for plane in ("image", "mask", "variance"):
399  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
400  f"{name}->{plane}")
401 
402  np = {}
403  validated[name][plane] = np
404  for settings, schema in (("compression", compressionSchema),
405  ("scaling", scalingSchema)):
406  np[settings] = {}
407  if settings not in recipes[name][plane]:
408  for key in schema:
409  np[settings][key] = schema[key]
410  continue
411  entry = recipes[name][plane][settings]
412  checkUnrecognized(entry, schema.keys(), f"{name}->{plane}->{settings}")
413  for key in schema:
414  value = type(schema[key])(entry[key]) if key in entry else schema[key]
415  np[settings][key] = value
416  return validated
417 
418 
420  """Specialisation for `~lsst.afw.image.Image` reading.
421  """
422 
423  _readerClass = ImageFitsReader
424 
425 
427  """Specialisation for `~lsst.afw.image.Mask` reading.
428  """
429 
430  _readerClass = MaskFitsReader
431 
432 
434  """Specialisation for `~lsst.afw.image.MaskedImage` reading.
435  """
436 
437  _readerClass = MaskedImageFitsReader
lsst.obs.base.formatters.fitsExposure.FitsImageFormatter
Definition: fitsExposure.py:419
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.write
def write(self, inMemoryDataset)
Definition: fitsExposure.py:272
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.readFull
def readFull(self, parameters=None)
Definition: fitsExposure.py:211
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.read
def read(self, component=None)
Definition: fitsExposure.py:234
lsst.obs.base.formatters.fitsExposure.FitsMaskedImageFormatter
Definition: fitsExposure.py:433
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter._metadata
_metadata
Definition: fitsExposure.py:83
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.stripMetadata
def stripMetadata(self)
Definition: fitsExposure.py:115
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter
Definition: fitsExposure.py:33
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.getImageCompressionSettings
def getImageCompressionSettings(self, recipeName)
Definition: fitsExposure.py:302
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.metadata
def metadata(self)
Definition: fitsExposure.py:91
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.validateWriteRecipes
def validateWriteRecipes(cls, recipes)
Definition: fitsExposure.py:339
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.extension
string extension
Definition: fitsExposure.py:82
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.readMetadata
def readMetadata(self)
Definition: fitsExposure.py:100
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter._readerClass
_readerClass
Definition: fitsExposure.py:85
lsst.obs.base.formatters.fitsExposure.FitsMaskFormatter
Definition: fitsExposure.py:426
lsst.obs.base.formatters.fitsExposure.FitsExposureFormatter.readComponent
def readComponent(self, component, parameters=None)
Definition: fitsExposure.py:144