22 __all__ = (
"FitsExposureFormatter", )
24 from astro_metadata_translator
import fix_header
25 from lsst.daf.butler
import Formatter
26 from lsst.afw.image
import ExposureFitsReader
27 from lsst.daf.base
import PropertySet
31 """Interface for reading and writing Exposures to and from FITS files.
33 This Formatter supports write recipes.
35 Each ``FitsExposureFormatter`` recipe for FITS compression should
36 define ``image``, ``mask`` and ``variance`` entries, each of which may
37 contain ``compression`` and ``scaling`` entries. Defaults will be
38 provided for any missing elements under ``compression`` and
41 The allowed entries under ``compression`` are:
43 * ``algorithm`` (`str`): compression algorithm to use
44 * ``rows`` (`int`): number of rows per tile (0 = entire dimension)
45 * ``columns`` (`int`): number of columns per tile (0 = entire dimension)
46 * ``quantizeLevel`` (`float`): cfitsio quantization level
48 The allowed entries under ``scaling`` are:
50 * ``algorithm`` (`str`): scaling algorithm to use
51 * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64)
52 * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values?
53 * ``seed`` (`int`): seed for random number generator when fuzzing
54 * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing
56 * ``quantizeLevel`` (`float`): divisor of the standard deviation for
58 * ``quantizePad`` (`float`): number of stdev to allow on the low side (for
59 ``STDEV_POSITIVE``/``NEGATIVE``)
60 * ``bscale`` (`float`): manually specified ``BSCALE``
61 (for ``MANUAL`` scaling)
62 * ``bzero`` (`float`): manually specified ``BSCALE``
63 (for ``MANUAL`` scaling)
65 A very simple example YAML recipe:
69 lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter:
73 algorithm: GZIP_SHUFFLE
78 supportedExtensions = frozenset({
".fits",
".fits.gz",
".fits.fz"})
81 supportedWriteParameters = frozenset({
"recipe"})
85 """The metadata read from this file. It will be stripped as
86 components are extracted from it
87 (`lsst.daf.base.PropertyList`).
94 """Read all header metadata directly into a PropertyList.
98 metadata : `~lsst.daf.base.PropertyList`
101 from lsst.afw.image
import readMetadata
107 """Remove metadata entries that are parsed into components.
109 This is only called when just the metadata is requested; stripping
110 entries there forces code that wants other components to ask for those
111 components directly rather than trying to extract them from the
112 metadata manually, which is fragile. This behavior is an intentional
117 metadata : `~lsst.daf.base.PropertyList`
118 Header metadata, to be modified in-place.
122 from lsst.afw.image
import bboxFromMetadata
123 from lsst.afw.geom
import makeSkyWcs
125 makeSkyWcs(self.
metadata, strip=
True)
128 """Read a component held by the Exposure.
132 component : `str`, optional
133 Component to read from the file.
134 parameters : `dict`, optional
135 If specified, a dictionary of slicing parameters that
136 overrides those in ``fileDescriptor``.
140 obj : component-dependent
141 In-memory component object.
146 Raised if the requested component cannot be handled.
148 componentMap = {
'wcs': (
'readWcs',
False),
149 'coaddInputs': (
'readCoaddInputs',
False),
150 'psf': (
'readPsf',
False),
151 'image': (
'readImage',
True),
152 'mask': (
'readMask',
True),
153 'variance': (
'readVariance',
True),
154 'photoCalib': (
'readPhotoCalib',
False),
155 'bbox': (
'readBBox',
True),
156 'xy0': (
'readXY0',
True),
157 'metadata': (
'readMetadata',
False),
158 'filter': (
'readFilter',
False),
159 'polygon': (
'readValidPolygon',
False),
160 'apCorrMap': (
'readApCorrMap',
False),
161 'visitInfo': (
'readVisitInfo',
False),
162 'transmissionCurve': (
'readTransmissionCurve',
False),
163 'detector': (
'readDetector',
False),
164 'extras': (
'readExtraComponents',
False),
165 'exposureInfo': (
'readExposureInfo',
False),
167 method, hasParams = componentMap.get(component,
None)
170 reader = ExposureFitsReader(self.fileDescriptor.location.path)
171 caller = getattr(reader, method,
None)
174 if parameters
is None:
175 parameters = self.fileDescriptor.parameters
176 if parameters
is None:
178 self.fileDescriptor.storageClass.validateParameters(parameters)
180 if hasParams
and parameters:
181 return caller(**parameters)
185 raise KeyError(f
"Unknown component requested: {component}")
188 """Read the full Exposure object.
192 parameters : `dict`, optional
193 If specified a dictionary of slicing parameters that overrides
194 those in ``fileDescriptor``.
198 exposure : `~lsst.afw.image.Exposure`
199 Complete in-memory exposure.
201 fileDescriptor = self.fileDescriptor
202 if parameters
is None:
203 parameters = fileDescriptor.parameters
204 if parameters
is None:
206 fileDescriptor.storageClass.validateParameters(parameters)
208 output = fileDescriptor.storageClass.pytype(fileDescriptor.location.path, **parameters)
210 reader = ExposureFitsReader(fileDescriptor.location.path)
211 output = reader.read(**parameters)
214 def read(self, component=None, parameters=None):
215 """Read data from a file.
219 component : `str`, optional
220 Component to read from the file. Only used if the `StorageClass`
221 for reading differed from the `StorageClass` used to write the
223 parameters : `dict`, optional
224 If specified, a dictionary of slicing parameters that
225 overrides those in ``fileDescriptor``.
229 inMemoryDataset : `object`
230 The requested data as a Python object. The type of object
231 is controlled by the specific formatter.
236 Component requested but this file does not seem to be a concrete
239 Raised when parameters passed with fileDescriptor are not
242 fileDescriptor = self.fileDescriptor
243 if fileDescriptor.readStorageClass != fileDescriptor.storageClass:
244 if component ==
"metadata":
247 elif component
is not None:
250 raise ValueError(
"Storage class inconsistency ({} vs {}) but no"
251 " component requested".format(fileDescriptor.readStorageClass.name,
252 fileDescriptor.storageClass.name))
253 return self.
readFull(parameters=parameters)
256 """Write a Python object to a file.
260 inMemoryDataset : `object`
261 The Python object to store.
266 The `URI` where the primary file is stored.
269 self.fileDescriptor.location.updateExtension(self.
extension)
270 outputPath = self.fileDescriptor.location.path
273 recipeName = self.writeParameters.get(
"recipe")
280 inMemoryDataset.writeFitsWithOptions(outputPath, options=ps)
282 inMemoryDataset.writeFits(outputPath)
283 return self.fileDescriptor.location.pathInStore
286 """Retrieve the relevant compression settings for this recipe.
291 Label associated with the collection of compression parameters
297 The selected settings.
302 if "default" not in self.writeRecipes:
304 recipeName =
"default"
306 if recipeName
not in self.writeRecipes:
307 raise RuntimeError(f
"Unrecognized recipe option given for compression: {recipeName}")
309 recipe = self.writeRecipes[recipeName]
312 seed = hash(tuple(self.dataId.items())) % 2**31
313 for plane
in (
"image",
"mask",
"variance"):
314 if plane
in recipe
and "scaling" in recipe[plane]:
315 scaling = recipe[plane][
"scaling"]
316 if "seed" in scaling
and scaling[
"seed"] == 0:
317 scaling[
"seed"] = seed
323 """Validate supplied recipes for this formatter.
325 The recipes are supplemented with default values where appropriate.
327 TODO: replace this custom validation code with Cerberus (DM-11846)
332 Recipes to validate. Can be empty dict or `None`.
337 Validated recipes. Returns what was given if there are no
343 Raised if validation fails.
347 compressionSchema = {
351 "quantizeLevel": 0.0,
356 "maskPlanes": [
"NO_DATA"],
358 "quantizeLevel": 4.0,
369 def checkUnrecognized(entry, allowed, description):
370 """Check to see if the entry contains unrecognised keywords"""
371 unrecognized = set(entry) - set(allowed)
374 f
"Unrecognized entries when parsing image compression recipe {description}: "
379 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
381 for plane
in (
"image",
"mask",
"variance"):
382 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
386 validated[name][plane] = np
387 for settings, schema
in ((
"compression", compressionSchema),
388 (
"scaling", scalingSchema)):
390 if settings
not in recipes[name][plane]:
392 np[settings][key] = schema[key]
394 entry = recipes[name][plane][settings]
395 checkUnrecognized(entry, schema.keys(), f
"{name}->{plane}->{settings}")
397 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
398 np[settings][key] = value