22 __all__ = (
"FitsExposureFormatter", )
24 from astro_metadata_translator
import fix_header
25 from lsst.daf.butler
import Formatter
26 from lsst.afw.image
import ExposureFitsReader
27 from lsst.daf.base
import PropertySet
31 """Interface for reading and writing Exposures to and from FITS files.
33 This Formatter supports write recipes.
35 Each ``FitsExposureFormatter`` recipe for FITS compression should
36 define ``image``, ``mask`` and ``variance`` entries, each of which may
37 contain ``compression`` and ``scaling`` entries. Defaults will be
38 provided for any missing elements under ``compression`` and
41 The allowed entries under ``compression`` are:
43 * ``algorithm`` (`str`): compression algorithm to use
44 * ``rows`` (`int`): number of rows per tile (0 = entire dimension)
45 * ``columns`` (`int`): number of columns per tile (0 = entire dimension)
46 * ``quantizeLevel`` (`float`): cfitsio quantization level
48 The allowed entries under ``scaling`` are:
50 * ``algorithm`` (`str`): scaling algorithm to use
51 * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64)
52 * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values?
53 * ``seed`` (`int`): seed for random number generator when fuzzing
54 * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing
56 * ``quantizeLevel`` (`float`): divisor of the standard deviation for
58 * ``quantizePad`` (`float`): number of stdev to allow on the low side (for
59 ``STDEV_POSITIVE``/``NEGATIVE``)
60 * ``bscale`` (`float`): manually specified ``BSCALE``
61 (for ``MANUAL`` scaling)
62 * ``bzero`` (`float`): manually specified ``BSCALE``
63 (for ``MANUAL`` scaling)
65 A very simple example YAML recipe:
69 lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter:
73 algorithm: GZIP_SHUFFLE
78 supportedExtensions = frozenset({
".fits",
".fits.gz",
".fits.fz"})
81 supportedWriteParameters = frozenset({
"recipe"})
85 """The metadata read from this file. It will be stripped as
86 components are extracted from it
87 (`lsst.daf.base.PropertyList`).
94 """Read all header metadata directly into a PropertyList.
98 metadata : `~lsst.daf.base.PropertyList`
103 from lsst.afw.image
import readMetadata
109 """Remove metadata entries that are parsed into components.
111 This is only called when just the metadata is requested; stripping
112 entries there forces code that wants other components to ask for those
113 components directly rather than trying to extract them from the
114 metadata manually, which is fragile. This behavior is an intentional
119 metadata : `~lsst.daf.base.PropertyList`
120 Header metadata, to be modified in-place.
124 from lsst.afw.image
import bboxFromMetadata
125 from lsst.afw.geom
import makeSkyWcs
133 makeSkyWcs(self.
metadata, strip=
True)
138 """Read a component held by the Exposure.
142 component : `str`, optional
143 Component to read from the file.
144 parameters : `dict`, optional
145 If specified, a dictionary of slicing parameters that
146 overrides those in ``fileDescriptor``.
150 obj : component-dependent
151 In-memory component object.
156 Raised if the requested component cannot be handled.
159 componentMap = {
'wcs': (
'readWcs',
False),
160 'coaddInputs': (
'readCoaddInputs',
False),
161 'psf': (
'readPsf',
False),
162 'image': (
'readImage',
True),
163 'mask': (
'readMask',
True),
164 'variance': (
'readVariance',
True),
165 'photoCalib': (
'readPhotoCalib',
False),
166 'bbox': (
'readBBox',
True),
167 'dimensions': (
'readBBox',
True),
168 'xy0': (
'readXY0',
True),
169 'filter': (
'readFilter',
False),
170 'validPolygon': (
'readValidPolygon',
False),
171 'apCorrMap': (
'readApCorrMap',
False),
172 'visitInfo': (
'readVisitInfo',
False),
173 'transmissionCurve': (
'readTransmissionCurve',
False),
174 'detector': (
'readDetector',
False),
175 'extras': (
'readExtraComponents',
False),
176 'exposureInfo': (
'readExposureInfo',
False),
178 method, hasParams = componentMap.get(component, (
None,
False))
183 reader = ExposureFitsReader(self.fileDescriptor.location.path)
184 caller = getattr(reader, method,
None)
187 if parameters
is None:
188 parameters = self.fileDescriptor.parameters
189 if parameters
is None:
191 self.fileDescriptor.storageClass.validateParameters(parameters)
193 if hasParams
and parameters:
194 thisComponent = caller(**parameters)
196 thisComponent = caller()
197 if component ==
"dimensions" and thisComponent
is not None:
198 thisComponent = thisComponent.getDimensions()
201 raise KeyError(f
"Unknown component requested: {component}")
204 """Read the full Exposure object.
208 parameters : `dict`, optional
209 If specified a dictionary of slicing parameters that overrides
210 those in ``fileDescriptor``.
214 exposure : `~lsst.afw.image.Exposure`
215 Complete in-memory exposure.
217 fileDescriptor = self.fileDescriptor
218 if parameters
is None:
219 parameters = fileDescriptor.parameters
220 if parameters
is None:
222 fileDescriptor.storageClass.validateParameters(parameters)
224 output = fileDescriptor.storageClass.pytype(fileDescriptor.location.path, **parameters)
226 reader = ExposureFitsReader(fileDescriptor.location.path)
227 output = reader.read(**parameters)
230 def read(self, component=None):
231 """Read data from a file.
235 component : `str`, optional
236 Component to read from the file. Only used if the `StorageClass`
237 for reading differed from the `StorageClass` used to write the
242 inMemoryDataset : `object`
243 The requested data as a Python object. The type of object
244 is controlled by the specific formatter.
249 Component requested but this file does not seem to be a concrete
252 Raised when parameters passed with fileDescriptor are not
255 fileDescriptor = self.fileDescriptor
256 if fileDescriptor.readStorageClass != fileDescriptor.storageClass:
257 if component ==
"metadata":
260 elif component
is not None:
263 raise ValueError(
"Storage class inconsistency ({} vs {}) but no"
264 " component requested".format(fileDescriptor.readStorageClass.name,
265 fileDescriptor.storageClass.name))
269 """Write a Python object to a file.
273 inMemoryDataset : `object`
274 The Python object to store.
279 The `URI` where the primary file is stored.
282 self.fileDescriptor.location.updateExtension(self.
extension)
283 outputPath = self.fileDescriptor.location.path
286 recipeName = self.writeParameters.get(
"recipe")
293 inMemoryDataset.writeFitsWithOptions(outputPath, options=ps)
295 inMemoryDataset.writeFits(outputPath)
296 return self.fileDescriptor.location.pathInStore
299 """Retrieve the relevant compression settings for this recipe.
304 Label associated with the collection of compression parameters
310 The selected settings.
315 if "default" not in self.writeRecipes:
317 recipeName =
"default"
319 if recipeName
not in self.writeRecipes:
320 raise RuntimeError(f
"Unrecognized recipe option given for compression: {recipeName}")
322 recipe = self.writeRecipes[recipeName]
325 seed = hash(tuple(self.dataId.items())) % 2**31
326 for plane
in (
"image",
"mask",
"variance"):
327 if plane
in recipe
and "scaling" in recipe[plane]:
328 scaling = recipe[plane][
"scaling"]
329 if "seed" in scaling
and scaling[
"seed"] == 0:
330 scaling[
"seed"] = seed
336 """Validate supplied recipes for this formatter.
338 The recipes are supplemented with default values where appropriate.
340 TODO: replace this custom validation code with Cerberus (DM-11846)
345 Recipes to validate. Can be empty dict or `None`.
350 Validated recipes. Returns what was given if there are no
356 Raised if validation fails.
360 compressionSchema = {
364 "quantizeLevel": 0.0,
369 "maskPlanes": [
"NO_DATA"],
371 "quantizeLevel": 4.0,
382 def checkUnrecognized(entry, allowed, description):
383 """Check to see if the entry contains unrecognised keywords"""
384 unrecognized = set(entry) - set(allowed)
387 f
"Unrecognized entries when parsing image compression recipe {description}: "
392 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
394 for plane
in (
"image",
"mask",
"variance"):
395 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
399 validated[name][plane] = np
400 for settings, schema
in ((
"compression", compressionSchema),
401 (
"scaling", scalingSchema)):
403 if settings
not in recipes[name][plane]:
405 np[settings][key] = schema[key]
407 entry = recipes[name][plane][settings]
408 checkUnrecognized(entry, schema.keys(), f
"{name}->{plane}->{settings}")
410 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
411 np[settings][key] = value