22 __all__ = (
"FitsExposureFormatter", )
24 from astro_metadata_translator
import fix_header
25 from lsst.daf.butler
import Formatter
26 from lsst.afw.image
import ExposureFitsReader
27 from lsst.daf.base
import PropertySet
31 """Interface for reading and writing Exposures to and from FITS files.
33 This Formatter supports write recipes.
35 Each ``FitsExposureFormatter`` recipe for FITS compression should
36 define ``image``, ``mask`` and ``variance`` entries, each of which may
37 contain ``compression`` and ``scaling`` entries. Defaults will be
38 provided for any missing elements under ``compression`` and
41 The allowed entries under ``compression`` are:
43 * ``algorithm`` (`str`): compression algorithm to use
44 * ``rows`` (`int`): number of rows per tile (0 = entire dimension)
45 * ``columns`` (`int`): number of columns per tile (0 = entire dimension)
46 * ``quantizeLevel`` (`float`): cfitsio quantization level
48 The allowed entries under ``scaling`` are:
50 * ``algorithm`` (`str`): scaling algorithm to use
51 * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64)
52 * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values?
53 * ``seed`` (`int`): seed for random number generator when fuzzing
54 * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing
56 * ``quantizeLevel`` (`float`): divisor of the standard deviation for
58 * ``quantizePad`` (`float`): number of stdev to allow on the low side (for
59 ``STDEV_POSITIVE``/``NEGATIVE``)
60 * ``bscale`` (`float`): manually specified ``BSCALE``
61 (for ``MANUAL`` scaling)
62 * ``bzero`` (`float`): manually specified ``BSCALE``
63 (for ``MANUAL`` scaling)
65 A very simple example YAML recipe:
69 lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter:
73 algorithm: GZIP_SHUFFLE
80 supportedWriteParameters = frozenset({
"recipe"})
84 """The metadata read from this file. It will be stripped as
85 components are extracted from it
86 (`lsst.daf.base.PropertyList`).
93 """Read all header metadata directly into a PropertyList.
97 metadata : `~lsst.daf.base.PropertyList`
100 from lsst.afw.image
import readMetadata
106 """Remove metadata entries that are parsed into components.
108 This is only called when just the metadata is requested; stripping
109 entries there forces code that wants other components to ask for those
110 components directly rather than trying to extract them from the
111 metadata manually, which is fragile. This behavior is an intentional
116 metadata : `~lsst.daf.base.PropertyList`
117 Header metadata, to be modified in-place.
121 from lsst.afw.image
import bboxFromMetadata
122 from lsst.afw.geom
import makeSkyWcs
124 makeSkyWcs(self.
metadata, strip=
True)
127 """Read a component held by the Exposure.
131 component : `str`, optional
132 Component to read from the file.
133 parameters : `dict`, optional
134 If specified, a dictionary of slicing parameters that
135 overrides those in ``fileDescriptor``.
139 obj : component-dependent
140 In-memory component object.
145 Raised if the requested component cannot be handled.
147 componentMap = {
'wcs': (
'readWcs',
False),
148 'coaddInputs': (
'readCoaddInputs',
False),
149 'psf': (
'readPsf',
False),
150 'image': (
'readImage',
True),
151 'mask': (
'readMask',
True),
152 'variance': (
'readVariance',
True),
153 'photoCalib': (
'readPhotoCalib',
False),
154 'bbox': (
'readBBox',
True),
155 'xy0': (
'readXY0',
True),
156 'metadata': (
'readMetadata',
False),
157 'filter': (
'readFilter',
False),
158 'polygon': (
'readValidPolygon',
False),
159 'apCorrMap': (
'readApCorrMap',
False),
160 'visitInfo': (
'readVisitInfo',
False),
161 'transmissionCurve': (
'readTransmissionCurve',
False),
162 'detector': (
'readDetector',
False),
163 'extras': (
'readExtraComponents',
False),
164 'exposureInfo': (
'readExposureInfo',
False),
166 method, hasParams = componentMap.get(component,
None)
169 reader = ExposureFitsReader(self.fileDescriptor.location.path)
170 caller = getattr(reader, method,
None)
173 if parameters
is None:
174 parameters = self.fileDescriptor.parameters
175 if parameters
is None:
177 self.fileDescriptor.storageClass.validateParameters(parameters)
179 if hasParams
and parameters:
180 return caller(**parameters)
184 raise KeyError(f
"Unknown component requested: {component}")
187 """Read the full Exposure object.
191 parameters : `dict`, optional
192 If specified a dictionary of slicing parameters that overrides
193 those in ``fileDescriptor``.
197 exposure : `~lsst.afw.image.Exposure`
198 Complete in-memory exposure.
200 fileDescriptor = self.fileDescriptor
201 if parameters
is None:
202 parameters = fileDescriptor.parameters
203 if parameters
is None:
205 fileDescriptor.storageClass.validateParameters(parameters)
207 output = fileDescriptor.storageClass.pytype(fileDescriptor.location.path, **parameters)
209 reader = ExposureFitsReader(fileDescriptor.location.path)
210 output = reader.read(**parameters)
213 def read(self, component=None, parameters=None):
214 """Read data from a file.
218 component : `str`, optional
219 Component to read from the file. Only used if the `StorageClass`
220 for reading differed from the `StorageClass` used to write the
222 parameters : `dict`, optional
223 If specified, a dictionary of slicing parameters that
224 overrides those in ``fileDescriptor``.
228 inMemoryDataset : `object`
229 The requested data as a Python object. The type of object
230 is controlled by the specific formatter.
235 Component requested but this file does not seem to be a concrete
238 Raised when parameters passed with fileDescriptor are not
241 fileDescriptor = self.fileDescriptor
242 if fileDescriptor.readStorageClass != fileDescriptor.storageClass:
243 if component ==
"metadata":
246 elif component
is not None:
249 raise ValueError(
"Storage class inconsistency ({} vs {}) but no"
250 " component requested".format(fileDescriptor.readStorageClass.name,
251 fileDescriptor.storageClass.name))
252 return self.
readFull(parameters=parameters)
255 """Write a Python object to a file.
259 inMemoryDataset : `object`
260 The Python object to store.
265 The `URI` where the primary file is stored.
268 self.fileDescriptor.location.updateExtension(self.
extension)
269 outputPath = self.fileDescriptor.location.path
272 recipeName = self.writeParameters.get(
"recipe")
279 inMemoryDataset.writeFitsWithOptions(outputPath, options=ps)
281 inMemoryDataset.writeFits(outputPath)
282 return self.fileDescriptor.location.pathInStore
285 """Retrieve the relevant compression settings for this recipe.
290 Label associated with the collection of compression parameters
296 The selected settings.
301 if "default" not in self.writeRecipes:
303 recipeName =
"default"
305 if recipeName
not in self.writeRecipes:
306 raise RuntimeError(f
"Unrecognized recipe option given for compression: {recipeName}")
308 recipe = self.writeRecipes[recipeName]
311 seed = hash(tuple(self.dataId.items())) % 2**31
312 for plane
in (
"image",
"mask",
"variance"):
313 if plane
in recipe
and "scaling" in recipe[plane]:
314 scaling = recipe[plane][
"scaling"]
315 if "seed" in scaling
and scaling[
"seed"] == 0:
316 scaling[
"seed"] = seed
322 """Validate supplied recipes for this formatter.
324 The recipes are supplemented with default values where appropriate.
326 TODO: replace this custom validation code with Cerberus (DM-11846)
331 Recipes to validate. Can be empty dict or `None`.
336 Validated recipes. Returns what was given if there are no
342 Raised if validation fails.
346 compressionSchema = {
350 "quantizeLevel": 0.0,
355 "maskPlanes": [
"NO_DATA"],
357 "quantizeLevel": 4.0,
368 def checkUnrecognized(entry, allowed, description):
369 """Check to see if the entry contains unrecognised keywords"""
370 unrecognized = set(entry) - set(allowed)
373 f
"Unrecognized entries when parsing image compression recipe {description}: "
378 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
380 for plane
in (
"image",
"mask",
"variance"):
381 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
385 validated[name][plane] = np
386 for settings, schema
in ((
"compression", compressionSchema),
387 (
"scaling", scalingSchema)):
389 if settings
not in recipes[name][plane]:
391 np[settings][key] = schema[key]
393 entry = recipes[name][plane][settings]
394 checkUnrecognized(entry, schema.keys(), f
"{name}->{plane}->{settings}")
396 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
397 np[settings][key] = value