Coverage for python/astro_metadata_translator/headers.py : 7%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
# This file is part of astro_metadata_translator. # # Developed for the LSST Data Management System. # This product includes software developed by the LSST Project # (http://www.lsst.org). # See the LICENSE file at the top-level directory of this distribution # for details of code ownership. # # Use of this source code is governed by a 3-clause BSD-style # license that can be found in the LICENSE file.
"""Name of environment variable containing search path for header fix up."""
"""Merge multiple headers into a single dict.
Given a list of dict-like data headers, combine them following the specified mode.
Parameters ---------- headers : `list` of `dict` (or `dict`-like) Collection of headers to combine. `~lsst.daf.base.PropertyList` is supported. mode : `str` Scheme to use when a header has the same key as another header but different value. Options are:
- ``'overwrite'`` : Value in later header overwrites earlier value. - ``'drop'`` : Entire key is dropped. - ``'first'`` : Retain first value encountered. - ``'append'`` : Convert value to list with a value for each header (`None` if the key was not present). If the value is identical in multiple headers but key is missing in some, then the single identical header is stored. sort : `bool`, optional If `True`, sort the supplied headers into date order if possible. This affects the resulting merged output depending on the requested merge mode. An attempt will be made to extract a date from the headers. first : `list` or `tuple`, optional Keys to retain even if they differ. For all modes excepting ``append`` (where it is ignored) the value in the merged header will always be the value first encountered. This is usually to allow time-dependent headers such as ``DATE-OBS`` and ``AZSTART`` to be retained to allow the header to indicate the range of values. No exception is raised if a key can not be found in a header since this allows a range of expected headers to be listed covering multiple instruments. last : `list` or `tuple`, optional Keys to retain even if they differ. For all modes excepting ``append`` (where it is ignored) the value in the merged header will always be the final value encountered. This is usually to allow time-dependent headers such as ``DATE-END`` and ``AZEND`` to be retained to allow the header to indicate the range of values. No exception is raised if a key can not be found in a header since this allows a range of expected headers to be listed covering multiple instruments.
Returns ------- merged : `dict` Single `dict` combining all the headers using the specified combination mode.
Notes ----- If ``first`` and ``last`` are supplied, the keys from ``first`` are handled first, followed by the keys from ``last``. No check is made to ensure that the keys do not overlap. """ if not headers: raise ValueError("No headers supplied.")
# Force PropertyList to OrderedDict # In python 3.7 dicts are guaranteed to retain order headers = [h.toOrderedDict() if hasattr(h, "toOrderedDict") else h for h in headers]
# With a single header provided return a copy immediately if len(headers) == 1: return copy.deepcopy(headers[0])
if sort: def key_func(hdr): translator_class = None try: translator_class = MetadataTranslator.determine_translator(hdr) except ValueError: # Try the FITS translator translator_class = FitsTranslator translator = translator_class(hdr) return translator.to_datetime_begin()
headers = sorted(headers, key=key_func)
log.debug("Received %d headers for merging", len(headers))
# Pull out first header first_hdr = headers.pop(0)
# Seed the merged header with a copy merged = copy.deepcopy(first_hdr)
if mode == "overwrite": for h in headers: merged.update(h)
elif mode == "first": # Reversing the headers and using overwrite mode would result in the # header order being inconsistent dependent on mode. for hdr in headers: for key in hdr: if key not in merged: merged[key] = hdr[key]
elif mode == "drop": drop = set() for hdr in headers: for key in hdr: if key not in merged: merged[key] = hdr[key] elif merged[key] != hdr[key]: # Key should be dropped later (not in loop since removing # the key now might add it back for the next header). drop.add(key)
for key in drop: del merged[key]
elif mode == "append": fill = set() for hdr in headers: for key in hdr: if key not in merged: merged[key] = hdr[key] elif not isinstance(merged[key], list) and merged[key] != hdr[key]: # If we detect different values, store an empty list # in the slot and fill it later. Do it at end so # we can pick up earlier values and fill empty with None. merged[key] = [] fill.add(key)
# Fill the entries that have multiple differing values for key in fill: merged[key] = [h[key] if key in h else None for h in itertools.chain([first_hdr], headers)]
else: raise ValueError(f"Unsupported value of '{mode}' for mode parameter.")
# Force the first and last values to be inserted # if mode != "append": def retain_value(to_receive, to_retain, sources): if to_retain: for k in to_retain: # Look for values until we find one for h in sources: if k in h: to_receive[k] = h[k] break
all_headers = (first_hdr, *headers) retain_value(merged, first, all_headers) retain_value(merged, last, tuple(reversed(all_headers)))
return merged
"""Update, in place, the supplied header with known corrections.
Parameters ---------- header : `dict`-like Header to correct. search_path : `list`, optional Explicit directory paths to search for correction files. translator_class : `MetadataTranslator`-class, optional If not `None`, the class to use to translate the supplied headers into standard form. Otherwise each registered translator class will be asked in turn if it knows how to translate the supplied header. filename : `str`, optional Name of the file whose header is being translated. For some datasets with missing header information this can sometimes allow for some fixups in translations.
Returns ------- fixed : `bool` `True` if the header was updated.
Raises ------ ValueError Raised if the supplied header is not understood by any registered translation classes. TypeError Raised if the supplied translation class is not a `MetadataTranslator`.
Notes ----- In order to determine that a header update is required it is necessary for the header to be handled by the supplied translator class or else support automatic translation class determination. It is also required that the ``observation_id`` and ``instrument`` be calculable prior to header fix up.
Correction files use names of the form ``instrument-obsid.yaml`` (for example ``LATISS-AT_O_20190329_000022.yaml``). The YAML file should have the format of:
.. code-block:: yaml
EXPTIME: 30.0 IMGTYPE: bias
where each key/value pair is copied directly into the supplied header, overwriting any previous values.
This function searches a number of locations for such a correction file. The search order is:
- Any paths explicitly supplied through ``search_path``. - The contents of the PATH-like environment variable ``$METADATA_CORRECTIONS_PATH``. - Any search paths supplied by the matching translator class.
The first file located in the search path is used for the correction. """
# PropertyList is not dict-like so force to a dict here to allow the # translation classes to work. We update the original header though. if hasattr(header, "toOrderedDict"): header_to_translate = header.toOrderedDict() else: header_to_translate = header
if translator_class is None: translator_class = MetadataTranslator.determine_translator(header_to_translate, filename=filename) elif not issubclass(translator_class, MetadataTranslator): raise TypeError(f"Translator class must be a MetadataTranslator, not {translator_class}")
# Create an instance for this header translator = translator_class(header_to_translate, filename=filename)
# To determine the file look up we need the observation_id and instrument try: obsid = translator.to_observation_id() instrument = translator.to_instrument() except Exception: # Return without comment if these translations failed return False
target_file = f"{instrument}-{obsid}.yaml" log.debug("Checking for header correction file named %s", target_file)
# Work out the search path paths = [] if search_path is not None: paths.extend(search_path) if ENV_VAR_NAME in os.environ and os.environ[ENV_VAR_NAME]: paths.extend(os.environ[ENV_VAR_NAME].split(os.path.pathsep))
paths.extend(translator.search_paths())
for p in paths: correction_file = os.path.join(p, target_file) if os.path.exists(correction_file): with open(correction_file) as fh: log.debug("Applying header corrections from file %s", correction_file) corrections = yaml.safe_load(fh)
# Apply corrections (PropertyList does not yet have update()) for k, v in corrections.items(): header[k] = v
return True
return False |