Coverage for python / lsst / cell_coadds / _fits.py: 15%
276 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-23 08:47 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-23 08:47 +0000
1# This file is part of cell_coadds.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22"""Module to handle FITS serialization and de-serialization.
24The routines to write and read the files are in the same module, as a change to
25one is typically accompanied by a corresponding change to another. Code changes
26relating to writing the file must bump to the version number denoted by the
27module constant FILE_FORMAT_VERSION.
29Although the typical use case is for newer versions of the code to read files
30written by an older version, for the purposes of deciding the newer version
31string, it is helpful to think about an older version of the reader attempting
32to read a newer version of the file on disk. The policy for bumping the version
33is as follows:
351. When the on-disk file format written by this module changes such that the
36previous version of the reader can still read files written by the newer
37version, then there should be a minor bump.
392. When the on-disk format written by this module changes in a way that will
40prevent the previous version of the reader from reading a file produced by the
41current version of the module, then there should be a major bump. This usually
42means that the new version of the reader cannot read older file either,
43save the temporary support with deprecation warnings, possibly until a new
44release of the Science Pipelines is made.
46Examples
47--------
481. A file with VERSION=1.3 should still be readable by the reader in
49this module when the module-level constant FILE_FORMAT_VERSION=1.4. A file
50written with VERSION=1.4 will typically be readable by a reader when the
51module-level FILE_FORMAT_VERSION=1.3, although such a use case is not expected.
52A concrete example of change
53that requires only a minor bump is adding another BinTable that keeps track of
54the input visits.
562. An example of major change would be migrating from using
57BinTableHDU to ImageHDU to save data. Even if the reader supports reading
58either of this formats based on the value of VERSION from the header, it should
59be a major change because the previous version of the reader cannot read data
60from ImageHDUs.
62Unit tests only check that a file written can be read by the concurrent version
63of the module, but not by any of the previous ones. Hence, bumping
64FILE_FORMAT_VERSION to the appropriate value is ultimately at the discretion of
65the developers.
67A major bump must also be recorded in the `isCompatibleWith` method.
68It is plausible that different (non-consequent) major format versions can be
69read by the same reader (due to reverting back to an earlier format, or to
70something very similar). `isCompatibleWith` method offers the convenience of
71checking if a particular format version can be read by the current reader.
73Note that major version 0 is considered unstable and experimental and none of
74the guarantee above applies.
75"""
77from __future__ import annotations
79__all__ = (
80 "CellCoaddFitsFormatter",
81 "CellCoaddFitsReader",
82 "IncompatibleVersionError",
83 "writeMultipleCellCoaddAsFits",
84)
86import logging
87import os
88from collections.abc import Mapping
89from dataclasses import dataclass
90from typing import Any
92import numpy as np
93from astropy.io import fits
94from astropy.table import Table
95from frozendict import frozendict
96from numpy.typing import DTypeLike
97from packaging import version
99import lsst.afw.geom as afwGeom
100import lsst.afw.image as afwImage
101from lsst.afw.image import ImageD, ImageF
102from lsst.daf.base import PropertySet
103from lsst.geom import Box2I, Extent2I, Point2D, Point2I
104from lsst.obs.base.formatters.fitsGeneric import FitsGenericFormatter
105from lsst.skymap import Index2D
107from ._coadd_ap_corr_map import EMPTY_AP_CORR_MAP
108from ._common_components import CoaddUnits, CommonComponents
109from ._grid_container import GridContainer
110from ._identifiers import CellIdentifiers, ObservationIdentifiers, PatchIdentifiers
111from ._image_planes import OwnedImagePlanes
112from ._multiple_cell_coadd import MultipleCellCoadd
113from ._single_cell_coadd import CoaddInputs, SingleCellCoadd
114from ._uniform_grid import UniformGrid
115from .typing_helpers import SingleCellCoaddApCorrMap
117FILE_FORMAT_VERSION = "0.8"
118"""Version number for the file format as persisted, presented as a string of
119the form M.m, where M is the major version, m is the minor version.
120"""
122MAX_POLYGON_VERTICES = 8
123"""Maximum number of vertices the overlap polygon region between a per-detector
124warp and the patch bounding box can have."""
125# --------------
126# | / \ |
127# | / \ |
128# |/ \|
129# | |
130# | |
131# |\ |
132# | \ /|
133# | \ / |
134# --------------
136logger = logging.getLogger(__name__)
139class IncompatibleVersionError(RuntimeError):
140 """Exception raised when the CellCoaddFitsReader version is not compatible
141 with the FITS file attempted to read.
142 """
145@dataclass
146class VisitRecord:
147 """A dataclass to hold relevant info about a visit.
149 This is intended for use with this module.
150 """
152 visit: int
153 day_obs: int
154 physical_filter: str
157class CellCoaddFitsFormatter(FitsGenericFormatter):
158 """Interface for writing and reading cell coadds to/from FITS files.
160 This assumes the existence of readFits and writeFits methods (for now).
161 """
164class CellCoaddFitsReader:
165 """A reader class to read from a FITS file and produce cell-based coadds.
167 This reader class has read methods that can either return a single
168 component without reading the entire file (e.g., readBBox, readWcs)
169 and read methods that return a full coadd (e.g.,
170 readAsMultipleCellCoadd, readAsExplodedCellCoadd, readAsStitchedCoadd).
172 Parameters
173 ----------
174 filename : `str`
175 The name of the FITS file to read.
176 """
178 # Minimum and maximum compatible file format versions are listed as
179 # iterables so as to allow for discontiguous intervals.
180 MINIMUM_FILE_FORMAT_VERSIONS = ("0.1",)
181 MAXIMUM_FILE_FORMAT_VERSIONS = ("1.0",)
183 def __init__(self, filename: str) -> None:
184 if not os.path.exists(filename):
185 raise FileNotFoundError(f"File {filename} not found")
187 self.filename = filename
189 @classmethod
190 def isCompatibleWith(cls, written_version: str, /) -> bool:
191 """Check if the serialization version is compatible with the reader.
193 This is a convenience method to ask if the current version of this
194 class can read a file, based on the VERSION in its header.
196 Parameters
197 ----------
198 written_version: `str`
199 The VERSION of the file to be read.
201 Returns
202 -------
203 compatible : `bool`
204 Whether the reader can read a file whose VERSION is
205 ``written_version``.
207 Notes
208 -----
209 This accepts the other version as a positional argument only.
210 """
211 written_version_object = version.parse(written_version)
212 for min_version, max_version in zip(
213 cls.MINIMUM_FILE_FORMAT_VERSIONS,
214 cls.MAXIMUM_FILE_FORMAT_VERSIONS,
215 strict=True,
216 ):
217 if version.parse(min_version) <= written_version_object < version.parse(max_version):
218 return True
220 return False
222 def readAsMultipleCellCoadd(self) -> MultipleCellCoadd:
223 """Read the FITS file as a MultipleCellCoadd object.
225 Raises
226 ------
227 IncompatibleError
228 Raised if the version of this module that wrote the file is
229 incompatible with this module that is reading it in.
230 ValueError
231 Raised if the outer cell size is not padded equally in either
232 direction or by an even number of pixels.
233 """
234 with fits.open(self.filename) as hdu_list:
235 header = hdu_list[1].header
236 written_version = header.get("VERSION", "0.1")
237 if not self.isCompatibleWith(written_version):
238 raise IncompatibleVersionError(
239 f"{self.filename} was written with version {written_version}"
240 f"but attempting to read it with a reader designed for {FILE_FORMAT_VERSION}"
241 )
242 if written_version != FILE_FORMAT_VERSION:
243 logger.info(
244 "Reading %s having version %s with reader designed for %s",
245 self.filename,
246 written_version,
247 FILE_FORMAT_VERSION,
248 )
250 written_version = version.parse(written_version)
252 # TODO: Remove this when FILE_FORMAT_VERSION is bumped to 1.0
253 if written_version < version.parse("0.3"):
254 header.rename_keyword("BAND", "FILTER")
256 data = hdu_list[1].data
258 # Read in WCS
259 ps = PropertySet()
260 ps.update(hdu_list[0].header)
261 wcs = afwGeom.makeSkyWcs(ps)
263 # Build the quantities needed to construct a MultipleCellCoadd.
264 if written_version >= version.parse("0.4"):
265 coadd_units = header["TUNIT2"]
266 else:
267 logger.info("Attemping to set pixel units from TUNIT1.")
268 coadd_units = header.get("TUNIT1", CoaddUnits.legacy.name)
270 outer_cell_size = Extent2I(header["OCELL1"], header["OCELL2"])
271 psf_image_size = Extent2I(header["PSFSIZE1"], header["PSFSIZE2"])
273 grid_cell_size = Extent2I(header["GRCELL1"], header["GRCELL2"]) # Inner size of a single cell.
274 grid_shape = Extent2I(header["GRSHAPE1"], header["GRSHAPE2"])
275 grid_min = Point2I(header["GRMIN1"], header["GRMIN2"])
276 grid_padding_extent = outer_cell_size - grid_cell_size
277 # In hindsight, it would have been easier to store the padding as a
278 # separate keyword in the header instead of OCELL1 and OCELL2.
279 # This can be done when the file format is bumped to 1.0.
280 if grid_padding_extent.x != grid_padding_extent.y:
281 raise ValueError(
282 "Outer cell size is not padded equally in either directions. "
283 f"Got {outer_cell_size} and {grid_cell_size}."
284 )
285 if grid_padding_extent.x % 2 != 0:
286 raise ValueError(
287 "Outer cell size is not padded by an even number of pixels. "
288 f"Got {outer_cell_size} and {grid_cell_size}."
289 )
290 grid_padding = grid_padding_extent.x // 2
291 grid = UniformGrid(cell_size=grid_cell_size, shape=grid_shape, padding=grid_padding, min=grid_min)
293 # This is the inner bounding box for the multiple cell coadd
294 inner_bbox = Box2I(
295 Point2I(header["INBBOX11"], header["INBBOX12"]),
296 Point2I(header["INBBOX21"], header["INBBOX22"]),
297 )
299 # Attempt to get inputs for each cell.
300 inputs = GridContainer[dict[ObservationIdentifiers, CoaddInputs]](shape=grid.shape)
301 coadd_input = CoaddInputs( # default version for written_version <= 0.5.
302 overlaps_center=True,
303 overlap_fraction=1.0,
304 weight=0.0,
305 psf_shape=afwGeom.Quadrupole(),
306 psf_shape_flag=True,
307 )
308 visit_polygons = {}
309 if written_version >= version.parse("0.3"):
310 if written_version >= version.parse("0.6"):
311 visit_hdu = hdu_list[hdu_list.index_of("VISIT_SUMMARY")].data
312 else:
313 visit_hdu = hdu_list[hdu_list.index_of("VISIT")].data
315 visit_dict = {
316 row["visit"]: VisitRecord(
317 visit=row["visit"],
318 physical_filter=(
319 row["physical_filter"]
320 if written_version >= version.parse("0.8")
321 else header["FILTER"]
322 ),
323 day_obs=row["day_obs"] if written_version >= version.parse("0.8") else 00000000,
324 )
325 for row in visit_hdu
326 }
327 link_table = hdu_list[hdu_list.index_of("CELL")].data
329 if written_version >= version.parse("0.6"):
330 mask_definitions = hdu_list[hdu_list.index_of("MASKDEF")].data
331 mask_plane_dict = {}
332 for mask_name, mask_value in mask_definitions:
333 afwImage.Mask.addMaskPlane(mask_name)
334 mask_plane_dict[mask_name] = mask_value
335 else:
336 logger.warning(
337 "Mask definitions can be incorrect since this was written with "
338 "FILE_FORMAT_VERSION %s < 0.6",
339 header["VERSION"],
340 )
341 mask_plane_dict = None
343 if written_version >= version.parse("0.6"):
344 visit_summary_hdu = hdu_list[hdu_list.index_of("VISIT_SUMMARY")]
345 for row in visit_summary_hdu.data:
346 visit = int(row["visit"])
347 obs_id = ObservationIdentifiers(
348 instrument=header["INSTRUME"],
349 visit=visit,
350 detector=row["detector"],
351 day_obs=visit_dict[visit].day_obs,
352 physical_filter=visit_dict[visit].physical_filter,
353 )
354 if written_version >= version.parse("0.7"):
355 num_vertices = row["num_vertices"]
356 else:
357 num_vertices = None # li[:None] returns the entire list.
358 visit_polygons[obs_id] = afwGeom.Polygon(
359 [Point2D(vertex) for vertex in row["polygon_vertices"][:num_vertices]]
360 )
362 for link_row in link_table:
363 cell_id = Index2D(link_row["cell_x"], link_row["cell_y"])
364 visit = link_row["visit"]
365 obs_id = ObservationIdentifiers(
366 instrument=header["INSTRUME"],
367 visit=visit,
368 detector=link_row["detector"],
369 day_obs=visit_dict[visit].day_obs,
370 physical_filter=visit_dict[visit].physical_filter,
371 )
372 if written_version >= version.parse("0.6"):
373 coadd_input = CoaddInputs(
374 overlaps_center=link_row["overlaps_center"],
375 overlap_fraction=link_row["overlap_fraction"],
376 weight=link_row["weight"],
377 psf_shape=afwGeom.Quadrupole(
378 ixx=link_row["psf_shape_ixx"],
379 iyy=link_row["psf_shape_iyy"],
380 ixy=link_row["psf_shape_ixy"],
381 ),
382 psf_shape_flag=link_row["psf_shape_flag"],
383 )
385 if cell_id in inputs:
386 inputs[cell_id][obs_id] = coadd_input
387 else:
388 inputs[cell_id] = {obs_id: coadd_input}
389 else:
390 logger.info(
391 "Cell inputs are available for VERSION=0.3 or later. The file provided has ",
392 "VERSION = %s",
393 written_version,
394 )
396 try:
397 aperture_correction_hdu = hdu_list[hdu_list.index_of("APCORR")].data
398 if len(aperture_correction_hdu) < len(data):
399 logger.warning("Aperture correction map is not available for all cells.")
400 aperture_correction_grid = self._readApCorr(aperture_correction_hdu, grid_shape)
401 except KeyError:
402 if written_version >= version.parse("0.4"):
403 logger.warning("Unable to read aperture correction map from the file.")
404 # Create an empty grid container regardless.
405 aperture_correction_grid = GridContainer[SingleCellCoaddApCorrMap](shape=grid_shape)
407 common = CommonComponents(
408 units=CoaddUnits(coadd_units),
409 wcs=wcs,
410 band=header["FILTER"],
411 identifiers=PatchIdentifiers(
412 skymap=header["SKYMAP"],
413 tract=header["TRACT"],
414 patch=Index2D(x=header["PATCH_X"], y=header["PATCH_Y"]),
415 band=header["FILTER"],
416 ),
417 visit_polygons=visit_polygons,
418 )
420 coadd = MultipleCellCoadd(
421 (
422 self._readSingleCellCoadd(
423 data=data[row_id],
424 header=header,
425 common=common,
426 inputs=inputs.get(
427 Index2D(int(data[row_id]["cell_id"][0]), int(data[row_id]["cell_id"][1])),
428 {},
429 ),
430 outer_cell_size=outer_cell_size,
431 psf_image_size=psf_image_size,
432 inner_cell_size=grid_cell_size,
433 mask_plane_dict=mask_plane_dict,
434 aperture_correction_map=aperture_correction_grid.get(
435 Index2D(
436 data[row_id]["cell_id"][0].tolist(),
437 data[row_id]["cell_id"][1].tolist(),
438 ),
439 EMPTY_AP_CORR_MAP,
440 ),
441 )
442 for row_id in range(len(data))
443 ),
444 grid=grid,
445 outer_cell_size=outer_cell_size,
446 psf_image_size=psf_image_size,
447 inner_bbox=inner_bbox,
448 common=common,
449 )
451 return coadd
453 @staticmethod
454 def _readSingleCellCoadd(
455 data: Mapping[str, Any],
456 common: CommonComponents,
457 header: Mapping[str, Any],
458 *,
459 inputs: Mapping[ObservationIdentifiers, CoaddInputs],
460 outer_cell_size: Extent2I,
461 inner_cell_size: Extent2I,
462 psf_image_size: Extent2I,
463 mask_plane_dict: Mapping[str, int] | None,
464 aperture_correction_map: SingleCellCoaddApCorrMap = EMPTY_AP_CORR_MAP,
465 ) -> SingleCellCoadd:
466 """Read a coadd from a FITS file.
468 Parameters
469 ----------
470 data : `Mapping`
471 The data from the FITS file. Usually, a single row from the binary
472 table representation.
473 common : `CommonComponents`
474 The common components of the coadd.
475 header : `Mapping`
476 The header of the FITS file as a dictionary.
477 inputs : `Mapping` [`ObservationIdentifiers`, `CoaddInputs`]
478 A mapping of ObservationIdentifiers that contributed to this cell
479 to their corresponding CoaddInputs.
480 outer_cell_size : `Extent2I`
481 The size of the outer cell.
482 psf_image_size : `Extent2I`
483 The size of the PSF image.
484 inner_cell_size : `Extent2I`
485 The size of the inner cell.
486 mask_plane_dict: `Mapping[str, int]` | None
487 A mapping defining the mask planes to their bit value.
488 aperture_correction_map : `Mapping` [`str`, `float`], optional
489 Mapping of algorithm name to aperture correction value.
490 If None, no aperture correction is applied.
492 Returns
493 -------
494 coadd : `SingleCellCoadd`
495 The coadd read from the file.
496 """
497 buffer = (outer_cell_size - inner_cell_size) // 2
499 n_noise_realizations = header.get("NNOISE", 0)
501 psf = ImageD(
502 array=data["psf"].astype(np.float64),
503 xy0=(-(psf_image_size // 2)).asPoint(), # integer division and negation do not commute.
504 ) # use the variable
505 xy0 = Point2I(
506 inner_cell_size.x * data["cell_id"][0] - buffer.x + header["GRMIN1"],
507 inner_cell_size.y * data["cell_id"][1] - buffer.y + header["GRMIN2"],
508 )
510 # Parse the mask array carefully, accounting for a change in
511 # the definitions.
512 mask = afwImage.Mask(
513 bbox=Box2I(corner=xy0, dimensions=Extent2I(outer_cell_size.x, outer_cell_size.y)),
514 initialValue=0,
515 )
516 mask_array = data["mask"].astype(np.int32)
517 if mask_plane_dict is not None:
518 for mask_name, bit_value in mask_plane_dict.items():
519 mask.array[(mask_array & 2**bit_value) > 0] |= afwImage.Mask.getPlaneBitMask(mask_name)
520 else:
521 mask.array[:, :] = mask_array
523 try:
524 maskfrac = data["maskfrac"]
525 mask_fractions = ImageF(maskfrac.astype(np.float32), xy0=xy0)
526 except KeyError:
527 mask_fractions = None
529 image_planes = OwnedImagePlanes(
530 image=ImageF(
531 data["image"].astype(np.float32),
532 xy0=xy0,
533 ),
534 mask=mask,
535 variance=ImageF(data["variance"].astype(np.float32), xy0=xy0),
536 noise_realizations=[
537 ImageF(data[f"noise_{n:02}"].astype(np.float32), xy0=xy0) for n in range(n_noise_realizations)
538 ],
539 mask_fractions=mask_fractions,
540 )
542 identifiers = CellIdentifiers(
543 cell=Index2D(data["cell_id"][0], data["cell_id"][1]),
544 skymap=common.identifiers.skymap,
545 tract=common.identifiers.tract,
546 patch=common.identifiers.patch,
547 band=common.identifiers.band,
548 )
550 return SingleCellCoadd(
551 outer=image_planes,
552 psf=psf,
553 inner_bbox=Box2I(
554 corner=Point2I(
555 inner_cell_size.x * data["cell_id"][0] + header["GRMIN1"],
556 inner_cell_size.y * data["cell_id"][1] + header["GRMIN2"],
557 ),
558 dimensions=inner_cell_size,
559 ),
560 common=common,
561 identifiers=identifiers,
562 inputs=inputs,
563 aperture_correction_map=aperture_correction_map,
564 )
566 def readWcs(self) -> afwGeom.SkyWcs:
567 """Read the WCS information from the FITS file.
569 Returns
570 -------
571 wcs : `~lsst.afw.geom.SkyWcs`
572 The WCS information read from the FITS file.
573 """
574 # Read in WCS
575 ps = PropertySet()
576 with fits.open(self.filename) as hdu_list:
577 ps.update(hdu_list[0].header)
578 wcs = afwGeom.makeSkyWcs(ps)
579 return wcs
581 def _readApCorr(
582 self, aperture_correction_hdu: fits.fitsrec.FITS_rec, grid_shape: Index2D
583 ) -> GridContainer[SingleCellCoaddApCorrMap]:
584 """Read the aperture correction map from the FITS file."""
585 column_names = aperture_correction_hdu.names
586 column_names.remove("x")
587 column_names.remove("y")
589 gc = GridContainer[SingleCellCoaddApCorrMap](shape=grid_shape)
590 for row_idx in range(len(aperture_correction_hdu)):
591 cell_id = Index2D(
592 x=aperture_correction_hdu["x"][row_idx], y=aperture_correction_hdu["y"][row_idx]
593 )
594 ap_corr_map_dict = {name: float(aperture_correction_hdu[name][row_idx]) for name in column_names}
595 gc[cell_id] = frozendict(ap_corr_map_dict)
597 return gc
600def to_numpy_record(ap_corr_map: Mapping[str, float], xy: Index2D, dtypes: DTypeLike) -> np.record:
601 """Convert the aperture correction map to a numpy record with
602 appropriate data types.
604 Parameters
605 ----------
606 ap_corr_map: `Mapping` [`str`, `float`]
607 Aperture correction values for each algorithm
608 xy: `~lsst.skymap.Index2D`
609 An object identifying the cell
610 dtypes: `~numpy.typing.DTypeLike`
611 An iterable of tuples describing the name and data type. It must
612 contain ("x", int) and ("y", int) to accommodate ``xy``.
614 Returns
615 -------
616 record : `np.record`
617 Per-cell aperture correction map as a record.
618 """
619 record = np.recarray((1,), dtype=dtypes)[0]
620 for field_name in ap_corr_map:
621 record[field_name] = ap_corr_map.get(field_name, np.nan)
623 record["x"] = xy.x
624 record["y"] = xy.y
626 return record
629def writeMultipleCellCoaddAsFits(
630 multiple_cell_coadd: MultipleCellCoadd,
631 filename: str,
632 overwrite: bool = False,
633 metadata: PropertySet | None = None,
634) -> fits.HDUList:
635 """Write a MultipleCellCoadd object to a FITS file.
637 Parameters
638 ----------
639 multiple_cell_coadd : `MultipleCellCoadd`
640 The multiple cell coadd to write to a FITS file.
641 filename : `str`
642 The name of the file to write to.
643 overwrite : `bool`, optional
644 Whether to overwrite the file if it already exists?
645 metadata : `~lsst.daf.base.PropertySet`, optional
646 Additional metadata to write to the FITS file.
648 Returns
649 -------
650 hdu_list : `~astropy.io.fits.HDUList`
651 The FITS file as an HDUList.
653 Notes
654 -----
655 Changes to this function that modify the way the file is written to disk
656 must be accompanied with a change to FILE_FORMAT_VERSION.
657 """
658 # Create metadata tables:
659 # 1. Visit table containing information about the visits.
660 # 2. Cell table containing info about the visit+detector for each cell.
661 visit_records: list[Any] = []
662 cell_records: list[Any] = []
663 instrument_set = set()
664 for cell_id, single_cell_coadd in multiple_cell_coadd.cells.items():
665 for observation_id, coadd_input in single_cell_coadd.inputs.items():
666 visit_records.append(
667 (observation_id.visit, observation_id.physical_filter, observation_id.day_obs)
668 )
669 cell_records.append(
670 (
671 cell_id.x,
672 cell_id.y,
673 observation_id.visit,
674 observation_id.detector,
675 coadd_input.overlaps_center,
676 coadd_input.overlap_fraction,
677 coadd_input.weight,
678 coadd_input.psf_shape.getIxx(),
679 coadd_input.psf_shape.getIyy(),
680 coadd_input.psf_shape.getIxy(),
681 coadd_input.psf_shape_flag,
682 )
683 )
684 instrument_set.add(observation_id.instrument)
686 assert len(instrument_set) == 1, "All cells must have the same instrument."
687 instrument = instrument_set.pop()
689 # Create visit_summary equivalent table
690 visit_column = fits.Column(
691 name="visit",
692 format="K",
693 array=[obs_id.visit for obs_id in multiple_cell_coadd.common.visit_polygons],
694 )
695 detector_column = fits.Column(
696 name="detector",
697 format="I",
698 array=[obs_id.detector for obs_id in multiple_cell_coadd.common.visit_polygons],
699 )
700 physical_filter_column = fits.Column(
701 name="physical_filter",
702 format="32A",
703 array=[obs_id.physical_filter for obs_id in multiple_cell_coadd.common.visit_polygons],
704 )
705 day_obs_column = fits.Column(
706 name="day_obs",
707 format="K",
708 array=[obs_id.day_obs for obs_id in multiple_cell_coadd.common.visit_polygons],
709 )
710 number_of_vertices = []
711 polygon_vertices_array = []
712 for obs_id, poly in multiple_cell_coadd.common.visit_polygons.items():
713 # Polygons are closed, so the first and last vertices are the same.
714 if (num_vertices := len(poly.getVertices())) > MAX_POLYGON_VERTICES + 1:
715 logger.warning(
716 "Visit %d, detector %d has a polygon with %d vertices. "
717 "This geometry should be impossible for two intersecting "
718 "convex quadrilaterals. Only the first %d will be stored in "
719 "the FITS file.",
720 obs_id.visit,
721 obs_id.detector,
722 num_vertices,
723 MAX_POLYGON_VERTICES,
724 )
725 number_of_vertices.append(num_vertices)
726 vertices = poly.getVertices() + poly.getVertices()
727 vertices = vertices[:MAX_POLYGON_VERTICES]
728 polygon_vertices_array.append(np.array(vertices))
729 polygon_column = fits.Column(
730 name="polygon_vertices",
731 format=f"{2 * MAX_POLYGON_VERTICES}E",
732 dim=f"(2,{MAX_POLYGON_VERTICES})",
733 array=polygon_vertices_array,
734 )
735 number_of_vertices_column = fits.Column(
736 name="num_vertices",
737 format="I",
738 array=number_of_vertices,
739 )
740 visit_summary_hdu = fits.BinTableHDU.from_columns(
741 [
742 visit_column,
743 detector_column,
744 physical_filter_column,
745 day_obs_column,
746 number_of_vertices_column,
747 polygon_column,
748 ],
749 name="VISIT_SUMMARY",
750 )
751 visit_summary_hdu.header["POLYVERT"] = MAX_POLYGON_VERTICES
753 visit_recarray = np.rec.fromrecords(
754 recList=sorted(set(visit_records), key=lambda x: x[0]), # Sort by visit.
755 formats=None, # formats has specified to please mypy. See numpy#26376.
756 names=(
757 "visit",
758 "physical_filter",
759 "day_obs",
760 ),
761 )
762 cell_recarray = np.rec.fromrecords(
763 recList=cell_records,
764 formats=None, # formats has specified to please mypy. See numpy#26376.
765 names=(
766 "cell_x",
767 "cell_y",
768 "visit",
769 "detector",
770 "overlaps_center",
771 "overlap_fraction",
772 "weight",
773 "psf_shape_ixx",
774 "psf_shape_iyy",
775 "psf_shape_ixy",
776 "psf_shape_flag",
777 ),
778 )
780 visit_hdu = fits.BinTableHDU.from_columns(visit_recarray, name="VISIT")
781 cell_hdu = fits.BinTableHDU.from_columns(cell_recarray, name="CELL")
783 cell_id = fits.Column(
784 name="cell_id",
785 format="2I",
786 array=[cell.identifiers.cell for cell in multiple_cell_coadd.cells.values()],
787 )
789 image_array = [cell.outer.image.array for cell in multiple_cell_coadd.cells.values()]
790 unit_array = [cell.common.units.name for cell in multiple_cell_coadd.cells.values()]
791 image = fits.Column(
792 name="image",
793 unit=unit_array[0],
794 format=f"{image_array[0].size}E",
795 dim=f"({image_array[0].shape[1]}, {image_array[0].shape[0]})",
796 array=image_array,
797 )
799 mask_array = [cell.outer.mask.array for cell in multiple_cell_coadd.cells.values()]
800 mask = fits.Column(
801 name="mask",
802 format=f"{mask_array[0].size}J",
803 dim=f"({mask_array[0].shape[1]}, {mask_array[0].shape[0]})",
804 array=mask_array,
805 )
806 # Add in mask bit definitions as its own table.
807 mask_definition_table = Table(names=("MASK_NAME", "BIT_VALUE"), dtype=(str, np.uint32))
808 for name, value in multiple_cell_coadd.cells.arbitrary.outer.mask.getMaskPlaneDict().items():
809 mask_definition_table.add_row((name, value))
810 mask_definition_hdu = fits.BinTableHDU.from_columns(mask_definition_table.as_array(), name="MASKDEF")
812 variance_array = [cell.outer.variance.array for cell in multiple_cell_coadd.cells.values()]
813 variance = fits.Column(
814 name="variance",
815 format=f"{variance_array[0].size}E",
816 dim=f"({variance_array[0].shape[1]}, {variance_array[0].shape[0]})",
817 array=variance_array,
818 )
820 psf_array = [cell.psf_image.array for cell in multiple_cell_coadd.cells.values()]
821 psf = fits.Column(
822 name="psf",
823 format=f"{psf_array[0].size}D",
824 dim=f"({psf_array[0].shape[1]}, {psf_array[0].shape[0]})",
825 array=[cell.psf_image.array for cell in multiple_cell_coadd.cells.values()],
826 )
828 columns = [cell_id, image, mask, variance, psf]
829 maskfrac_array = [
830 cell.outer.mask_fractions.array
831 for cell in multiple_cell_coadd.cells.values()
832 if cell.outer.mask_fractions
833 ]
834 if maskfrac_array:
835 maskfrac = fits.Column(
836 name="maskfrac",
837 format=f"{maskfrac_array[0].size}E",
838 dim=f"({maskfrac_array[0].shape[1]}, {maskfrac_array[0].shape[0]})",
839 array=maskfrac_array,
840 )
841 columns.append(maskfrac)
843 n_noise_realizations = multiple_cell_coadd.n_noise_realizations
844 for n in range(n_noise_realizations):
845 noise_array = [cell.outer.noise_realizations[n].array for cell in multiple_cell_coadd.cells.values()]
846 columns.append(
847 fits.Column(
848 name=f"noise_{n:02}",
849 format=f"{noise_array[0].size}E",
850 dim=f"({noise_array[0].shape[1]}, {noise_array[0].shape[0]})",
851 array=noise_array,
852 )
853 )
855 if multiple_cell_coadd.cells.arbitrary.aperture_correction_map:
856 apcorr_fields: set[str] = set()
857 for cell in multiple_cell_coadd.cells.values():
858 if cell.aperture_correction_map:
859 apcorr_fields.update(cell.aperture_correction_map)
860 dtypes = [("x", int), ("y", int)] + [(key, float) for key in apcorr_fields]
861 aperture_correction_recarray = np.rec.fromrecords(
862 recList=[
863 to_numpy_record(cell.aperture_correction_map, cell.identifiers.cell, dtypes)
864 for cell in multiple_cell_coadd.cells.values()
865 if cell.aperture_correction_map
866 ],
867 dtype=dtypes,
868 )
869 aperture_correction_hdu = fits.BinTableHDU.from_columns(aperture_correction_recarray, name="APCORR")
870 else:
871 aperture_correction_hdu = None
873 col_defs = fits.ColDefs(columns)
874 hdu = fits.BinTableHDU.from_columns(col_defs)
876 grid_cell_size = multiple_cell_coadd.grid.cell_size
877 grid_shape = multiple_cell_coadd.grid.shape
878 grid_min = multiple_cell_coadd.grid.bbox.getMin()
879 grid_cards = {
880 "GRCELL1": grid_cell_size.x,
881 "GRCELL2": grid_cell_size.y,
882 "GRSHAPE1": grid_shape.x,
883 "GRSHAPE2": grid_shape.y,
884 "GRMIN1": grid_min.x,
885 "GRMIN2": grid_min.y,
886 }
887 hdu.header.extend(grid_cards)
889 outer_cell_size_cards = {
890 "OCELL1": multiple_cell_coadd.outer_cell_size.x,
891 "OCELL2": multiple_cell_coadd.outer_cell_size.y,
892 }
893 hdu.header.extend(outer_cell_size_cards)
895 psf_image_size_cards = {
896 "PSFSIZE1": multiple_cell_coadd.psf_image_size.x,
897 "PSFSIZE2": multiple_cell_coadd.psf_image_size.y,
898 }
899 hdu.header.extend(psf_image_size_cards)
901 inner_bbox_cards = {
902 "INBBOX11": multiple_cell_coadd.inner_bbox.minX,
903 "INBBOX12": multiple_cell_coadd.inner_bbox.minY,
904 "INBBOX21": multiple_cell_coadd.inner_bbox.maxX,
905 "INBBOX22": multiple_cell_coadd.inner_bbox.maxY,
906 }
907 hdu.header.extend(inner_bbox_cards)
909 wcs = multiple_cell_coadd.common.wcs
910 wcs_cards = wcs.getFitsMetadata().toDict()
911 primary_hdu = fits.PrimaryHDU()
912 primary_hdu.header.extend(wcs_cards)
914 hdu.header["VERSION"] = FILE_FORMAT_VERSION
915 hdu.header["TUNIT2"] = multiple_cell_coadd.common.units.name
916 hdu.header["TUNIT4"] = multiple_cell_coadd.common.units.name + "**2"
917 # This assumed to be the same as multiple_cell_coadd.common.identifers.band
918 # See DM-38843.
919 hdu.header["INSTRUME"] = instrument
920 hdu.header["FILTER"] = multiple_cell_coadd.common.band
921 hdu.header["SKYMAP"] = multiple_cell_coadd.common.identifiers.skymap
922 hdu.header["TRACT"] = multiple_cell_coadd.common.identifiers.tract
923 hdu.header["PATCH_X"] = multiple_cell_coadd.common.identifiers.patch.x
924 hdu.header["PATCH_Y"] = multiple_cell_coadd.common.identifiers.patch.y
925 hdu.header["NNOISE"] = n_noise_realizations
927 if metadata is not None:
928 hdu.header.extend(metadata.toDict())
930 hdu_list = fits.HDUList([primary_hdu, hdu, cell_hdu, visit_hdu, mask_definition_hdu, visit_summary_hdu])
931 if aperture_correction_hdu:
932 hdu_list.append(aperture_correction_hdu)
934 hdu_list.writeto(filename, overwrite=overwrite, checksum=True)
936 return hdu_list