Coverage for python/lsst/drp/tasks/update_visit_summary.py: 22%
235 statements
« prev ^ index » next coverage.py v7.5.0, created at 2024-04-26 12:02 +0000
« prev ^ index » next coverage.py v7.5.0, created at 2024-04-26 12:02 +0000
1# This file is part of drp_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = (
25 "UpdateVisitSummaryConnections",
26 "UpdateVisitSummaryConfig",
27 "UpdateVisitSummaryTask",
28 "PossiblyMultipleInput",
29 "PerTractInput",
30 "GlobalInput",
31)
33import dataclasses
34from abc import ABC, abstractmethod
35from collections.abc import Iterable, Mapping
36from typing import Any
38import astropy.table
39import lsst.pipe.base.connectionTypes as cT
40from lsst.afw.geom import SkyWcs
41from lsst.afw.image import ExposureSummaryStats
42from lsst.afw.math import BackgroundList
43from lsst.afw.table import ExposureCatalog, ExposureRecord, SchemaMapper
44from lsst.daf.butler import Butler, DatasetRef, DeferredDatasetHandle
45from lsst.daf.butler.formatters.parquet import pandas_to_astropy
46from lsst.geom import Angle, Box2I, SpherePoint, degrees
47from lsst.pex.config import ChoiceField, ConfigurableField
48from lsst.pipe.base import (
49 InputQuantizedConnection,
50 InvalidQuantumError,
51 OutputQuantizedConnection,
52 PipelineTask,
53 PipelineTaskConfig,
54 PipelineTaskConnections,
55 QuantumContext,
56 Struct,
57)
58from lsst.pipe.tasks.computeExposureSummaryStats import ComputeExposureSummaryStatsTask
59from lsst.skymap import BaseSkyMap, TractInfo
60from lsst.skymap.detail import makeSkyPolygonFromBBox
63def compute_center_for_detector_record(
64 record: ExposureRecord, bbox: Box2I | None = None, wcs: SkyWcs | None = None
65) -> SpherePoint | None:
66 """Compute the sky coordinate center for a detector to be used when
67 testing distance to tract center.
69 Parameters
70 ----------
71 record : `lsst.afw.table.ExposureRecord`
72 Exposure record to obtain WCS and bbox from if not provided.
73 bbox : `lsst.geom.Box2I`, optional
74 Bounding box for the detector in its own pixel coordinates.
75 wcs : `lsst.afw.geom.SkyWcs`, optional
76 WCS that maps the detector's pixel coordinate system to celestial
77 coordinates.
79 Returns
80 -------
81 center : `lsst.geom.SpherePoint` or `None`
82 Center of the detector in sky coordinates, or `None` if no WCS was
83 given or present in the given record.
84 """
85 if bbox is None:
86 bbox = record.getBBox()
87 if wcs is None:
88 wcs = record.getWcs()
89 if wcs is None:
90 return None
91 region = makeSkyPolygonFromBBox(bbox, wcs)
92 return SpherePoint(region.getCentroid())
95class PossiblyMultipleInput(ABC):
96 """A helper ABC for handling input `~lsst.afw.table.ExposureCatalog`
97 datasets that may be multiple (one per tract/visit combination) or
98 unique/global (one per visit).
99 """
101 @abstractmethod
102 def best_for_detector(
103 self,
104 detector_id: int,
105 center: SpherePoint | None = None,
106 bbox: Box2I | None = None,
107 ) -> tuple[int, ExposureRecord | None]:
108 """Return the exposure record for this detector that is the best match
109 for this detector.
111 Parameters
112 ----------
113 detector_id : `int`
114 Detector ID; used to find the right row in the catalog or catalogs.
115 center : `lsst.geom.SpherePoint` or `None`
116 Center of the detector in sky coordinates. If not provided, one
117 will be computed via `compute_center_for_detector_record`.
118 bbox : `lsst.geom.Box2I`, optional
119 Bounding box for the detector in its own pixel coordinates.
121 Returns
122 -------
123 tract_id : `int`
124 ID of the tract that supplied this record, or `-1` if ``record`` is
125 `None` or if the input was not per-tract.
126 record : `lsst.afw.table.ExposureRecord` or `None`
127 Best record for this detector, or `None` if there either were no
128 records for this detector or no WCS available to compute a center.
129 """
130 raise NotImplementedError()
133@dataclasses.dataclass
134class PerTractInput(PossiblyMultipleInput):
135 """Wrapper class for input `~lsst.afw.table.ExposureCatalog` datasets
136 that are per-tract.
138 This selects the best tract via the minimum average distance (on the sky)
139 from the detector's corners to the tract center.
140 """
142 catalogs_by_tract: list[tuple[TractInfo, ExposureCatalog]]
143 """List of tuples of catalogs and the tracts they correspond to
144 (`list` [`tuple` [`lsst.skymap.TractInfo`,
145 `lsst.afw.table.ExposureCatalog`]]).
146 """
148 @classmethod
149 def load(
150 cls,
151 butler: QuantumContext | Butler,
152 sky_map: BaseSkyMap,
153 refs: Iterable[DatasetRef],
154 ) -> PerTractInput:
155 """Load and wrap input catalogs.
157 Parameters
158 ----------
159 butler : `lsst.pipe.base.QuantumContext`
160 Butler proxy used in `~lsst.pipe.base.PipelineTask.runQuantum`.
161 sky_map : `lsst.skymap.BaseSkyMap`
162 Definition of tracts and patches.
163 refs : `~collections.abc.Iterable` [`lsst.daf.butler.DatasetRef`]
164 References to the catalog datasets to load.
166 Returns
167 -------
168 wrapper : `PerTractInput`
169 Wrapper object for the loaded catalogs.
170 """
171 catalogs_by_tract = []
172 for ref in refs:
173 tract_id = ref.dataId["tract"]
174 tract_info = sky_map[tract_id]
175 catalogs_by_tract.append(
176 (
177 tract_info,
178 butler.get(ref),
179 )
180 )
181 return cls(catalogs_by_tract)
183 def best_for_detector(
184 self,
185 detector_id: int,
186 center: SpherePoint | None = None,
187 bbox: Box2I | None = None,
188 ) -> tuple[int, ExposureRecord | None]:
189 # Docstring inherited.
190 best_result: tuple[int, ExposureRecord | None] = (-1, None)
191 best_distance: Angle = float("inf") * degrees
192 for tract_info, catalog in self.catalogs_by_tract:
193 record = catalog.find(detector_id)
194 if record is None:
195 continue
196 if center is None:
197 center_for_record = compute_center_for_detector_record(record, bbox=bbox)
198 if center_for_record is None:
199 continue
200 else:
201 center_for_record = center
202 center_distance = tract_info.ctr_coord.separation(center_for_record)
203 if best_distance > center_distance:
204 best_result = (tract_info.tract_id, record)
205 best_distance = center_distance
206 return best_result
209@dataclasses.dataclass
210class GlobalInput(PossiblyMultipleInput):
211 """Wrapper class for input `~lsst.afw.table.ExposureCatalog` datasets
212 that are not per-tract.
213 """
215 catalog: ExposureCatalog
216 """Loaded per-visit catalog dataset (`lsst.afw.table.ExposureCatalog`).
217 """
219 def best_for_detector(
220 self,
221 detector_id: int,
222 center: SpherePoint | None = None,
223 bbox: Box2I | None = None,
224 ) -> tuple[int, ExposureRecord | None]:
225 # Docstring inherited.
226 return -1, self.catalog.find(detector_id)
229class UpdateVisitSummaryConnections(
230 PipelineTaskConnections,
231 dimensions=("instrument", "visit"),
232 defaultTemplates={
233 "skyWcsName": "gbdesAstrometricFit",
234 "photoCalibName": "fgcm",
235 },
236):
237 sky_map = cT.Input(
238 doc="Description of tract/patch geometry.",
239 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
240 dimensions=("skymap",),
241 storageClass="SkyMap",
242 )
243 input_summary_schema = cT.InitInput(
244 doc="Schema for input_summary_catalog.",
245 name="visitSummary_schema",
246 storageClass="ExposureCatalog",
247 )
248 input_summary_catalog = cT.Input(
249 doc="Visit summary table to load and modify.",
250 name="visitSummary",
251 dimensions=("instrument", "visit"),
252 storageClass="ExposureCatalog",
253 )
254 input_exposures = cT.Input(
255 doc=(
256 "Per-detector images to obtain image, mask, and variance from "
257 "(embedded summary stats and other components are ignored)."
258 ),
259 name="calexp",
260 dimensions=("instrument", "detector", "visit"),
261 storageClass="ExposureF",
262 multiple=True,
263 deferLoad=True,
264 deferGraphConstraint=True,
265 )
266 psf_overrides = cT.Input(
267 doc="Visit-level catalog of updated PSFs to use.",
268 name="finalized_psf_ap_corr_catalog",
269 dimensions=("instrument", "visit"),
270 storageClass="ExposureCatalog",
271 deferGraphConstraint=True,
272 )
273 psf_star_catalog = cT.Input(
274 doc="Per-visit table of PSF reserved- and used-star measurements.",
275 name="finalized_src_table",
276 dimensions=("instrument", "visit"),
277 storageClass="DataFrame",
278 deferGraphConstraint=True,
279 )
280 ap_corr_overrides = cT.Input(
281 doc="Visit-level catalog of updated aperture correction maps to use.",
282 name="finalized_psf_ap_corr_catalog",
283 dimensions=("instrument", "visit"),
284 storageClass="ExposureCatalog",
285 deferGraphConstraint=True,
286 )
287 photo_calib_overrides_tract = cT.Input(
288 doc="Per-Tract visit-level catalog of updated photometric calibration objects to use.",
289 name="{photoCalibName}PhotoCalibCatalog",
290 dimensions=("instrument", "visit", "tract"),
291 storageClass="ExposureCatalog",
292 multiple=True,
293 deferGraphConstraint=True,
294 )
295 photo_calib_overrides_global = cT.Input(
296 doc="Global visit-level catalog of updated photometric calibration objects to use.",
297 name="{photoCalibName}PhotoCalibCatalog",
298 dimensions=("instrument", "visit"),
299 storageClass="ExposureCatalog",
300 deferGraphConstraint=True,
301 )
302 wcs_overrides_tract = cT.Input(
303 doc="Per-tract visit-level catalog of updated astrometric calibration objects to use.",
304 name="{skyWcsName}SkyWcsCatalog",
305 dimensions=("instrument", "visit", "tract"),
306 storageClass="ExposureCatalog",
307 multiple=True,
308 deferGraphConstraint=True,
309 )
310 wcs_overrides_global = cT.Input(
311 doc="Global visit-level catalog of updated astrometric calibration objects to use.",
312 name="{skyWcsName}SkyWcsCatalog",
313 dimensions=("instrument", "visit"),
314 storageClass="ExposureCatalog",
315 deferGraphConstraint=True,
316 )
317 background_originals = cT.Input(
318 doc="Per-detector original background that has already been subtracted from 'input_exposures'.",
319 name="calexpBackground",
320 dimensions=("instrument", "visit", "detector"),
321 storageClass="Background",
322 multiple=True,
323 deferLoad=True,
324 deferGraphConstraint=True,
325 )
326 background_overrides = cT.Input(
327 doc="Per-detector background that can be subtracted directly from 'input_exposures'.",
328 name="skyCorr",
329 dimensions=("instrument", "visit", "detector"),
330 storageClass="Background",
331 multiple=True,
332 deferLoad=True,
333 deferGraphConstraint=True,
334 )
335 output_summary_schema = cT.InitOutput(
336 doc="Schema of the output visit summary catalog.",
337 name="finalVisitSummary_schema",
338 storageClass="ExposureCatalog",
339 )
340 output_summary_catalog = cT.Output(
341 doc="Visit-level catalog summarizing all image characterizations and calibrations.",
342 name="finalVisitSummary",
343 dimensions=("instrument", "visit"),
344 storageClass="ExposureCatalog",
345 )
347 def __init__(self, *, config: UpdateVisitSummaryConfig | None = None):
348 super().__init__(config=config)
349 match self.config.wcs_provider:
350 case "input_summary":
351 self.inputs.remove("wcs_overrides_tract")
352 self.inputs.remove("wcs_overrides_global")
353 case "tract":
354 self.inputs.remove("wcs_overrides_global")
355 case "global":
356 self.inputs.remove("wcs_overrides_tract")
357 case bad:
358 raise ValueError(f"Invalid value wcs_provider={bad!r}; config was not validated.")
359 match self.config.photo_calib_provider:
360 case "input_summary":
361 self.inputs.remove("photo_calib_overrides_tract")
362 self.inputs.remove("photo_calib_overrides_global")
363 case "tract":
364 self.inputs.remove("photo_calib_overrides_global")
365 case "global":
366 self.inputs.remove("photo_calib_overrides_tract")
367 case bad:
368 raise ValueError(f"Invalid value photo_calib_provider={bad!r}; config was not validated.")
369 match self.config.background_provider:
370 case "input_summary":
371 self.inputs.remove("background_originals")
372 self.inputs.remove("background_overrides")
373 case "replacement":
374 pass
375 case bad:
376 raise ValueError(f"Invalid value background_provider={bad!r}; config was not validated.")
379class UpdateVisitSummaryConfig(PipelineTaskConfig, pipelineConnections=UpdateVisitSummaryConnections):
380 """Configuration for UpdateVisitSummaryTask.
382 Notes
383 -----
384 The configuration defaults for this task reflect a simple or "least common
385 denominator" pipeline, not the more complete, more sophisticated pipeline
386 we run on the instruments we support best. The expectation is that the
387 various full pipeline definitions will generally import the simpler
388 definition, so making the defaults correspond to any full pipeline would
389 just lead to the simple pipeline setting them back to the simple-pipeline
390 values and the full pipeline still having to then override them to the
391 full-pipeline values.
392 """
394 compute_summary_stats = ConfigurableField(
395 doc="Subtask that computes summary statistics from Exposure components.",
396 target=ComputeExposureSummaryStatsTask,
397 )
398 wcs_provider = ChoiceField(
399 doc="Which connection and behavior to use when applying WCS overrides.",
400 dtype=str,
401 allowed={
402 "input_summary": (
403 "Propagate the WCS from the input visit summary catalog "
404 "and do not recompute WCS-based summary statistics."
405 ),
406 "tract": {
407 "Use the 'wcs_overrides_tract' connection to load an "
408 "`ExposureCatalog` with {visit, tract} dimensions and per-"
409 "detector rows, and recommpute WCS-based summary statistics."
410 },
411 "global": {
412 "Use the 'wcs_overrides_global' connection to load an "
413 "`ExposureCatalog` with {visit} dimensions and per-"
414 "detector rows, and recommpute WCS-based summary statistics."
415 },
416 # If needed, we could add options here to propagate the WCS from
417 # the input exposures and/or transfer WCS-based summary statistics
418 # from them as well. Right now there's no use case for that, since
419 # the input visit summary is always produced after the last time we
420 # write a new Exposure.
421 },
422 default="input_summary",
423 optional=False,
424 )
425 photo_calib_provider = ChoiceField(
426 doc="Which connection and behavior to use when applying photometric calibration overrides.",
427 dtype=str,
428 allowed={
429 "input_summary": (
430 "Propagate the PhotoCalib from the input visit summary catalog "
431 "and do not recompute photometric calibration summary "
432 "statistics."
433 ),
434 "tract": {
435 "Use the 'photo_calib_overrides_tract' connection to load an "
436 "`ExposureCatalog` with {visit, tract} dimensions and per-"
437 "detector rows, and recommpute photometric calibration summary "
438 "statistics."
439 },
440 "global": {
441 "Use the 'photo_calib_overrides_global' connection to load an "
442 "`ExposureCatalog` with {visit} dimensions and per-"
443 "detector rows, and recommpute photometric calibration summary "
444 "statistics."
445 },
446 # If needed, we could add options here to propagate the PhotoCalib
447 # from the input exposures and/or transfer photometric calibration
448 # summary statistics them as well. Right now there's no use case
449 # for that, since the input visit summary is always produced after
450 # the last time we write a new Exposure.
451 },
452 default="input_summary",
453 optional=False,
454 )
455 background_provider = ChoiceField(
456 doc="Which connection(s) and behavior to use when applying background overrides.",
457 dtype=str,
458 allowed={
459 "input_summary": (
460 "The input visit summary catalog already includes summary "
461 "statistics for the final backgrounds that can be used as-is."
462 ),
463 "replacement": {
464 "The 'background_originals' connection refers to a background "
465 "model that has been superseded by the model referred to by "
466 "the 'background_overrides' connection."
467 },
468 # Could also imagine an option in which there is no original
469 # background and the new one stands alone; can add later if needed.
470 },
471 default="input_summary",
472 optional=False,
473 )
474 # Could imagine an option here to say that the original background has not
475 # been subtracted from the input exposures, allowing postISRCCD to be used
476 # as input exposures. Can add later if needed.
479class UpdateVisitSummaryTask(PipelineTask):
480 """A pipeline task that creates a new visit-summary table after all
481 `lsst.afw.image.Exposure` components have been finalized.
483 Notes
484 -----
485 This task is designed to be run just prior to making warps for coaddition,
486 as it aggregates all inputs other than the images and backgrounds into a
487 single ``ExposureCatalog`` dataset and recomputes summary statistics that
488 are useful in selecting which images should go into a coadd. Its output
489 can also be used to reconstruct a final processed visit image when combined
490 with a post-ISR image, the background model, and the final mask.
491 """
493 # The `run` method of this task can conditionally apply overrides for PSFs
494 # and aperture corrections, but its `PipelineTask` interface always applies
495 # them. We can always add the config options to make them optional later,
496 # if that turns out to be useful.
498 _DefaultName = "updateVisitSummary"
499 ConfigClass = UpdateVisitSummaryConfig
501 compute_summary_stats: ComputeExposureSummaryStatsTask
503 def __init__(self, *, initInputs: dict[str, Any] | None = None, **kwargs: Any):
504 super().__init__(initInputs=initInputs, **kwargs)
505 self.makeSubtask("compute_summary_stats")
506 if initInputs is None or "input_summary_schema" not in initInputs:
507 raise RuntimeError("Task requires 'input_summary_schema' in initInputs.")
508 input_summary_schema = initInputs["input_summary_schema"].schema
509 self.schema_mapper = SchemaMapper(input_summary_schema)
510 self.schema_mapper.addMinimalSchema(input_summary_schema)
511 self.schema = self.schema_mapper.getOutputSchema()
512 if self.config.wcs_provider == "tract":
513 self.schema.addField("wcsTractId", type="L", doc="ID of the tract that provided the WCS.")
514 if self.config.photo_calib_provider == "tract":
515 self.schema.addField(
516 "photoCalibTractId",
517 type="L",
518 doc="ID of the tract that provided the PhotoCalib.",
519 )
520 self.output_summary_schema = ExposureCatalog(self.schema)
522 def runQuantum(
523 self,
524 butlerQC: QuantumContext,
525 inputRefs: InputQuantizedConnection,
526 outputRefs: OutputQuantizedConnection,
527 ) -> None:
528 # Docstring inherited.
529 sky_map = butlerQC.get(inputRefs.sky_map)
530 del inputRefs.sky_map
531 inputs = {}
532 # Collapse the wcs_override_ and photo_calib_override_ connection pairs
533 # into individual inputs (either ExposureCatalog or PerTractInput
534 # objects).
535 match self.config.wcs_provider:
536 case "tract":
537 inputs["wcs_overrides"] = PerTractInput.load(butlerQC, sky_map, inputRefs.wcs_overrides_tract)
538 del inputRefs.wcs_overrides_tract
539 case "global":
540 inputs["wcs_overrides"] = GlobalInput(butlerQC.get(inputRefs.wcs_overrides_global))
541 del inputRefs.wcs_overrides_global
542 case "input_summary":
543 inputs["wcs_overrides"] = None
544 match self.config.photo_calib_provider:
545 case "tract":
546 inputs["photo_calib_overrides"] = PerTractInput.load(
547 butlerQC, sky_map, inputRefs.photo_calib_overrides_tract
548 )
549 del inputRefs.photo_calib_overrides_tract
550 case "global":
551 inputs["photo_calib_overrides"] = GlobalInput(
552 butlerQC.get(inputRefs.photo_calib_overrides_global)
553 )
554 del inputRefs.photo_calib_overrides_global
555 case "input_summary":
556 inputs["photo_calib_overrides"] = None
557 # Load or make DeferredDatasetHandles for everything else.
558 inputs.update(butlerQC.get(inputRefs))
559 deferred_dataset_types = ["input_exposures"]
560 # Handle whether to look for background originals and overrides at all.
561 match self.config.background_provider:
562 case "replacement":
563 deferred_dataset_types.append("background_originals")
564 deferred_dataset_types.append("background_overrides")
565 # Transform the lists of DeferredDatasetHandles for the multiple=True,
566 # deferLoad=True connections into mappings keyed by detector ID.
567 for name in deferred_dataset_types:
568 handles_list = inputs[name]
569 inputs[name] = {handle.dataId["detector"]: handle for handle in handles_list}
570 for record in inputs["input_summary_catalog"]:
571 detector_id = record.getId()
572 if detector_id not in inputs[name]:
573 raise InvalidQuantumError(
574 f"No {name!r} with detector {detector_id} for visit "
575 f"{butlerQC.quantum.dataId['visit']} even though this detector is present "
576 "in the input visit summary catalog. "
577 "This is most likely to occur when the QuantumGraph that includes this task "
578 "was incorrectly generated with an explicit or implicit (from datasets) tract "
579 "constraint."
580 )
581 # Convert the psf_star_catalog datasets from DataFrame to Astropy so
582 # they can be handled by ComputeExposureSummaryStatsTask (which was
583 # actually written to work with afw.table, but Astropy is similar
584 # enough that it works, too). Ideally this would be handled by just
585 # using ArrowAstropy as the storage class in the connection, but QG
586 # generation apparently doesn't fully support those yet, as it leads to
587 # problems in ci_hsc.
588 inputs["psf_star_catalog"] = pandas_to_astropy(inputs["psf_star_catalog"])
589 # Actually run the task and write the results.
590 outputs = self.run(**inputs)
591 butlerQC.put(outputs, outputRefs)
593 def run(
594 self,
595 input_summary_catalog: ExposureCatalog,
596 input_exposures: Mapping[int, DeferredDatasetHandle],
597 psf_overrides: ExposureCatalog | None = None,
598 psf_star_catalog: astropy.table.Table | None = None,
599 ap_corr_overrides: ExposureCatalog | None = None,
600 photo_calib_overrides: PossiblyMultipleInput | None = None,
601 wcs_overrides: PossiblyMultipleInput | None = None,
602 background_originals: Mapping[int, DeferredDatasetHandle] | None = None,
603 background_overrides: Mapping[int, DeferredDatasetHandle] | None = None,
604 ):
605 """Build an updated version of a visit summary catalog.
607 Parameters
608 ----------
609 input_summary_catalog : `lsst.afw.table.ExposureCatalog`
610 Input catalog. Each row in this catalog will be used to produce
611 a row in the output catalog. Any override parameter that is `None`
612 will leave the corresponding values unchanged from those in this
613 input catalog.
614 input_exposures : `collections.abc.Mapping` [`int`,
615 `lsst.daf.butler.DeferredDatasetHandle`]
616 Deferred-load objects that fetch `lsst.afw.image.Exposure`
617 instances. Only the image, mask, and variance are used; all other
618 components are assumed to be superceded by at least
619 ``input_summary_catalog`` and probably some ``_overrides``
620 arguments as well. This usually corresponds to the ``calexp``
621 dataset.
622 psf_overrides : `lsst.afw.table.ExposureCatalog`, optional
623 Catalog with attached `lsst.afw.detection.Psf` objects that
624 supersede the input catalog's PSFs.
625 psf_star_catalog : `astropy.table.Table`, optional
626 Table containing PSF stars for use in computing PSF summary
627 statistics. Must be provided if ``psf_overrides`` is.
628 ap_corr_overrides : `lsst.afw.table.ExposureCatalog`, optional
629 Catalog with attached `lsst.afw.image.ApCorrMap` objects that
630 supersede the input catalog's aperture corrections.
631 photo_calib_overrides : `PossiblyMultipleInput`, optional
632 Catalog wrappers with attached `lsst.afw.image.PhotoCalib`
633 objects that supersede the input catalog's photometric
634 calibrations.
635 wcs_overrides : `PossiblyMultipleInput`, optional
636 Catalog wrappers with attached `lsst.afw.geom.SkyWcs` objects
637 that supersede the input catalog's astrometric calibrations.
638 background_originals : `collections.abc.Mapping` [`int`,
639 `lsst.daf.butler.DeferredDatasetHandle`], optional
640 Deferred-load objects that fetch `lsst.afw.math.BackgroundList`
641 instances. These should correspond to the background already
642 subtracted from ``input_exposures``. If not provided and
643 ``background_overrides`` is, it is assumed that the background in
644 ``input_exposures`` has not been subtracted. If provided, all keys
645 in ``background_overrides`` must also be present in
646 ``background_originals``.
647 background_overrides : `collections.abc.Mapping` [`int`,
648 `lsst.daf.butler.DeferredDatasetHandle`], optional
649 Deferred-load objects that fetch `lsst.afw.math.BackgroundList`
650 instances. These should correspond to the background that should
651 now be subtracted from``input_exposures`` to yield the final
652 background-subtracted image.
654 Returns
655 -------
656 output_summary_catalog : `lsst.afw.table.ExposureCatalog`
657 Output visit summary catalog.
659 Notes
660 -----
661 If any override parameter is provided but does not have a value for a
662 particular detector, that component will be set to `None` in the
663 returned catalog for that detector and all summary statistics derived
664 from that component will be reset (usually to ``NaN``) as well. Not
665 passing an override parameter at all will instead pass through the
666 original component and values from the input catalog unchanged.
667 """
668 output_summary_catalog = ExposureCatalog(self.schema)
669 output_summary_catalog.setMetadata(input_summary_catalog.getMetadata())
670 for input_record in input_summary_catalog:
671 detector_id = input_record.getId()
672 output_record = output_summary_catalog.addNew()
674 # Make a new ExposureSummaryStats from the input record.
675 summary_stats = ExposureSummaryStats.from_record(input_record)
677 # Also copy the input record values to output record; this copies
678 # many of the same values just copied into `summary_stats` (which
679 # will be overridden later by summary_stats.update_record), but it
680 # also copies fields that aren't part of summary_stats, including
681 # the actual components like Psf, Wcs, etc.
682 output_record.assign(input_record, self.schema_mapper)
684 exposure = input_exposures[detector_id].get()
685 bbox = exposure.getBBox()
687 if wcs_overrides:
688 wcs_tract, wcs_record = wcs_overrides.best_for_detector(detector_id, bbox=bbox)
689 if wcs_record is not None:
690 wcs = wcs_record.getWcs()
691 else:
692 wcs = None
693 if self.config.wcs_provider == "tract":
694 output_record["wcsTractId"] = wcs_tract
695 output_record.setWcs(wcs)
696 self.compute_summary_stats.update_wcs_stats(
697 summary_stats, wcs, bbox, output_record.getVisitInfo()
698 )
699 else:
700 wcs = input_record.getWcs()
702 if psf_overrides:
703 if (psf_record := psf_overrides.find(detector_id)) is not None:
704 psf = psf_record.getPsf()
705 else:
706 psf = None
707 output_record.setPsf(psf)
708 sources = psf_star_catalog[psf_star_catalog["detector"] == detector_id]
709 if len(sources) == 0:
710 sources = None
711 self.compute_summary_stats.update_psf_stats(
712 summary_stats,
713 psf,
714 bbox,
715 sources,
716 image_mask=exposure.mask,
717 sources_is_astropy=True,
718 )
720 if ap_corr_overrides:
721 if (ap_corr_record := ap_corr_overrides.find(detector_id)) is not None:
722 ap_corr = ap_corr_record.getApCorrMap()
723 else:
724 ap_corr = None
725 output_record.setApCorrMap(ap_corr)
727 if photo_calib_overrides:
728 center = compute_center_for_detector_record(output_record, bbox, wcs)
729 (
730 photo_calib_tract,
731 photo_calib_record,
732 ) = photo_calib_overrides.best_for_detector(detector_id, center=center)
733 if photo_calib_record is not None:
734 photo_calib = photo_calib_record.getPhotoCalib()
735 else:
736 photo_calib = None
737 if self.config.photo_calib_provider == "tract":
738 output_record["photoCalibTractId"] = photo_calib_tract
739 output_record.setPhotoCalib(photo_calib)
740 self.compute_summary_stats.update_photo_calib_stats(summary_stats, photo_calib)
742 if background_overrides is not None:
743 if (handle := background_overrides.get(detector_id)) is not None:
744 new_bkg = handle.get()
745 if background_originals is not None:
746 orig_bkg = background_originals[detector_id].get()
747 else:
748 orig_bkg = BackgroundList()
750 full_bkg = orig_bkg.clone()
751 for layer in new_bkg:
752 full_bkg.append(layer)
753 exposure.image -= new_bkg.getImage()
754 self.compute_summary_stats.update_background_stats(summary_stats, full_bkg)
755 self.compute_summary_stats.update_masked_image_stats(
756 summary_stats, exposure.getMaskedImage()
757 )
759 # Update the effective exposure time calculation
760 self.compute_summary_stats.update_effective_time_stats(summary_stats, exposure)
762 summary_stats.update_record(output_record)
763 del exposure
765 return Struct(output_summary_catalog=output_summary_catalog)