Coverage for python/lsst/drp/tasks/update_visit_summary.py: 21%
232 statements
« prev ^ index » next coverage.py v7.2.4, created at 2023-04-29 04:12 -0700
« prev ^ index » next coverage.py v7.2.4, created at 2023-04-29 04:12 -0700
1# This file is part of drp_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = (
25 "UpdateVisitSummaryConnections",
26 "UpdateVisitSummaryConfig",
27 "UpdateVisitSummaryTask",
28 "PossiblyMultipleInput",
29 "PerTractInput",
30 "GlobalInput",
31)
33import dataclasses
34from abc import ABC, abstractmethod
35from collections.abc import Iterable, Mapping
36from typing import Any
38import astropy.table
39import lsst.pipe.base.connectionTypes as cT
40from lsst.afw.geom import SkyWcs
41from lsst.afw.image import ExposureSummaryStats
42from lsst.afw.math import BackgroundList
43from lsst.afw.table import ExposureCatalog, ExposureRecord, SchemaMapper
44from lsst.daf.butler import Butler, DatasetRef, DeferredDatasetHandle
45from lsst.daf.butler.formatters.parquet import pandas_to_astropy
46from lsst.geom import Angle, Box2I, SpherePoint, degrees
47from lsst.pex.config import ChoiceField, ConfigurableField
48from lsst.pipe.base import (
49 ButlerQuantumContext,
50 InputQuantizedConnection,
51 InvalidQuantumError,
52 OutputQuantizedConnection,
53 PipelineTask,
54 PipelineTaskConfig,
55 PipelineTaskConnections,
56 Struct,
57)
58from lsst.pipe.tasks.computeExposureSummaryStats import ComputeExposureSummaryStatsTask
59from lsst.skymap import BaseSkyMap, TractInfo
60from lsst.skymap.detail import makeSkyPolygonFromBBox
63def compute_center_for_detector_record(
64 record: ExposureRecord, bbox: Box2I | None = None, wcs: SkyWcs | None = None
65) -> SpherePoint | None:
66 """Compute the sky coordinate center for a detector to be used when
67 testing distance to tract center.
69 Parameters
70 ----------
71 record : `lsst.afw.table.ExposureRecord`
72 Exposure record to obtain WCS and bbox from if not provided.
73 bbox : `lsst.geom.Box2I`, optional
74 Bounding box for the detector in its own pixel coordinates.
75 wcs : `lsst.afw.geom.SkyWcs`, optional
76 WCS that maps the detector's pixel coordinate system to celestial
77 coordinates.
79 Returns
80 -------
81 center : `lsst.geom.SpherePoint` or `None`
82 Center of the detector in sky coordinates, or `None` if no WCS was
83 given or present in the given record.
84 """
85 if bbox is None:
86 bbox = record.getBBox()
87 if wcs is None:
88 wcs = record.getWcs()
89 if wcs is None:
90 return None
91 region = makeSkyPolygonFromBBox(bbox, wcs)
92 return SpherePoint(region.getCentroid())
95class PossiblyMultipleInput(ABC):
96 """A helper ABC for handling input `~lsst.afw.table.ExposureCatalog`
97 datasets that may be multiple (one per tract/visit combination) or
98 unique/global (one per visit).
99 """
101 @abstractmethod
102 def best_for_detector(
103 self,
104 detector_id: int,
105 center: SpherePoint | None = None,
106 bbox: Box2I | None = None,
107 ) -> tuple[int, ExposureRecord | None]:
108 """Return the exposure record for this detector that is the best match
109 for this detector.
111 Parameters
112 ----------
113 detector_id : `int`
114 Detector ID; used to find the right row in the catalog or catalogs.
115 center : `lsst.geom.SpherePoint` or `None`
116 Center of the detector in sky coordinates. If not provided, one
117 will be computed via `compute_center_for_detector_record`.
118 bbox : `lsst.geom.Box2I`, optional
119 Bounding box for the detector in its own pixel coordinates.
121 Returns
122 -------
123 tract_id : `int`
124 ID of the tract that supplied this record, or `-1` if ``record`` is
125 `None` or if the input was not per-tract.
126 record : `lsst.afw.table.ExposureRecord` or `None`
127 Best record for this detector, or `None` if there either were no
128 records for this detector or no WCS available to compute a center.
129 """
130 raise NotImplementedError()
133@dataclasses.dataclass
134class PerTractInput(PossiblyMultipleInput):
135 """Wrapper class for input `~lsst.afw.table.ExposureCatalog` datasets
136 that are per-tract.
138 This selects the best tract via the minimum average distance (on the sky)
139 from the detector's corners to the tract center.
140 """
142 catalogs_by_tract: list[tuple[TractInfo, ExposureCatalog]]
143 """List of tuples of catalogs and the tracts they correspond to
144 (`list` [`tuple` [`lsst.skymap.TractInfo`,
145 `lsst.afw.table.ExposureCatalog`]]).
146 """
148 @classmethod
149 def load(
150 cls,
151 butler: ButlerQuantumContext | Butler,
152 sky_map: BaseSkyMap,
153 refs: Iterable[DatasetRef],
154 ) -> PerTractInput:
155 """Load and wrap input catalogs.
157 Parameters
158 ----------
159 butler : `lsst.pipe.base.ButlerQuantumContext`
160 Butler proxy used in `~lsst.pipe.base.PipelineTask.runQuantum`.
161 sky_map : `lsst.skymap.BaseSkyMap`
162 Definition of tracts and patches.
163 refs : `~collections.abc.Iterable` [`lsst.daf.butler.DatasetRef`]
164 References to the catalog datasets to load.
166 Returns
167 -------
168 wrapper : `PerTractInput`
169 Wrapper object for the loaded catalogs.
170 """
171 catalogs_by_tract = []
172 for ref in refs:
173 tract_id = ref.dataId["tract"]
174 tract_info = sky_map[tract_id]
175 catalogs_by_tract.append(
176 (
177 tract_info,
178 butler.get(ref),
179 )
180 )
181 return cls(catalogs_by_tract)
183 def best_for_detector(
184 self,
185 detector_id: int,
186 center: SpherePoint | None = None,
187 bbox: Box2I | None = None,
188 ) -> tuple[int, ExposureRecord | None]:
189 # Docstring inherited.
190 best_result: tuple[int, ExposureRecord | None] = (-1, None)
191 best_distance: Angle = float("inf") * degrees
192 for tract_info, catalog in self.catalogs_by_tract:
193 record = catalog.find(detector_id)
194 if record is None:
195 continue
196 if center is None:
197 center_for_record = compute_center_for_detector_record(
198 record, bbox=bbox
199 )
200 if center_for_record is None:
201 continue
202 else:
203 center_for_record = center
204 center_distance = tract_info.ctr_coord.separation(center_for_record)
205 if best_distance > center_distance:
206 best_result = (tract_info.tract_id, record)
207 best_distance = center_distance
208 return best_result
211@dataclasses.dataclass
212class GlobalInput(PossiblyMultipleInput):
213 """Wrapper class for input `~lsst.afw.table.ExposureCatalog` datasets
214 that are not per-tract.
215 """
217 catalog: ExposureCatalog
218 """Loaded per-visit catalog dataset (`lsst.afw.table.ExposureCatalog`).
219 """
221 def best_for_detector(
222 self,
223 detector_id: int,
224 center: SpherePoint | None = None,
225 bbox: Box2I | None = None,
226 ) -> tuple[int, ExposureRecord | None]:
227 # Docstring inherited.
228 return -1, self.catalog.find(detector_id)
231class UpdateVisitSummaryConnections(
232 PipelineTaskConnections,
233 dimensions=("instrument", "visit"),
234 defaultTemplates={
235 "skyWcsName": "gbdesAstrometricFit",
236 "photoCalibName": "fgcm",
237 },
238):
239 sky_map = cT.Input(
240 doc="Description of tract/patch geometry.",
241 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
242 dimensions=("skymap",),
243 storageClass="SkyMap",
244 )
245 input_summary_schema = cT.InitInput(
246 doc="Schema for input_summary_catalog.",
247 name="visitSummary_schema",
248 storageClass="ExposureCatalog",
249 )
250 input_summary_catalog = cT.Input(
251 doc="Visit summary table to load and modify.",
252 name="visitSummary",
253 dimensions=("instrument", "visit"),
254 storageClass="ExposureCatalog",
255 )
256 input_exposures = cT.Input(
257 doc=(
258 "Per-detector images to obtain image, mask, and variance from "
259 "(embedded summary stats and other components are ignored)."
260 ),
261 name="calexp",
262 dimensions=("instrument", "detector", "visit"),
263 storageClass="ExposureF",
264 multiple=True,
265 deferLoad=True,
266 deferGraphConstraint=True,
267 )
268 psf_overrides = cT.Input(
269 doc="Visit-level catalog of updated PSFs to use.",
270 name="finalized_psf_ap_corr_catalog",
271 dimensions=("instrument", "visit"),
272 storageClass="ExposureCatalog",
273 deferGraphConstraint=True,
274 )
275 psf_star_catalog = cT.Input(
276 doc="Per-visit table of PSF reserved- and used-star measurements.",
277 name="finalized_src_table",
278 dimensions=("instrument", "visit"),
279 storageClass="DataFrame",
280 deferGraphConstraint=True,
281 )
282 ap_corr_overrides = cT.Input(
283 doc="Visit-level catalog of updated aperture correction maps to use.",
284 name="finalized_psf_ap_corr_catalog",
285 dimensions=("instrument", "visit"),
286 storageClass="ExposureCatalog",
287 deferGraphConstraint=True,
288 )
289 photo_calib_overrides_tract = cT.Input(
290 doc="Per-Tract visit-level catalog of updated photometric calibration objects to use.",
291 name="{photoCalibName}PhotoCalibCatalog",
292 dimensions=("instrument", "visit", "tract"),
293 storageClass="ExposureCatalog",
294 multiple=True,
295 deferGraphConstraint=True,
296 )
297 photo_calib_overrides_global = cT.Input(
298 doc="Global visit-level catalog of updated photometric calibration objects to use.",
299 name="{photoCalibName}PhotoCalibCatalog",
300 dimensions=("instrument", "visit"),
301 storageClass="ExposureCatalog",
302 deferGraphConstraint=True,
303 )
304 wcs_overrides_tract = cT.Input(
305 doc="Per-tract visit-level catalog of updated astrometric calibration objects to use.",
306 name="{skyWcsName}SkyWcsCatalog",
307 dimensions=("instrument", "visit", "tract"),
308 storageClass="ExposureCatalog",
309 multiple=True,
310 deferGraphConstraint=True,
311 )
312 wcs_overrides_global = cT.Input(
313 doc="Global visit-level catalog of updated astrometric calibration objects to use.",
314 name="{skyWcsName}SkyWcsCatalog",
315 dimensions=("instrument", "visit"),
316 storageClass="ExposureCatalog",
317 deferGraphConstraint=True,
318 )
319 background_originals = cT.Input(
320 doc="Per-detector original background that has already been subtracted from 'input_exposures'.",
321 name="calexpBackground",
322 dimensions=("instrument", "visit", "detector"),
323 storageClass="Background",
324 multiple=True,
325 deferLoad=True,
326 deferGraphConstraint=True,
327 )
328 background_overrides = cT.Input(
329 doc="Per-detector background that can be subtracted directly from 'input_exposures'.",
330 name="skyCorr",
331 dimensions=("instrument", "visit", "detector"),
332 storageClass="Background",
333 multiple=True,
334 deferLoad=True,
335 deferGraphConstraint=True,
336 )
337 output_summary_schema = cT.InitOutput(
338 doc="Schema of the output visit summary catalog.",
339 name="finalVisitSummary_schema",
340 storageClass="ExposureCatalog",
341 )
342 output_summary_catalog = cT.Output(
343 doc="Visit-level catalog summarizing all image characterizations and calibrations.",
344 name="finalVisitSummary",
345 dimensions=("instrument", "visit"),
346 storageClass="ExposureCatalog",
347 )
349 def __init__(self, *, config: UpdateVisitSummaryConfig | None = None):
350 super().__init__(config=config)
351 match self.config.wcs_provider:
352 case "input_summary":
353 self.inputs.remove("wcs_overrides_tract")
354 self.inputs.remove("wcs_overrides_global")
355 case "tract":
356 self.inputs.remove("wcs_overrides_global")
357 case "global":
358 self.inputs.remove("wcs_overrides_tract")
359 case bad:
360 raise ValueError(
361 f"Invalid value wcs_provider={bad!r}; config was not validated."
362 )
363 match self.config.photo_calib_provider:
364 case "input_summary":
365 self.inputs.remove("photo_calib_overrides_tract")
366 self.inputs.remove("photo_calib_overrides_global")
367 case "tract":
368 self.inputs.remove("photo_calib_overrides_global")
369 case "global":
370 self.inputs.remove("photo_calib_overrides_tract")
371 case bad:
372 raise ValueError(
373 f"Invalid value photo_calib_provider={bad!r}; config was not validated."
374 )
375 match self.config.background_provider:
376 case "input_summary":
377 self.inputs.remove("background_originals")
378 self.inputs.remove("background_overrides")
379 case "replacement":
380 pass
381 case bad:
382 raise ValueError(
383 f"Invalid value background_provider={bad!r}; config was not validated."
384 )
387class UpdateVisitSummaryConfig(
388 PipelineTaskConfig, pipelineConnections=UpdateVisitSummaryConnections
389):
390 """Configuration for UpdateVisitSummaryTask.
392 Notes
393 -----
394 The configuration defaults for this task reflect a simple or "least common
395 denominator" pipeline, not the more complete, more sophisticated pipeline
396 we run on the instruments we support best. The expectation is that the
397 various full pipeline definitions will generally import the simpler
398 definition, so making the defaults correspond to any full pipeline would
399 just lead to the simple pipeline setting them back to the simple-pipeline
400 values and the full pipeline still having to then override them to the
401 full-pipeline values.
402 """
404 compute_summary_stats = ConfigurableField(
405 doc="Subtask that computes summary statistics from Exposure components.",
406 target=ComputeExposureSummaryStatsTask,
407 )
408 wcs_provider = ChoiceField(
409 doc="Which connection and behavior to use when applying WCS overrides.",
410 dtype=str,
411 allowed={
412 "input_summary": (
413 "Propagate the WCS from the input visit summary catalog "
414 "and do not recompute WCS-based summary statistics."
415 ),
416 "tract": {
417 "Use the 'wcs_overrides_tract' connection to load an "
418 "`ExposureCatalog` with {visit, tract} dimensions and per-"
419 "detector rows, and recommpute WCS-based summary statistics."
420 },
421 "global": {
422 "Use the 'wcs_overrides_global' connection to load an "
423 "`ExposureCatalog` with {visit} dimensions and per-"
424 "detector rows, and recommpute WCS-based summary statistics."
425 },
426 # If needed, we could add options here to propagate the WCS from
427 # the input exposures and/or transfer WCS-based summary statistics
428 # from them as well. Right now there's no use case for that, since
429 # the input visit summary is always produced after the last time we
430 # write a new Exposure.
431 },
432 default="input_summary",
433 optional=False,
434 )
435 photo_calib_provider = ChoiceField(
436 doc="Which connection and behavior to use when applying photometric calibration overrides.",
437 dtype=str,
438 allowed={
439 "input_summary": (
440 "Propagate the PhotoCalib from the input visit summary catalog "
441 "and do not recompute photometric calibration summary "
442 "statistics."
443 ),
444 "tract": {
445 "Use the 'photo_calib_overrides_tract' connection to load an "
446 "`ExposureCatalog` with {visit, tract} dimensions and per-"
447 "detector rows, and recommpute photometric calibration summary "
448 "statistics."
449 },
450 "global": {
451 "Use the 'photo_calib_overrides_global' connection to load an "
452 "`ExposureCatalog` with {visit} dimensions and per-"
453 "detector rows, and recommpute photometric calibration summary "
454 "statistics."
455 },
456 # If needed, we could add options here to propagate the PhotoCalib
457 # from the input exposures and/or transfer photometric calibration
458 # summary statistics them as well. Right now there's no use case
459 # for that, since the input visit summary is always produced after
460 # the last time we write a new Exposure.
461 },
462 default="input_summary",
463 optional=False,
464 )
465 background_provider = ChoiceField(
466 doc="Which connection(s) and behavior to use when applying background overrides.",
467 dtype=str,
468 allowed={
469 "input_summary": (
470 "The input visit summary catalog already includes summary "
471 "statistics for the final backgrounds that can be used as-is."
472 ),
473 "replacement": {
474 "The 'background_originals' connection refers to a background "
475 "model that has been superseded by the model referred to by "
476 "the 'background_overrides' connection."
477 },
478 # Could also imagine an option in which there is no original
479 # background and the new one stands alone; can add later if needed.
480 },
481 default="input_summary",
482 optional=False,
483 )
484 # Could imagine an option here to say that the original background has not
485 # been subtracted from the input exposures, allowing postISRCCD to be used
486 # as input exposures. Can add later if needed.
489class UpdateVisitSummaryTask(PipelineTask):
490 """A pipeline task that creates a new visit-summary table after all
491 `lsst.afw.image.Exposure` components have been finalized.
493 Notes
494 -----
495 This task is designed to be run just prior to making warps for coaddition,
496 as it aggregates all inputs other than the images and backgrounds into a
497 single ``ExposureCatalog`` dataset and recomputes summary statistics that
498 are useful in selecting which images should go into a coadd. Its output
499 can also be used to reconstruct a final processed visit image when combined
500 with a post-ISR image, the background model, and the final mask.
501 """
503 # The `run` method of this task can conditionally apply overrides for PSFs
504 # and aperture corrections, but its `PipelineTask` interface always applies
505 # them. We can always add the config options to make them optional later,
506 # if that turns out to be useful.
508 _DefaultName = "updateVisitSummary"
509 ConfigClass = UpdateVisitSummaryConfig
511 compute_summary_stats: ComputeExposureSummaryStatsTask
513 def __init__(self, *, initInputs: dict[str, Any] | None = None, **kwargs: Any):
514 super().__init__(initInputs=initInputs, **kwargs)
515 self.makeSubtask("compute_summary_stats")
516 if initInputs is None or "input_summary_schema" not in initInputs:
517 raise RuntimeError("Task requires 'input_summary_schema' in initInputs.")
518 input_summary_schema = initInputs["input_summary_schema"].schema
519 self.schema_mapper = SchemaMapper(input_summary_schema)
520 self.schema_mapper.addMinimalSchema(input_summary_schema)
521 self.schema = self.schema_mapper.getOutputSchema()
522 if self.config.wcs_provider == "tract":
523 self.schema.addField(
524 "wcsTractId", type="L", doc="ID of the tract that provided the WCS."
525 )
526 if self.config.photo_calib_provider == "tract":
527 self.schema.addField(
528 "photoCalibTractId",
529 type="L",
530 doc="ID of the tract that provided the PhotoCalib.",
531 )
532 self.output_summary_schema = ExposureCatalog(self.schema)
534 def runQuantum(
535 self,
536 butlerQC: ButlerQuantumContext,
537 inputRefs: InputQuantizedConnection,
538 outputRefs: OutputQuantizedConnection,
539 ) -> None:
540 # Docstring inherited.
541 sky_map = butlerQC.get(inputRefs.sky_map)
542 del inputRefs.sky_map
543 inputs = {}
544 # Collapse the wcs_override_ and photo_calib_override_ connection pairs
545 # into individual inputs (either ExposureCatalog or PerTractInput
546 # objects).
547 match self.config.wcs_provider:
548 case "tract":
549 inputs["wcs_overrides"] = PerTractInput.load(
550 butlerQC, sky_map, inputRefs.wcs_overrides_tract
551 )
552 del inputRefs.wcs_overrides_tract
553 case "global":
554 inputs["wcs_overrides"] = GlobalInput(
555 butlerQC.get(inputRefs.wcs_overrides_global)
556 )
557 del inputRefs.wcs_overrides_global
558 case "input_summary":
559 inputs["wcs_overrides"] = None
560 match self.config.photo_calib_provider:
561 case "tract":
562 inputs["photo_calib_overrides"] = PerTractInput.load(
563 butlerQC, sky_map, inputRefs.photo_calib_overrides_tract
564 )
565 del inputRefs.photo_calib_overrides_tract
566 case "global":
567 inputs["photo_calib_overrides"] = GlobalInput(
568 butlerQC.get(inputRefs.photo_calib_overrides_global)
569 )
570 del inputRefs.photo_calib_overrides_global
571 case "input_summary":
572 inputs["photo_calib_overrides"] = None
573 # Load or make DeferredDatasetHandles for everything else.
574 inputs.update(butlerQC.get(inputRefs))
575 deferred_dataset_types = ["input_exposures"]
576 # Handle whether to look for background originals and overrides at all.
577 match self.config.background_provider:
578 case "replacement":
579 deferred_dataset_types.append("background_originals")
580 deferred_dataset_types.append("background_overrides")
581 # Transform the lists of DeferredDatasetHandles for the multiple=True,
582 # deferLoad=True connections into mappings keyed by detector ID.
583 for name in deferred_dataset_types:
584 handles_list = inputs[name]
585 inputs[name] = {
586 handle.dataId["detector"]: handle for handle in handles_list
587 }
588 for record in inputs["input_summary_catalog"]:
589 detector_id = record.getId()
590 if detector_id not in inputs[name]:
591 raise InvalidQuantumError(
592 f"No {name!r} with detector {detector_id} for visit "
593 f"{butlerQC.quantum.dataId['visit']} even though this detector is present "
594 "in the input visit summary catalog. "
595 "This is most likely to occur when the QuantumGraph that includes this task "
596 "was incorrectly generated with an explicit or implicit (from datasets) tract "
597 "constraint."
598 )
599 # Convert the psf_star_catalog datasets from DataFrame to Astropy so
600 # they can be handled by ComputeExposureSummaryStatsTask (which was
601 # actually written to work with afw.table, but Astropy is similar
602 # enough that it works, too). Ideally this would be handled by just
603 # using ArrowAstropy as the storage class in the connection, but QG
604 # generation apparently doesn't fully support those yet, as it leads to
605 # problems in ci_hsc.
606 inputs["psf_star_catalog"] = pandas_to_astropy(inputs["psf_star_catalog"])
607 # Actually run the task and write the results.
608 outputs = self.run(**inputs)
609 butlerQC.put(outputs, outputRefs)
611 def run(
612 self,
613 input_summary_catalog: ExposureCatalog,
614 input_exposures: Mapping[int, DeferredDatasetHandle],
615 psf_overrides: ExposureCatalog | None = None,
616 psf_star_catalog: astropy.table.Table | None = None,
617 ap_corr_overrides: ExposureCatalog | None = None,
618 photo_calib_overrides: PossiblyMultipleInput | None = None,
619 wcs_overrides: PossiblyMultipleInput | None = None,
620 background_originals: Mapping[int, DeferredDatasetHandle] | None = None,
621 background_overrides: Mapping[int, DeferredDatasetHandle] | None = None,
622 ):
623 """Build an updated version of a visit summary catalog.
625 Parameters
626 ----------
627 input_summary_catalog : `lsst.afw.table.ExposureCatalog`
628 Input catalog. Each row in this catalog will be used to produce
629 a row in the output catalog. Any override parameter that is `None`
630 will leave the corresponding values unchanged from those in this
631 input catalog.
632 input_exposures : `collections.abc.Mapping` [`int`,
633 `lsst.daf.butler.DeferredDatasetHandle`]
634 Deferred-load objects that fetch `lsst.afw.image.Exposure`
635 instances. Only the image, mask, and variance are used; all other
636 components are assumed to be superceded by at least
637 ``input_summary_catalog`` and probably some ``_overrides``
638 arguments as well. This usually corresponds to the ``calexp``
639 dataset.
640 psf_overrides : `lsst.afw.table.ExposureCatalog`, optional
641 Catalog with attached `lsst.afw.detection.Psf` objects that
642 supersede the input catalog's PSFs.
643 psf_star_catalog : `astropy.table.Table`, optional
644 Table containing PSF stars for use in computing PSF summary
645 statistics. Must be provided if ``psf_overrides`` is.
646 ap_corr_overrides : `lsst.afw.table.ExposureCatalog`, optional
647 Catalog with attached `lsst.afw.image.ApCorrMap` objects that
648 supersede the input catalog's aperture corrections.
649 photo_calib_overrides : `PossiblyMultipleInput`, optional
650 Catalog wrappers with attached `lsst.afw.image.PhotoCalib`
651 objects that supersede the input catalog's photometric
652 calibrations.
653 wcs_overrides : `PossiblyMultipleInput`, optional
654 Catalog wrappers with attached `lsst.afw.geom.SkyWcs` objects
655 that supersede the input catalog's astrometric calibrations.
656 background_originals : `collections.abc.Mapping` [`int`,
657 `lsst.daf.butler.DeferredDatasetHandle`], optional
658 Deferred-load objects that fetch `lsst.afw.math.BackgroundList`
659 instances. These should correspond to the background already
660 subtracted from ``input_exposures``. If not provided and
661 ``background_overrides`` is, it is assumed that the background in
662 ``input_exposures`` has not been subtracted. If provided, all keys
663 in ``background_overrides`` must also be present in
664 ``background_originals``.
665 background_overrides : `collections.abc.Mapping` [`int`,
666 `lsst.daf.butler.DeferredDatasetHandle`], optional
667 Deferred-load objects that fetch `lsst.afw.math.BackgroundList`
668 instances. These should correspond to the background that should
669 now be subtracted from``input_exposures`` to yield the final
670 background-subtracted image.
672 Returns
673 -------
674 output_summary_catalog : `lsst.afw.table.ExposureCatalog`
675 Output visit summary catalog.
677 Notes
678 -----
679 If any override parameter is provided but does not have a value for a
680 particular detector, that component will be set to `None` in the
681 returned catalog for that detector and all summary statistics derived
682 from that component will be reset (usually to ``NaN``) as well. Not
683 passing an override parameter at all will instead pass through the
684 original component and values from the input catalog unchanged.
685 """
686 output_summary_catalog = ExposureCatalog(self.schema)
687 output_summary_catalog.setMetadata(input_summary_catalog.getMetadata())
688 for input_record in input_summary_catalog:
689 detector_id = input_record.getId()
690 output_record = output_summary_catalog.addNew()
692 # Make a new ExposureSummaryStats from the input record.
693 summary_stats = ExposureSummaryStats.from_record(input_record)
695 # Also copy the input record values to output record; this copies
696 # many of the same values just copied into `summary_stats` (which
697 # will be overridden later by summary_stats.update_record), but it
698 # also copies fields that aren't part of summary_stats, including
699 # the actual components like Psf, Wcs, etc.
700 output_record.assign(input_record, self.schema_mapper)
702 exposure = input_exposures[detector_id].get()
703 bbox = exposure.getBBox()
705 if wcs_overrides:
706 wcs_tract, wcs_record = wcs_overrides.best_for_detector(
707 detector_id, bbox=bbox
708 )
709 if wcs_record is not None:
710 wcs = wcs_record.getWcs()
711 else:
712 wcs = None
713 if self.config.wcs_provider == "tract":
714 output_record["wcsTractId"] = wcs_tract
715 output_record.setWcs(wcs)
716 self.compute_summary_stats.update_wcs_stats(
717 summary_stats, wcs, bbox, output_record.getVisitInfo()
718 )
719 else:
720 wcs = input_record.getWcs()
722 if psf_overrides:
723 if (psf_record := psf_overrides.find(detector_id)) is not None:
724 psf = psf_record.getPsf()
725 else:
726 psf = None
727 output_record.setPsf(psf)
728 sources = psf_star_catalog[psf_star_catalog["detector"] == detector_id]
729 self.compute_summary_stats.update_psf_stats(
730 summary_stats,
731 psf,
732 bbox,
733 sources,
734 image_mask=exposure.mask,
735 sources_is_astropy=True,
736 )
738 if ap_corr_overrides:
739 if (ap_corr_record := ap_corr_overrides.find(detector_id)) is not None:
740 ap_corr = ap_corr_record.getApCorrMap()
741 else:
742 ap_corr = None
743 output_record.setApCorrMap(ap_corr)
745 if photo_calib_overrides:
746 center = compute_center_for_detector_record(output_record, bbox, wcs)
747 (
748 photo_calib_tract,
749 photo_calib_record,
750 ) = photo_calib_overrides.best_for_detector(detector_id, center=center)
751 if photo_calib_record is not None:
752 photo_calib = photo_calib_record.getPhotoCalib()
753 else:
754 photo_calib = None
755 if self.config.photo_calib_provider == "tract":
756 output_record["photoCalibTractId"] = photo_calib_tract
757 output_record.setPhotoCalib(photo_calib)
758 self.compute_summary_stats.update_photo_calib_stats(
759 summary_stats, photo_calib
760 )
762 if background_overrides is not None:
763 if (handle := background_overrides.get(detector_id)) is not None:
764 new_bkg = handle.get()
765 if background_originals is not None:
766 orig_bkg = background_originals[detector_id].get()
767 else:
768 orig_bkg = BackgroundList()
770 full_bkg = orig_bkg.clone()
771 for layer in new_bkg:
772 full_bkg.append(layer)
773 exposure.image -= new_bkg.getImage()
774 self.compute_summary_stats.update_background_stats(
775 summary_stats, full_bkg
776 )
777 self.compute_summary_stats.update_masked_image_stats(
778 summary_stats, exposure.getMaskedImage()
779 )
781 summary_stats.update_record(output_record)
782 del exposure
784 return Struct(output_summary_catalog=output_summary_catalog)