Coverage for python/lsst/drp/tasks/update_visit_summary.py: 22%

228 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-01-13 12:06 +0000

1# This file is part of drp_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ( 

25 "UpdateVisitSummaryConnections", 

26 "UpdateVisitSummaryConfig", 

27 "UpdateVisitSummaryTask", 

28 "PossiblyMultipleInput", 

29 "PerTractInput", 

30 "GlobalInput", 

31) 

32 

33import dataclasses 

34from abc import ABC, abstractmethod 

35from collections.abc import Iterable, Mapping 

36from typing import Any 

37 

38import astropy.table 

39import lsst.pipe.base.connectionTypes as cT 

40from lsst.afw.geom import SkyWcs 

41from lsst.afw.image import ExposureSummaryStats 

42from lsst.afw.math import BackgroundList 

43from lsst.afw.table import ExposureCatalog, ExposureRecord, SchemaMapper 

44from lsst.daf.butler import Butler, DatasetRef, DeferredDatasetHandle 

45from lsst.daf.butler.formatters.parquet import pandas_to_astropy 

46from lsst.geom import Angle, Box2I, SpherePoint, degrees 

47from lsst.pex.config import ChoiceField, ConfigurableField 

48from lsst.pipe.base import ( 

49 ButlerQuantumContext, 

50 InputQuantizedConnection, 

51 OutputQuantizedConnection, 

52 PipelineTask, 

53 PipelineTaskConfig, 

54 PipelineTaskConnections, 

55 Struct, 

56) 

57from lsst.pipe.tasks.computeExposureSummaryStats import ComputeExposureSummaryStatsTask 

58from lsst.skymap import BaseSkyMap, TractInfo 

59from lsst.skymap.detail import makeSkyPolygonFromBBox 

60 

61 

62def compute_center_for_detector_record( 

63 record: ExposureRecord, bbox: Box2I | None = None, wcs: SkyWcs | None = None 

64) -> SpherePoint | None: 

65 """Compute the sky coordinate center for a detector to be used when 

66 testing distance to tract center. 

67 

68 Parameters 

69 ---------- 

70 record : `lsst.afw.table.ExposureRecord` 

71 Exposure record to obtain WCS and bbox from if not provided. 

72 bbox : `lsst.geom.Box2I`, optional 

73 Bounding box for the detector in its own pixel coordinates. 

74 wcs : `lsst.afw.geom.SkyWcs`, optional 

75 WCS that maps the detector's pixel coordinate system to celestial 

76 coordinates. 

77 

78 Returns 

79 ------- 

80 center : `lsst.geom.SpherePoint` or `None` 

81 Center of the detector in sky coordinates, or `None` if no WCS was 

82 given or present in the given record. 

83 """ 

84 if bbox is None: 

85 bbox = record.getBBox() 

86 if wcs is None: 

87 wcs = record.getWcs() 

88 if wcs is None: 

89 return None 

90 region = makeSkyPolygonFromBBox(bbox, wcs) 

91 return SpherePoint(region.getCentroid()) 

92 

93 

94class PossiblyMultipleInput(ABC): 

95 """A helper ABC for handling input `~lsst.afw.table.ExposureCatalog` 

96 datasets that may be multiple (one per tract/visit combination) or 

97 unique/global (one per visit). 

98 """ 

99 

100 @abstractmethod 

101 def best_for_detector( 

102 self, 

103 detector_id: int, 

104 center: SpherePoint | None = None, 

105 bbox: Box2I | None = None, 

106 ) -> tuple[int, ExposureRecord | None]: 

107 """Return the exposure record for this detector that is the best match 

108 for this detector. 

109 

110 Parameters 

111 ---------- 

112 detector_id : `int` 

113 Detector ID; used to find the right row in the catalog or catalogs. 

114 center : `lsst.geom.SpherePoint` or `None` 

115 Center of the detector in sky coordinates. If not provided, one 

116 will be computed via `compute_center_for_detector_record`. 

117 bbox : `lsst.geom.Box2I`, optional 

118 Bounding box for the detector in its own pixel coordinates. 

119 

120 Returns 

121 ------- 

122 tract_id : `int` 

123 ID of the tract that supplied this record, or `-1` if ``record`` is 

124 `None` or if the input was not per-tract. 

125 record : `lsst.afw.table.ExposureRecord` or `None` 

126 Best record for this detector, or `None` if there either were no 

127 records for this detector or no WCS available to compute a center. 

128 """ 

129 raise NotImplementedError() 

130 

131 

132@dataclasses.dataclass 

133class PerTractInput(PossiblyMultipleInput): 

134 """Wrapper class for input `~lsst.afw.table.ExposureCatalog` datasets 

135 that are per-tract. 

136 

137 This selects the best tract via the minimum average distance (on the sky) 

138 from the detector's corners to the tract center. 

139 """ 

140 

141 catalogs_by_tract: list[tuple[TractInfo, ExposureCatalog]] 

142 """List of tuples of catalogs and the tracts they correspond to 

143 (`list` [`tuple` [`lsst.skymap.TractInfo`, 

144 `lsst.afw.table.ExposureCatalog`]]). 

145 """ 

146 

147 @classmethod 

148 def load( 

149 cls, 

150 butler: ButlerQuantumContext | Butler, 

151 sky_map: BaseSkyMap, 

152 refs: Iterable[DatasetRef], 

153 ) -> PerTractInput: 

154 """Load and wrap input catalogs. 

155 

156 Parameters 

157 ---------- 

158 butler : `lsst.pipe.base.ButlerQuantumContext` 

159 Butler proxy used in `~lsst.pipe.base.PipelineTask.runQuantum`. 

160 sky_map : `lsst.skymap.BaseSkyMap` 

161 Definition of tracts and patches. 

162 refs : `~collections.abc.Iterable` [`lsst.daf.butler.DatasetRef`] 

163 References to the catalog datasets to load. 

164 

165 Returns 

166 ------- 

167 wrapper : `PerTractInput` 

168 Wrapper object for the loaded catalogs. 

169 """ 

170 catalogs_by_tract = [] 

171 for ref in refs: 

172 tract_id = ref.dataId["tract"] 

173 tract_info = sky_map[tract_id] 

174 catalogs_by_tract.append( 

175 ( 

176 tract_info, 

177 butler.get(ref), 

178 ) 

179 ) 

180 return cls(catalogs_by_tract) 

181 

182 def best_for_detector( 

183 self, 

184 detector_id: int, 

185 center: SpherePoint | None = None, 

186 bbox: Box2I | None = None, 

187 ) -> tuple[int, ExposureRecord | None]: 

188 # Docstring inherited. 

189 best_result: tuple[int, ExposureRecord | None] = (-1, None) 

190 best_distance: Angle = float("inf") * degrees 

191 for tract_info, catalog in self.catalogs_by_tract: 

192 record = catalog.find(detector_id) 

193 if record is None: 

194 continue 

195 if center is None: 

196 center_for_record = compute_center_for_detector_record( 

197 record, bbox=bbox 

198 ) 

199 if center_for_record is None: 

200 continue 

201 else: 

202 center_for_record = center 

203 center_distance = tract_info.ctr_coord.separation(center_for_record) 

204 if best_distance > center_distance: 

205 best_result = (tract_info.tract_id, record) 

206 best_distance = center_distance 

207 return best_result 

208 

209 

210@dataclasses.dataclass 

211class GlobalInput(PossiblyMultipleInput): 

212 """Wrapper class for input `~lsst.afw.table.ExposureCatalog` datasets 

213 that are not per-tract. 

214 """ 

215 

216 catalog: ExposureCatalog 

217 """Loaded per-visit catalog dataset (`lsst.afw.table.ExposureCatalog`). 

218 """ 

219 

220 def best_for_detector( 

221 self, 

222 detector_id: int, 

223 center: SpherePoint | None = None, 

224 bbox: Box2I | None = None, 

225 ) -> tuple[int, ExposureRecord | None]: 

226 # Docstring inherited. 

227 return -1, self.catalog.find(detector_id) 

228 

229 

230class UpdateVisitSummaryConnections( 

231 PipelineTaskConnections, 

232 dimensions=("instrument", "visit"), 

233 defaultTemplates={ 

234 "skyWcsName": "jointcal", 

235 "photoCalibName": "fgcm", 

236 }, 

237): 

238 sky_map = cT.Input( 

239 doc="Description of tract/patch geometry.", 

240 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

241 dimensions=("skymap",), 

242 storageClass="SkyMap", 

243 ) 

244 input_summary_schema = cT.InitInput( 

245 doc="Schema for input_summary_catalog.", 

246 name="visitSummary_schema", 

247 storageClass="ExposureCatalog", 

248 ) 

249 input_summary_catalog = cT.Input( 

250 doc="Visit summary table to load and modify.", 

251 name="visitSummary", 

252 dimensions=("instrument", "visit"), 

253 storageClass="ExposureCatalog", 

254 ) 

255 input_exposures = cT.Input( 

256 doc=( 

257 "Per-detector images to obtain image, mask, and variance from " 

258 "(embedded summary stats and other components are ignored)." 

259 ), 

260 name="calexp", 

261 dimensions=("instrument", "detector", "visit"), 

262 storageClass="ExposureF", 

263 multiple=True, 

264 deferLoad=True, 

265 ) 

266 psf_overrides = cT.Input( 

267 doc="Visit-level catalog of updated PSFs to use.", 

268 name="finalized_psf_ap_corr_catalog", 

269 dimensions=("instrument", "visit"), 

270 storageClass="ExposureCatalog", 

271 ) 

272 psf_star_catalog = cT.Input( 

273 doc="Per-visit table of PSF reserved- and used-star measurements.", 

274 name="finalized_src_table", 

275 dimensions=("instrument", "visit"), 

276 storageClass="DataFrame", 

277 ) 

278 ap_corr_overrides = cT.Input( 

279 doc="Visit-level catalog of updated aperture correction maps to use.", 

280 name="finalized_psf_ap_corr_catalog", 

281 dimensions=("instrument", "visit"), 

282 storageClass="ExposureCatalog", 

283 ) 

284 photo_calib_overrides_tract = cT.Input( 

285 doc="Per-Tract visit-level catalog of updated photometric calibration objects to use.", 

286 name="{photoCalibName}PhotoCalibCatalog", 

287 dimensions=("instrument", "visit", "tract"), 

288 storageClass="ExposureCatalog", 

289 multiple=True, 

290 ) 

291 photo_calib_overrides_global = cT.Input( 

292 doc="Global visit-level catalog of updated photometric calibration objects to use.", 

293 name="{photoCalibName}PhotoCalibCatalog", 

294 dimensions=("instrument", "visit"), 

295 storageClass="ExposureCatalog", 

296 ) 

297 wcs_overrides_tract = cT.Input( 

298 doc="Per-tract visit-level catalog of updated astrometric calibration objects to use.", 

299 name="{skyWcsName}SkyWcsCatalog", 

300 dimensions=("instrument", "visit", "tract"), 

301 storageClass="ExposureCatalog", 

302 multiple=True, 

303 ) 

304 wcs_overrides_global = cT.Input( 

305 doc="Global visit-level catalog of updated astrometric calibration objects to use.", 

306 name="{skyWcsName}SkyWcsCatalog", 

307 dimensions=("instrument", "visit"), 

308 storageClass="ExposureCatalog", 

309 ) 

310 background_originals = cT.Input( 

311 doc="Per-detector original background that has already been subtracted from 'input_exposures'.", 

312 name="calexpBackground", 

313 dimensions=("instrument", "visit", "detector"), 

314 storageClass="Background", 

315 multiple=True, 

316 deferLoad=True, 

317 ) 

318 background_overrides = cT.Input( 

319 doc="Per-detector background that can be subtracted directly from 'input_exposures'.", 

320 name="skyCorr", 

321 dimensions=("instrument", "visit", "detector"), 

322 storageClass="Background", 

323 multiple=True, 

324 deferLoad=True, 

325 ) 

326 output_summary_schema = cT.InitOutput( 

327 doc="Schema of the output visit summary catalog.", 

328 name="finalVisitSummary_schema", 

329 storageClass="ExposureCatalog", 

330 ) 

331 output_summary_catalog = cT.Output( 

332 doc="Visit-level catalog summarizing all image characterizations and calibrations.", 

333 name="finalVisitSummary", 

334 dimensions=("instrument", "visit"), 

335 storageClass="ExposureCatalog", 

336 ) 

337 

338 def __init__(self, *, config: UpdateVisitSummaryConfig | None = None): 

339 super().__init__(config=config) 

340 match self.config.wcs_provider: 

341 case "input_summary": 

342 self.inputs.remove("wcs_overrides_tract") 

343 self.inputs.remove("wcs_overrides_global") 

344 case "tract": 

345 self.inputs.remove("wcs_overrides_global") 

346 case "global": 

347 self.inputs.remove("wcs_overrides_tract") 

348 case bad: 

349 raise ValueError( 

350 f"Invalid value wcs_provider={bad!r}; config was not validated." 

351 ) 

352 match self.config.photo_calib_provider: 

353 case "input_summary": 

354 self.inputs.remove("photo_calib_overrides_tract") 

355 self.inputs.remove("photo_calib_overrides_global") 

356 case "tract": 

357 self.inputs.remove("photo_calib_overrides_global") 

358 case "global": 

359 self.inputs.remove("photo_calib_overrides_tract") 

360 case bad: 

361 raise ValueError( 

362 f"Invalid value photo_calib_provider={bad!r}; config was not validated." 

363 ) 

364 match self.config.background_provider: 

365 case "input_summary": 

366 self.inputs.remove("background_originals") 

367 self.inputs.remove("background_overrides") 

368 case "replacement": 

369 pass 

370 case bad: 

371 raise ValueError( 

372 f"Invalid value background_provider={bad!r}; config was not validated." 

373 ) 

374 

375 

376class UpdateVisitSummaryConfig( 

377 PipelineTaskConfig, pipelineConnections=UpdateVisitSummaryConnections 

378): 

379 """Configuration for UpdateVisitSummaryTask. 

380 

381 Notes 

382 ----- 

383 The configuration defaults for this task reflect a simple or "least common 

384 denominator" pipeline, not the more complete, more sophisticated pipeline 

385 we run on the instruments we support best. The expectation is that the 

386 various full pipeline definitions will generally import the simpler 

387 definition, so making the defaults correspond to any full pipeline would 

388 just lead to the simple pipeline setting them back to the simple-pipeline 

389 values and the full pipeline still having to then override them to the 

390 full-pipeline values. 

391 """ 

392 

393 compute_summary_stats = ConfigurableField( 

394 doc="Subtask that computes summary statistics from Exposure components.", 

395 target=ComputeExposureSummaryStatsTask, 

396 ) 

397 wcs_provider = ChoiceField( 

398 doc="Which connection and behavior to use when applying WCS overrides.", 

399 dtype=str, 

400 allowed={ 

401 "input_summary": ( 

402 "Propagate the WCS from the input visit summary catalog " 

403 "and do not recompute WCS-based summary statistics." 

404 ), 

405 "tract": { 

406 "Use the 'wcs_overrides_tract' connection to load an " 

407 "`ExposureCatalog` with {visit, tract} dimensions and per-" 

408 "detector rows, and recommpute WCS-based summary statistics." 

409 }, 

410 "global": { 

411 "Use the 'wcs_overrides_global' connection to load an " 

412 "`ExposureCatalog` with {visit} dimensions and per-" 

413 "detector rows, and recommpute WCS-based summary statistics." 

414 }, 

415 # If needed, we could add options here to propagate the WCS from 

416 # the input exposures and/or transfer WCS-based summary statistics 

417 # from them as well. Right now there's no use case for that, since 

418 # the input visit summary is always produced after the last time we 

419 # write a new Exposure. 

420 }, 

421 default="input_summary", 

422 optional=False, 

423 ) 

424 photo_calib_provider = ChoiceField( 

425 doc="Which connection and behavior to use when applying photometric calibration overrides.", 

426 dtype=str, 

427 allowed={ 

428 "input_summary": ( 

429 "Propagate the PhotoCalib from the input visit summary catalog " 

430 "and do not recompute photometric calibration summary " 

431 "statistics." 

432 ), 

433 "tract": { 

434 "Use the 'photo_calib_overrides_tract' connection to load an " 

435 "`ExposureCatalog` with {visit, tract} dimensions and per-" 

436 "detector rows, and recommpute photometric calibration summary " 

437 "statistics." 

438 }, 

439 "global": { 

440 "Use the 'photo_calib_overrides_global' connection to load an " 

441 "`ExposureCatalog` with {visit} dimensions and per-" 

442 "detector rows, and recommpute photometric calibration summary " 

443 "statistics." 

444 }, 

445 # If needed, we could add options here to propagate the PhotoCalib 

446 # from the input exposures and/or transfer photometric calibration 

447 # summary statistics them as well. Right now there's no use case 

448 # for that, since the input visit summary is always produced after 

449 # the last time we write a new Exposure. 

450 }, 

451 default="input_summary", 

452 optional=False, 

453 ) 

454 background_provider = ChoiceField( 

455 doc="Which connection(s) and behavior to use when applying background overrides.", 

456 dtype=str, 

457 allowed={ 

458 "input_summary": ( 

459 "The input visit summary catalog already includes summary " 

460 "statistics for the final backgrounds that can be used as-is." 

461 ), 

462 "replacement": { 

463 "The 'background_originals' connection refers to a background " 

464 "model that has been superseded by the model referred to by " 

465 "the 'background_overrides' connection." 

466 }, 

467 # Could also imagine an option in which there is no original 

468 # background and the new one stands alone; can add later if needed. 

469 }, 

470 default="input_summary", 

471 optional=False, 

472 ) 

473 # Could imagine an option here to say that the original background has not 

474 # been subtracted from the input exposures, allowing postISRCCD to be used 

475 # as input exposures. Can add later if needed. 

476 

477 

478class UpdateVisitSummaryTask(PipelineTask): 

479 """A pipeline task that creates a new visit-summary table after all 

480 `lsst.afw.image.Exposure` components have been finalized. 

481 

482 Notes 

483 ----- 

484 This task is designed to be run just prior to making warps for coaddition, 

485 as it aggregates all inputs other than the images and backgrounds into a 

486 single ``ExposureCatalog`` dataset and recomputes summary statistics that 

487 are useful in selecting which images should go into a coadd. Its output 

488 can also be used to reconstruct a final processed visit image when combined 

489 with a post-ISR image, the background model, and the final mask. 

490 """ 

491 

492 # The `run` method of this task can conditionally apply overrides for PSFs 

493 # and aperture corrections, but its `PipelineTask` interface always applies 

494 # them. We can always add the config options to make them optional later, 

495 # if that turns out to be useful. 

496 

497 _DefaultName = "updateVisitSummary" 

498 ConfigClass = UpdateVisitSummaryConfig 

499 

500 compute_summary_stats: ComputeExposureSummaryStatsTask 

501 

502 def __init__(self, *, initInputs: dict[str, Any] | None = None, **kwargs: Any): 

503 super().__init__(initInputs=initInputs, **kwargs) 

504 self.makeSubtask("compute_summary_stats") 

505 if initInputs is None or "input_summary_schema" not in initInputs: 

506 raise RuntimeError("Task requires 'input_summary_schema' in initInputs.") 

507 input_summary_schema = initInputs["input_summary_schema"].schema 

508 self.schema_mapper = SchemaMapper(input_summary_schema) 

509 self.schema_mapper.addMinimalSchema(input_summary_schema) 

510 self.schema = self.schema_mapper.getOutputSchema() 

511 if self.config.wcs_provider == "tract": 

512 self.schema.addField( 

513 "wcsTractId", type="L", doc="ID of the tract that provided the WCS." 

514 ) 

515 if self.config.photo_calib_provider == "tract": 

516 self.schema.addField( 

517 "photoCalibTractId", 

518 type="L", 

519 doc="ID of the tract that provided the PhotoCalib.", 

520 ) 

521 self.output_summary_schema = ExposureCatalog(self.schema) 

522 

523 def runQuantum( 

524 self, 

525 butlerQC: ButlerQuantumContext, 

526 inputRefs: InputQuantizedConnection, 

527 outputRefs: OutputQuantizedConnection, 

528 ) -> None: 

529 # Docstring inherited. 

530 sky_map = butlerQC.get(inputRefs.sky_map) 

531 del inputRefs.sky_map 

532 inputs = {} 

533 # Collapse the wcs_override_ and photo_calib_override_ connection pairs 

534 # into individual inputs (either ExposureCatalog or PerTractInput 

535 # objects). 

536 match self.config.wcs_provider: 

537 case "tract": 

538 inputs["wcs_overrides"] = PerTractInput.load( 

539 butlerQC, sky_map, inputRefs.wcs_overrides_tract 

540 ) 

541 del inputRefs.wcs_overrides_tract 

542 case "global": 

543 inputs["wcs_overrides"] = GlobalInput( 

544 butlerQC.get(inputRefs.wcs_overrides_global) 

545 ) 

546 del inputRefs.wcs_overrides_global 

547 case "input_summary": 

548 inputs["wcs_overrides"] = None 

549 match self.config.photo_calib_provider: 

550 case "tract": 

551 inputs["photo_calib_overrides"] = PerTractInput.load( 

552 butlerQC, sky_map, inputRefs.photo_calib_overrides_tract 

553 ) 

554 del inputRefs.photo_calib_overrides_tract 

555 case "global": 

556 inputs["photo_calib_overrides"] = GlobalInput( 

557 butlerQC.get(inputRefs.photo_calib_overrides_global) 

558 ) 

559 del inputRefs.photo_calib_overrides_global 

560 case "input_summary": 

561 inputs["photo_calib_overrides"] = None 

562 # Load or make DeferredDatasetHandles for everything else. 

563 inputs.update(butlerQC.get(inputRefs)) 

564 deferred_dataset_types = ["input_exposures"] 

565 # Handle whether to look for background originals and overrides at all. 

566 match self.config.background_provider: 

567 case "replacement": 

568 deferred_dataset_types.append("background_originals") 

569 deferred_dataset_types.append("background_overrides") 

570 # Transform the lists of DeferredDatasetHandles for the multiple=True, 

571 # deferLoad=True connections into mappings keyed by detector ID. 

572 for name in deferred_dataset_types: 

573 handles_list = inputs[name] 

574 inputs[name] = { 

575 handle.dataId["detector"]: handle for handle in handles_list 

576 } 

577 # Convert the psf_star_catalog datasets from DataFrame to Astropy so 

578 # they can be handled by ComputeExposureSummaryStatsTask (which was 

579 # actually written to work with afw.table, but Astropy is similar 

580 # enough that it works, too). Ideally this would be handled by just 

581 # using ArrowAstropy as the storage class in the connection, but QG 

582 # generation apparently doesn't fully support those yet, as it leads to 

583 # problems in ci_hsc. 

584 inputs["psf_star_catalog"] = pandas_to_astropy(inputs["psf_star_catalog"]) 

585 # Actually run the task and write the results. 

586 outputs = self.run(**inputs) 

587 butlerQC.put(outputs, outputRefs) 

588 

589 def run( 

590 self, 

591 input_summary_catalog: ExposureCatalog, 

592 input_exposures: Mapping[int, DeferredDatasetHandle], 

593 psf_overrides: ExposureCatalog | None = None, 

594 psf_star_catalog: astropy.table.Table | None = None, 

595 ap_corr_overrides: ExposureCatalog | None = None, 

596 photo_calib_overrides: PossiblyMultipleInput | None = None, 

597 wcs_overrides: PossiblyMultipleInput | None = None, 

598 background_originals: Mapping[int, DeferredDatasetHandle] | None = None, 

599 background_overrides: Mapping[int, DeferredDatasetHandle] | None = None, 

600 ): 

601 """Build an updated version of a visit summary catalog. 

602 

603 Parameters 

604 ---------- 

605 input_summary_catalog : `lsst.afw.table.ExposureCatalog` 

606 Input catalog. Each row in this catalog will be used to produce 

607 a row in the output catalog. Any override parameter that is `None` 

608 will leave the corresponding values unchanged from those in this 

609 input catalog. 

610 input_exposures : `collections.abc.Mapping` [`int`, 

611 `lsst.daf.butler.DeferredDatasetHandle`] 

612 Deferred-load objects that fetch `lsst.afw.image.Exposure` 

613 instances. Only the image, mask, and variance are used; all other 

614 components are assumed to be superceded by at least 

615 ``input_summary_catalog`` and probably some ``_overrides`` 

616 arguments as well. This usually corresponds to the ``calexp`` 

617 dataset. 

618 psf_overrides : `lsst.afw.table.ExposureCatalog`, optional 

619 Catalog with attached `lsst.afw.detection.Psf` objects that 

620 supersede the input catalog's PSFs. 

621 psf_star_catalog : `astropy.table.Table`, optional 

622 Table containing PSF stars for use in computing PSF summary 

623 statistics. Must be provided if ``psf_overrides`` is. 

624 ap_corr_overrides : `lsst.afw.table.ExposureCatalog`, optional 

625 Catalog with attached `lsst.afw.image.ApCorrMap` objects that 

626 supersede the input catalog's aperture corrections. 

627 photo_calib_overrides : `PossiblyMultipleInput`, optional 

628 Catalog wrappers with attached `lsst.afw.image.PhotoCalib` 

629 objects that supersede the input catalog's photometric 

630 calibrations. 

631 wcs_overrides : `PossiblyMultipleInput`, optional 

632 Catalog wrappers with attached `lsst.afw.geom.SkyWcs` objects 

633 that supersede the input catalog's astrometric calibrations. 

634 background_originals : `collections.abc.Mapping` [`int`, 

635 `lsst.daf.butler.DeferredDatasetHandle`], optional 

636 Deferred-load objects that fetch `lsst.afw.math.BackgroundList` 

637 instances. These should correspond to the background already 

638 subtracted from ``input_exposures``. If not provided and 

639 ``background_overrides`` is, it is assumed that the background in 

640 ``input_exposures`` has not been subtracted. If provided, all keys 

641 in ``background_overrides`` must also be present in 

642 ``background_originals``. 

643 background_overrides : `collections.abc.Mapping` [`int`, 

644 `lsst.daf.butler.DeferredDatasetHandle`], optional 

645 Deferred-load objects that fetch `lsst.afw.math.BackgroundList` 

646 instances. These should correspond to the background that should 

647 now be subtracted from``input_exposures`` to yield the final 

648 background-subtracted image. 

649 

650 Returns 

651 ------- 

652 output_summary_catalog : `lsst.afw.table.ExposureCatalog` 

653 Output visit summary catalog. 

654 

655 Notes 

656 ----- 

657 If any override parameter is provided but does not have a value for a 

658 particular detector, that component will be set to `None` in the 

659 returned catalog for that detector and all summary statistics derived 

660 from that component will be reset (usually to ``NaN``) as well. Not 

661 passing an override parameter at all will instead pass through the 

662 original component and values from the input catalog unchanged. 

663 """ 

664 output_summary_catalog = ExposureCatalog(self.schema) 

665 output_summary_catalog.setMetadata(input_summary_catalog.getMetadata()) 

666 for input_record in input_summary_catalog: 

667 detector_id = input_record.getId() 

668 output_record = output_summary_catalog.addNew() 

669 

670 # Make a new ExposureSummaryStats from the input record. 

671 summary_stats = ExposureSummaryStats.from_record(input_record) 

672 

673 # Also copy the input record values to output record; this copies 

674 # many of the same values just copied into `summary_stats` (which 

675 # will be overridden later by summary_stats.update_record), but it 

676 # also copies fields that aren't part of summary_stats, including 

677 # the actual components like Psf, Wcs, etc. 

678 output_record.assign(input_record, self.schema_mapper) 

679 

680 exposure = input_exposures[detector_id].get() 

681 bbox = exposure.getBBox() 

682 

683 if wcs_overrides: 

684 wcs_tract, wcs_record = wcs_overrides.best_for_detector( 

685 detector_id, bbox=bbox 

686 ) 

687 if wcs_record is not None: 

688 wcs = wcs_record.getWcs() 

689 else: 

690 wcs = None 

691 if self.config.wcs_provider == "tract": 

692 output_record["wcsTractId"] = wcs_tract 

693 output_record.setWcs(wcs) 

694 self.compute_summary_stats.update_wcs_stats( 

695 summary_stats, wcs, bbox, output_record.getVisitInfo() 

696 ) 

697 else: 

698 wcs = input_record.getWcs() 

699 

700 if psf_overrides: 

701 if (psf_record := psf_overrides.find(detector_id)) is not None: 

702 psf = psf_record.getPsf() 

703 else: 

704 psf = None 

705 output_record.setPsf(psf) 

706 sources = psf_star_catalog[psf_star_catalog["detector"] == detector_id] 

707 self.compute_summary_stats.update_psf_stats( 

708 summary_stats, 

709 psf, 

710 bbox, 

711 sources, 

712 image_mask=exposure.mask, 

713 sources_is_astropy=True, 

714 ) 

715 

716 if ap_corr_overrides: 

717 if (ap_corr_record := ap_corr_overrides.find(detector_id)) is not None: 

718 ap_corr = ap_corr_record.getApCorrMap() 

719 else: 

720 ap_corr = None 

721 output_record.setApCorrMap(ap_corr) 

722 

723 if photo_calib_overrides: 

724 center = compute_center_for_detector_record(output_record, bbox, wcs) 

725 ( 

726 photo_calib_tract, 

727 photo_calib_record, 

728 ) = photo_calib_overrides.best_for_detector(detector_id, center=center) 

729 if photo_calib_record is not None: 

730 photo_calib = photo_calib_record.getPhotoCalib() 

731 else: 

732 photo_calib = None 

733 if self.config.photo_calib_provider == "tract": 

734 output_record["photoCalibTractId"] = photo_calib_tract 

735 output_record.setPhotoCalib(photo_calib) 

736 self.compute_summary_stats.update_photo_calib_stats( 

737 summary_stats, photo_calib 

738 ) 

739 

740 if background_overrides is not None: 

741 if (handle := background_overrides.get(detector_id)) is not None: 

742 new_bkg = handle.get() 

743 if background_originals is not None: 

744 orig_bkg = background_originals[detector_id].get() 

745 else: 

746 orig_bkg = BackgroundList() 

747 

748 full_bkg = orig_bkg.clone() 

749 for layer in new_bkg: 

750 full_bkg.append(layer) 

751 exposure.image -= new_bkg.getImage() 

752 self.compute_summary_stats.update_background_stats( 

753 summary_stats, full_bkg 

754 ) 

755 self.compute_summary_stats.update_masked_image_stats( 

756 summary_stats, exposure.getMaskedImage() 

757 ) 

758 

759 summary_stats.update_record(output_record) 

760 del exposure 

761 

762 return Struct(output_summary_catalog=output_summary_catalog)