Coverage for python/lsst/pipe/tasks/update_visit_summary.py: 21%

231 statements  

« prev     ^ index     » next       coverage.py v7.2.2, created at 2023-03-23 17:26 -0700

1# This file is part of pipe_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ( 

25 "UpdateVisitSummaryConnections", 

26 "UpdateVisitSummaryConfig", 

27 "UpdateVisitSummaryTask", 

28 "PossiblyMultipleInput", 

29 "PerTractInput", 

30 "GlobalInput", 

31) 

32 

33import dataclasses 

34from abc import ABC, abstractmethod 

35from collections.abc import Iterable, Mapping 

36from typing import Any 

37 

38import astropy.table 

39import lsst.pipe.base.connectionTypes as cT 

40from lsst.afw.geom import SkyWcs 

41from lsst.afw.image import ExposureSummaryStats 

42from lsst.afw.math import BackgroundList 

43from lsst.afw.table import ExposureCatalog, ExposureRecord, SchemaMapper 

44from lsst.daf.butler import Butler, DatasetRef, DeferredDatasetHandle 

45from lsst.geom import Angle, Box2I, SpherePoint, degrees 

46from lsst.pex.config import ChoiceField, ConfigurableField 

47from lsst.pipe.base import ( 

48 ButlerQuantumContext, 

49 InputQuantizedConnection, 

50 InvalidQuantumError, 

51 OutputQuantizedConnection, 

52 PipelineTask, 

53 PipelineTaskConfig, 

54 PipelineTaskConnections, 

55 Struct, 

56) 

57from lsst.skymap import BaseSkyMap, TractInfo 

58from lsst.skymap.detail import makeSkyPolygonFromBBox 

59from .computeExposureSummaryStats import ComputeExposureSummaryStatsTask 

60 

61 

62def compute_center_for_detector_record( 

63 record: ExposureRecord, bbox: Box2I | None = None, wcs: SkyWcs | None = None 

64) -> SpherePoint | None: 

65 """Compute the sky coordinate center for a detector to be used when 

66 testing distance to tract center. 

67 

68 Parameters 

69 ---------- 

70 record : `lsst.afw.table.ExposureRecord` 

71 Exposure record to obtain WCS and bbox from if not provided. 

72 bbox : `lsst.geom.Box2I`, optional 

73 Bounding box for the detector in its own pixel coordinates. 

74 wcs : `lsst.afw.geom.SkyWcs`, optional 

75 WCS that maps the detector's pixel coordinate system to celestial 

76 coordinates. 

77 

78 Returns 

79 ------- 

80 center : `lsst.geom.SpherePoint` or `None` 

81 Center of the detector in sky coordinates, or `None` if no WCS was 

82 given or present in the given record. 

83 """ 

84 if bbox is None: 

85 bbox = record.getBBox() 

86 if wcs is None: 

87 wcs = record.getWcs() 

88 if wcs is None: 

89 return None 

90 region = makeSkyPolygonFromBBox(bbox, wcs) 

91 return SpherePoint(region.getCentroid()) 

92 

93 

94class PossiblyMultipleInput(ABC): 

95 """A helper ABC for handling input `~lsst.afw.table.ExposureCatalog` 

96 datasets that may be multiple (one per tract/visit combination) or 

97 unique/global (one per visit). 

98 """ 

99 

100 @abstractmethod 

101 def best_for_detector( 

102 self, 

103 detector_id: int, 

104 center: SpherePoint | None = None, 

105 bbox: Box2I | None = None, 

106 ) -> tuple[int, ExposureRecord | None]: 

107 """Return the exposure record for this detector that is the best match 

108 for this detector. 

109 

110 Parameters 

111 ---------- 

112 detector_id : `int` 

113 Detector ID; used to find the right row in the catalog or catalogs. 

114 center : `lsst.geom.SpherePoint` or `None` 

115 Center of the detector in sky coordinates. If not provided, one 

116 will be computed via `compute_center_for_detector_record`. 

117 bbox : `lsst.geom.Box2I`, optional 

118 Bounding box for the detector in its own pixel coordinates. 

119 

120 Returns 

121 ------- 

122 tract_id : `int` 

123 ID of the tract that supplied this record, or `-1` if ``record`` is 

124 `None` or if the input was not per-tract. 

125 record : `lsst.afw.table.ExposureRecord` or `None` 

126 Best record for this detector, or `None` if there either were no 

127 records for this detector or no WCS available to compute a center. 

128 """ 

129 raise NotImplementedError() 

130 

131 

132@dataclasses.dataclass 

133class PerTractInput(PossiblyMultipleInput): 

134 """Wrapper class for input `~lsst.afw.table.ExposureCatalog` datasets 

135 that are per-tract. 

136 

137 This selects the best tract via the minimum average distance (on the sky) 

138 from the detector's corners to the tract center. 

139 """ 

140 

141 catalogs_by_tract: list[tuple[TractInfo, ExposureCatalog]] 

142 """List of tuples of catalogs and the tracts they correspond to 

143 (`list` [`tuple` [`lsst.skymap.TractInfo`, 

144 `lsst.afw.table.ExposureCatalog`]]). 

145 """ 

146 

147 @classmethod 

148 def load( 

149 cls, 

150 butler: ButlerQuantumContext | Butler, 

151 sky_map: BaseSkyMap, 

152 refs: Iterable[DatasetRef], 

153 ) -> PerTractInput: 

154 """Load and wrap input catalogs. 

155 

156 Parameters 

157 ---------- 

158 butler : `lsst.pipe.base.ButlerQuantumContext` 

159 Butler proxy used in `~lsst.pipe.base.PipelineTask.runQuantum`. 

160 sky_map : `lsst.skymap.BaseSkyMap` 

161 Definition of tracts and patches. 

162 refs : `~collections.abc.Iterable` [`lsst.daf.butler.DatasetRef`] 

163 References to the catalog datasets to load. 

164 

165 Returns 

166 ------- 

167 wrapper : `PerTractInput` 

168 Wrapper object for the loaded catalogs. 

169 """ 

170 catalogs_by_tract = [] 

171 for ref in refs: 

172 tract_id = ref.dataId["tract"] 

173 tract_info = sky_map[tract_id] 

174 catalogs_by_tract.append( 

175 ( 

176 tract_info, 

177 butler.get(ref), 

178 ) 

179 ) 

180 return cls(catalogs_by_tract) 

181 

182 def best_for_detector( 

183 self, 

184 detector_id: int, 

185 center: SpherePoint | None = None, 

186 bbox: Box2I | None = None, 

187 ) -> tuple[int, ExposureRecord | None]: 

188 # Docstring inherited. 

189 best_result: tuple[int, ExposureRecord | None] = (-1, None) 

190 best_distance: Angle = float("inf") * degrees 

191 for tract_info, catalog in self.catalogs_by_tract: 

192 record = catalog.find(detector_id) 

193 if record is None: 

194 continue 

195 if center is None: 

196 center_for_record = compute_center_for_detector_record( 

197 record, bbox=bbox 

198 ) 

199 if center_for_record is None: 

200 continue 

201 else: 

202 center_for_record = center 

203 center_distance = tract_info.ctr_coord.separation(center_for_record) 

204 if best_distance > center_distance: 

205 best_result = (tract_info.tract_id, record) 

206 best_distance = center_distance 

207 return best_result 

208 

209 

210@dataclasses.dataclass 

211class GlobalInput(PossiblyMultipleInput): 

212 """Wrapper class for input `~lsst.afw.table.ExposureCatalog` datasets 

213 that are not per-tract. 

214 """ 

215 

216 catalog: ExposureCatalog 

217 """Loaded per-visit catalog dataset (`lsst.afw.table.ExposureCatalog`). 

218 """ 

219 

220 def best_for_detector( 

221 self, 

222 detector_id: int, 

223 center: SpherePoint | None = None, 

224 bbox: Box2I | None = None, 

225 ) -> tuple[int, ExposureRecord | None]: 

226 # Docstring inherited. 

227 return -1, self.catalog.find(detector_id) 

228 

229 

230class UpdateVisitSummaryConnections( 

231 PipelineTaskConnections, 

232 dimensions=("instrument", "visit"), 

233 defaultTemplates={ 

234 "skyWcsName": "jointcal", 

235 "photoCalibName": "fgcm", 

236 }, 

237): 

238 sky_map = cT.Input( 

239 doc="Description of tract/patch geometry.", 

240 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

241 dimensions=("skymap",), 

242 storageClass="SkyMap", 

243 ) 

244 input_summary_schema = cT.InitInput( 

245 doc="Schema for input_summary_catalog.", 

246 name="visitSummary_schema", 

247 storageClass="ExposureCatalog", 

248 ) 

249 input_summary_catalog = cT.Input( 

250 doc="Visit summary table to load and modify.", 

251 name="visitSummary", 

252 dimensions=("instrument", "visit"), 

253 storageClass="ExposureCatalog", 

254 ) 

255 input_exposures = cT.Input( 

256 doc=( 

257 "Per-detector images to obtain image, mask, and variance from " 

258 "(embedded summary stats and other components are ignored)." 

259 ), 

260 name="calexp", 

261 dimensions=("instrument", "detector", "visit"), 

262 storageClass="ExposureF", 

263 multiple=True, 

264 deferLoad=True, 

265 deferGraphConstraint=True, 

266 ) 

267 psf_overrides = cT.Input( 

268 doc="Visit-level catalog of updated PSFs to use.", 

269 name="finalized_psf_ap_corr_catalog", 

270 dimensions=("instrument", "visit"), 

271 storageClass="ExposureCatalog", 

272 deferGraphConstraint=True, 

273 ) 

274 psf_star_catalog = cT.Input( 

275 doc="Per-visit table of PSF reserved- and used-star measurements.", 

276 name="finalized_src_table", 

277 dimensions=("instrument", "visit"), 

278 storageClass="DataFrame", 

279 deferGraphConstraint=True, 

280 ) 

281 ap_corr_overrides = cT.Input( 

282 doc="Visit-level catalog of updated aperture correction maps to use.", 

283 name="finalized_psf_ap_corr_catalog", 

284 dimensions=("instrument", "visit"), 

285 storageClass="ExposureCatalog", 

286 deferGraphConstraint=True, 

287 ) 

288 photo_calib_overrides_tract = cT.Input( 

289 doc="Per-Tract visit-level catalog of updated photometric calibration objects to use.", 

290 name="{photoCalibName}PhotoCalibCatalog", 

291 dimensions=("instrument", "visit", "tract"), 

292 storageClass="ExposureCatalog", 

293 multiple=True, 

294 deferGraphConstraint=True, 

295 ) 

296 photo_calib_overrides_global = cT.Input( 

297 doc="Global visit-level catalog of updated photometric calibration objects to use.", 

298 name="{photoCalibName}PhotoCalibCatalog", 

299 dimensions=("instrument", "visit"), 

300 storageClass="ExposureCatalog", 

301 deferGraphConstraint=True, 

302 ) 

303 wcs_overrides_tract = cT.Input( 

304 doc="Per-tract visit-level catalog of updated astrometric calibration objects to use.", 

305 name="{skyWcsName}SkyWcsCatalog", 

306 dimensions=("instrument", "visit", "tract"), 

307 storageClass="ExposureCatalog", 

308 multiple=True, 

309 deferGraphConstraint=True, 

310 ) 

311 wcs_overrides_global = cT.Input( 

312 doc="Global visit-level catalog of updated astrometric calibration objects to use.", 

313 name="{skyWcsName}SkyWcsCatalog", 

314 dimensions=("instrument", "visit"), 

315 storageClass="ExposureCatalog", 

316 deferGraphConstraint=True, 

317 ) 

318 background_originals = cT.Input( 

319 doc="Per-detector original background that has already been subtracted from 'input_exposures'.", 

320 name="calexpBackground", 

321 dimensions=("instrument", "visit", "detector"), 

322 storageClass="Background", 

323 multiple=True, 

324 deferLoad=True, 

325 deferGraphConstraint=True, 

326 ) 

327 background_overrides = cT.Input( 

328 doc="Per-detector background that can be subtracted directly from 'input_exposures'.", 

329 name="skyCorr", 

330 dimensions=("instrument", "visit", "detector"), 

331 storageClass="Background", 

332 multiple=True, 

333 deferLoad=True, 

334 deferGraphConstraint=True, 

335 ) 

336 output_summary_schema = cT.InitOutput( 

337 doc="Schema of the output visit summary catalog.", 

338 name="finalVisitSummary_schema", 

339 storageClass="ExposureCatalog", 

340 ) 

341 output_summary_catalog = cT.Output( 

342 doc="Visit-level catalog summarizing all image characterizations and calibrations.", 

343 name="finalVisitSummary", 

344 dimensions=("instrument", "visit"), 

345 storageClass="ExposureCatalog", 

346 ) 

347 

348 def __init__(self, *, config: UpdateVisitSummaryConfig | None = None): 

349 super().__init__(config=config) 

350 match self.config.wcs_provider: 

351 case "input_summary": 

352 self.inputs.remove("wcs_overrides_tract") 

353 self.inputs.remove("wcs_overrides_global") 

354 case "tract": 

355 self.inputs.remove("wcs_overrides_global") 

356 case "global": 

357 self.inputs.remove("wcs_overrides_tract") 

358 case bad: 

359 raise ValueError( 

360 f"Invalid value wcs_provider={bad!r}; config was not validated." 

361 ) 

362 match self.config.photo_calib_provider: 

363 case "input_summary": 

364 self.inputs.remove("photo_calib_overrides_tract") 

365 self.inputs.remove("photo_calib_overrides_global") 

366 case "tract": 

367 self.inputs.remove("photo_calib_overrides_global") 

368 case "global": 

369 self.inputs.remove("photo_calib_overrides_tract") 

370 case bad: 

371 raise ValueError( 

372 f"Invalid value photo_calib_provider={bad!r}; config was not validated." 

373 ) 

374 match self.config.background_provider: 

375 case "input_summary": 

376 self.inputs.remove("background_originals") 

377 self.inputs.remove("background_overrides") 

378 case "replacement": 

379 pass 

380 case bad: 

381 raise ValueError( 

382 f"Invalid value background_provider={bad!r}; config was not validated." 

383 ) 

384 

385 

386class UpdateVisitSummaryConfig( 

387 PipelineTaskConfig, pipelineConnections=UpdateVisitSummaryConnections 

388): 

389 """Configuration for UpdateVisitSummaryTask. 

390 

391 Notes 

392 ----- 

393 The configuration defaults for this task reflect a simple or "least common 

394 denominator" pipeline, not the more complete, more sophisticated pipeline 

395 we run on the instruments we support best. The expectation is that the 

396 various full pipeline definitions will generally import the simpler 

397 definition, so making the defaults correspond to any full pipeline would 

398 just lead to the simple pipeline setting them back to the simple-pipeline 

399 values and the full pipeline still having to then override them to the 

400 full-pipeline values. 

401 """ 

402 

403 compute_summary_stats = ConfigurableField( 

404 doc="Subtask that computes summary statistics from Exposure components.", 

405 target=ComputeExposureSummaryStatsTask, 

406 ) 

407 wcs_provider = ChoiceField( 

408 doc="Which connection and behavior to use when applying WCS overrides.", 

409 dtype=str, 

410 allowed={ 

411 "input_summary": ( 

412 "Propagate the WCS from the input visit summary catalog " 

413 "and do not recompute WCS-based summary statistics." 

414 ), 

415 "tract": { 

416 "Use the 'wcs_overrides_tract' connection to load an " 

417 "`ExposureCatalog` with {visit, tract} dimensions and per-" 

418 "detector rows, and recommpute WCS-based summary statistics." 

419 }, 

420 "global": { 

421 "Use the 'wcs_overrides_global' connection to load an " 

422 "`ExposureCatalog` with {visit} dimensions and per-" 

423 "detector rows, and recommpute WCS-based summary statistics." 

424 }, 

425 # If needed, we could add options here to propagate the WCS from 

426 # the input exposures and/or transfer WCS-based summary statistics 

427 # from them as well. Right now there's no use case for that, since 

428 # the input visit summary is always produced after the last time we 

429 # write a new Exposure. 

430 }, 

431 default="input_summary", 

432 optional=False, 

433 ) 

434 photo_calib_provider = ChoiceField( 

435 doc="Which connection and behavior to use when applying photometric calibration overrides.", 

436 dtype=str, 

437 allowed={ 

438 "input_summary": ( 

439 "Propagate the PhotoCalib from the input visit summary catalog " 

440 "and do not recompute photometric calibration summary " 

441 "statistics." 

442 ), 

443 "tract": { 

444 "Use the 'photo_calib_overrides_tract' connection to load an " 

445 "`ExposureCatalog` with {visit, tract} dimensions and per-" 

446 "detector rows, and recommpute photometric calibration summary " 

447 "statistics." 

448 }, 

449 "global": { 

450 "Use the 'photo_calib_overrides_global' connection to load an " 

451 "`ExposureCatalog` with {visit} dimensions and per-" 

452 "detector rows, and recommpute photometric calibration summary " 

453 "statistics." 

454 }, 

455 # If needed, we could add options here to propagate the PhotoCalib 

456 # from the input exposures and/or transfer photometric calibration 

457 # summary statistics them as well. Right now there's no use case 

458 # for that, since the input visit summary is always produced after 

459 # the last time we write a new Exposure. 

460 }, 

461 default="input_summary", 

462 optional=False, 

463 ) 

464 background_provider = ChoiceField( 

465 doc="Which connection(s) and behavior to use when applying background overrides.", 

466 dtype=str, 

467 allowed={ 

468 "input_summary": ( 

469 "The input visit summary catalog already includes summary " 

470 "statistics for the final backgrounds that can be used as-is." 

471 ), 

472 "replacement": { 

473 "The 'background_originals' connection refers to a background " 

474 "model that has been superseded by the model referred to by " 

475 "the 'background_overrides' connection." 

476 }, 

477 # Could also imagine an option in which there is no original 

478 # background and the new one stands alone; can add later if needed. 

479 }, 

480 default="input_summary", 

481 optional=False, 

482 ) 

483 # Could imagine an option here to say that the original background has not 

484 # been subtracted from the input exposures, allowing postISRCCD to be used 

485 # as input exposures. Can add later if needed. 

486 

487 

488class UpdateVisitSummaryTask(PipelineTask): 

489 """A pipeline task that creates a new visit-summary table after all 

490 `lsst.afw.image.Exposure` components have been finalized. 

491 

492 Notes 

493 ----- 

494 This task is designed to be run just prior to making warps for coaddition, 

495 as it aggregates all inputs other than the images and backgrounds into a 

496 single ``ExposureCatalog`` dataset and recomputes summary statistics that 

497 are useful in selecting which images should go into a coadd. Its output 

498 can also be used to reconstruct a final processed visit image when combined 

499 with a post-ISR image, the background model, and the final mask. 

500 """ 

501 

502 # The `run` method of this task can conditionally apply overrides for PSFs 

503 # and aperture corrections, but its `PipelineTask` interface always applies 

504 # them. We can always add the config options to make them optional later, 

505 # if that turns out to be useful. 

506 

507 _DefaultName = "updateVisitSummary" 

508 ConfigClass = UpdateVisitSummaryConfig 

509 

510 compute_summary_stats: ComputeExposureSummaryStatsTask 

511 

512 def __init__(self, *, initInputs: dict[str, Any] | None = None, **kwargs: Any): 

513 super().__init__(initInputs=initInputs, **kwargs) 

514 self.makeSubtask("compute_summary_stats") 

515 if initInputs is None or "input_summary_schema" not in initInputs: 

516 raise RuntimeError("Task requires 'input_summary_schema' in initInputs.") 

517 input_summary_schema = initInputs["input_summary_schema"].schema 

518 self.schema_mapper = SchemaMapper(input_summary_schema) 

519 self.schema_mapper.addMinimalSchema(input_summary_schema) 

520 self.schema = self.schema_mapper.getOutputSchema() 

521 if self.config.wcs_provider == "tract": 

522 self.schema.addField( 

523 "wcsTractId", type="L", doc="ID of the tract that provided the WCS." 

524 ) 

525 if self.config.photo_calib_provider == "tract": 

526 self.schema.addField( 

527 "photoCalibTractId", 

528 type="L", 

529 doc="ID of the tract that provided the PhotoCalib.", 

530 ) 

531 self.output_summary_schema = ExposureCatalog(self.schema) 

532 

533 def runQuantum( 

534 self, 

535 butlerQC: ButlerQuantumContext, 

536 inputRefs: InputQuantizedConnection, 

537 outputRefs: OutputQuantizedConnection, 

538 ) -> None: 

539 # Docstring inherited. 

540 sky_map = butlerQC.get(inputRefs.sky_map) 

541 del inputRefs.sky_map 

542 inputs = {} 

543 # Collapse the wcs_override_ and photo_calib_override_ connection pairs 

544 # into individual inputs (either ExposureCatalog or PerTractInput 

545 # objects). 

546 match self.config.wcs_provider: 

547 case "tract": 

548 inputs["wcs_overrides"] = PerTractInput.load( 

549 butlerQC, sky_map, inputRefs.wcs_overrides_tract 

550 ) 

551 del inputRefs.wcs_overrides_tract 

552 case "global": 

553 inputs["wcs_overrides"] = GlobalInput( 

554 butlerQC.get(inputRefs.wcs_overrides_global) 

555 ) 

556 del inputRefs.wcs_overrides_global 

557 case "input_summary": 

558 inputs["wcs_overrides"] = None 

559 match self.config.photo_calib_provider: 

560 case "tract": 

561 inputs["photo_calib_overrides"] = PerTractInput.load( 

562 butlerQC, sky_map, inputRefs.photo_calib_overrides_tract 

563 ) 

564 del inputRefs.photo_calib_overrides_tract 

565 case "global": 

566 inputs["photo_calib_overrides"] = GlobalInput( 

567 butlerQC.get(inputRefs.photo_calib_overrides_global) 

568 ) 

569 del inputRefs.photo_calib_overrides_global 

570 case "input_summary": 

571 inputs["photo_calib_overrides"] = None 

572 # Load or make DeferredDatasetHandles for everything else. 

573 inputs.update(butlerQC.get(inputRefs)) 

574 deferred_dataset_types = ["input_exposures"] 

575 # Handle whether to look for background originals and overrides at all. 

576 match self.config.background_provider: 

577 case "replacement": 

578 deferred_dataset_types.append("background_originals") 

579 deferred_dataset_types.append("background_overrides") 

580 # Transform the lists of DeferredDatasetHandles for the multiple=True, 

581 # deferLoad=True connections into mappings keyed by detector ID. 

582 for name in deferred_dataset_types: 

583 handles_list = inputs[name] 

584 inputs[name] = { 

585 handle.dataId["detector"]: handle for handle in handles_list 

586 } 

587 for record in inputs["input_summary_catalog"]: 

588 detector_id = record.getId() 

589 if detector_id not in inputs[name]: 

590 raise InvalidQuantumError( 

591 f"No {name!r} with detector {detector_id} for visit " 

592 f"{butlerQC.quantum.dataId['visit']} even though this detector is present " 

593 "in the input visit summary catalog. " 

594 "This is most likely to occur when the QuantumGraph that includes this task " 

595 "was incorrectly generated with an explicit or implicit (from datasets) tract " 

596 "constraint." 

597 ) 

598 # Convert the psf_star_catalog datasets from DataFrame to Astropy so 

599 # they can be handled by ComputeExposureSummaryStatsTask (which was 

600 # actually written to work with afw.table, but Astropy is similar 

601 # enough that it works, too). Ideally this would be handled by just 

602 # using ArrowAstropy as the storage class in the connection, but QG 

603 # generation apparently doesn't fully support those yet, as it leads to 

604 # problems in ci_hsc. 

605 inputs["psf_star_catalog"] = astropy.table.Table.from_pandas(inputs["psf_star_catalog"], index=True) 

606 # Actually run the task and write the results. 

607 outputs = self.run(**inputs) 

608 butlerQC.put(outputs, outputRefs) 

609 

610 def run( 

611 self, 

612 input_summary_catalog: ExposureCatalog, 

613 input_exposures: Mapping[int, DeferredDatasetHandle], 

614 psf_overrides: ExposureCatalog | None = None, 

615 psf_star_catalog: astropy.table.Table | None = None, 

616 ap_corr_overrides: ExposureCatalog | None = None, 

617 photo_calib_overrides: PossiblyMultipleInput | None = None, 

618 wcs_overrides: PossiblyMultipleInput | None = None, 

619 background_originals: Mapping[int, DeferredDatasetHandle] | None = None, 

620 background_overrides: Mapping[int, DeferredDatasetHandle] | None = None, 

621 ): 

622 """Build an updated version of a visit summary catalog. 

623 

624 Parameters 

625 ---------- 

626 input_summary_catalog : `lsst.afw.table.ExposureCatalog` 

627 Input catalog. Each row in this catalog will be used to produce 

628 a row in the output catalog. Any override parameter that is `None` 

629 will leave the corresponding values unchanged from those in this 

630 input catalog. 

631 input_exposures : `collections.abc.Mapping` [`int`, 

632 `lsst.daf.butler.DeferredDatasetHandle`] 

633 Deferred-load objects that fetch `lsst.afw.image.Exposure` 

634 instances. Only the image, mask, and variance are used; all other 

635 components are assumed to be superceded by at least 

636 ``input_summary_catalog`` and probably some ``_overrides`` 

637 arguments as well. This usually corresponds to the ``calexp`` 

638 dataset. 

639 psf_overrides : `lsst.afw.table.ExposureCatalog`, optional 

640 Catalog with attached `lsst.afw.detection.Psf` objects that 

641 supersede the input catalog's PSFs. 

642 psf_star_catalog : `astropy.table.Table`, optional 

643 Table containing PSF stars for use in computing PSF summary 

644 statistics. Must be provided if ``psf_overrides`` is. 

645 ap_corr_overrides : `lsst.afw.table.ExposureCatalog`, optional 

646 Catalog with attached `lsst.afw.image.ApCorrMap` objects that 

647 supersede the input catalog's aperture corrections. 

648 photo_calib_overrides : `PossiblyMultipleInput`, optional 

649 Catalog wrappers with attached `lsst.afw.image.PhotoCalib` 

650 objects that supersede the input catalog's photometric 

651 calibrations. 

652 wcs_overrides : `PossiblyMultipleInput`, optional 

653 Catalog wrappers with attached `lsst.afw.geom.SkyWcs` objects 

654 that supersede the input catalog's astrometric calibrations. 

655 background_originals : `collections.abc.Mapping` [`int`, 

656 `lsst.daf.butler.DeferredDatasetHandle`], optional 

657 Deferred-load objects that fetch `lsst.afw.math.BackgroundList` 

658 instances. These should correspond to the background already 

659 subtracted from ``input_exposures``. If not provided and 

660 ``background_overrides`` is, it is assumed that the background in 

661 ``input_exposures`` has not been subtracted. If provided, all keys 

662 in ``background_overrides`` must also be present in 

663 ``background_originals``. 

664 background_overrides : `collections.abc.Mapping` [`int`, 

665 `lsst.daf.butler.DeferredDatasetHandle`], optional 

666 Deferred-load objects that fetch `lsst.afw.math.BackgroundList` 

667 instances. These should correspond to the background that should 

668 now be subtracted from``input_exposures`` to yield the final 

669 background-subtracted image. 

670 

671 Returns 

672 ------- 

673 output_summary_catalog : `lsst.afw.table.ExposureCatalog` 

674 Output visit summary catalog. 

675 

676 Notes 

677 ----- 

678 If any override parameter is provided but does not have a value for a 

679 particular detector, that component will be set to `None` in the 

680 returned catalog for that detector and all summary statistics derived 

681 from that component will be reset (usually to ``NaN``) as well. Not 

682 passing an override parameter at all will instead pass through the 

683 original component and values from the input catalog unchanged. 

684 """ 

685 output_summary_catalog = ExposureCatalog(self.schema) 

686 output_summary_catalog.setMetadata(input_summary_catalog.getMetadata()) 

687 for input_record in input_summary_catalog: 

688 detector_id = input_record.getId() 

689 output_record = output_summary_catalog.addNew() 

690 

691 # Make a new ExposureSummaryStats from the input record. 

692 summary_stats = ExposureSummaryStats.from_record(input_record) 

693 

694 # Also copy the input record values to output record; this copies 

695 # many of the same values just copied into `summary_stats` (which 

696 # will be overridden later by summary_stats.update_record), but it 

697 # also copies fields that aren't part of summary_stats, including 

698 # the actual components like Psf, Wcs, etc. 

699 output_record.assign(input_record, self.schema_mapper) 

700 

701 exposure = input_exposures[detector_id].get() 

702 bbox = exposure.getBBox() 

703 

704 if wcs_overrides: 

705 wcs_tract, wcs_record = wcs_overrides.best_for_detector( 

706 detector_id, bbox=bbox 

707 ) 

708 if wcs_record is not None: 

709 wcs = wcs_record.getWcs() 

710 else: 

711 wcs = None 

712 if self.config.wcs_provider == "tract": 

713 output_record["wcsTractId"] = wcs_tract 

714 output_record.setWcs(wcs) 

715 self.compute_summary_stats.update_wcs_stats( 

716 summary_stats, wcs, bbox, output_record.getVisitInfo() 

717 ) 

718 else: 

719 wcs = input_record.getWcs() 

720 

721 if psf_overrides: 

722 if (psf_record := psf_overrides.find(detector_id)) is not None: 

723 psf = psf_record.getPsf() 

724 else: 

725 psf = None 

726 output_record.setPsf(psf) 

727 sources = psf_star_catalog[psf_star_catalog["detector"] == detector_id] 

728 self.compute_summary_stats.update_psf_stats( 

729 summary_stats, 

730 psf, 

731 bbox, 

732 sources, 

733 image_mask=exposure.mask, 

734 sources_is_astropy=True, 

735 ) 

736 

737 if ap_corr_overrides: 

738 if (ap_corr_record := ap_corr_overrides.find(detector_id)) is not None: 

739 ap_corr = ap_corr_record.getApCorrMap() 

740 else: 

741 ap_corr = None 

742 output_record.setApCorrMap(ap_corr) 

743 

744 if photo_calib_overrides: 

745 center = compute_center_for_detector_record(output_record, bbox, wcs) 

746 ( 

747 photo_calib_tract, 

748 photo_calib_record, 

749 ) = photo_calib_overrides.best_for_detector(detector_id, center=center) 

750 if photo_calib_record is not None: 

751 photo_calib = photo_calib_record.getPhotoCalib() 

752 else: 

753 photo_calib = None 

754 if self.config.photo_calib_provider == "tract": 

755 output_record["photoCalibTractId"] = photo_calib_tract 

756 output_record.setPhotoCalib(photo_calib) 

757 self.compute_summary_stats.update_photo_calib_stats( 

758 summary_stats, photo_calib 

759 ) 

760 

761 if background_overrides is not None: 

762 if (handle := background_overrides.get(detector_id)) is not None: 

763 new_bkg = handle.get() 

764 if background_originals is not None: 

765 orig_bkg = background_originals[detector_id].get() 

766 else: 

767 orig_bkg = BackgroundList() 

768 

769 full_bkg = orig_bkg.clone() 

770 for layer in new_bkg: 

771 full_bkg.append(layer) 

772 exposure.image -= new_bkg.getImage() 

773 self.compute_summary_stats.update_background_stats( 

774 summary_stats, full_bkg 

775 ) 

776 self.compute_summary_stats.update_masked_image_stats( 

777 summary_stats, exposure.getMaskedImage() 

778 ) 

779 

780 summary_stats.update_record(output_record) 

781 del exposure 

782 

783 return Struct(output_summary_catalog=output_summary_catalog)