Coverage for python/lsst/pipe/tasks/multiBand.py: 27%

284 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-04-04 02:47 -0700

1# This file is part of pipe_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22__all__ = ["DetectCoaddSourcesConfig", "DetectCoaddSourcesTask"] 

23 

24import warnings 

25 

26from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections) 

27import lsst.pipe.base.connectionTypes as cT 

28from lsst.pex.config import Config, Field, ConfigurableField, ChoiceField 

29from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask 

30from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask 

31from lsst.meas.deblender import SourceDeblendTask 

32from lsst.meas.extensions.scarlet import ScarletDeblendTask 

33from lsst.meas.astrom import DirectMatchTask, denormalizeMatches 

34from lsst.pipe.tasks.fakes import BaseFakeSourcesTask 

35from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask 

36from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask 

37import lsst.afw.table as afwTable 

38import lsst.afw.math as afwMath 

39from lsst.daf.base import PropertyList 

40from lsst.skymap import BaseSkyMap 

41from lsst.obs.base import ExposureIdInfo 

42 

43# NOTE: these imports are a convenience so multiband users only have to import this file. 

44from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401 

45from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401 

46from .multiBandUtils import CullPeaksConfig # noqa: F401 

47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401 

48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401 

49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401 

50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401 

51 

52 

53""" 

54New set types: 

55* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 

56* deepCoadd_mergeDet: merged detections (tract, patch) 

57* deepCoadd_meas: measurements of merged detections (tract, patch, filter) 

58* deepCoadd_ref: reference sources (tract, patch) 

59All of these have associated *_schema catalogs that require no data ID and hold no records. 

60 

61In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 

62the mergeDet, meas, and ref dataset Footprints: 

63* deepCoadd_peak_schema 

64""" 

65 

66 

67############################################################################################################## 

68class DetectCoaddSourcesConnections(PipelineTaskConnections, 

69 dimensions=("tract", "patch", "band", "skymap"), 

70 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): 

71 detectionSchema = cT.InitOutput( 

72 doc="Schema of the detection catalog", 

73 name="{outputCoaddName}Coadd_det_schema", 

74 storageClass="SourceCatalog", 

75 ) 

76 exposure = cT.Input( 

77 doc="Exposure on which detections are to be performed", 

78 name="{inputCoaddName}Coadd", 

79 storageClass="ExposureF", 

80 dimensions=("tract", "patch", "band", "skymap") 

81 ) 

82 outputBackgrounds = cT.Output( 

83 doc="Output Backgrounds used in detection", 

84 name="{outputCoaddName}Coadd_calexp_background", 

85 storageClass="Background", 

86 dimensions=("tract", "patch", "band", "skymap") 

87 ) 

88 outputSources = cT.Output( 

89 doc="Detected sources catalog", 

90 name="{outputCoaddName}Coadd_det", 

91 storageClass="SourceCatalog", 

92 dimensions=("tract", "patch", "band", "skymap") 

93 ) 

94 outputExposure = cT.Output( 

95 doc="Exposure post detection", 

96 name="{outputCoaddName}Coadd_calexp", 

97 storageClass="ExposureF", 

98 dimensions=("tract", "patch", "band", "skymap") 

99 ) 

100 

101 

102class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections): 

103 """Configuration parameters for the DetectCoaddSourcesTask 

104 """ 

105 

106 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?") 

107 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling") 

108 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection") 

109 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

110 doInsertFakes = Field(dtype=bool, default=False, 

111 doc="Run fake sources injection task", 

112 deprecated=("doInsertFakes is no longer supported. This config will be removed " 

113 "after v24.")) 

114 insertFakes = ConfigurableField(target=BaseFakeSourcesTask, 

115 doc="Injection of fake sources for testing " 

116 "purposes (must be retargeted)", 

117 deprecated=("insertFakes is no longer supported. This config will " 

118 "be removed after v24.")) 

119 hasFakes = Field( 

120 dtype=bool, 

121 default=False, 

122 doc="Should be set to True if fake sources have been inserted into the input data.", 

123 ) 

124 

125 def setDefaults(self): 

126 super().setDefaults() 

127 self.detection.thresholdType = "pixel_stdev" 

128 self.detection.isotropicGrow = True 

129 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic 

130 self.detection.reEstimateBackground = False 

131 self.detection.background.useApprox = False 

132 self.detection.background.binSize = 4096 

133 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER' 

134 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender 

135 

136 

137class DetectCoaddSourcesTask(PipelineTask): 

138 """Detect sources on a single filter coadd. 

139 

140 Coadding individual visits requires each exposure to be warped. This 

141 introduces covariance in the noise properties across pixels. Before 

142 detection, we correct the coadd variance by scaling the variance plane in 

143 the coadd to match the observed variance. This is an approximate 

144 approach -- strictly, we should propagate the full covariance matrix -- 

145 but it is simple and works well in practice. 

146 

147 After scaling the variance plane, we detect sources and generate footprints 

148 by delegating to the @ref SourceDetectionTask_ "detection" subtask. 

149 

150 DetectCoaddSourcesTask is meant to be run after assembling a coadded image 

151 in a given band. The purpose of the task is to update the background, 

152 detect all sources in a single band and generate a set of parent 

153 footprints. Subsequent tasks in the multi-band processing procedure will 

154 merge sources across bands and, eventually, perform forced photometry. 

155 

156 Parameters 

157 ---------- 

158 schema : `lsst.afw.table.Schema`, optional 

159 Initial schema for the output catalog, modified-in place to include all 

160 fields set by this task. If None, the source minimal schema will be used. 

161 **kwargs 

162 Additional keyword arguments. 

163 """ 

164 

165 _DefaultName = "detectCoaddSources" 

166 ConfigClass = DetectCoaddSourcesConfig 

167 

168 def __init__(self, schema=None, **kwargs): 

169 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree 

170 # call structure has been reviewed carefully to be sure super will work as intended. 

171 super().__init__(**kwargs) 

172 if schema is None: 

173 schema = afwTable.SourceTable.makeMinimalSchema() 

174 self.schema = schema 

175 self.makeSubtask("detection", schema=self.schema) 

176 if self.config.doScaleVariance: 

177 self.makeSubtask("scaleVariance") 

178 

179 self.detectionSchema = afwTable.SourceCatalog(self.schema) 

180 

181 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

182 inputs = butlerQC.get(inputRefs) 

183 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band") 

184 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory() 

185 inputs["expId"] = exposureIdInfo.expId 

186 outputs = self.run(**inputs) 

187 butlerQC.put(outputs, outputRefs) 

188 

189 def run(self, exposure, idFactory, expId): 

190 """Run detection on an exposure. 

191 

192 First scale the variance plane to match the observed variance 

193 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to 

194 detect sources. 

195 

196 Parameters 

197 ---------- 

198 exposure : `lsst.afw.image.Exposure` 

199 Exposure on which to detect (may be backround-subtracted and scaled, 

200 depending on configuration). 

201 idFactory : `lsst.afw.table.IdFactory` 

202 IdFactory to set source identifiers. 

203 expId : `int` 

204 Exposure identifier (integer) for RNG seed. 

205 

206 Returns 

207 ------- 

208 result : `lsst.pipe.base.Struct` 

209 Results as a struct with attributes: 

210 

211 ``sources`` 

212 Catalog of detections (`lsst.afw.table.SourceCatalog`). 

213 ``backgrounds`` 

214 List of backgrounds (`list`). 

215 """ 

216 if self.config.doScaleVariance: 

217 varScale = self.scaleVariance.run(exposure.maskedImage) 

218 exposure.getMetadata().add("VARIANCE_SCALE", varScale) 

219 backgrounds = afwMath.BackgroundList() 

220 table = afwTable.SourceTable.make(self.schema, idFactory) 

221 detections = self.detection.run(table, exposure, expId=expId) 

222 sources = detections.sources 

223 if hasattr(detections, "background") and detections.background: 

224 for bg in detections.background: 

225 backgrounds.append(bg) 

226 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure) 

227 

228 

229############################################################################################################## 

230 

231 

232class DeblendCoaddSourcesConfig(Config): 

233 """Configuration parameters for the `DeblendCoaddSourcesTask`. 

234 """ 

235 

236 singleBandDeblend = ConfigurableField(target=SourceDeblendTask, 

237 doc="Deblend sources separately in each band") 

238 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask, 

239 doc="Deblend sources simultaneously across bands") 

240 simultaneous = Field(dtype=bool, 

241 default=True, 

242 doc="Simultaneously deblend all bands? " 

243 "True uses `multibandDeblend` while False uses `singleBandDeblend`") 

244 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

245 hasFakes = Field(dtype=bool, 

246 default=False, 

247 doc="Should be set to True if fake sources have been inserted into the input data.") 

248 

249 def setDefaults(self): 

250 Config.setDefaults(self) 

251 self.singleBandDeblend.propagateAllPeaks = True 

252 

253 

254class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, 

255 dimensions=("tract", "patch", "band", "skymap"), 

256 defaultTemplates={"inputCoaddName": "deep", 

257 "outputCoaddName": "deep", 

258 "deblendedCatalog": "deblendedFlux"}): 

259 inputSchema = cT.InitInput( 

260 doc="Input schema for measure merged task produced by a deblender or detection task", 

261 name="{inputCoaddName}Coadd_deblendedFlux_schema", 

262 storageClass="SourceCatalog" 

263 ) 

264 outputSchema = cT.InitOutput( 

265 doc="Output schema after all new fields are added by task", 

266 name="{inputCoaddName}Coadd_meas_schema", 

267 storageClass="SourceCatalog" 

268 ) 

269 refCat = cT.PrerequisiteInput( 

270 doc="Reference catalog used to match measured sources against known sources", 

271 name="ref_cat", 

272 storageClass="SimpleCatalog", 

273 dimensions=("skypix",), 

274 deferLoad=True, 

275 multiple=True 

276 ) 

277 exposure = cT.Input( 

278 doc="Input coadd image", 

279 name="{inputCoaddName}Coadd_calexp", 

280 storageClass="ExposureF", 

281 dimensions=("tract", "patch", "band", "skymap") 

282 ) 

283 skyMap = cT.Input( 

284 doc="SkyMap to use in processing", 

285 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

286 storageClass="SkyMap", 

287 dimensions=("skymap",), 

288 ) 

289 visitCatalogs = cT.Input( 

290 doc="Source catalogs for visits which overlap input tract, patch, band. Will be " 

291 "further filtered in the task for the purpose of propagating flags from image calibration " 

292 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.", 

293 name="src", 

294 dimensions=("instrument", "visit", "detector"), 

295 storageClass="SourceCatalog", 

296 multiple=True 

297 ) 

298 sourceTableHandles = cT.Input( 

299 doc=("Source tables that are derived from the ``CalibrateTask`` sources. " 

300 "These tables contain astrometry and photometry flags, and optionally " 

301 "PSF flags."), 

302 name="sourceTable_visit", 

303 storageClass="DataFrame", 

304 dimensions=("instrument", "visit"), 

305 multiple=True, 

306 deferLoad=True, 

307 ) 

308 finalizedSourceTableHandles = cT.Input( 

309 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These " 

310 "tables contain PSF flags from the finalized PSF estimation."), 

311 name="finalized_src_table", 

312 storageClass="DataFrame", 

313 dimensions=("instrument", "visit"), 

314 multiple=True, 

315 deferLoad=True, 

316 ) 

317 inputCatalog = cT.Input( 

318 doc=("Name of the input catalog to use." 

319 "If the single band deblender was used this should be 'deblendedFlux." 

320 "If the multi-band deblender was used this should be 'deblendedModel, " 

321 "or deblendedFlux if the multiband deblender was configured to output " 

322 "deblended flux catalogs. If no deblending was performed this should " 

323 "be 'mergeDet'"), 

324 name="{inputCoaddName}Coadd_{deblendedCatalog}", 

325 storageClass="SourceCatalog", 

326 dimensions=("tract", "patch", "band", "skymap"), 

327 ) 

328 scarletCatalog = cT.Input( 

329 doc="Catalogs produced by multiband deblending", 

330 name="{inputCoaddName}Coadd_deblendedCatalog", 

331 storageClass="SourceCatalog", 

332 dimensions=("tract", "patch", "skymap"), 

333 ) 

334 scarletModels = cT.Input( 

335 doc="Multiband scarlet models produced by the deblender", 

336 name="{inputCoaddName}Coadd_scarletModelData", 

337 storageClass="ScarletModelData", 

338 dimensions=("tract", "patch", "skymap"), 

339 ) 

340 outputSources = cT.Output( 

341 doc="Source catalog containing all the measurement information generated in this task", 

342 name="{outputCoaddName}Coadd_meas", 

343 dimensions=("tract", "patch", "band", "skymap"), 

344 storageClass="SourceCatalog", 

345 ) 

346 matchResult = cT.Output( 

347 doc="Match catalog produced by configured matcher, optional on doMatchSources", 

348 name="{outputCoaddName}Coadd_measMatch", 

349 dimensions=("tract", "patch", "band", "skymap"), 

350 storageClass="Catalog", 

351 ) 

352 denormMatches = cT.Output( 

353 doc="Denormalized Match catalog produced by configured matcher, optional on " 

354 "doWriteMatchesDenormalized", 

355 name="{outputCoaddName}Coadd_measMatchFull", 

356 dimensions=("tract", "patch", "band", "skymap"), 

357 storageClass="Catalog", 

358 ) 

359 

360 def __init__(self, *, config=None): 

361 super().__init__(config=config) 

362 if config.doPropagateFlags is False: 

363 self.inputs -= set(("visitCatalogs",)) 

364 self.inputs -= set(("sourceTableHandles",)) 

365 self.inputs -= set(("finalizedSourceTableHandles",)) 

366 elif config.propagateFlags.target == PropagateSourceFlagsTask: 

367 # New PropagateSourceFlagsTask does not use visitCatalogs. 

368 self.inputs -= set(("visitCatalogs",)) 

369 # Check for types of flags required. 

370 if not config.propagateFlags.source_flags: 

371 self.inputs -= set(("sourceTableHandles",)) 

372 if not config.propagateFlags.finalized_source_flags: 

373 self.inputs -= set(("finalizedSourceTableHandles",)) 

374 else: 

375 # Deprecated PropagateVisitFlagsTask uses visitCatalogs. 

376 self.inputs -= set(("sourceTableHandles",)) 

377 self.inputs -= set(("finalizedSourceTableHandles",)) 

378 

379 if config.inputCatalog == "deblendedCatalog": 

380 self.inputs -= set(("inputCatalog",)) 

381 

382 if not config.doAddFootprints: 

383 self.inputs -= set(("scarletModels",)) 

384 else: 

385 self.inputs -= set(("deblendedCatalog")) 

386 self.inputs -= set(("scarletModels",)) 

387 

388 if config.doMatchSources is False: 

389 self.outputs -= set(("matchResult",)) 

390 

391 if config.doWriteMatchesDenormalized is False: 

392 self.outputs -= set(("denormMatches",)) 

393 

394 

395class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig, 

396 pipelineConnections=MeasureMergedCoaddSourcesConnections): 

397 """Configuration parameters for the MeasureMergedCoaddSourcesTask 

398 """ 

399 inputCatalog = ChoiceField( 

400 dtype=str, 

401 default="deblendedCatalog", 

402 allowed={ 

403 "deblendedCatalog": "Output catalog from ScarletDeblendTask", 

404 "deblendedFlux": "Output catalog from SourceDeblendTask", 

405 "mergeDet": "The merged detections before deblending." 

406 }, 

407 doc="The name of the input catalog.", 

408 ) 

409 doAddFootprints = Field(dtype=bool, 

410 default=True, 

411 doc="Whether or not to add footprints to the input catalog from scarlet models. " 

412 "This should be true whenever using the multi-band deblender, " 

413 "otherwise this should be False.") 

414 doConserveFlux = Field(dtype=bool, default=True, 

415 doc="Whether to use the deblender models as templates to re-distribute the flux " 

416 "from the 'exposure' (True), or to perform measurements on the deblender " 

417 "model footprints.") 

418 doStripFootprints = Field(dtype=bool, default=True, 

419 doc="Whether to strip footprints from the output catalog before " 

420 "saving to disk. " 

421 "This is usually done when using scarlet models to save disk space.") 

422 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") 

423 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") 

424 doPropagateFlags = Field( 

425 dtype=bool, default=True, 

426 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 

427 ) 

428 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd") 

429 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?") 

430 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog") 

431 doWriteMatchesDenormalized = Field( 

432 dtype=bool, 

433 default=False, 

434 doc=("Write reference matches in denormalized format? " 

435 "This format uses more disk space, but is more convenient to read."), 

436 ) 

437 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

438 psfCache = Field(dtype=int, default=100, doc="Size of psfCache") 

439 checkUnitsParseStrict = Field( 

440 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", 

441 dtype=str, 

442 default="raise", 

443 ) 

444 doApCorr = Field( 

445 dtype=bool, 

446 default=True, 

447 doc="Apply aperture corrections" 

448 ) 

449 applyApCorr = ConfigurableField( 

450 target=ApplyApCorrTask, 

451 doc="Subtask to apply aperture corrections" 

452 ) 

453 doRunCatalogCalculation = Field( 

454 dtype=bool, 

455 default=True, 

456 doc='Run catalogCalculation task' 

457 ) 

458 catalogCalculation = ConfigurableField( 

459 target=CatalogCalculationTask, 

460 doc="Subtask to run catalogCalculation plugins on catalog" 

461 ) 

462 

463 hasFakes = Field( 

464 dtype=bool, 

465 default=False, 

466 doc="Should be set to True if fake sources have been inserted into the input data." 

467 ) 

468 

469 @property 

470 def refObjLoader(self): 

471 return self.match.refObjLoader 

472 

473 def setDefaults(self): 

474 super().setDefaults() 

475 self.measurement.plugins.names |= ['base_InputCount', 

476 'base_Variance', 

477 'base_LocalPhotoCalib', 

478 'base_LocalWcs'] 

479 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 

480 'INEXACT_PSF'] 

481 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 

482 'INEXACT_PSF'] 

483 

484 

485class MeasureMergedCoaddSourcesTask(PipelineTask): 

486 """Deblend sources from main catalog in each coadd seperately and measure. 

487 

488 Use peaks and footprints from a master catalog to perform deblending and 

489 measurement in each coadd. 

490 

491 Given a master input catalog of sources (peaks and footprints) or deblender 

492 outputs(including a HeavyFootprint in each band), measure each source on 

493 the coadd. Repeating this procedure with the same master catalog across 

494 multiple coadds will generate a consistent set of child sources. 

495 

496 The deblender retains all peaks and deblends any missing peaks (dropouts in 

497 that band) as PSFs. Source properties are measured and the @c is-primary 

498 flag (indicating sources with no children) is set. Visit flags are 

499 propagated to the coadd sources. 

500 

501 Optionally, we can match the coadd sources to an external reference 

502 catalog. 

503 

504 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we 

505 have a set of per-band catalogs. The next stage in the multi-band 

506 processing procedure will merge these measurements into a suitable catalog 

507 for driving forced photometry. 

508 

509 Parameters 

510 ---------- 

511 butler : `lsst.daf.butler.Butler` or `None`, optional 

512 A butler used to read the input schemas from disk or construct the reference 

513 catalog loader, if schema or peakSchema or refObjLoader is None. 

514 schema : ``lsst.afw.table.Schema`, optional 

515 The schema of the merged detection catalog used as input to this one. 

516 peakSchema : ``lsst.afw.table.Schema`, optional 

517 The schema of the PeakRecords in the Footprints in the merged detection catalog. 

518 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional 

519 An instance of LoadReferenceObjectsTasks that supplies an external reference 

520 catalog. May be None if the loader can be constructed from the butler argument or all steps 

521 requiring a reference catalog are disabled. 

522 initInputs : `dict`, optional 

523 Dictionary that can contain a key ``inputSchema`` containing the 

524 input schema. If present will override the value of ``schema``. 

525 **kwargs 

526 Additional keyword arguments. 

527 """ 

528 

529 _DefaultName = "measureCoaddSources" 

530 ConfigClass = MeasureMergedCoaddSourcesConfig 

531 

532 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, 

533 **kwargs): 

534 super().__init__(**kwargs) 

535 self.deblended = self.config.inputCatalog.startswith("deblended") 

536 self.inputCatalog = "Coadd_" + self.config.inputCatalog 

537 if initInputs is not None: 

538 schema = initInputs['inputSchema'].schema 

539 if schema is None: 

540 assert butler is not None, "Neither butler nor schema is defined" 

541 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema").schema 

542 self.schemaMapper = afwTable.SchemaMapper(schema) 

543 self.schemaMapper.addMinimalSchema(schema) 

544 self.schema = self.schemaMapper.getOutputSchema() 

545 self.algMetadata = PropertyList() 

546 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) 

547 self.makeSubtask("setPrimaryFlags", schema=self.schema) 

548 if self.config.doMatchSources: 

549 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader) 

550 if self.config.doPropagateFlags: 

551 self.makeSubtask("propagateFlags", schema=self.schema) 

552 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) 

553 if self.config.doApCorr: 

554 self.makeSubtask("applyApCorr", schema=self.schema) 

555 if self.config.doRunCatalogCalculation: 

556 self.makeSubtask("catalogCalculation", schema=self.schema) 

557 

558 self.outputSchema = afwTable.SourceCatalog(self.schema) 

559 

560 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

561 inputs = butlerQC.get(inputRefs) 

562 

563 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], 

564 inputs.pop('refCat'), 

565 name=self.config.connections.refCat, 

566 config=self.config.refObjLoader, 

567 log=self.log) 

568 self.match.setRefObjLoader(refObjLoader) 

569 

570 # Set psfcache 

571 # move this to run after gen2 deprecation 

572 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache) 

573 

574 # Get unique integer ID for IdFactory and RNG seeds 

575 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch") 

576 inputs['exposureId'] = exposureIdInfo.expId 

577 idFactory = exposureIdInfo.makeSourceIdFactory() 

578 # Transform inputCatalog 

579 table = afwTable.SourceTable.make(self.schema, idFactory) 

580 sources = afwTable.SourceCatalog(table) 

581 # Load the correct input catalog 

582 if "scarletCatalog" in inputs: 

583 inputCatalog = inputs.pop("scarletCatalog") 

584 catalogRef = inputRefs.scarletCatalog 

585 else: 

586 inputCatalog = inputs.pop("inputCatalog") 

587 catalogRef = inputRefs.inputCatalog 

588 sources.extend(inputCatalog, self.schemaMapper) 

589 del inputCatalog 

590 # Add the HeavyFootprints to the deblended sources 

591 if self.config.doAddFootprints: 

592 modelData = inputs.pop('scarletModels') 

593 if self.config.doConserveFlux: 

594 redistributeImage = inputs['exposure'].image 

595 else: 

596 redistributeImage = None 

597 modelData.updateCatalogFootprints( 

598 catalog=sources, 

599 band=inputRefs.exposure.dataId["band"], 

600 psfModel=inputs['exposure'].getPsf(), 

601 redistributeImage=redistributeImage, 

602 removeScarletData=True, 

603 ) 

604 table = sources.getTable() 

605 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

606 inputs['sources'] = sources 

607 

608 skyMap = inputs.pop('skyMap') 

609 tractNumber = catalogRef.dataId['tract'] 

610 tractInfo = skyMap[tractNumber] 

611 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch']) 

612 skyInfo = Struct( 

613 skyMap=skyMap, 

614 tractInfo=tractInfo, 

615 patchInfo=patchInfo, 

616 wcs=tractInfo.getWcs(), 

617 bbox=patchInfo.getOuterBBox() 

618 ) 

619 inputs['skyInfo'] = skyInfo 

620 

621 if self.config.doPropagateFlags: 

622 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

623 # New version 

624 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds 

625 inputs["ccdInputs"] = ccdInputs 

626 

627 if "sourceTableHandles" in inputs: 

628 sourceTableHandles = inputs.pop("sourceTableHandles") 

629 sourceTableHandleDict = {handle.dataId["visit"]: handle 

630 for handle in sourceTableHandles} 

631 inputs["sourceTableHandleDict"] = sourceTableHandleDict 

632 if "finalizedSourceTableHandles" in inputs: 

633 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles") 

634 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle 

635 for handle in finalizedSourceTableHandles} 

636 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict 

637 else: 

638 # Deprecated legacy version 

639 # Filter out any visit catalog that is not coadd inputs 

640 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds 

641 visitKey = ccdInputs.schema.find("visit").key 

642 ccdKey = ccdInputs.schema.find("ccd").key 

643 inputVisitIds = set() 

644 ccdRecordsWcs = {} 

645 for ccdRecord in ccdInputs: 

646 visit = ccdRecord.get(visitKey) 

647 ccd = ccdRecord.get(ccdKey) 

648 inputVisitIds.add((visit, ccd)) 

649 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs() 

650 

651 inputCatalogsToKeep = [] 

652 inputCatalogWcsUpdate = [] 

653 for i, dataRef in enumerate(inputRefs.visitCatalogs): 

654 key = (dataRef.dataId['visit'], dataRef.dataId['detector']) 

655 if key in inputVisitIds: 

656 inputCatalogsToKeep.append(inputs['visitCatalogs'][i]) 

657 inputCatalogWcsUpdate.append(ccdRecordsWcs[key]) 

658 inputs['visitCatalogs'] = inputCatalogsToKeep 

659 inputs['wcsUpdates'] = inputCatalogWcsUpdate 

660 inputs['ccdInputs'] = ccdInputs 

661 

662 outputs = self.run(**inputs) 

663 # Strip HeavyFootprints to save space on disk 

664 sources = outputs.outputSources 

665 butlerQC.put(outputs, outputRefs) 

666 

667 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, 

668 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None): 

669 """Run measurement algorithms on the input exposure, and optionally populate the 

670 resulting catalog with extra information. 

671 

672 Parameters 

673 ---------- 

674 exposure : `lsst.afw.exposure.Exposure` 

675 The input exposure on which measurements are to be performed. 

676 sources : `lsst.afw.table.SourceCatalog` 

677 A catalog built from the results of merged detections, or 

678 deblender outputs. 

679 skyInfo : `lsst.pipe.base.Struct` 

680 A struct containing information about the position of the input exposure within 

681 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box. 

682 exposureId : `int` or `bytes` 

683 Packed unique number or bytes unique to the input exposure. 

684 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional 

685 Catalog containing information on the individual visits which went into making 

686 the coadd. 

687 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional 

688 A list of source catalogs corresponding to measurements made on the individual 

689 visits which went into the input exposure. If None and butler is `None` then 

690 the task cannot propagate visit flags to the output catalog. 

691 Deprecated, to be removed with PropagateVisitFlagsTask. 

692 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional 

693 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 

694 to the input visits. Used to put all coordinates to common system. If `None` and 

695 butler is `None` then the task cannot propagate visit flags to the output catalog. 

696 Deprecated, to be removed with PropagateVisitFlagsTask. 

697 butler : `None`, optional 

698 This was a Gen2 butler used to load visit catalogs. 

699 No longer used and should not be set. Will be removed in the 

700 future. 

701 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional 

702 Dict for sourceTable_visit handles (key is visit) for propagating flags. 

703 These tables are derived from the ``CalibrateTask`` sources, and contain 

704 astrometry and photometry flags, and optionally PSF flags. 

705 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional 

706 Dict for finalized_src_table handles (key is visit) for propagating flags. 

707 These tables are derived from ``FinalizeCalibrationTask`` and contain 

708 PSF flags from the finalized PSF estimation. 

709 

710 Returns 

711 ------- 

712 results : `lsst.pipe.base.Struct` 

713 Results of running measurement task. Will contain the catalog in the 

714 sources attribute. Optionally will have results of matching to a 

715 reference catalog in the matchResults attribute, and denormalized 

716 matches in the denormMatches attribute. 

717 """ 

718 if butler is not None: 

719 warnings.warn("The 'butler' parameter is no longer used and can be safely removed.", 

720 category=FutureWarning, stacklevel=2) 

721 butler = None 

722 

723 self.measurement.run(sources, exposure, exposureId=exposureId) 

724 

725 if self.config.doApCorr: 

726 self.applyApCorr.run( 

727 catalog=sources, 

728 apCorrMap=exposure.getInfo().getApCorrMap() 

729 ) 

730 

731 # TODO DM-11568: this contiguous check-and-copy could go away if we 

732 # reserve enough space during SourceDetection and/or SourceDeblend. 

733 # NOTE: sourceSelectors require contiguous catalogs, so ensure 

734 # contiguity now, so views are preserved from here on. 

735 if not sources.isContiguous(): 

736 sources = sources.copy(deep=True) 

737 

738 if self.config.doRunCatalogCalculation: 

739 self.catalogCalculation.run(sources) 

740 

741 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo, 

742 patchInfo=skyInfo.patchInfo) 

743 if self.config.doPropagateFlags: 

744 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

745 # New version 

746 self.propagateFlags.run( 

747 sources, 

748 ccdInputs, 

749 sourceTableHandleDict, 

750 finalizedSourceTableHandleDict 

751 ) 

752 else: 

753 # Legacy deprecated version 

754 self.propagateFlags.run( 

755 butler, 

756 sources, 

757 ccdInputs, 

758 exposure.getWcs(), 

759 visitCatalogs, 

760 wcsUpdates 

761 ) 

762 

763 results = Struct() 

764 

765 if self.config.doMatchSources: 

766 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel) 

767 matches = afwTable.packMatches(matchResult.matches) 

768 matches.table.setMetadata(matchResult.matchMeta) 

769 results.matchResult = matches 

770 if self.config.doWriteMatchesDenormalized: 

771 if matchResult.matches: 

772 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta) 

773 else: 

774 self.log.warning("No matches, so generating dummy denormalized matches file") 

775 denormMatches = afwTable.BaseCatalog(afwTable.Schema()) 

776 denormMatches.setMetadata(PropertyList()) 

777 denormMatches.getMetadata().add("COMMENT", 

778 "This catalog is empty because no matches were found.") 

779 results.denormMatches = denormMatches 

780 results.denormMatches = denormMatches 

781 

782 results.outputSources = sources 

783 return results