Coverage for python/lsst/pipe/tasks/multiBand.py: 30%

287 statements  

« prev     ^ index     » next       coverage.py v6.4.4, created at 2022-09-13 10:29 +0000

1#!/usr/bin/env python 

2# 

3# LSST Data Management System 

4# Copyright 2008-2015 AURA/LSST. 

5# 

6# This product includes software developed by the 

7# LSST Project (http://www.lsst.org/). 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the LSST License Statement and 

20# the GNU General Public License along with this program. If not, 

21# see <https://www.lsstcorp.org/LegalNotices/>. 

22# 

23import warnings 

24 

25from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections) 

26import lsst.pipe.base.connectionTypes as cT 

27from lsst.pex.config import Config, Field, ConfigurableField, ChoiceField 

28from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask 

29from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask 

30from lsst.meas.deblender import SourceDeblendTask 

31from lsst.meas.extensions.scarlet import ScarletDeblendTask 

32from lsst.meas.astrom import DirectMatchTask, denormalizeMatches 

33from lsst.pipe.tasks.fakes import BaseFakeSourcesTask 

34from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask 

35from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask 

36import lsst.afw.table as afwTable 

37import lsst.afw.math as afwMath 

38from lsst.daf.base import PropertyList 

39from lsst.skymap import BaseSkyMap 

40from lsst.obs.base import ExposureIdInfo 

41 

42# NOTE: these imports are a convenience so multiband users only have to import this file. 

43from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401 

44from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401 

45from .multiBandUtils import CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401 

46from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401 

47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401 

48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401 

49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401 

50 

51 

52""" 

53New set types: 

54* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 

55* deepCoadd_mergeDet: merged detections (tract, patch) 

56* deepCoadd_meas: measurements of merged detections (tract, patch, filter) 

57* deepCoadd_ref: reference sources (tract, patch) 

58All of these have associated *_schema catalogs that require no data ID and hold no records. 

59 

60In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 

61the mergeDet, meas, and ref dataset Footprints: 

62* deepCoadd_peak_schema 

63""" 

64 

65 

66############################################################################################################## 

67class DetectCoaddSourcesConnections(PipelineTaskConnections, 

68 dimensions=("tract", "patch", "band", "skymap"), 

69 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): 

70 detectionSchema = cT.InitOutput( 

71 doc="Schema of the detection catalog", 

72 name="{outputCoaddName}Coadd_det_schema", 

73 storageClass="SourceCatalog", 

74 ) 

75 exposure = cT.Input( 

76 doc="Exposure on which detections are to be performed", 

77 name="{inputCoaddName}Coadd", 

78 storageClass="ExposureF", 

79 dimensions=("tract", "patch", "band", "skymap") 

80 ) 

81 outputBackgrounds = cT.Output( 

82 doc="Output Backgrounds used in detection", 

83 name="{outputCoaddName}Coadd_calexp_background", 

84 storageClass="Background", 

85 dimensions=("tract", "patch", "band", "skymap") 

86 ) 

87 outputSources = cT.Output( 

88 doc="Detected sources catalog", 

89 name="{outputCoaddName}Coadd_det", 

90 storageClass="SourceCatalog", 

91 dimensions=("tract", "patch", "band", "skymap") 

92 ) 

93 outputExposure = cT.Output( 

94 doc="Exposure post detection", 

95 name="{outputCoaddName}Coadd_calexp", 

96 storageClass="ExposureF", 

97 dimensions=("tract", "patch", "band", "skymap") 

98 ) 

99 

100 

101class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections): 

102 """! 

103 @anchor DetectCoaddSourcesConfig_ 

104 

105 @brief Configuration parameters for the DetectCoaddSourcesTask 

106 """ 

107 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?") 

108 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling") 

109 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection") 

110 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

111 doInsertFakes = Field(dtype=bool, default=False, 

112 doc="Run fake sources injection task", 

113 deprecated=("doInsertFakes is no longer supported. This config will be removed " 

114 "after v24.")) 

115 insertFakes = ConfigurableField(target=BaseFakeSourcesTask, 

116 doc="Injection of fake sources for testing " 

117 "purposes (must be retargeted)", 

118 deprecated=("insertFakes is no longer supported. This config will " 

119 "be removed after v24.")) 

120 hasFakes = Field( 

121 dtype=bool, 

122 default=False, 

123 doc="Should be set to True if fake sources have been inserted into the input data.", 

124 ) 

125 

126 def setDefaults(self): 

127 super().setDefaults() 

128 self.detection.thresholdType = "pixel_stdev" 

129 self.detection.isotropicGrow = True 

130 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic 

131 self.detection.reEstimateBackground = False 

132 self.detection.background.useApprox = False 

133 self.detection.background.binSize = 4096 

134 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER' 

135 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender 

136 

137## @addtogroup LSST_task_documentation 

138## @{ 

139## @page page_DetectCoaddSourcesTask DetectCoaddSourcesTask 

140## @ref DetectCoaddSourcesTask_ "DetectCoaddSourcesTask" 

141## @copybrief DetectCoaddSourcesTask 

142## @} 

143 

144 

145class DetectCoaddSourcesTask(PipelineTask): 

146 """Detect sources on a coadd.""" 

147 _DefaultName = "detectCoaddSources" 

148 ConfigClass = DetectCoaddSourcesConfig 

149 getSchemaCatalogs = _makeGetSchemaCatalogs("det") 

150 

151 def __init__(self, schema=None, **kwargs): 

152 """! 

153 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask. 

154 

155 Keyword arguments (in addition to those forwarded to PipelineTask.__init__): 

156 

157 @param[in] schema: initial schema for the output catalog, modified-in place to include all 

158 fields set by this task. If None, the source minimal schema will be used. 

159 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__ 

160 """ 

161 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree 

162 # call structure has been reviewed carefully to be sure super will work as intended. 

163 super().__init__(**kwargs) 

164 if schema is None: 

165 schema = afwTable.SourceTable.makeMinimalSchema() 

166 self.schema = schema 

167 self.makeSubtask("detection", schema=self.schema) 

168 if self.config.doScaleVariance: 

169 self.makeSubtask("scaleVariance") 

170 

171 self.detectionSchema = afwTable.SourceCatalog(self.schema) 

172 

173 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

174 inputs = butlerQC.get(inputRefs) 

175 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band") 

176 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory() 

177 inputs["expId"] = exposureIdInfo.expId 

178 outputs = self.run(**inputs) 

179 butlerQC.put(outputs, outputRefs) 

180 

181 def run(self, exposure, idFactory, expId): 

182 """! 

183 @brief Run detection on an exposure. 

184 

185 First scale the variance plane to match the observed variance 

186 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to 

187 detect sources. 

188 

189 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled, 

190 depending on configuration). 

191 @param[in] idFactory: IdFactory to set source identifiers 

192 @param[in] expId: Exposure identifier (integer) for RNG seed 

193 

194 @return a pipe.base.Struct with fields 

195 - sources: catalog of detections 

196 - backgrounds: list of backgrounds 

197 """ 

198 if self.config.doScaleVariance: 

199 varScale = self.scaleVariance.run(exposure.maskedImage) 

200 exposure.getMetadata().add("VARIANCE_SCALE", varScale) 

201 backgrounds = afwMath.BackgroundList() 

202 table = afwTable.SourceTable.make(self.schema, idFactory) 

203 detections = self.detection.run(table, exposure, expId=expId) 

204 sources = detections.sources 

205 fpSets = detections.fpSets 

206 if hasattr(fpSets, "background") and fpSets.background: 

207 for bg in fpSets.background: 

208 backgrounds.append(bg) 

209 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure) 

210 

211 

212############################################################################################################## 

213 

214 

215class DeblendCoaddSourcesConfig(Config): 

216 """DeblendCoaddSourcesConfig 

217 

218 Configuration parameters for the `DeblendCoaddSourcesTask`. 

219 """ 

220 singleBandDeblend = ConfigurableField(target=SourceDeblendTask, 

221 doc="Deblend sources separately in each band") 

222 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask, 

223 doc="Deblend sources simultaneously across bands") 

224 simultaneous = Field(dtype=bool, 

225 default=True, 

226 doc="Simultaneously deblend all bands? " 

227 "True uses `multibandDeblend` while False uses `singleBandDeblend`") 

228 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

229 hasFakes = Field(dtype=bool, 

230 default=False, 

231 doc="Should be set to True if fake sources have been inserted into the input data.") 

232 

233 def setDefaults(self): 

234 Config.setDefaults(self) 

235 self.singleBandDeblend.propagateAllPeaks = True 

236 

237 

238class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, 

239 dimensions=("tract", "patch", "band", "skymap"), 

240 defaultTemplates={"inputCoaddName": "deep", 

241 "outputCoaddName": "deep", 

242 "deblendedCatalog": "deblendedFlux"}): 

243 warnings.warn("MeasureMergedCoaddSourcesConnections.defaultTemplates is deprecated and no longer used. " 

244 "Use MeasureMergedCoaddSourcesConfig.inputCatalog.") 

245 inputSchema = cT.InitInput( 

246 doc="Input schema for measure merged task produced by a deblender or detection task", 

247 name="{inputCoaddName}Coadd_deblendedFlux_schema", 

248 storageClass="SourceCatalog" 

249 ) 

250 outputSchema = cT.InitOutput( 

251 doc="Output schema after all new fields are added by task", 

252 name="{inputCoaddName}Coadd_meas_schema", 

253 storageClass="SourceCatalog" 

254 ) 

255 refCat = cT.PrerequisiteInput( 

256 doc="Reference catalog used to match measured sources against known sources", 

257 name="ref_cat", 

258 storageClass="SimpleCatalog", 

259 dimensions=("skypix",), 

260 deferLoad=True, 

261 multiple=True 

262 ) 

263 exposure = cT.Input( 

264 doc="Input coadd image", 

265 name="{inputCoaddName}Coadd_calexp", 

266 storageClass="ExposureF", 

267 dimensions=("tract", "patch", "band", "skymap") 

268 ) 

269 skyMap = cT.Input( 

270 doc="SkyMap to use in processing", 

271 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

272 storageClass="SkyMap", 

273 dimensions=("skymap",), 

274 ) 

275 visitCatalogs = cT.Input( 

276 doc="Source catalogs for visits which overlap input tract, patch, band. Will be " 

277 "further filtered in the task for the purpose of propagating flags from image calibration " 

278 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.", 

279 name="src", 

280 dimensions=("instrument", "visit", "detector"), 

281 storageClass="SourceCatalog", 

282 multiple=True 

283 ) 

284 sourceTableHandles = cT.Input( 

285 doc=("Source tables that are derived from the ``CalibrateTask`` sources. " 

286 "These tables contain astrometry and photometry flags, and optionally " 

287 "PSF flags."), 

288 name="sourceTable_visit", 

289 storageClass="DataFrame", 

290 dimensions=("instrument", "visit"), 

291 multiple=True, 

292 deferLoad=True, 

293 ) 

294 finalizedSourceTableHandles = cT.Input( 

295 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These " 

296 "tables contain PSF flags from the finalized PSF estimation."), 

297 name="finalized_src_table", 

298 storageClass="DataFrame", 

299 dimensions=("instrument", "visit"), 

300 multiple=True, 

301 deferLoad=True, 

302 ) 

303 inputCatalog = cT.Input( 

304 doc=("Name of the input catalog to use." 

305 "If the single band deblender was used this should be 'deblendedFlux." 

306 "If the multi-band deblender was used this should be 'deblendedModel, " 

307 "or deblendedFlux if the multiband deblender was configured to output " 

308 "deblended flux catalogs. If no deblending was performed this should " 

309 "be 'mergeDet'"), 

310 name="{inputCoaddName}Coadd_{deblendedCatalog}", 

311 storageClass="SourceCatalog", 

312 dimensions=("tract", "patch", "band", "skymap"), 

313 ) 

314 scarletCatalog = cT.Input( 

315 doc="Catalogs produced by multiband deblending", 

316 name="{inputCoaddName}Coadd_deblendedCatalog", 

317 storageClass="SourceCatalog", 

318 dimensions=("tract", "patch", "skymap"), 

319 ) 

320 scarletModels = cT.Input( 

321 doc="Multiband scarlet models produced by the deblender", 

322 name="{inputCoaddName}Coadd_scarletModelData", 

323 storageClass="ScarletModelData", 

324 dimensions=("tract", "patch", "skymap"), 

325 ) 

326 outputSources = cT.Output( 

327 doc="Source catalog containing all the measurement information generated in this task", 

328 name="{outputCoaddName}Coadd_meas", 

329 dimensions=("tract", "patch", "band", "skymap"), 

330 storageClass="SourceCatalog", 

331 ) 

332 matchResult = cT.Output( 

333 doc="Match catalog produced by configured matcher, optional on doMatchSources", 

334 name="{outputCoaddName}Coadd_measMatch", 

335 dimensions=("tract", "patch", "band", "skymap"), 

336 storageClass="Catalog", 

337 ) 

338 denormMatches = cT.Output( 

339 doc="Denormalized Match catalog produced by configured matcher, optional on " 

340 "doWriteMatchesDenormalized", 

341 name="{outputCoaddName}Coadd_measMatchFull", 

342 dimensions=("tract", "patch", "band", "skymap"), 

343 storageClass="Catalog", 

344 ) 

345 

346 def __init__(self, *, config=None): 

347 super().__init__(config=config) 

348 if config.doPropagateFlags is False: 

349 self.inputs -= set(("visitCatalogs",)) 

350 self.inputs -= set(("sourceTableHandles",)) 

351 self.inputs -= set(("finalizedSourceTableHandles",)) 

352 elif config.propagateFlags.target == PropagateSourceFlagsTask: 

353 # New PropagateSourceFlagsTask does not use visitCatalogs. 

354 self.inputs -= set(("visitCatalogs",)) 

355 # Check for types of flags required. 

356 if not config.propagateFlags.source_flags: 

357 self.inputs -= set(("sourceTableHandles",)) 

358 if not config.propagateFlags.finalized_source_flags: 

359 self.inputs -= set(("finalizedSourceTableHandles",)) 

360 else: 

361 # Deprecated PropagateVisitFlagsTask uses visitCatalogs. 

362 self.inputs -= set(("sourceTableHandles",)) 

363 self.inputs -= set(("finalizedSourceTableHandles",)) 

364 

365 if config.inputCatalog == "deblendedCatalog": 

366 self.inputs -= set(("inputCatalog",)) 

367 

368 if not config.doAddFootprints: 

369 self.inputs -= set(("scarletModels",)) 

370 else: 

371 self.inputs -= set(("deblendedCatalog")) 

372 self.inputs -= set(("scarletModels",)) 

373 

374 if config.doMatchSources is False: 

375 self.outputs -= set(("matchResult",)) 

376 

377 if config.doWriteMatchesDenormalized is False: 

378 self.outputs -= set(("denormMatches",)) 

379 

380 

381class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig, 

382 pipelineConnections=MeasureMergedCoaddSourcesConnections): 

383 """! 

384 @anchor MeasureMergedCoaddSourcesConfig_ 

385 

386 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask 

387 """ 

388 inputCatalog = ChoiceField( 

389 dtype=str, 

390 default="deblendedCatalog", 

391 allowed={ 

392 "deblendedCatalog": "Output catalog from ScarletDeblendTask", 

393 "deblendedFlux": "Output catalog from SourceDeblendTask", 

394 "mergeDet": "The merged detections before deblending." 

395 }, 

396 doc="The name of the input catalog.", 

397 ) 

398 doAddFootprints = Field(dtype=bool, 

399 default=True, 

400 doc="Whether or not to add footprints to the input catalog from scarlet models. " 

401 "This should be true whenever using the multi-band deblender, " 

402 "otherwise this should be False.") 

403 doConserveFlux = Field(dtype=bool, default=True, 

404 doc="Whether to use the deblender models as templates to re-distribute the flux " 

405 "from the 'exposure' (True), or to perform measurements on the deblender " 

406 "model footprints.") 

407 doStripFootprints = Field(dtype=bool, default=True, 

408 doc="Whether to strip footprints from the output catalog before " 

409 "saving to disk. " 

410 "This is usually done when using scarlet models to save disk space.") 

411 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") 

412 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") 

413 doPropagateFlags = Field( 

414 dtype=bool, default=True, 

415 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 

416 ) 

417 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd") 

418 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?") 

419 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog") 

420 doWriteMatchesDenormalized = Field( 

421 dtype=bool, 

422 default=False, 

423 doc=("Write reference matches in denormalized format? " 

424 "This format uses more disk space, but is more convenient to read."), 

425 ) 

426 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

427 psfCache = Field(dtype=int, default=100, doc="Size of psfCache") 

428 checkUnitsParseStrict = Field( 

429 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", 

430 dtype=str, 

431 default="raise", 

432 ) 

433 doApCorr = Field( 

434 dtype=bool, 

435 default=True, 

436 doc="Apply aperture corrections" 

437 ) 

438 applyApCorr = ConfigurableField( 

439 target=ApplyApCorrTask, 

440 doc="Subtask to apply aperture corrections" 

441 ) 

442 doRunCatalogCalculation = Field( 

443 dtype=bool, 

444 default=True, 

445 doc='Run catalogCalculation task' 

446 ) 

447 catalogCalculation = ConfigurableField( 

448 target=CatalogCalculationTask, 

449 doc="Subtask to run catalogCalculation plugins on catalog" 

450 ) 

451 

452 hasFakes = Field( 

453 dtype=bool, 

454 default=False, 

455 doc="Should be set to True if fake sources have been inserted into the input data." 

456 ) 

457 

458 @property 

459 def refObjLoader(self): 

460 return self.match.refObjLoader 

461 

462 def setDefaults(self): 

463 super().setDefaults() 

464 self.measurement.plugins.names |= ['base_InputCount', 

465 'base_Variance', 

466 'base_LocalPhotoCalib', 

467 'base_LocalWcs'] 

468 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 

469 'INEXACT_PSF'] 

470 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 

471 'INEXACT_PSF'] 

472 

473 

474## @addtogroup LSST_task_documentation 

475## @{ 

476## @page page_MeasureMergedCoaddSourcesTask MeasureMergedCoaddSourcesTask 

477## @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask" 

478## @copybrief MeasureMergedCoaddSourcesTask 

479## @} 

480 

481class MeasureMergedCoaddSourcesTask(PipelineTask): 

482 """Deblend sources from main catalog in each coadd seperately and measure.""" 

483 _DefaultName = "measureCoaddSources" 

484 ConfigClass = MeasureMergedCoaddSourcesConfig 

485 getSchemaCatalogs = _makeGetSchemaCatalogs("meas") 

486 

487 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, 

488 **kwargs): 

489 """! 

490 @brief Initialize the task. 

491 

492 Keyword arguments (in addition to those forwarded to PipelineTask.__init__): 

493 @param[in] schema: the schema of the merged detection catalog used as input to this one 

494 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog 

495 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference 

496 catalog. May be None if the loader can be constructed from the butler argument or all steps 

497 requiring a reference catalog are disabled. 

498 @param[in] butler: a butler used to read the input schemas from disk or construct the reference 

499 catalog loader, if schema or peakSchema or refObjLoader is None 

500 

501 The task will set its own self.schema attribute to the schema of the output measurement catalog. 

502 This will include all fields from the input schema, as well as additional fields for all the 

503 measurements. 

504 """ 

505 super().__init__(**kwargs) 

506 self.deblended = self.config.inputCatalog.startswith("deblended") 

507 self.inputCatalog = "Coadd_" + self.config.inputCatalog 

508 if initInputs is not None: 

509 schema = initInputs['inputSchema'].schema 

510 if schema is None: 

511 assert butler is not None, "Neither butler nor schema is defined" 

512 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema 

513 self.schemaMapper = afwTable.SchemaMapper(schema) 

514 self.schemaMapper.addMinimalSchema(schema) 

515 self.schema = self.schemaMapper.getOutputSchema() 

516 self.algMetadata = PropertyList() 

517 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) 

518 self.makeSubtask("setPrimaryFlags", schema=self.schema) 

519 if self.config.doMatchSources: 

520 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader) 

521 if self.config.doPropagateFlags: 

522 self.makeSubtask("propagateFlags", schema=self.schema) 

523 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) 

524 if self.config.doApCorr: 

525 self.makeSubtask("applyApCorr", schema=self.schema) 

526 if self.config.doRunCatalogCalculation: 

527 self.makeSubtask("catalogCalculation", schema=self.schema) 

528 

529 self.outputSchema = afwTable.SourceCatalog(self.schema) 

530 

531 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

532 inputs = butlerQC.get(inputRefs) 

533 

534 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], 

535 inputs.pop('refCat'), 

536 name=self.config.connections.refCat, 

537 config=self.config.refObjLoader, 

538 log=self.log) 

539 self.match.setRefObjLoader(refObjLoader) 

540 

541 # Set psfcache 

542 # move this to run after gen2 deprecation 

543 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache) 

544 

545 # Get unique integer ID for IdFactory and RNG seeds 

546 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch") 

547 inputs['exposureId'] = exposureIdInfo.expId 

548 idFactory = exposureIdInfo.makeSourceIdFactory() 

549 # Transform inputCatalog 

550 table = afwTable.SourceTable.make(self.schema, idFactory) 

551 sources = afwTable.SourceCatalog(table) 

552 # Load the correct input catalog 

553 if "scarletCatalog" in inputs: 

554 inputCatalog = inputs.pop("scarletCatalog") 

555 catalogRef = inputRefs.scarletCatalog 

556 else: 

557 inputCatalog = inputs.pop("inputCatalog") 

558 catalogRef = inputRefs.inputCatalog 

559 sources.extend(inputCatalog, self.schemaMapper) 

560 del inputCatalog 

561 # Add the HeavyFootprints to the deblended sources 

562 if self.config.doAddFootprints: 

563 modelData = inputs.pop('scarletModels') 

564 if self.config.doConserveFlux: 

565 redistributeImage = inputs['exposure'].image 

566 else: 

567 redistributeImage = None 

568 modelData.updateCatalogFootprints( 

569 catalog=sources, 

570 band=inputRefs.exposure.dataId["band"], 

571 psfModel=inputs['exposure'].getPsf(), 

572 redistributeImage=redistributeImage, 

573 removeScarletData=True, 

574 ) 

575 table = sources.getTable() 

576 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

577 inputs['sources'] = sources 

578 

579 skyMap = inputs.pop('skyMap') 

580 tractNumber = catalogRef.dataId['tract'] 

581 tractInfo = skyMap[tractNumber] 

582 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch']) 

583 skyInfo = Struct( 

584 skyMap=skyMap, 

585 tractInfo=tractInfo, 

586 patchInfo=patchInfo, 

587 wcs=tractInfo.getWcs(), 

588 bbox=patchInfo.getOuterBBox() 

589 ) 

590 inputs['skyInfo'] = skyInfo 

591 

592 if self.config.doPropagateFlags: 

593 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

594 # New version 

595 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds 

596 inputs["ccdInputs"] = ccdInputs 

597 

598 if "sourceTableHandles" in inputs: 

599 sourceTableHandles = inputs.pop("sourceTableHandles") 

600 sourceTableHandleDict = {handle.dataId["visit"]: handle 

601 for handle in sourceTableHandles} 

602 inputs["sourceTableHandleDict"] = sourceTableHandleDict 

603 if "finalizedSourceTableHandles" in inputs: 

604 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles") 

605 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle 

606 for handle in finalizedSourceTableHandles} 

607 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict 

608 else: 

609 # Deprecated legacy version 

610 # Filter out any visit catalog that is not coadd inputs 

611 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds 

612 visitKey = ccdInputs.schema.find("visit").key 

613 ccdKey = ccdInputs.schema.find("ccd").key 

614 inputVisitIds = set() 

615 ccdRecordsWcs = {} 

616 for ccdRecord in ccdInputs: 

617 visit = ccdRecord.get(visitKey) 

618 ccd = ccdRecord.get(ccdKey) 

619 inputVisitIds.add((visit, ccd)) 

620 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs() 

621 

622 inputCatalogsToKeep = [] 

623 inputCatalogWcsUpdate = [] 

624 for i, dataRef in enumerate(inputRefs.visitCatalogs): 

625 key = (dataRef.dataId['visit'], dataRef.dataId['detector']) 

626 if key in inputVisitIds: 

627 inputCatalogsToKeep.append(inputs['visitCatalogs'][i]) 

628 inputCatalogWcsUpdate.append(ccdRecordsWcs[key]) 

629 inputs['visitCatalogs'] = inputCatalogsToKeep 

630 inputs['wcsUpdates'] = inputCatalogWcsUpdate 

631 inputs['ccdInputs'] = ccdInputs 

632 

633 outputs = self.run(**inputs) 

634 # Strip HeavyFootprints to save space on disk 

635 sources = outputs.outputSources 

636 butlerQC.put(outputs, outputRefs) 

637 

638 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, 

639 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None): 

640 """Run measurement algorithms on the input exposure, and optionally populate the 

641 resulting catalog with extra information. 

642 

643 Parameters 

644 ---------- 

645 exposure : `lsst.afw.exposure.Exposure` 

646 The input exposure on which measurements are to be performed 

647 sources : `lsst.afw.table.SourceCatalog` 

648 A catalog built from the results of merged detections, or 

649 deblender outputs. 

650 skyInfo : `lsst.pipe.base.Struct` 

651 A struct containing information about the position of the input exposure within 

652 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box 

653 exposureId : `int` or `bytes` 

654 packed unique number or bytes unique to the input exposure 

655 ccdInputs : `lsst.afw.table.ExposureCatalog` 

656 Catalog containing information on the individual visits which went into making 

657 the coadd. 

658 sourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

659 Dict for sourceTable_visit handles (key is visit) for propagating flags. 

660 These tables are derived from the ``CalibrateTask`` sources, and contain 

661 astrometry and photometry flags, and optionally PSF flags. 

662 finalizedSourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`], optional 

663 Dict for finalized_src_table handles (key is visit) for propagating flags. 

664 These tables are derived from ``FinalizeCalibrationTask`` and contain 

665 PSF flags from the finalized PSF estimation. 

666 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` 

667 A list of source catalogs corresponding to measurements made on the individual 

668 visits which went into the input exposure. If None and butler is `None` then 

669 the task cannot propagate visit flags to the output catalog. 

670 Deprecated, to be removed with PropagateVisitFlagsTask. 

671 wcsUpdates : list of `lsst.afw.geom.SkyWcs` 

672 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 

673 to the input visits. Used to put all coordinates to common system. If `None` and 

674 butler is `None` then the task cannot propagate visit flags to the output catalog. 

675 Deprecated, to be removed with PropagateVisitFlagsTask. 

676 butler : `None` 

677 This was a Gen2 butler used to load visit catalogs. 

678 No longer used and should not be set. Will be removed in the 

679 future. 

680 

681 Returns 

682 ------- 

683 results : `lsst.pipe.base.Struct` 

684 Results of running measurement task. Will contain the catalog in the 

685 sources attribute. Optionally will have results of matching to a 

686 reference catalog in the matchResults attribute, and denormalized 

687 matches in the denormMatches attribute. 

688 """ 

689 if butler is not None: 

690 warnings.warn("The 'butler' parameter is no longer used and can be safely removed.", 

691 category=FutureWarning, stacklevel=2) 

692 butler = None 

693 

694 self.measurement.run(sources, exposure, exposureId=exposureId) 

695 

696 if self.config.doApCorr: 

697 self.applyApCorr.run( 

698 catalog=sources, 

699 apCorrMap=exposure.getInfo().getApCorrMap() 

700 ) 

701 

702 # TODO DM-11568: this contiguous check-and-copy could go away if we 

703 # reserve enough space during SourceDetection and/or SourceDeblend. 

704 # NOTE: sourceSelectors require contiguous catalogs, so ensure 

705 # contiguity now, so views are preserved from here on. 

706 if not sources.isContiguous(): 

707 sources = sources.copy(deep=True) 

708 

709 if self.config.doRunCatalogCalculation: 

710 self.catalogCalculation.run(sources) 

711 

712 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo, 

713 patchInfo=skyInfo.patchInfo) 

714 if self.config.doPropagateFlags: 

715 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

716 # New version 

717 self.propagateFlags.run( 

718 sources, 

719 ccdInputs, 

720 sourceTableHandleDict, 

721 finalizedSourceTableHandleDict 

722 ) 

723 else: 

724 # Legacy deprecated version 

725 self.propagateFlags.run( 

726 butler, 

727 sources, 

728 ccdInputs, 

729 exposure.getWcs(), 

730 visitCatalogs, 

731 wcsUpdates 

732 ) 

733 

734 results = Struct() 

735 

736 if self.config.doMatchSources: 

737 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel) 

738 matches = afwTable.packMatches(matchResult.matches) 

739 matches.table.setMetadata(matchResult.matchMeta) 

740 results.matchResult = matches 

741 if self.config.doWriteMatchesDenormalized: 

742 if matchResult.matches: 

743 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta) 

744 else: 

745 self.log.warning("No matches, so generating dummy denormalized matches file") 

746 denormMatches = afwTable.BaseCatalog(afwTable.Schema()) 

747 denormMatches.setMetadata(PropertyList()) 

748 denormMatches.getMetadata().add("COMMENT", 

749 "This catalog is empty because no matches were found.") 

750 results.denormMatches = denormMatches 

751 results.denormMatches = denormMatches 

752 

753 results.outputSources = sources 

754 return results