Coverage for python/lsst/pipe/tasks/multiBand.py: 25%

273 statements  

« prev     ^ index     » next       coverage.py v7.5.1, created at 2024-05-14 09:28 +0000

1# This file is part of pipe_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22__all__ = ["DetectCoaddSourcesConfig", "DetectCoaddSourcesTask"] 

23 

24from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections) 

25import lsst.pipe.base.connectionTypes as cT 

26from lsst.pex.config import Field, ConfigurableField, ChoiceField 

27from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask, \ 

28 SetPrimaryFlagsTask 

29from lsst.meas.base import ( 

30 SingleFrameMeasurementTask, 

31 ApplyApCorrTask, 

32 CatalogCalculationTask, 

33 SkyMapIdGeneratorConfig, 

34) 

35from lsst.meas.extensions.scarlet.io import updateCatalogFootprints 

36from lsst.meas.astrom import DirectMatchTask, denormalizeMatches 

37from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask 

38import lsst.afw.table as afwTable 

39import lsst.afw.math as afwMath 

40from lsst.daf.base import PropertyList 

41from lsst.skymap import BaseSkyMap 

42 

43# NOTE: these imports are a convenience so multiband users only have to import this file. 

44from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401 

45from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401 

46from .multiBandUtils import CullPeaksConfig # noqa: F401 

47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401 

48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401 

49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401 

50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401 

51 

52 

53""" 

54New set types: 

55* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 

56* deepCoadd_mergeDet: merged detections (tract, patch) 

57* deepCoadd_meas: measurements of merged detections (tract, patch, filter) 

58* deepCoadd_ref: reference sources (tract, patch) 

59All of these have associated *_schema catalogs that require no data ID and hold no records. 

60 

61In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 

62the mergeDet, meas, and ref dataset Footprints: 

63* deepCoadd_peak_schema 

64""" 

65 

66 

67############################################################################################################## 

68class DetectCoaddSourcesConnections(PipelineTaskConnections, 

69 dimensions=("tract", "patch", "band", "skymap"), 

70 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): 

71 detectionSchema = cT.InitOutput( 

72 doc="Schema of the detection catalog", 

73 name="{outputCoaddName}Coadd_det_schema", 

74 storageClass="SourceCatalog", 

75 ) 

76 exposure = cT.Input( 

77 doc="Exposure on which detections are to be performed", 

78 name="{inputCoaddName}Coadd", 

79 storageClass="ExposureF", 

80 dimensions=("tract", "patch", "band", "skymap") 

81 ) 

82 outputBackgrounds = cT.Output( 

83 doc="Output Backgrounds used in detection", 

84 name="{outputCoaddName}Coadd_calexp_background", 

85 storageClass="Background", 

86 dimensions=("tract", "patch", "band", "skymap") 

87 ) 

88 outputSources = cT.Output( 

89 doc="Detected sources catalog", 

90 name="{outputCoaddName}Coadd_det", 

91 storageClass="SourceCatalog", 

92 dimensions=("tract", "patch", "band", "skymap") 

93 ) 

94 outputExposure = cT.Output( 

95 doc="Exposure post detection", 

96 name="{outputCoaddName}Coadd_calexp", 

97 storageClass="ExposureF", 

98 dimensions=("tract", "patch", "band", "skymap") 

99 ) 

100 

101 

102class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections): 

103 """Configuration parameters for the DetectCoaddSourcesTask 

104 """ 

105 

106 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?") 

107 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling") 

108 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection") 

109 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

110 hasFakes = Field( 

111 dtype=bool, 

112 default=False, 

113 doc="Should be set to True if fake sources have been inserted into the input data.", 

114 ) 

115 idGenerator = SkyMapIdGeneratorConfig.make_field() 

116 

117 def setDefaults(self): 

118 super().setDefaults() 

119 self.detection.thresholdType = "pixel_stdev" 

120 self.detection.isotropicGrow = True 

121 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic 

122 self.detection.reEstimateBackground = False 

123 self.detection.background.useApprox = False 

124 self.detection.background.binSize = 4096 

125 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER' 

126 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender 

127 # Include band in packed data IDs that go into object IDs (None -> "as 

128 # many bands as are defined", rather than the default of zero). 

129 self.idGenerator.packer.n_bands = None 

130 

131 

132class DetectCoaddSourcesTask(PipelineTask): 

133 """Detect sources on a single filter coadd. 

134 

135 Coadding individual visits requires each exposure to be warped. This 

136 introduces covariance in the noise properties across pixels. Before 

137 detection, we correct the coadd variance by scaling the variance plane in 

138 the coadd to match the observed variance. This is an approximate 

139 approach -- strictly, we should propagate the full covariance matrix -- 

140 but it is simple and works well in practice. 

141 

142 After scaling the variance plane, we detect sources and generate footprints 

143 by delegating to the @ref SourceDetectionTask_ "detection" subtask. 

144 

145 DetectCoaddSourcesTask is meant to be run after assembling a coadded image 

146 in a given band. The purpose of the task is to update the background, 

147 detect all sources in a single band and generate a set of parent 

148 footprints. Subsequent tasks in the multi-band processing procedure will 

149 merge sources across bands and, eventually, perform forced photometry. 

150 

151 Parameters 

152 ---------- 

153 schema : `lsst.afw.table.Schema`, optional 

154 Initial schema for the output catalog, modified-in place to include all 

155 fields set by this task. If None, the source minimal schema will be used. 

156 **kwargs 

157 Additional keyword arguments. 

158 """ 

159 

160 _DefaultName = "detectCoaddSources" 

161 ConfigClass = DetectCoaddSourcesConfig 

162 

163 def __init__(self, schema=None, **kwargs): 

164 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree 

165 # call structure has been reviewed carefully to be sure super will work as intended. 

166 super().__init__(**kwargs) 

167 if schema is None: 

168 schema = afwTable.SourceTable.makeMinimalSchema() 

169 self.schema = schema 

170 self.makeSubtask("detection", schema=self.schema) 

171 if self.config.doScaleVariance: 

172 self.makeSubtask("scaleVariance") 

173 

174 self.detectionSchema = afwTable.SourceCatalog(self.schema) 

175 

176 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

177 inputs = butlerQC.get(inputRefs) 

178 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId) 

179 inputs["idFactory"] = idGenerator.make_table_id_factory() 

180 inputs["expId"] = idGenerator.catalog_id 

181 outputs = self.run(**inputs) 

182 butlerQC.put(outputs, outputRefs) 

183 

184 def run(self, exposure, idFactory, expId): 

185 """Run detection on an exposure. 

186 

187 First scale the variance plane to match the observed variance 

188 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to 

189 detect sources. 

190 

191 Parameters 

192 ---------- 

193 exposure : `lsst.afw.image.Exposure` 

194 Exposure on which to detect (may be backround-subtracted and scaled, 

195 depending on configuration). 

196 idFactory : `lsst.afw.table.IdFactory` 

197 IdFactory to set source identifiers. 

198 expId : `int` 

199 Exposure identifier (integer) for RNG seed. 

200 

201 Returns 

202 ------- 

203 result : `lsst.pipe.base.Struct` 

204 Results as a struct with attributes: 

205 

206 ``sources`` 

207 Catalog of detections (`lsst.afw.table.SourceCatalog`). 

208 ``backgrounds`` 

209 List of backgrounds (`list`). 

210 """ 

211 if self.config.doScaleVariance: 

212 varScale = self.scaleVariance.run(exposure.maskedImage) 

213 exposure.getMetadata().add("VARIANCE_SCALE", varScale) 

214 backgrounds = afwMath.BackgroundList() 

215 table = afwTable.SourceTable.make(self.schema, idFactory) 

216 detections = self.detection.run(table, exposure, expId=expId) 

217 sources = detections.sources 

218 if hasattr(detections, "background") and detections.background: 

219 for bg in detections.background: 

220 backgrounds.append(bg) 

221 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure) 

222 

223 

224class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, 

225 dimensions=("tract", "patch", "band", "skymap"), 

226 defaultTemplates={"inputCoaddName": "deep", 

227 "outputCoaddName": "deep", 

228 "deblendedCatalog": "deblendedFlux"}): 

229 inputSchema = cT.InitInput( 

230 doc="Input schema for measure merged task produced by a deblender or detection task", 

231 name="{inputCoaddName}Coadd_deblendedFlux_schema", 

232 storageClass="SourceCatalog" 

233 ) 

234 outputSchema = cT.InitOutput( 

235 doc="Output schema after all new fields are added by task", 

236 name="{inputCoaddName}Coadd_meas_schema", 

237 storageClass="SourceCatalog" 

238 ) 

239 refCat = cT.PrerequisiteInput( 

240 doc="Reference catalog used to match measured sources against known sources", 

241 name="ref_cat", 

242 storageClass="SimpleCatalog", 

243 dimensions=("skypix",), 

244 deferLoad=True, 

245 multiple=True 

246 ) 

247 exposure = cT.Input( 

248 doc="Input coadd image", 

249 name="{inputCoaddName}Coadd_calexp", 

250 storageClass="ExposureF", 

251 dimensions=("tract", "patch", "band", "skymap") 

252 ) 

253 skyMap = cT.Input( 

254 doc="SkyMap to use in processing", 

255 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

256 storageClass="SkyMap", 

257 dimensions=("skymap",), 

258 ) 

259 visitCatalogs = cT.Input( 

260 doc="Source catalogs for visits which overlap input tract, patch, band. Will be " 

261 "further filtered in the task for the purpose of propagating flags from image calibration " 

262 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.", 

263 name="src", 

264 dimensions=("instrument", "visit", "detector"), 

265 storageClass="SourceCatalog", 

266 multiple=True 

267 ) 

268 sourceTableHandles = cT.Input( 

269 doc=("Source tables that are derived from the ``CalibrateTask`` sources. " 

270 "These tables contain astrometry and photometry flags, and optionally " 

271 "PSF flags."), 

272 name="sourceTable_visit", 

273 storageClass="DataFrame", 

274 dimensions=("instrument", "visit"), 

275 multiple=True, 

276 deferLoad=True, 

277 ) 

278 finalizedSourceTableHandles = cT.Input( 

279 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These " 

280 "tables contain PSF flags from the finalized PSF estimation."), 

281 name="finalized_src_table", 

282 storageClass="DataFrame", 

283 dimensions=("instrument", "visit"), 

284 multiple=True, 

285 deferLoad=True, 

286 ) 

287 inputCatalog = cT.Input( 

288 doc=("Name of the input catalog to use." 

289 "If the single band deblender was used this should be 'deblendedFlux." 

290 "If the multi-band deblender was used this should be 'deblendedModel, " 

291 "or deblendedFlux if the multiband deblender was configured to output " 

292 "deblended flux catalogs. If no deblending was performed this should " 

293 "be 'mergeDet'"), 

294 name="{inputCoaddName}Coadd_{deblendedCatalog}", 

295 storageClass="SourceCatalog", 

296 dimensions=("tract", "patch", "band", "skymap"), 

297 ) 

298 scarletCatalog = cT.Input( 

299 doc="Catalogs produced by multiband deblending", 

300 name="{inputCoaddName}Coadd_deblendedCatalog", 

301 storageClass="SourceCatalog", 

302 dimensions=("tract", "patch", "skymap"), 

303 ) 

304 scarletModels = cT.Input( 

305 doc="Multiband scarlet models produced by the deblender", 

306 name="{inputCoaddName}Coadd_scarletModelData", 

307 storageClass="ScarletModelData", 

308 dimensions=("tract", "patch", "skymap"), 

309 ) 

310 outputSources = cT.Output( 

311 doc="Source catalog containing all the measurement information generated in this task", 

312 name="{outputCoaddName}Coadd_meas", 

313 dimensions=("tract", "patch", "band", "skymap"), 

314 storageClass="SourceCatalog", 

315 ) 

316 matchResult = cT.Output( 

317 doc="Match catalog produced by configured matcher, optional on doMatchSources", 

318 name="{outputCoaddName}Coadd_measMatch", 

319 dimensions=("tract", "patch", "band", "skymap"), 

320 storageClass="Catalog", 

321 ) 

322 denormMatches = cT.Output( 

323 doc="Denormalized Match catalog produced by configured matcher, optional on " 

324 "doWriteMatchesDenormalized", 

325 name="{outputCoaddName}Coadd_measMatchFull", 

326 dimensions=("tract", "patch", "band", "skymap"), 

327 storageClass="Catalog", 

328 ) 

329 

330 def __init__(self, *, config=None): 

331 super().__init__(config=config) 

332 if config.doPropagateFlags is False: 

333 self.inputs -= set(("visitCatalogs",)) 

334 self.inputs -= set(("sourceTableHandles",)) 

335 self.inputs -= set(("finalizedSourceTableHandles",)) 

336 elif config.propagateFlags.target == PropagateSourceFlagsTask: 

337 # New PropagateSourceFlagsTask does not use visitCatalogs. 

338 self.inputs -= set(("visitCatalogs",)) 

339 # Check for types of flags required. 

340 if not config.propagateFlags.source_flags: 

341 self.inputs -= set(("sourceTableHandles",)) 

342 if not config.propagateFlags.finalized_source_flags: 

343 self.inputs -= set(("finalizedSourceTableHandles",)) 

344 else: 

345 # Deprecated PropagateVisitFlagsTask uses visitCatalogs. 

346 self.inputs -= set(("sourceTableHandles",)) 

347 self.inputs -= set(("finalizedSourceTableHandles",)) 

348 

349 if config.inputCatalog == "deblendedCatalog": 

350 self.inputs -= set(("inputCatalog",)) 

351 

352 if not config.doAddFootprints: 

353 self.inputs -= set(("scarletModels",)) 

354 else: 

355 self.inputs -= set(("deblendedCatalog")) 

356 self.inputs -= set(("scarletModels",)) 

357 

358 if config.doMatchSources is False: 

359 self.prerequisiteInputs -= set(("refCat",)) 

360 self.outputs -= set(("matchResult",)) 

361 

362 if config.doWriteMatchesDenormalized is False: 

363 self.outputs -= set(("denormMatches",)) 

364 

365 

366class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig, 

367 pipelineConnections=MeasureMergedCoaddSourcesConnections): 

368 """Configuration parameters for the MeasureMergedCoaddSourcesTask 

369 """ 

370 inputCatalog = ChoiceField( 

371 dtype=str, 

372 default="deblendedCatalog", 

373 allowed={ 

374 "deblendedCatalog": "Output catalog from ScarletDeblendTask", 

375 "deblendedFlux": "Output catalog from SourceDeblendTask", 

376 "mergeDet": "The merged detections before deblending." 

377 }, 

378 doc="The name of the input catalog.", 

379 ) 

380 doAddFootprints = Field(dtype=bool, 

381 default=True, 

382 doc="Whether or not to add footprints to the input catalog from scarlet models. " 

383 "This should be true whenever using the multi-band deblender, " 

384 "otherwise this should be False.") 

385 doConserveFlux = Field(dtype=bool, default=True, 

386 doc="Whether to use the deblender models as templates to re-distribute the flux " 

387 "from the 'exposure' (True), or to perform measurements on the deblender " 

388 "model footprints.") 

389 doStripFootprints = Field(dtype=bool, default=True, 

390 doc="Whether to strip footprints from the output catalog before " 

391 "saving to disk. " 

392 "This is usually done when using scarlet models to save disk space.") 

393 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") 

394 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") 

395 doPropagateFlags = Field( 

396 dtype=bool, default=True, 

397 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 

398 ) 

399 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd") 

400 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?") 

401 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog") 

402 doWriteMatchesDenormalized = Field( 

403 dtype=bool, 

404 default=False, 

405 doc=("Write reference matches in denormalized format? " 

406 "This format uses more disk space, but is more convenient to read."), 

407 ) 

408 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

409 psfCache = Field(dtype=int, default=100, doc="Size of psfCache") 

410 checkUnitsParseStrict = Field( 

411 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", 

412 dtype=str, 

413 default="raise", 

414 ) 

415 doApCorr = Field( 

416 dtype=bool, 

417 default=True, 

418 doc="Apply aperture corrections" 

419 ) 

420 applyApCorr = ConfigurableField( 

421 target=ApplyApCorrTask, 

422 doc="Subtask to apply aperture corrections" 

423 ) 

424 doRunCatalogCalculation = Field( 

425 dtype=bool, 

426 default=True, 

427 doc='Run catalogCalculation task' 

428 ) 

429 catalogCalculation = ConfigurableField( 

430 target=CatalogCalculationTask, 

431 doc="Subtask to run catalogCalculation plugins on catalog" 

432 ) 

433 

434 hasFakes = Field( 

435 dtype=bool, 

436 default=False, 

437 doc="Should be set to True if fake sources have been inserted into the input data." 

438 ) 

439 idGenerator = SkyMapIdGeneratorConfig.make_field() 

440 

441 @property 

442 def refObjLoader(self): 

443 return self.match.refObjLoader 

444 

445 def setDefaults(self): 

446 super().setDefaults() 

447 self.measurement.plugins.names |= ['base_InputCount', 

448 'base_Variance', 

449 'base_LocalPhotoCalib', 

450 'base_LocalWcs'] 

451 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 

452 'INEXACT_PSF', 'STREAK'] 

453 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 

454 'INEXACT_PSF', 'STREAK'] 

455 

456 def validate(self): 

457 super().validate() 

458 

459 if not self.doMatchSources and self.doWriteMatchesDenormalized: 

460 raise ValueError("Cannot set doWriteMatchesDenormalized if doMatchSources is False.") 

461 

462 

463class MeasureMergedCoaddSourcesTask(PipelineTask): 

464 """Deblend sources from main catalog in each coadd seperately and measure. 

465 

466 Use peaks and footprints from a master catalog to perform deblending and 

467 measurement in each coadd. 

468 

469 Given a master input catalog of sources (peaks and footprints) or deblender 

470 outputs(including a HeavyFootprint in each band), measure each source on 

471 the coadd. Repeating this procedure with the same master catalog across 

472 multiple coadds will generate a consistent set of child sources. 

473 

474 The deblender retains all peaks and deblends any missing peaks (dropouts in 

475 that band) as PSFs. Source properties are measured and the @c is-primary 

476 flag (indicating sources with no children) is set. Visit flags are 

477 propagated to the coadd sources. 

478 

479 Optionally, we can match the coadd sources to an external reference 

480 catalog. 

481 

482 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we 

483 have a set of per-band catalogs. The next stage in the multi-band 

484 processing procedure will merge these measurements into a suitable catalog 

485 for driving forced photometry. 

486 

487 Parameters 

488 ---------- 

489 schema : ``lsst.afw.table.Schema`, optional 

490 The schema of the merged detection catalog used as input to this one. 

491 peakSchema : ``lsst.afw.table.Schema`, optional 

492 The schema of the PeakRecords in the Footprints in the merged detection catalog. 

493 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional 

494 An instance of ReferenceObjectLoader that supplies an external reference 

495 catalog. May be None if the loader can be constructed from the butler argument or all steps 

496 requiring a reference catalog are disabled. 

497 initInputs : `dict`, optional 

498 Dictionary that can contain a key ``inputSchema`` containing the 

499 input schema. If present will override the value of ``schema``. 

500 **kwargs 

501 Additional keyword arguments. 

502 """ 

503 

504 _DefaultName = "measureCoaddSources" 

505 ConfigClass = MeasureMergedCoaddSourcesConfig 

506 

507 def __init__(self, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, 

508 **kwargs): 

509 super().__init__(**kwargs) 

510 self.deblended = self.config.inputCatalog.startswith("deblended") 

511 self.inputCatalog = "Coadd_" + self.config.inputCatalog 

512 if initInputs is not None: 

513 schema = initInputs['inputSchema'].schema 

514 if schema is None: 

515 raise ValueError("Schema must be defined.") 

516 self.schemaMapper = afwTable.SchemaMapper(schema) 

517 self.schemaMapper.addMinimalSchema(schema) 

518 self.schema = self.schemaMapper.getOutputSchema() 

519 afwTable.CoordKey.addErrorFields(self.schema) 

520 self.algMetadata = PropertyList() 

521 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) 

522 self.makeSubtask("setPrimaryFlags", schema=self.schema) 

523 if self.config.doMatchSources: 

524 self.makeSubtask("match", refObjLoader=refObjLoader) 

525 if self.config.doPropagateFlags: 

526 self.makeSubtask("propagateFlags", schema=self.schema) 

527 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) 

528 if self.config.doApCorr: 

529 self.makeSubtask("applyApCorr", schema=self.schema) 

530 if self.config.doRunCatalogCalculation: 

531 self.makeSubtask("catalogCalculation", schema=self.schema) 

532 

533 self.outputSchema = afwTable.SourceCatalog(self.schema) 

534 

535 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

536 inputs = butlerQC.get(inputRefs) 

537 

538 if self.config.doMatchSources: 

539 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], 

540 inputs.pop('refCat'), 

541 name=self.config.connections.refCat, 

542 config=self.config.refObjLoader, 

543 log=self.log) 

544 self.match.setRefObjLoader(refObjLoader) 

545 

546 # Set psfcache 

547 # move this to run after gen2 deprecation 

548 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache) 

549 

550 # Get unique integer ID for IdFactory and RNG seeds; only the latter 

551 # should really be used as the IDs all come from the input catalog. 

552 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId) 

553 inputs['exposureId'] = idGenerator.catalog_id 

554 

555 # Transform inputCatalog 

556 table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory()) 

557 sources = afwTable.SourceCatalog(table) 

558 # Load the correct input catalog 

559 if "scarletCatalog" in inputs: 

560 inputCatalog = inputs.pop("scarletCatalog") 

561 catalogRef = inputRefs.scarletCatalog 

562 else: 

563 inputCatalog = inputs.pop("inputCatalog") 

564 catalogRef = inputRefs.inputCatalog 

565 sources.extend(inputCatalog, self.schemaMapper) 

566 del inputCatalog 

567 # Add the HeavyFootprints to the deblended sources 

568 if self.config.doAddFootprints: 

569 modelData = inputs.pop('scarletModels') 

570 if self.config.doConserveFlux: 

571 imageForRedistribution = inputs['exposure'] 

572 else: 

573 imageForRedistribution = None 

574 updateCatalogFootprints( 

575 modelData=modelData, 

576 catalog=sources, 

577 band=inputRefs.exposure.dataId["band"], 

578 imageForRedistribution=imageForRedistribution, 

579 removeScarletData=True, 

580 updateFluxColumns=True, 

581 ) 

582 table = sources.getTable() 

583 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

584 inputs['sources'] = sources 

585 

586 skyMap = inputs.pop('skyMap') 

587 tractNumber = catalogRef.dataId['tract'] 

588 tractInfo = skyMap[tractNumber] 

589 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch']) 

590 skyInfo = Struct( 

591 skyMap=skyMap, 

592 tractInfo=tractInfo, 

593 patchInfo=patchInfo, 

594 wcs=tractInfo.getWcs(), 

595 bbox=patchInfo.getOuterBBox() 

596 ) 

597 inputs['skyInfo'] = skyInfo 

598 

599 if self.config.doPropagateFlags: 

600 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

601 # New version 

602 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds 

603 inputs["ccdInputs"] = ccdInputs 

604 

605 if "sourceTableHandles" in inputs: 

606 sourceTableHandles = inputs.pop("sourceTableHandles") 

607 sourceTableHandleDict = {handle.dataId["visit"]: handle 

608 for handle in sourceTableHandles} 

609 inputs["sourceTableHandleDict"] = sourceTableHandleDict 

610 if "finalizedSourceTableHandles" in inputs: 

611 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles") 

612 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle 

613 for handle in finalizedSourceTableHandles} 

614 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict 

615 else: 

616 # Deprecated legacy version 

617 # Filter out any visit catalog that is not coadd inputs 

618 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds 

619 visitKey = ccdInputs.schema.find("visit").key 

620 ccdKey = ccdInputs.schema.find("ccd").key 

621 inputVisitIds = set() 

622 ccdRecordsWcs = {} 

623 for ccdRecord in ccdInputs: 

624 visit = ccdRecord.get(visitKey) 

625 ccd = ccdRecord.get(ccdKey) 

626 inputVisitIds.add((visit, ccd)) 

627 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs() 

628 

629 inputCatalogsToKeep = [] 

630 inputCatalogWcsUpdate = [] 

631 for i, dataRef in enumerate(inputRefs.visitCatalogs): 

632 key = (dataRef.dataId['visit'], dataRef.dataId['detector']) 

633 if key in inputVisitIds: 

634 inputCatalogsToKeep.append(inputs['visitCatalogs'][i]) 

635 inputCatalogWcsUpdate.append(ccdRecordsWcs[key]) 

636 inputs['visitCatalogs'] = inputCatalogsToKeep 

637 inputs['wcsUpdates'] = inputCatalogWcsUpdate 

638 inputs['ccdInputs'] = ccdInputs 

639 

640 outputs = self.run(**inputs) 

641 # Strip HeavyFootprints to save space on disk 

642 sources = outputs.outputSources 

643 butlerQC.put(outputs, outputRefs) 

644 

645 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, 

646 sourceTableHandleDict=None, finalizedSourceTableHandleDict=None): 

647 """Run measurement algorithms on the input exposure, and optionally populate the 

648 resulting catalog with extra information. 

649 

650 Parameters 

651 ---------- 

652 exposure : `lsst.afw.exposure.Exposure` 

653 The input exposure on which measurements are to be performed. 

654 sources : `lsst.afw.table.SourceCatalog` 

655 A catalog built from the results of merged detections, or 

656 deblender outputs. 

657 skyInfo : `lsst.pipe.base.Struct` 

658 A struct containing information about the position of the input exposure within 

659 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box. 

660 exposureId : `int` or `bytes` 

661 Packed unique number or bytes unique to the input exposure. 

662 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional 

663 Catalog containing information on the individual visits which went into making 

664 the coadd. 

665 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional 

666 A list of source catalogs corresponding to measurements made on the individual 

667 visits which went into the input exposure. If None and butler is `None` then 

668 the task cannot propagate visit flags to the output catalog. 

669 Deprecated, to be removed with PropagateVisitFlagsTask. 

670 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional 

671 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 

672 to the input visits. Used to put all coordinates to common system. If `None` and 

673 butler is `None` then the task cannot propagate visit flags to the output catalog. 

674 Deprecated, to be removed with PropagateVisitFlagsTask. 

675 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional 

676 Dict for sourceTable_visit handles (key is visit) for propagating flags. 

677 These tables are derived from the ``CalibrateTask`` sources, and contain 

678 astrometry and photometry flags, and optionally PSF flags. 

679 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional 

680 Dict for finalized_src_table handles (key is visit) for propagating flags. 

681 These tables are derived from ``FinalizeCalibrationTask`` and contain 

682 PSF flags from the finalized PSF estimation. 

683 

684 Returns 

685 ------- 

686 results : `lsst.pipe.base.Struct` 

687 Results of running measurement task. Will contain the catalog in the 

688 sources attribute. Optionally will have results of matching to a 

689 reference catalog in the matchResults attribute, and denormalized 

690 matches in the denormMatches attribute. 

691 """ 

692 self.measurement.run(sources, exposure, exposureId=exposureId) 

693 

694 if self.config.doApCorr: 

695 self.applyApCorr.run( 

696 catalog=sources, 

697 apCorrMap=exposure.getInfo().getApCorrMap() 

698 ) 

699 

700 # TODO DM-11568: this contiguous check-and-copy could go away if we 

701 # reserve enough space during SourceDetection and/or SourceDeblend. 

702 # NOTE: sourceSelectors require contiguous catalogs, so ensure 

703 # contiguity now, so views are preserved from here on. 

704 if not sources.isContiguous(): 

705 sources = sources.copy(deep=True) 

706 

707 if self.config.doRunCatalogCalculation: 

708 self.catalogCalculation.run(sources) 

709 

710 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo, 

711 patchInfo=skyInfo.patchInfo) 

712 if self.config.doPropagateFlags: 

713 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

714 # New version 

715 self.propagateFlags.run( 

716 sources, 

717 ccdInputs, 

718 sourceTableHandleDict, 

719 finalizedSourceTableHandleDict 

720 ) 

721 else: 

722 # Legacy deprecated version 

723 self.propagateFlags.run( 

724 sources, 

725 ccdInputs, 

726 exposure.getWcs(), 

727 visitCatalogs, 

728 wcsUpdates 

729 ) 

730 

731 results = Struct() 

732 

733 if self.config.doMatchSources: 

734 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel) 

735 matches = afwTable.packMatches(matchResult.matches) 

736 matches.table.setMetadata(matchResult.matchMeta) 

737 results.matchResult = matches 

738 if self.config.doWriteMatchesDenormalized: 

739 if matchResult.matches: 

740 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta) 

741 else: 

742 self.log.warning("No matches, so generating dummy denormalized matches file") 

743 denormMatches = afwTable.BaseCatalog(afwTable.Schema()) 

744 denormMatches.setMetadata(PropertyList()) 

745 denormMatches.getMetadata().add("COMMENT", 

746 "This catalog is empty because no matches were found.") 

747 results.denormMatches = denormMatches 

748 results.denormMatches = denormMatches 

749 

750 results.outputSources = sources 

751 return results