Coverage for python/lsst/pipe/tasks/multiBand.py: 25%

271 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-07-19 09:37 +0000

1# This file is part of pipe_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22__all__ = ["DetectCoaddSourcesConfig", "DetectCoaddSourcesTask"] 

23 

24import warnings 

25 

26from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections) 

27import lsst.pipe.base.connectionTypes as cT 

28from lsst.pex.config import Field, ConfigurableField, ChoiceField 

29from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask 

30from lsst.meas.base import ( 

31 SingleFrameMeasurementTask, 

32 ApplyApCorrTask, 

33 CatalogCalculationTask, 

34 SkyMapIdGeneratorConfig, 

35) 

36from lsst.meas.astrom import DirectMatchTask, denormalizeMatches 

37from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask 

38from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask 

39import lsst.afw.table as afwTable 

40import lsst.afw.math as afwMath 

41from lsst.daf.base import PropertyList 

42from lsst.skymap import BaseSkyMap 

43 

44# NOTE: these imports are a convenience so multiband users only have to import this file. 

45from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401 

46from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401 

47from .multiBandUtils import CullPeaksConfig # noqa: F401 

48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401 

49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401 

50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401 

51from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401 

52 

53 

54""" 

55New set types: 

56* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 

57* deepCoadd_mergeDet: merged detections (tract, patch) 

58* deepCoadd_meas: measurements of merged detections (tract, patch, filter) 

59* deepCoadd_ref: reference sources (tract, patch) 

60All of these have associated *_schema catalogs that require no data ID and hold no records. 

61 

62In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 

63the mergeDet, meas, and ref dataset Footprints: 

64* deepCoadd_peak_schema 

65""" 

66 

67 

68############################################################################################################## 

69class DetectCoaddSourcesConnections(PipelineTaskConnections, 

70 dimensions=("tract", "patch", "band", "skymap"), 

71 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): 

72 detectionSchema = cT.InitOutput( 

73 doc="Schema of the detection catalog", 

74 name="{outputCoaddName}Coadd_det_schema", 

75 storageClass="SourceCatalog", 

76 ) 

77 exposure = cT.Input( 

78 doc="Exposure on which detections are to be performed", 

79 name="{inputCoaddName}Coadd", 

80 storageClass="ExposureF", 

81 dimensions=("tract", "patch", "band", "skymap") 

82 ) 

83 outputBackgrounds = cT.Output( 

84 doc="Output Backgrounds used in detection", 

85 name="{outputCoaddName}Coadd_calexp_background", 

86 storageClass="Background", 

87 dimensions=("tract", "patch", "band", "skymap") 

88 ) 

89 outputSources = cT.Output( 

90 doc="Detected sources catalog", 

91 name="{outputCoaddName}Coadd_det", 

92 storageClass="SourceCatalog", 

93 dimensions=("tract", "patch", "band", "skymap") 

94 ) 

95 outputExposure = cT.Output( 

96 doc="Exposure post detection", 

97 name="{outputCoaddName}Coadd_calexp", 

98 storageClass="ExposureF", 

99 dimensions=("tract", "patch", "band", "skymap") 

100 ) 

101 

102 

103class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections): 

104 """Configuration parameters for the DetectCoaddSourcesTask 

105 """ 

106 

107 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?") 

108 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling") 

109 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection") 

110 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

111 hasFakes = Field( 

112 dtype=bool, 

113 default=False, 

114 doc="Should be set to True if fake sources have been inserted into the input data.", 

115 ) 

116 idGenerator = SkyMapIdGeneratorConfig.make_field() 

117 

118 def setDefaults(self): 

119 super().setDefaults() 

120 self.detection.thresholdType = "pixel_stdev" 

121 self.detection.isotropicGrow = True 

122 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic 

123 self.detection.reEstimateBackground = False 

124 self.detection.background.useApprox = False 

125 self.detection.background.binSize = 4096 

126 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER' 

127 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender 

128 # Include band in packed data IDs that go into object IDs (None -> "as 

129 # many bands as are defined", rather than the default of zero). 

130 self.idGenerator.packer.n_bands = None 

131 

132 

133class DetectCoaddSourcesTask(PipelineTask): 

134 """Detect sources on a single filter coadd. 

135 

136 Coadding individual visits requires each exposure to be warped. This 

137 introduces covariance in the noise properties across pixels. Before 

138 detection, we correct the coadd variance by scaling the variance plane in 

139 the coadd to match the observed variance. This is an approximate 

140 approach -- strictly, we should propagate the full covariance matrix -- 

141 but it is simple and works well in practice. 

142 

143 After scaling the variance plane, we detect sources and generate footprints 

144 by delegating to the @ref SourceDetectionTask_ "detection" subtask. 

145 

146 DetectCoaddSourcesTask is meant to be run after assembling a coadded image 

147 in a given band. The purpose of the task is to update the background, 

148 detect all sources in a single band and generate a set of parent 

149 footprints. Subsequent tasks in the multi-band processing procedure will 

150 merge sources across bands and, eventually, perform forced photometry. 

151 

152 Parameters 

153 ---------- 

154 schema : `lsst.afw.table.Schema`, optional 

155 Initial schema for the output catalog, modified-in place to include all 

156 fields set by this task. If None, the source minimal schema will be used. 

157 **kwargs 

158 Additional keyword arguments. 

159 """ 

160 

161 _DefaultName = "detectCoaddSources" 

162 ConfigClass = DetectCoaddSourcesConfig 

163 

164 def __init__(self, schema=None, **kwargs): 

165 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree 

166 # call structure has been reviewed carefully to be sure super will work as intended. 

167 super().__init__(**kwargs) 

168 if schema is None: 

169 schema = afwTable.SourceTable.makeMinimalSchema() 

170 self.schema = schema 

171 self.makeSubtask("detection", schema=self.schema) 

172 if self.config.doScaleVariance: 

173 self.makeSubtask("scaleVariance") 

174 

175 self.detectionSchema = afwTable.SourceCatalog(self.schema) 

176 

177 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

178 inputs = butlerQC.get(inputRefs) 

179 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId) 

180 inputs["idFactory"] = idGenerator.make_table_id_factory() 

181 inputs["expId"] = idGenerator.catalog_id 

182 outputs = self.run(**inputs) 

183 butlerQC.put(outputs, outputRefs) 

184 

185 def run(self, exposure, idFactory, expId): 

186 """Run detection on an exposure. 

187 

188 First scale the variance plane to match the observed variance 

189 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to 

190 detect sources. 

191 

192 Parameters 

193 ---------- 

194 exposure : `lsst.afw.image.Exposure` 

195 Exposure on which to detect (may be backround-subtracted and scaled, 

196 depending on configuration). 

197 idFactory : `lsst.afw.table.IdFactory` 

198 IdFactory to set source identifiers. 

199 expId : `int` 

200 Exposure identifier (integer) for RNG seed. 

201 

202 Returns 

203 ------- 

204 result : `lsst.pipe.base.Struct` 

205 Results as a struct with attributes: 

206 

207 ``sources`` 

208 Catalog of detections (`lsst.afw.table.SourceCatalog`). 

209 ``backgrounds`` 

210 List of backgrounds (`list`). 

211 """ 

212 if self.config.doScaleVariance: 

213 varScale = self.scaleVariance.run(exposure.maskedImage) 

214 exposure.getMetadata().add("VARIANCE_SCALE", varScale) 

215 backgrounds = afwMath.BackgroundList() 

216 table = afwTable.SourceTable.make(self.schema, idFactory) 

217 detections = self.detection.run(table, exposure, expId=expId) 

218 sources = detections.sources 

219 if hasattr(detections, "background") and detections.background: 

220 for bg in detections.background: 

221 backgrounds.append(bg) 

222 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure) 

223 

224 

225class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, 

226 dimensions=("tract", "patch", "band", "skymap"), 

227 defaultTemplates={"inputCoaddName": "deep", 

228 "outputCoaddName": "deep", 

229 "deblendedCatalog": "deblendedFlux"}): 

230 inputSchema = cT.InitInput( 

231 doc="Input schema for measure merged task produced by a deblender or detection task", 

232 name="{inputCoaddName}Coadd_deblendedFlux_schema", 

233 storageClass="SourceCatalog" 

234 ) 

235 outputSchema = cT.InitOutput( 

236 doc="Output schema after all new fields are added by task", 

237 name="{inputCoaddName}Coadd_meas_schema", 

238 storageClass="SourceCatalog" 

239 ) 

240 refCat = cT.PrerequisiteInput( 

241 doc="Reference catalog used to match measured sources against known sources", 

242 name="ref_cat", 

243 storageClass="SimpleCatalog", 

244 dimensions=("skypix",), 

245 deferLoad=True, 

246 multiple=True 

247 ) 

248 exposure = cT.Input( 

249 doc="Input coadd image", 

250 name="{inputCoaddName}Coadd_calexp", 

251 storageClass="ExposureF", 

252 dimensions=("tract", "patch", "band", "skymap") 

253 ) 

254 skyMap = cT.Input( 

255 doc="SkyMap to use in processing", 

256 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

257 storageClass="SkyMap", 

258 dimensions=("skymap",), 

259 ) 

260 visitCatalogs = cT.Input( 

261 doc="Source catalogs for visits which overlap input tract, patch, band. Will be " 

262 "further filtered in the task for the purpose of propagating flags from image calibration " 

263 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.", 

264 name="src", 

265 dimensions=("instrument", "visit", "detector"), 

266 storageClass="SourceCatalog", 

267 multiple=True 

268 ) 

269 sourceTableHandles = cT.Input( 

270 doc=("Source tables that are derived from the ``CalibrateTask`` sources. " 

271 "These tables contain astrometry and photometry flags, and optionally " 

272 "PSF flags."), 

273 name="sourceTable_visit", 

274 storageClass="DataFrame", 

275 dimensions=("instrument", "visit"), 

276 multiple=True, 

277 deferLoad=True, 

278 ) 

279 finalizedSourceTableHandles = cT.Input( 

280 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These " 

281 "tables contain PSF flags from the finalized PSF estimation."), 

282 name="finalized_src_table", 

283 storageClass="DataFrame", 

284 dimensions=("instrument", "visit"), 

285 multiple=True, 

286 deferLoad=True, 

287 ) 

288 inputCatalog = cT.Input( 

289 doc=("Name of the input catalog to use." 

290 "If the single band deblender was used this should be 'deblendedFlux." 

291 "If the multi-band deblender was used this should be 'deblendedModel, " 

292 "or deblendedFlux if the multiband deblender was configured to output " 

293 "deblended flux catalogs. If no deblending was performed this should " 

294 "be 'mergeDet'"), 

295 name="{inputCoaddName}Coadd_{deblendedCatalog}", 

296 storageClass="SourceCatalog", 

297 dimensions=("tract", "patch", "band", "skymap"), 

298 ) 

299 scarletCatalog = cT.Input( 

300 doc="Catalogs produced by multiband deblending", 

301 name="{inputCoaddName}Coadd_deblendedCatalog", 

302 storageClass="SourceCatalog", 

303 dimensions=("tract", "patch", "skymap"), 

304 ) 

305 scarletModels = cT.Input( 

306 doc="Multiband scarlet models produced by the deblender", 

307 name="{inputCoaddName}Coadd_scarletModelData", 

308 storageClass="ScarletModelData", 

309 dimensions=("tract", "patch", "skymap"), 

310 ) 

311 outputSources = cT.Output( 

312 doc="Source catalog containing all the measurement information generated in this task", 

313 name="{outputCoaddName}Coadd_meas", 

314 dimensions=("tract", "patch", "band", "skymap"), 

315 storageClass="SourceCatalog", 

316 ) 

317 matchResult = cT.Output( 

318 doc="Match catalog produced by configured matcher, optional on doMatchSources", 

319 name="{outputCoaddName}Coadd_measMatch", 

320 dimensions=("tract", "patch", "band", "skymap"), 

321 storageClass="Catalog", 

322 ) 

323 denormMatches = cT.Output( 

324 doc="Denormalized Match catalog produced by configured matcher, optional on " 

325 "doWriteMatchesDenormalized", 

326 name="{outputCoaddName}Coadd_measMatchFull", 

327 dimensions=("tract", "patch", "band", "skymap"), 

328 storageClass="Catalog", 

329 ) 

330 

331 def __init__(self, *, config=None): 

332 super().__init__(config=config) 

333 if config.doPropagateFlags is False: 

334 self.inputs -= set(("visitCatalogs",)) 

335 self.inputs -= set(("sourceTableHandles",)) 

336 self.inputs -= set(("finalizedSourceTableHandles",)) 

337 elif config.propagateFlags.target == PropagateSourceFlagsTask: 

338 # New PropagateSourceFlagsTask does not use visitCatalogs. 

339 self.inputs -= set(("visitCatalogs",)) 

340 # Check for types of flags required. 

341 if not config.propagateFlags.source_flags: 

342 self.inputs -= set(("sourceTableHandles",)) 

343 if not config.propagateFlags.finalized_source_flags: 

344 self.inputs -= set(("finalizedSourceTableHandles",)) 

345 else: 

346 # Deprecated PropagateVisitFlagsTask uses visitCatalogs. 

347 self.inputs -= set(("sourceTableHandles",)) 

348 self.inputs -= set(("finalizedSourceTableHandles",)) 

349 

350 if config.inputCatalog == "deblendedCatalog": 

351 self.inputs -= set(("inputCatalog",)) 

352 

353 if not config.doAddFootprints: 

354 self.inputs -= set(("scarletModels",)) 

355 else: 

356 self.inputs -= set(("deblendedCatalog")) 

357 self.inputs -= set(("scarletModels",)) 

358 

359 if config.doMatchSources is False: 

360 self.outputs -= set(("matchResult",)) 

361 

362 if config.doWriteMatchesDenormalized is False: 

363 self.outputs -= set(("denormMatches",)) 

364 

365 

366class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig, 

367 pipelineConnections=MeasureMergedCoaddSourcesConnections): 

368 """Configuration parameters for the MeasureMergedCoaddSourcesTask 

369 """ 

370 inputCatalog = ChoiceField( 

371 dtype=str, 

372 default="deblendedCatalog", 

373 allowed={ 

374 "deblendedCatalog": "Output catalog from ScarletDeblendTask", 

375 "deblendedFlux": "Output catalog from SourceDeblendTask", 

376 "mergeDet": "The merged detections before deblending." 

377 }, 

378 doc="The name of the input catalog.", 

379 ) 

380 doAddFootprints = Field(dtype=bool, 

381 default=True, 

382 doc="Whether or not to add footprints to the input catalog from scarlet models. " 

383 "This should be true whenever using the multi-band deblender, " 

384 "otherwise this should be False.") 

385 doConserveFlux = Field(dtype=bool, default=True, 

386 doc="Whether to use the deblender models as templates to re-distribute the flux " 

387 "from the 'exposure' (True), or to perform measurements on the deblender " 

388 "model footprints.") 

389 doStripFootprints = Field(dtype=bool, default=True, 

390 doc="Whether to strip footprints from the output catalog before " 

391 "saving to disk. " 

392 "This is usually done when using scarlet models to save disk space.") 

393 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") 

394 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") 

395 doPropagateFlags = Field( 

396 dtype=bool, default=True, 

397 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 

398 ) 

399 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd") 

400 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?") 

401 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog") 

402 doWriteMatchesDenormalized = Field( 

403 dtype=bool, 

404 default=False, 

405 doc=("Write reference matches in denormalized format? " 

406 "This format uses more disk space, but is more convenient to read."), 

407 ) 

408 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

409 psfCache = Field(dtype=int, default=100, doc="Size of psfCache") 

410 checkUnitsParseStrict = Field( 

411 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", 

412 dtype=str, 

413 default="raise", 

414 ) 

415 doApCorr = Field( 

416 dtype=bool, 

417 default=True, 

418 doc="Apply aperture corrections" 

419 ) 

420 applyApCorr = ConfigurableField( 

421 target=ApplyApCorrTask, 

422 doc="Subtask to apply aperture corrections" 

423 ) 

424 doRunCatalogCalculation = Field( 

425 dtype=bool, 

426 default=True, 

427 doc='Run catalogCalculation task' 

428 ) 

429 catalogCalculation = ConfigurableField( 

430 target=CatalogCalculationTask, 

431 doc="Subtask to run catalogCalculation plugins on catalog" 

432 ) 

433 

434 hasFakes = Field( 

435 dtype=bool, 

436 default=False, 

437 doc="Should be set to True if fake sources have been inserted into the input data." 

438 ) 

439 idGenerator = SkyMapIdGeneratorConfig.make_field() 

440 

441 @property 

442 def refObjLoader(self): 

443 return self.match.refObjLoader 

444 

445 def setDefaults(self): 

446 super().setDefaults() 

447 self.measurement.plugins.names |= ['base_InputCount', 

448 'base_Variance', 

449 'base_LocalPhotoCalib', 

450 'base_LocalWcs'] 

451 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 

452 'INEXACT_PSF'] 

453 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 

454 'INEXACT_PSF'] 

455 

456 

457class MeasureMergedCoaddSourcesTask(PipelineTask): 

458 """Deblend sources from main catalog in each coadd seperately and measure. 

459 

460 Use peaks and footprints from a master catalog to perform deblending and 

461 measurement in each coadd. 

462 

463 Given a master input catalog of sources (peaks and footprints) or deblender 

464 outputs(including a HeavyFootprint in each band), measure each source on 

465 the coadd. Repeating this procedure with the same master catalog across 

466 multiple coadds will generate a consistent set of child sources. 

467 

468 The deblender retains all peaks and deblends any missing peaks (dropouts in 

469 that band) as PSFs. Source properties are measured and the @c is-primary 

470 flag (indicating sources with no children) is set. Visit flags are 

471 propagated to the coadd sources. 

472 

473 Optionally, we can match the coadd sources to an external reference 

474 catalog. 

475 

476 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we 

477 have a set of per-band catalogs. The next stage in the multi-band 

478 processing procedure will merge these measurements into a suitable catalog 

479 for driving forced photometry. 

480 

481 Parameters 

482 ---------- 

483 butler : `lsst.daf.butler.Butler` or `None`, optional 

484 A butler used to read the input schemas from disk or construct the reference 

485 catalog loader, if schema or peakSchema or refObjLoader is None. 

486 schema : ``lsst.afw.table.Schema`, optional 

487 The schema of the merged detection catalog used as input to this one. 

488 peakSchema : ``lsst.afw.table.Schema`, optional 

489 The schema of the PeakRecords in the Footprints in the merged detection catalog. 

490 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional 

491 An instance of ReferenceObjectLoader that supplies an external reference 

492 catalog. May be None if the loader can be constructed from the butler argument or all steps 

493 requiring a reference catalog are disabled. 

494 initInputs : `dict`, optional 

495 Dictionary that can contain a key ``inputSchema`` containing the 

496 input schema. If present will override the value of ``schema``. 

497 **kwargs 

498 Additional keyword arguments. 

499 """ 

500 

501 _DefaultName = "measureCoaddSources" 

502 ConfigClass = MeasureMergedCoaddSourcesConfig 

503 

504 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, 

505 **kwargs): 

506 super().__init__(**kwargs) 

507 self.deblended = self.config.inputCatalog.startswith("deblended") 

508 self.inputCatalog = "Coadd_" + self.config.inputCatalog 

509 if initInputs is not None: 

510 schema = initInputs['inputSchema'].schema 

511 if schema is None: 

512 assert butler is not None, "Neither butler nor schema is defined" 

513 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema").schema 

514 self.schemaMapper = afwTable.SchemaMapper(schema) 

515 self.schemaMapper.addMinimalSchema(schema) 

516 self.schema = self.schemaMapper.getOutputSchema() 

517 self.algMetadata = PropertyList() 

518 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) 

519 self.makeSubtask("setPrimaryFlags", schema=self.schema) 

520 if self.config.doMatchSources: 

521 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader) 

522 if self.config.doPropagateFlags: 

523 self.makeSubtask("propagateFlags", schema=self.schema) 

524 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) 

525 if self.config.doApCorr: 

526 self.makeSubtask("applyApCorr", schema=self.schema) 

527 if self.config.doRunCatalogCalculation: 

528 self.makeSubtask("catalogCalculation", schema=self.schema) 

529 

530 self.outputSchema = afwTable.SourceCatalog(self.schema) 

531 

532 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

533 inputs = butlerQC.get(inputRefs) 

534 

535 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], 

536 inputs.pop('refCat'), 

537 name=self.config.connections.refCat, 

538 config=self.config.refObjLoader, 

539 log=self.log) 

540 self.match.setRefObjLoader(refObjLoader) 

541 

542 # Set psfcache 

543 # move this to run after gen2 deprecation 

544 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache) 

545 

546 # Get unique integer ID for IdFactory and RNG seeds; only the latter 

547 # should really be used as the IDs all come from the input catalog. 

548 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId) 

549 inputs['exposureId'] = idGenerator.catalog_id 

550 

551 # Transform inputCatalog 

552 table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory()) 

553 sources = afwTable.SourceCatalog(table) 

554 # Load the correct input catalog 

555 if "scarletCatalog" in inputs: 

556 inputCatalog = inputs.pop("scarletCatalog") 

557 catalogRef = inputRefs.scarletCatalog 

558 else: 

559 inputCatalog = inputs.pop("inputCatalog") 

560 catalogRef = inputRefs.inputCatalog 

561 sources.extend(inputCatalog, self.schemaMapper) 

562 del inputCatalog 

563 # Add the HeavyFootprints to the deblended sources 

564 if self.config.doAddFootprints: 

565 modelData = inputs.pop('scarletModels') 

566 if self.config.doConserveFlux: 

567 redistributeImage = inputs['exposure'].image 

568 else: 

569 redistributeImage = None 

570 modelData.updateCatalogFootprints( 

571 catalog=sources, 

572 band=inputRefs.exposure.dataId["band"], 

573 psfModel=inputs['exposure'].getPsf(), 

574 redistributeImage=redistributeImage, 

575 removeScarletData=True, 

576 ) 

577 table = sources.getTable() 

578 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

579 inputs['sources'] = sources 

580 

581 skyMap = inputs.pop('skyMap') 

582 tractNumber = catalogRef.dataId['tract'] 

583 tractInfo = skyMap[tractNumber] 

584 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch']) 

585 skyInfo = Struct( 

586 skyMap=skyMap, 

587 tractInfo=tractInfo, 

588 patchInfo=patchInfo, 

589 wcs=tractInfo.getWcs(), 

590 bbox=patchInfo.getOuterBBox() 

591 ) 

592 inputs['skyInfo'] = skyInfo 

593 

594 if self.config.doPropagateFlags: 

595 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

596 # New version 

597 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds 

598 inputs["ccdInputs"] = ccdInputs 

599 

600 if "sourceTableHandles" in inputs: 

601 sourceTableHandles = inputs.pop("sourceTableHandles") 

602 sourceTableHandleDict = {handle.dataId["visit"]: handle 

603 for handle in sourceTableHandles} 

604 inputs["sourceTableHandleDict"] = sourceTableHandleDict 

605 if "finalizedSourceTableHandles" in inputs: 

606 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles") 

607 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle 

608 for handle in finalizedSourceTableHandles} 

609 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict 

610 else: 

611 # Deprecated legacy version 

612 # Filter out any visit catalog that is not coadd inputs 

613 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds 

614 visitKey = ccdInputs.schema.find("visit").key 

615 ccdKey = ccdInputs.schema.find("ccd").key 

616 inputVisitIds = set() 

617 ccdRecordsWcs = {} 

618 for ccdRecord in ccdInputs: 

619 visit = ccdRecord.get(visitKey) 

620 ccd = ccdRecord.get(ccdKey) 

621 inputVisitIds.add((visit, ccd)) 

622 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs() 

623 

624 inputCatalogsToKeep = [] 

625 inputCatalogWcsUpdate = [] 

626 for i, dataRef in enumerate(inputRefs.visitCatalogs): 

627 key = (dataRef.dataId['visit'], dataRef.dataId['detector']) 

628 if key in inputVisitIds: 

629 inputCatalogsToKeep.append(inputs['visitCatalogs'][i]) 

630 inputCatalogWcsUpdate.append(ccdRecordsWcs[key]) 

631 inputs['visitCatalogs'] = inputCatalogsToKeep 

632 inputs['wcsUpdates'] = inputCatalogWcsUpdate 

633 inputs['ccdInputs'] = ccdInputs 

634 

635 outputs = self.run(**inputs) 

636 # Strip HeavyFootprints to save space on disk 

637 sources = outputs.outputSources 

638 butlerQC.put(outputs, outputRefs) 

639 

640 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, 

641 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None): 

642 """Run measurement algorithms on the input exposure, and optionally populate the 

643 resulting catalog with extra information. 

644 

645 Parameters 

646 ---------- 

647 exposure : `lsst.afw.exposure.Exposure` 

648 The input exposure on which measurements are to be performed. 

649 sources : `lsst.afw.table.SourceCatalog` 

650 A catalog built from the results of merged detections, or 

651 deblender outputs. 

652 skyInfo : `lsst.pipe.base.Struct` 

653 A struct containing information about the position of the input exposure within 

654 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box. 

655 exposureId : `int` or `bytes` 

656 Packed unique number or bytes unique to the input exposure. 

657 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional 

658 Catalog containing information on the individual visits which went into making 

659 the coadd. 

660 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional 

661 A list of source catalogs corresponding to measurements made on the individual 

662 visits which went into the input exposure. If None and butler is `None` then 

663 the task cannot propagate visit flags to the output catalog. 

664 Deprecated, to be removed with PropagateVisitFlagsTask. 

665 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional 

666 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 

667 to the input visits. Used to put all coordinates to common system. If `None` and 

668 butler is `None` then the task cannot propagate visit flags to the output catalog. 

669 Deprecated, to be removed with PropagateVisitFlagsTask. 

670 butler : `None`, optional 

671 This was a Gen2 butler used to load visit catalogs. 

672 No longer used and should not be set. Will be removed in the 

673 future. 

674 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional 

675 Dict for sourceTable_visit handles (key is visit) for propagating flags. 

676 These tables are derived from the ``CalibrateTask`` sources, and contain 

677 astrometry and photometry flags, and optionally PSF flags. 

678 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional 

679 Dict for finalized_src_table handles (key is visit) for propagating flags. 

680 These tables are derived from ``FinalizeCalibrationTask`` and contain 

681 PSF flags from the finalized PSF estimation. 

682 

683 Returns 

684 ------- 

685 results : `lsst.pipe.base.Struct` 

686 Results of running measurement task. Will contain the catalog in the 

687 sources attribute. Optionally will have results of matching to a 

688 reference catalog in the matchResults attribute, and denormalized 

689 matches in the denormMatches attribute. 

690 """ 

691 if butler is not None: 

692 warnings.warn("The 'butler' parameter is no longer used and can be safely removed.", 

693 category=FutureWarning, stacklevel=2) 

694 butler = None 

695 

696 self.measurement.run(sources, exposure, exposureId=exposureId) 

697 

698 if self.config.doApCorr: 

699 self.applyApCorr.run( 

700 catalog=sources, 

701 apCorrMap=exposure.getInfo().getApCorrMap() 

702 ) 

703 

704 # TODO DM-11568: this contiguous check-and-copy could go away if we 

705 # reserve enough space during SourceDetection and/or SourceDeblend. 

706 # NOTE: sourceSelectors require contiguous catalogs, so ensure 

707 # contiguity now, so views are preserved from here on. 

708 if not sources.isContiguous(): 

709 sources = sources.copy(deep=True) 

710 

711 if self.config.doRunCatalogCalculation: 

712 self.catalogCalculation.run(sources) 

713 

714 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo, 

715 patchInfo=skyInfo.patchInfo) 

716 if self.config.doPropagateFlags: 

717 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

718 # New version 

719 self.propagateFlags.run( 

720 sources, 

721 ccdInputs, 

722 sourceTableHandleDict, 

723 finalizedSourceTableHandleDict 

724 ) 

725 else: 

726 # Legacy deprecated version 

727 self.propagateFlags.run( 

728 butler, 

729 sources, 

730 ccdInputs, 

731 exposure.getWcs(), 

732 visitCatalogs, 

733 wcsUpdates 

734 ) 

735 

736 results = Struct() 

737 

738 if self.config.doMatchSources: 

739 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel) 

740 matches = afwTable.packMatches(matchResult.matches) 

741 matches.table.setMetadata(matchResult.matchMeta) 

742 results.matchResult = matches 

743 if self.config.doWriteMatchesDenormalized: 

744 if matchResult.matches: 

745 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta) 

746 else: 

747 self.log.warning("No matches, so generating dummy denormalized matches file") 

748 denormMatches = afwTable.BaseCatalog(afwTable.Schema()) 

749 denormMatches.setMetadata(PropertyList()) 

750 denormMatches.getMetadata().add("COMMENT", 

751 "This catalog is empty because no matches were found.") 

752 results.denormMatches = denormMatches 

753 results.denormMatches = denormMatches 

754 

755 results.outputSources = sources 

756 return results