Coverage for python/lsst/pipe/tasks/multiBand.py: 26%

266 statements  

« prev     ^ index     » next       coverage.py v7.3.1, created at 2023-09-07 10:56 +0000

1# This file is part of pipe_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22__all__ = ["DetectCoaddSourcesConfig", "DetectCoaddSourcesTask"] 

23 

24from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections) 

25import lsst.pipe.base.connectionTypes as cT 

26from lsst.pex.config import Field, ConfigurableField, ChoiceField 

27from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask 

28from lsst.meas.base import ( 

29 SingleFrameMeasurementTask, 

30 ApplyApCorrTask, 

31 CatalogCalculationTask, 

32 SkyMapIdGeneratorConfig, 

33) 

34from lsst.meas.astrom import DirectMatchTask, denormalizeMatches 

35from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask 

36from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask 

37import lsst.afw.table as afwTable 

38import lsst.afw.math as afwMath 

39from lsst.daf.base import PropertyList 

40from lsst.skymap import BaseSkyMap 

41 

42# NOTE: these imports are a convenience so multiband users only have to import this file. 

43from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401 

44from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401 

45from .multiBandUtils import CullPeaksConfig # noqa: F401 

46from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401 

47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401 

48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401 

49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401 

50 

51 

52""" 

53New set types: 

54* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 

55* deepCoadd_mergeDet: merged detections (tract, patch) 

56* deepCoadd_meas: measurements of merged detections (tract, patch, filter) 

57* deepCoadd_ref: reference sources (tract, patch) 

58All of these have associated *_schema catalogs that require no data ID and hold no records. 

59 

60In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 

61the mergeDet, meas, and ref dataset Footprints: 

62* deepCoadd_peak_schema 

63""" 

64 

65 

66############################################################################################################## 

67class DetectCoaddSourcesConnections(PipelineTaskConnections, 

68 dimensions=("tract", "patch", "band", "skymap"), 

69 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): 

70 detectionSchema = cT.InitOutput( 

71 doc="Schema of the detection catalog", 

72 name="{outputCoaddName}Coadd_det_schema", 

73 storageClass="SourceCatalog", 

74 ) 

75 exposure = cT.Input( 

76 doc="Exposure on which detections are to be performed", 

77 name="{inputCoaddName}Coadd", 

78 storageClass="ExposureF", 

79 dimensions=("tract", "patch", "band", "skymap") 

80 ) 

81 outputBackgrounds = cT.Output( 

82 doc="Output Backgrounds used in detection", 

83 name="{outputCoaddName}Coadd_calexp_background", 

84 storageClass="Background", 

85 dimensions=("tract", "patch", "band", "skymap") 

86 ) 

87 outputSources = cT.Output( 

88 doc="Detected sources catalog", 

89 name="{outputCoaddName}Coadd_det", 

90 storageClass="SourceCatalog", 

91 dimensions=("tract", "patch", "band", "skymap") 

92 ) 

93 outputExposure = cT.Output( 

94 doc="Exposure post detection", 

95 name="{outputCoaddName}Coadd_calexp", 

96 storageClass="ExposureF", 

97 dimensions=("tract", "patch", "band", "skymap") 

98 ) 

99 

100 

101class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections): 

102 """Configuration parameters for the DetectCoaddSourcesTask 

103 """ 

104 

105 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?") 

106 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling") 

107 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection") 

108 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

109 hasFakes = Field( 

110 dtype=bool, 

111 default=False, 

112 doc="Should be set to True if fake sources have been inserted into the input data.", 

113 ) 

114 idGenerator = SkyMapIdGeneratorConfig.make_field() 

115 

116 def setDefaults(self): 

117 super().setDefaults() 

118 self.detection.thresholdType = "pixel_stdev" 

119 self.detection.isotropicGrow = True 

120 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic 

121 self.detection.reEstimateBackground = False 

122 self.detection.background.useApprox = False 

123 self.detection.background.binSize = 4096 

124 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER' 

125 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender 

126 # Include band in packed data IDs that go into object IDs (None -> "as 

127 # many bands as are defined", rather than the default of zero). 

128 self.idGenerator.packer.n_bands = None 

129 

130 

131class DetectCoaddSourcesTask(PipelineTask): 

132 """Detect sources on a single filter coadd. 

133 

134 Coadding individual visits requires each exposure to be warped. This 

135 introduces covariance in the noise properties across pixels. Before 

136 detection, we correct the coadd variance by scaling the variance plane in 

137 the coadd to match the observed variance. This is an approximate 

138 approach -- strictly, we should propagate the full covariance matrix -- 

139 but it is simple and works well in practice. 

140 

141 After scaling the variance plane, we detect sources and generate footprints 

142 by delegating to the @ref SourceDetectionTask_ "detection" subtask. 

143 

144 DetectCoaddSourcesTask is meant to be run after assembling a coadded image 

145 in a given band. The purpose of the task is to update the background, 

146 detect all sources in a single band and generate a set of parent 

147 footprints. Subsequent tasks in the multi-band processing procedure will 

148 merge sources across bands and, eventually, perform forced photometry. 

149 

150 Parameters 

151 ---------- 

152 schema : `lsst.afw.table.Schema`, optional 

153 Initial schema for the output catalog, modified-in place to include all 

154 fields set by this task. If None, the source minimal schema will be used. 

155 **kwargs 

156 Additional keyword arguments. 

157 """ 

158 

159 _DefaultName = "detectCoaddSources" 

160 ConfigClass = DetectCoaddSourcesConfig 

161 

162 def __init__(self, schema=None, **kwargs): 

163 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree 

164 # call structure has been reviewed carefully to be sure super will work as intended. 

165 super().__init__(**kwargs) 

166 if schema is None: 

167 schema = afwTable.SourceTable.makeMinimalSchema() 

168 self.schema = schema 

169 self.makeSubtask("detection", schema=self.schema) 

170 if self.config.doScaleVariance: 

171 self.makeSubtask("scaleVariance") 

172 

173 self.detectionSchema = afwTable.SourceCatalog(self.schema) 

174 

175 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

176 inputs = butlerQC.get(inputRefs) 

177 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId) 

178 inputs["idFactory"] = idGenerator.make_table_id_factory() 

179 inputs["expId"] = idGenerator.catalog_id 

180 outputs = self.run(**inputs) 

181 butlerQC.put(outputs, outputRefs) 

182 

183 def run(self, exposure, idFactory, expId): 

184 """Run detection on an exposure. 

185 

186 First scale the variance plane to match the observed variance 

187 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to 

188 detect sources. 

189 

190 Parameters 

191 ---------- 

192 exposure : `lsst.afw.image.Exposure` 

193 Exposure on which to detect (may be backround-subtracted and scaled, 

194 depending on configuration). 

195 idFactory : `lsst.afw.table.IdFactory` 

196 IdFactory to set source identifiers. 

197 expId : `int` 

198 Exposure identifier (integer) for RNG seed. 

199 

200 Returns 

201 ------- 

202 result : `lsst.pipe.base.Struct` 

203 Results as a struct with attributes: 

204 

205 ``sources`` 

206 Catalog of detections (`lsst.afw.table.SourceCatalog`). 

207 ``backgrounds`` 

208 List of backgrounds (`list`). 

209 """ 

210 if self.config.doScaleVariance: 

211 varScale = self.scaleVariance.run(exposure.maskedImage) 

212 exposure.getMetadata().add("VARIANCE_SCALE", varScale) 

213 backgrounds = afwMath.BackgroundList() 

214 table = afwTable.SourceTable.make(self.schema, idFactory) 

215 detections = self.detection.run(table, exposure, expId=expId) 

216 sources = detections.sources 

217 if hasattr(detections, "background") and detections.background: 

218 for bg in detections.background: 

219 backgrounds.append(bg) 

220 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure) 

221 

222 

223class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, 

224 dimensions=("tract", "patch", "band", "skymap"), 

225 defaultTemplates={"inputCoaddName": "deep", 

226 "outputCoaddName": "deep", 

227 "deblendedCatalog": "deblendedFlux"}): 

228 inputSchema = cT.InitInput( 

229 doc="Input schema for measure merged task produced by a deblender or detection task", 

230 name="{inputCoaddName}Coadd_deblendedFlux_schema", 

231 storageClass="SourceCatalog" 

232 ) 

233 outputSchema = cT.InitOutput( 

234 doc="Output schema after all new fields are added by task", 

235 name="{inputCoaddName}Coadd_meas_schema", 

236 storageClass="SourceCatalog" 

237 ) 

238 refCat = cT.PrerequisiteInput( 

239 doc="Reference catalog used to match measured sources against known sources", 

240 name="ref_cat", 

241 storageClass="SimpleCatalog", 

242 dimensions=("skypix",), 

243 deferLoad=True, 

244 multiple=True 

245 ) 

246 exposure = cT.Input( 

247 doc="Input coadd image", 

248 name="{inputCoaddName}Coadd_calexp", 

249 storageClass="ExposureF", 

250 dimensions=("tract", "patch", "band", "skymap") 

251 ) 

252 skyMap = cT.Input( 

253 doc="SkyMap to use in processing", 

254 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

255 storageClass="SkyMap", 

256 dimensions=("skymap",), 

257 ) 

258 visitCatalogs = cT.Input( 

259 doc="Source catalogs for visits which overlap input tract, patch, band. Will be " 

260 "further filtered in the task for the purpose of propagating flags from image calibration " 

261 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.", 

262 name="src", 

263 dimensions=("instrument", "visit", "detector"), 

264 storageClass="SourceCatalog", 

265 multiple=True 

266 ) 

267 sourceTableHandles = cT.Input( 

268 doc=("Source tables that are derived from the ``CalibrateTask`` sources. " 

269 "These tables contain astrometry and photometry flags, and optionally " 

270 "PSF flags."), 

271 name="sourceTable_visit", 

272 storageClass="DataFrame", 

273 dimensions=("instrument", "visit"), 

274 multiple=True, 

275 deferLoad=True, 

276 ) 

277 finalizedSourceTableHandles = cT.Input( 

278 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These " 

279 "tables contain PSF flags from the finalized PSF estimation."), 

280 name="finalized_src_table", 

281 storageClass="DataFrame", 

282 dimensions=("instrument", "visit"), 

283 multiple=True, 

284 deferLoad=True, 

285 ) 

286 inputCatalog = cT.Input( 

287 doc=("Name of the input catalog to use." 

288 "If the single band deblender was used this should be 'deblendedFlux." 

289 "If the multi-band deblender was used this should be 'deblendedModel, " 

290 "or deblendedFlux if the multiband deblender was configured to output " 

291 "deblended flux catalogs. If no deblending was performed this should " 

292 "be 'mergeDet'"), 

293 name="{inputCoaddName}Coadd_{deblendedCatalog}", 

294 storageClass="SourceCatalog", 

295 dimensions=("tract", "patch", "band", "skymap"), 

296 ) 

297 scarletCatalog = cT.Input( 

298 doc="Catalogs produced by multiband deblending", 

299 name="{inputCoaddName}Coadd_deblendedCatalog", 

300 storageClass="SourceCatalog", 

301 dimensions=("tract", "patch", "skymap"), 

302 ) 

303 scarletModels = cT.Input( 

304 doc="Multiband scarlet models produced by the deblender", 

305 name="{inputCoaddName}Coadd_scarletModelData", 

306 storageClass="ScarletModelData", 

307 dimensions=("tract", "patch", "skymap"), 

308 ) 

309 outputSources = cT.Output( 

310 doc="Source catalog containing all the measurement information generated in this task", 

311 name="{outputCoaddName}Coadd_meas", 

312 dimensions=("tract", "patch", "band", "skymap"), 

313 storageClass="SourceCatalog", 

314 ) 

315 matchResult = cT.Output( 

316 doc="Match catalog produced by configured matcher, optional on doMatchSources", 

317 name="{outputCoaddName}Coadd_measMatch", 

318 dimensions=("tract", "patch", "band", "skymap"), 

319 storageClass="Catalog", 

320 ) 

321 denormMatches = cT.Output( 

322 doc="Denormalized Match catalog produced by configured matcher, optional on " 

323 "doWriteMatchesDenormalized", 

324 name="{outputCoaddName}Coadd_measMatchFull", 

325 dimensions=("tract", "patch", "band", "skymap"), 

326 storageClass="Catalog", 

327 ) 

328 

329 def __init__(self, *, config=None): 

330 super().__init__(config=config) 

331 if config.doPropagateFlags is False: 

332 self.inputs -= set(("visitCatalogs",)) 

333 self.inputs -= set(("sourceTableHandles",)) 

334 self.inputs -= set(("finalizedSourceTableHandles",)) 

335 elif config.propagateFlags.target == PropagateSourceFlagsTask: 

336 # New PropagateSourceFlagsTask does not use visitCatalogs. 

337 self.inputs -= set(("visitCatalogs",)) 

338 # Check for types of flags required. 

339 if not config.propagateFlags.source_flags: 

340 self.inputs -= set(("sourceTableHandles",)) 

341 if not config.propagateFlags.finalized_source_flags: 

342 self.inputs -= set(("finalizedSourceTableHandles",)) 

343 else: 

344 # Deprecated PropagateVisitFlagsTask uses visitCatalogs. 

345 self.inputs -= set(("sourceTableHandles",)) 

346 self.inputs -= set(("finalizedSourceTableHandles",)) 

347 

348 if config.inputCatalog == "deblendedCatalog": 

349 self.inputs -= set(("inputCatalog",)) 

350 

351 if not config.doAddFootprints: 

352 self.inputs -= set(("scarletModels",)) 

353 else: 

354 self.inputs -= set(("deblendedCatalog")) 

355 self.inputs -= set(("scarletModels",)) 

356 

357 if config.doMatchSources is False: 

358 self.outputs -= set(("matchResult",)) 

359 

360 if config.doWriteMatchesDenormalized is False: 

361 self.outputs -= set(("denormMatches",)) 

362 

363 

364class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig, 

365 pipelineConnections=MeasureMergedCoaddSourcesConnections): 

366 """Configuration parameters for the MeasureMergedCoaddSourcesTask 

367 """ 

368 inputCatalog = ChoiceField( 

369 dtype=str, 

370 default="deblendedCatalog", 

371 allowed={ 

372 "deblendedCatalog": "Output catalog from ScarletDeblendTask", 

373 "deblendedFlux": "Output catalog from SourceDeblendTask", 

374 "mergeDet": "The merged detections before deblending." 

375 }, 

376 doc="The name of the input catalog.", 

377 ) 

378 doAddFootprints = Field(dtype=bool, 

379 default=True, 

380 doc="Whether or not to add footprints to the input catalog from scarlet models. " 

381 "This should be true whenever using the multi-band deblender, " 

382 "otherwise this should be False.") 

383 doConserveFlux = Field(dtype=bool, default=True, 

384 doc="Whether to use the deblender models as templates to re-distribute the flux " 

385 "from the 'exposure' (True), or to perform measurements on the deblender " 

386 "model footprints.") 

387 doStripFootprints = Field(dtype=bool, default=True, 

388 doc="Whether to strip footprints from the output catalog before " 

389 "saving to disk. " 

390 "This is usually done when using scarlet models to save disk space.") 

391 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") 

392 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") 

393 doPropagateFlags = Field( 

394 dtype=bool, default=True, 

395 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 

396 ) 

397 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd") 

398 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?") 

399 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog") 

400 doWriteMatchesDenormalized = Field( 

401 dtype=bool, 

402 default=False, 

403 doc=("Write reference matches in denormalized format? " 

404 "This format uses more disk space, but is more convenient to read."), 

405 ) 

406 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

407 psfCache = Field(dtype=int, default=100, doc="Size of psfCache") 

408 checkUnitsParseStrict = Field( 

409 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", 

410 dtype=str, 

411 default="raise", 

412 ) 

413 doApCorr = Field( 

414 dtype=bool, 

415 default=True, 

416 doc="Apply aperture corrections" 

417 ) 

418 applyApCorr = ConfigurableField( 

419 target=ApplyApCorrTask, 

420 doc="Subtask to apply aperture corrections" 

421 ) 

422 doRunCatalogCalculation = Field( 

423 dtype=bool, 

424 default=True, 

425 doc='Run catalogCalculation task' 

426 ) 

427 catalogCalculation = ConfigurableField( 

428 target=CatalogCalculationTask, 

429 doc="Subtask to run catalogCalculation plugins on catalog" 

430 ) 

431 

432 hasFakes = Field( 

433 dtype=bool, 

434 default=False, 

435 doc="Should be set to True if fake sources have been inserted into the input data." 

436 ) 

437 idGenerator = SkyMapIdGeneratorConfig.make_field() 

438 

439 @property 

440 def refObjLoader(self): 

441 return self.match.refObjLoader 

442 

443 def setDefaults(self): 

444 super().setDefaults() 

445 self.measurement.plugins.names |= ['base_InputCount', 

446 'base_Variance', 

447 'base_LocalPhotoCalib', 

448 'base_LocalWcs'] 

449 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 

450 'INEXACT_PSF'] 

451 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 

452 'INEXACT_PSF'] 

453 

454 

455class MeasureMergedCoaddSourcesTask(PipelineTask): 

456 """Deblend sources from main catalog in each coadd seperately and measure. 

457 

458 Use peaks and footprints from a master catalog to perform deblending and 

459 measurement in each coadd. 

460 

461 Given a master input catalog of sources (peaks and footprints) or deblender 

462 outputs(including a HeavyFootprint in each band), measure each source on 

463 the coadd. Repeating this procedure with the same master catalog across 

464 multiple coadds will generate a consistent set of child sources. 

465 

466 The deblender retains all peaks and deblends any missing peaks (dropouts in 

467 that band) as PSFs. Source properties are measured and the @c is-primary 

468 flag (indicating sources with no children) is set. Visit flags are 

469 propagated to the coadd sources. 

470 

471 Optionally, we can match the coadd sources to an external reference 

472 catalog. 

473 

474 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we 

475 have a set of per-band catalogs. The next stage in the multi-band 

476 processing procedure will merge these measurements into a suitable catalog 

477 for driving forced photometry. 

478 

479 Parameters 

480 ---------- 

481 schema : ``lsst.afw.table.Schema`, optional 

482 The schema of the merged detection catalog used as input to this one. 

483 peakSchema : ``lsst.afw.table.Schema`, optional 

484 The schema of the PeakRecords in the Footprints in the merged detection catalog. 

485 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional 

486 An instance of ReferenceObjectLoader that supplies an external reference 

487 catalog. May be None if the loader can be constructed from the butler argument or all steps 

488 requiring a reference catalog are disabled. 

489 initInputs : `dict`, optional 

490 Dictionary that can contain a key ``inputSchema`` containing the 

491 input schema. If present will override the value of ``schema``. 

492 **kwargs 

493 Additional keyword arguments. 

494 """ 

495 

496 _DefaultName = "measureCoaddSources" 

497 ConfigClass = MeasureMergedCoaddSourcesConfig 

498 

499 def __init__(self, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, 

500 **kwargs): 

501 super().__init__(**kwargs) 

502 self.deblended = self.config.inputCatalog.startswith("deblended") 

503 self.inputCatalog = "Coadd_" + self.config.inputCatalog 

504 if initInputs is not None: 

505 schema = initInputs['inputSchema'].schema 

506 if schema is None: 

507 raise ValueError("Schema must be defined.") 

508 self.schemaMapper = afwTable.SchemaMapper(schema) 

509 self.schemaMapper.addMinimalSchema(schema) 

510 self.schema = self.schemaMapper.getOutputSchema() 

511 self.algMetadata = PropertyList() 

512 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) 

513 self.makeSubtask("setPrimaryFlags", schema=self.schema) 

514 if self.config.doMatchSources: 

515 self.makeSubtask("match", refObjLoader=refObjLoader) 

516 if self.config.doPropagateFlags: 

517 self.makeSubtask("propagateFlags", schema=self.schema) 

518 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) 

519 if self.config.doApCorr: 

520 self.makeSubtask("applyApCorr", schema=self.schema) 

521 if self.config.doRunCatalogCalculation: 

522 self.makeSubtask("catalogCalculation", schema=self.schema) 

523 

524 self.outputSchema = afwTable.SourceCatalog(self.schema) 

525 

526 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

527 inputs = butlerQC.get(inputRefs) 

528 

529 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], 

530 inputs.pop('refCat'), 

531 name=self.config.connections.refCat, 

532 config=self.config.refObjLoader, 

533 log=self.log) 

534 self.match.setRefObjLoader(refObjLoader) 

535 

536 # Set psfcache 

537 # move this to run after gen2 deprecation 

538 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache) 

539 

540 # Get unique integer ID for IdFactory and RNG seeds; only the latter 

541 # should really be used as the IDs all come from the input catalog. 

542 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId) 

543 inputs['exposureId'] = idGenerator.catalog_id 

544 

545 # Transform inputCatalog 

546 table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory()) 

547 sources = afwTable.SourceCatalog(table) 

548 # Load the correct input catalog 

549 if "scarletCatalog" in inputs: 

550 inputCatalog = inputs.pop("scarletCatalog") 

551 catalogRef = inputRefs.scarletCatalog 

552 else: 

553 inputCatalog = inputs.pop("inputCatalog") 

554 catalogRef = inputRefs.inputCatalog 

555 sources.extend(inputCatalog, self.schemaMapper) 

556 del inputCatalog 

557 # Add the HeavyFootprints to the deblended sources 

558 if self.config.doAddFootprints: 

559 modelData = inputs.pop('scarletModels') 

560 if self.config.doConserveFlux: 

561 redistributeImage = inputs['exposure'].image 

562 else: 

563 redistributeImage = None 

564 modelData.updateCatalogFootprints( 

565 catalog=sources, 

566 band=inputRefs.exposure.dataId["band"], 

567 psfModel=inputs['exposure'].getPsf(), 

568 redistributeImage=redistributeImage, 

569 removeScarletData=True, 

570 ) 

571 table = sources.getTable() 

572 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

573 inputs['sources'] = sources 

574 

575 skyMap = inputs.pop('skyMap') 

576 tractNumber = catalogRef.dataId['tract'] 

577 tractInfo = skyMap[tractNumber] 

578 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch']) 

579 skyInfo = Struct( 

580 skyMap=skyMap, 

581 tractInfo=tractInfo, 

582 patchInfo=patchInfo, 

583 wcs=tractInfo.getWcs(), 

584 bbox=patchInfo.getOuterBBox() 

585 ) 

586 inputs['skyInfo'] = skyInfo 

587 

588 if self.config.doPropagateFlags: 

589 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

590 # New version 

591 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds 

592 inputs["ccdInputs"] = ccdInputs 

593 

594 if "sourceTableHandles" in inputs: 

595 sourceTableHandles = inputs.pop("sourceTableHandles") 

596 sourceTableHandleDict = {handle.dataId["visit"]: handle 

597 for handle in sourceTableHandles} 

598 inputs["sourceTableHandleDict"] = sourceTableHandleDict 

599 if "finalizedSourceTableHandles" in inputs: 

600 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles") 

601 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle 

602 for handle in finalizedSourceTableHandles} 

603 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict 

604 else: 

605 # Deprecated legacy version 

606 # Filter out any visit catalog that is not coadd inputs 

607 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds 

608 visitKey = ccdInputs.schema.find("visit").key 

609 ccdKey = ccdInputs.schema.find("ccd").key 

610 inputVisitIds = set() 

611 ccdRecordsWcs = {} 

612 for ccdRecord in ccdInputs: 

613 visit = ccdRecord.get(visitKey) 

614 ccd = ccdRecord.get(ccdKey) 

615 inputVisitIds.add((visit, ccd)) 

616 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs() 

617 

618 inputCatalogsToKeep = [] 

619 inputCatalogWcsUpdate = [] 

620 for i, dataRef in enumerate(inputRefs.visitCatalogs): 

621 key = (dataRef.dataId['visit'], dataRef.dataId['detector']) 

622 if key in inputVisitIds: 

623 inputCatalogsToKeep.append(inputs['visitCatalogs'][i]) 

624 inputCatalogWcsUpdate.append(ccdRecordsWcs[key]) 

625 inputs['visitCatalogs'] = inputCatalogsToKeep 

626 inputs['wcsUpdates'] = inputCatalogWcsUpdate 

627 inputs['ccdInputs'] = ccdInputs 

628 

629 outputs = self.run(**inputs) 

630 # Strip HeavyFootprints to save space on disk 

631 sources = outputs.outputSources 

632 butlerQC.put(outputs, outputRefs) 

633 

634 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, 

635 sourceTableHandleDict=None, finalizedSourceTableHandleDict=None): 

636 """Run measurement algorithms on the input exposure, and optionally populate the 

637 resulting catalog with extra information. 

638 

639 Parameters 

640 ---------- 

641 exposure : `lsst.afw.exposure.Exposure` 

642 The input exposure on which measurements are to be performed. 

643 sources : `lsst.afw.table.SourceCatalog` 

644 A catalog built from the results of merged detections, or 

645 deblender outputs. 

646 skyInfo : `lsst.pipe.base.Struct` 

647 A struct containing information about the position of the input exposure within 

648 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box. 

649 exposureId : `int` or `bytes` 

650 Packed unique number or bytes unique to the input exposure. 

651 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional 

652 Catalog containing information on the individual visits which went into making 

653 the coadd. 

654 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional 

655 A list of source catalogs corresponding to measurements made on the individual 

656 visits which went into the input exposure. If None and butler is `None` then 

657 the task cannot propagate visit flags to the output catalog. 

658 Deprecated, to be removed with PropagateVisitFlagsTask. 

659 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional 

660 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 

661 to the input visits. Used to put all coordinates to common system. If `None` and 

662 butler is `None` then the task cannot propagate visit flags to the output catalog. 

663 Deprecated, to be removed with PropagateVisitFlagsTask. 

664 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional 

665 Dict for sourceTable_visit handles (key is visit) for propagating flags. 

666 These tables are derived from the ``CalibrateTask`` sources, and contain 

667 astrometry and photometry flags, and optionally PSF flags. 

668 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional 

669 Dict for finalized_src_table handles (key is visit) for propagating flags. 

670 These tables are derived from ``FinalizeCalibrationTask`` and contain 

671 PSF flags from the finalized PSF estimation. 

672 

673 Returns 

674 ------- 

675 results : `lsst.pipe.base.Struct` 

676 Results of running measurement task. Will contain the catalog in the 

677 sources attribute. Optionally will have results of matching to a 

678 reference catalog in the matchResults attribute, and denormalized 

679 matches in the denormMatches attribute. 

680 """ 

681 self.measurement.run(sources, exposure, exposureId=exposureId) 

682 

683 if self.config.doApCorr: 

684 self.applyApCorr.run( 

685 catalog=sources, 

686 apCorrMap=exposure.getInfo().getApCorrMap() 

687 ) 

688 

689 # TODO DM-11568: this contiguous check-and-copy could go away if we 

690 # reserve enough space during SourceDetection and/or SourceDeblend. 

691 # NOTE: sourceSelectors require contiguous catalogs, so ensure 

692 # contiguity now, so views are preserved from here on. 

693 if not sources.isContiguous(): 

694 sources = sources.copy(deep=True) 

695 

696 if self.config.doRunCatalogCalculation: 

697 self.catalogCalculation.run(sources) 

698 

699 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo, 

700 patchInfo=skyInfo.patchInfo) 

701 if self.config.doPropagateFlags: 

702 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

703 # New version 

704 self.propagateFlags.run( 

705 sources, 

706 ccdInputs, 

707 sourceTableHandleDict, 

708 finalizedSourceTableHandleDict 

709 ) 

710 else: 

711 # Legacy deprecated version 

712 self.propagateFlags.run( 

713 sources, 

714 ccdInputs, 

715 exposure.getWcs(), 

716 visitCatalogs, 

717 wcsUpdates 

718 ) 

719 

720 results = Struct() 

721 

722 if self.config.doMatchSources: 

723 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel) 

724 matches = afwTable.packMatches(matchResult.matches) 

725 matches.table.setMetadata(matchResult.matchMeta) 

726 results.matchResult = matches 

727 if self.config.doWriteMatchesDenormalized: 

728 if matchResult.matches: 

729 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta) 

730 else: 

731 self.log.warning("No matches, so generating dummy denormalized matches file") 

732 denormMatches = afwTable.BaseCatalog(afwTable.Schema()) 

733 denormMatches.setMetadata(PropertyList()) 

734 denormMatches.getMetadata().add("COMMENT", 

735 "This catalog is empty because no matches were found.") 

736 results.denormMatches = denormMatches 

737 results.denormMatches = denormMatches 

738 

739 results.outputSources = sources 

740 return results