Coverage for python/lsst/pipe/tasks/multiBand.py: 27%

Shortcuts on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

425 statements  

1#!/usr/bin/env python 

2# 

3# LSST Data Management System 

4# Copyright 2008-2015 AURA/LSST. 

5# 

6# This product includes software developed by the 

7# LSST Project (http://www.lsst.org/). 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the LSST License Statement and 

20# the GNU General Public License along with this program. If not, 

21# see <https://www.lsstcorp.org/LegalNotices/>. 

22# 

23import numpy as np 

24 

25from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer 

26from lsst.coadd.utils.getGen3CoaddExposureId import getGen3CoaddExposureId 

27from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner, 

28 PipelineTask, PipelineTaskConfig, PipelineTaskConnections) 

29import lsst.pipe.base.connectionTypes as cT 

30from lsst.pex.config import Config, Field, ConfigurableField 

31from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader 

32from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask 

33from lsst.meas.deblender import SourceDeblendTask 

34from lsst.meas.extensions.scarlet import ScarletDeblendTask 

35from lsst.pipe.tasks.coaddBase import getSkyInfo 

36from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask 

37from lsst.meas.astrom import DirectMatchTask, denormalizeMatches 

38from lsst.pipe.tasks.fakes import BaseFakeSourcesTask 

39from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask 

40from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask 

41import lsst.afw.image as afwImage 

42import lsst.afw.table as afwTable 

43import lsst.afw.math as afwMath 

44from lsst.daf.base import PropertyList 

45from lsst.skymap import BaseSkyMap 

46from lsst.obs.base import ExposureIdInfo 

47 

48# NOTE: these imports are a convenience so multiband users only have to import this file. 

49from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401 

50from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401 

51from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401 

52from .multiBandUtils import getInputSchema, readCatalog, _makeMakeIdFactory # noqa: F401 

53from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401 

54from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401 

55from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401 

56from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401 

57 

58 

59""" 

60New set types: 

61* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 

62* deepCoadd_mergeDet: merged detections (tract, patch) 

63* deepCoadd_meas: measurements of merged detections (tract, patch, filter) 

64* deepCoadd_ref: reference sources (tract, patch) 

65All of these have associated *_schema catalogs that require no data ID and hold no records. 

66 

67In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 

68the mergeDet, meas, and ref dataset Footprints: 

69* deepCoadd_peak_schema 

70""" 

71 

72 

73############################################################################################################## 

74class DetectCoaddSourcesConnections(PipelineTaskConnections, 

75 dimensions=("tract", "patch", "band", "skymap"), 

76 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): 

77 detectionSchema = cT.InitOutput( 

78 doc="Schema of the detection catalog", 

79 name="{outputCoaddName}Coadd_det_schema", 

80 storageClass="SourceCatalog", 

81 ) 

82 exposure = cT.Input( 

83 doc="Exposure on which detections are to be performed", 

84 name="{inputCoaddName}Coadd", 

85 storageClass="ExposureF", 

86 dimensions=("tract", "patch", "band", "skymap") 

87 ) 

88 outputBackgrounds = cT.Output( 

89 doc="Output Backgrounds used in detection", 

90 name="{outputCoaddName}Coadd_calexp_background", 

91 storageClass="Background", 

92 dimensions=("tract", "patch", "band", "skymap") 

93 ) 

94 outputSources = cT.Output( 

95 doc="Detected sources catalog", 

96 name="{outputCoaddName}Coadd_det", 

97 storageClass="SourceCatalog", 

98 dimensions=("tract", "patch", "band", "skymap") 

99 ) 

100 outputExposure = cT.Output( 

101 doc="Exposure post detection", 

102 name="{outputCoaddName}Coadd_calexp", 

103 storageClass="ExposureF", 

104 dimensions=("tract", "patch", "band", "skymap") 

105 ) 

106 

107 

108class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections): 

109 """! 

110 @anchor DetectCoaddSourcesConfig_ 

111 

112 @brief Configuration parameters for the DetectCoaddSourcesTask 

113 """ 

114 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?") 

115 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling") 

116 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection") 

117 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

118 doInsertFakes = Field(dtype=bool, default=False, 

119 doc="Run fake sources injection task") 

120 insertFakes = ConfigurableField(target=BaseFakeSourcesTask, 

121 doc="Injection of fake sources for testing " 

122 "purposes (must be retargeted)") 

123 hasFakes = Field( 

124 dtype=bool, 

125 default=False, 

126 doc="Should be set to True if fake sources have been inserted into the input data." 

127 ) 

128 

129 def setDefaults(self): 

130 super().setDefaults() 

131 self.detection.thresholdType = "pixel_stdev" 

132 self.detection.isotropicGrow = True 

133 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic 

134 self.detection.reEstimateBackground = False 

135 self.detection.background.useApprox = False 

136 self.detection.background.binSize = 4096 

137 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER' 

138 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender 

139 

140## @addtogroup LSST_task_documentation 

141## @{ 

142## @page page_DetectCoaddSourcesTask DetectCoaddSourcesTask 

143## @ref DetectCoaddSourcesTask_ "DetectCoaddSourcesTask" 

144## @copybrief DetectCoaddSourcesTask 

145## @} 

146 

147 

148class DetectCoaddSourcesTask(PipelineTask, CmdLineTask): 

149 r"""! 

150 @anchor DetectCoaddSourcesTask_ 

151 

152 @brief Detect sources on a coadd 

153 

154 @section pipe_tasks_multiBand_Contents Contents 

155 

156 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose 

157 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize 

158 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run 

159 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config 

160 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug 

161 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example 

162 

163 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description 

164 

165 Command-line task that detects sources on a coadd of exposures obtained with a single filter. 

166 

167 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise 

168 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane 

169 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should 

170 propagate the full covariance matrix -- but it is simple and works well in practice. 

171 

172 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref 

173 SourceDetectionTask_ "detection" subtask. 

174 

175 @par Inputs: 

176 deepCoadd{tract,patch,filter}: ExposureF 

177 @par Outputs: 

178 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints) 

179 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input 

180 exposure (ExposureF) 

181 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList 

182 @par Data Unit: 

183 tract, patch, filter 

184 

185 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask. 

186 You can retarget this subtask if you wish. 

187 

188 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization 

189 

190 @copydoc \_\_init\_\_ 

191 

192 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task 

193 

194 @copydoc run 

195 

196 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters 

197 

198 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig" 

199 

200 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables 

201 

202 The command line task interface supports a 

203 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

204 files. 

205 

206 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to 

207 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for 

208 @ref SourceDetectionTask_ "SourceDetectionTask" for further information. 

209 

210 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example 

211 of using DetectCoaddSourcesTask 

212 

213 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of 

214 the task is to update the background, detect all sources in a single band and generate a set of parent 

215 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and, 

216 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data 

217 reference to the coadd to be processed. A list of the available optional arguments can be obtained by 

218 calling detectCoaddSources.py with the `--help` command line argument: 

219 @code 

220 detectCoaddSources.py --help 

221 @endcode 

222 

223 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

224 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed 

225 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows: 

226 @code 

227 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

228 @endcode 

229 that will process the HSC-I band data. The results are written to 

230 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`. 

231 

232 It is also necessary to run: 

233 @code 

234 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

235 @endcode 

236 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

237 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask". 

238 """ 

239 _DefaultName = "detectCoaddSources" 

240 ConfigClass = DetectCoaddSourcesConfig 

241 getSchemaCatalogs = _makeGetSchemaCatalogs("det") 

242 makeIdFactory = _makeMakeIdFactory("CoaddId") 

243 

244 @classmethod 

245 def _makeArgumentParser(cls): 

246 parser = ArgumentParser(name=cls._DefaultName) 

247 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

248 ContainerClass=ExistingCoaddDataIdContainer) 

249 return parser 

250 

251 def __init__(self, schema=None, **kwargs): 

252 """! 

253 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask. 

254 

255 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

256 

257 @param[in] schema: initial schema for the output catalog, modified-in place to include all 

258 fields set by this task. If None, the source minimal schema will be used. 

259 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__ 

260 """ 

261 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree 

262 # call structure has been reviewed carefully to be sure super will work as intended. 

263 super().__init__(**kwargs) 

264 if schema is None: 

265 schema = afwTable.SourceTable.makeMinimalSchema() 

266 if self.config.doInsertFakes: 

267 self.makeSubtask("insertFakes") 

268 self.schema = schema 

269 self.makeSubtask("detection", schema=self.schema) 

270 if self.config.doScaleVariance: 

271 self.makeSubtask("scaleVariance") 

272 

273 self.detectionSchema = afwTable.SourceCatalog(self.schema) 

274 

275 def runDataRef(self, patchRef): 

276 """! 

277 @brief Run detection on a coadd. 

278 

279 Invokes @ref run and then uses @ref write to output the 

280 results. 

281 

282 @param[in] patchRef: data reference for patch 

283 """ 

284 if self.config.hasFakes: 

285 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True) 

286 else: 

287 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True) 

288 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, log=self.log) 

289 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId) 

290 self.write(results, patchRef) 

291 return results 

292 

293 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

294 inputs = butlerQC.get(inputRefs) 

295 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band") 

296 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory() 

297 inputs["expId"] = exposureIdInfo.expId 

298 outputs = self.run(**inputs) 

299 butlerQC.put(outputs, outputRefs) 

300 

301 def run(self, exposure, idFactory, expId): 

302 """! 

303 @brief Run detection on an exposure. 

304 

305 First scale the variance plane to match the observed variance 

306 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to 

307 detect sources. 

308 

309 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled, 

310 depending on configuration). 

311 @param[in] idFactory: IdFactory to set source identifiers 

312 @param[in] expId: Exposure identifier (integer) for RNG seed 

313 

314 @return a pipe.base.Struct with fields 

315 - sources: catalog of detections 

316 - backgrounds: list of backgrounds 

317 """ 

318 if self.config.doScaleVariance: 

319 varScale = self.scaleVariance.run(exposure.maskedImage) 

320 exposure.getMetadata().add("VARIANCE_SCALE", varScale) 

321 backgrounds = afwMath.BackgroundList() 

322 if self.config.doInsertFakes: 

323 self.insertFakes.run(exposure, background=backgrounds) 

324 table = afwTable.SourceTable.make(self.schema, idFactory) 

325 detections = self.detection.run(table, exposure, expId=expId) 

326 sources = detections.sources 

327 fpSets = detections.fpSets 

328 if hasattr(fpSets, "background") and fpSets.background: 

329 for bg in fpSets.background: 

330 backgrounds.append(bg) 

331 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure) 

332 

333 def write(self, results, patchRef): 

334 """! 

335 @brief Write out results from runDetection. 

336 

337 @param[in] exposure: Exposure to write out 

338 @param[in] results: Struct returned from runDetection 

339 @param[in] patchRef: data reference for patch 

340 """ 

341 coaddName = self.config.coaddName + "Coadd" 

342 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background") 

343 patchRef.put(results.outputSources, coaddName + "_det") 

344 if self.config.hasFakes: 

345 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp") 

346 else: 

347 patchRef.put(results.outputExposure, coaddName + "_calexp") 

348 

349############################################################################################################## 

350 

351 

352class DeblendCoaddSourcesConfig(Config): 

353 """DeblendCoaddSourcesConfig 

354 

355 Configuration parameters for the `DeblendCoaddSourcesTask`. 

356 """ 

357 singleBandDeblend = ConfigurableField(target=SourceDeblendTask, 

358 doc="Deblend sources separately in each band") 

359 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask, 

360 doc="Deblend sources simultaneously across bands") 

361 simultaneous = Field(dtype=bool, 

362 default=True, 

363 doc="Simultaneously deblend all bands? " 

364 "True uses `multibandDeblend` while False uses `singleBandDeblend`") 

365 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

366 hasFakes = Field(dtype=bool, 

367 default=False, 

368 doc="Should be set to True if fake sources have been inserted into the input data.") 

369 

370 def setDefaults(self): 

371 Config.setDefaults(self) 

372 self.singleBandDeblend.propagateAllPeaks = True 

373 

374 

375class DeblendCoaddSourcesRunner(MergeSourcesRunner): 

376 """Task runner for the `MergeSourcesTask` 

377 

378 Required because the run method requires a list of 

379 dataRefs rather than a single dataRef. 

380 """ 

381 @staticmethod 

382 def getTargetList(parsedCmd, **kwargs): 

383 """Provide a list of patch references for each patch, tract, filter combo. 

384 

385 Parameters 

386 ---------- 

387 parsedCmd: 

388 The parsed command 

389 kwargs: 

390 Keyword arguments passed to the task 

391 

392 Returns 

393 ------- 

394 targetList: list 

395 List of tuples, where each tuple is a (dataRef, kwargs) pair. 

396 """ 

397 refDict = MergeSourcesRunner.buildRefDict(parsedCmd) 

398 kwargs["psfCache"] = parsedCmd.psfCache 

399 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()] 

400 

401 

402class DeblendCoaddSourcesTask(CmdLineTask): 

403 """Deblend the sources in a merged catalog 

404 

405 Deblend sources from master catalog in each coadd. 

406 This can either be done separately in each band using the HSC-SDSS deblender 

407 (`DeblendCoaddSourcesTask.config.simultaneous==False`) 

408 or use SCARLET to simultaneously fit the blend in all bands 

409 (`DeblendCoaddSourcesTask.config.simultaneous==True`). 

410 The task will set its own `self.schema` atribute to the `Schema` of the 

411 output deblended catalog. 

412 This will include all fields from the input `Schema`, as well as additional fields 

413 from the deblender. 

414 

415 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description 

416 --------------------------------------------------------- 

417 ` 

418 

419 Parameters 

420 ---------- 

421 butler: `Butler` 

422 Butler used to read the input schemas from disk or 

423 construct the reference catalog loader, if `schema` or `peakSchema` or 

424 schema: `Schema` 

425 The schema of the merged detection catalog as an input to this task. 

426 peakSchema: `Schema` 

427 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog 

428 """ 

429 ConfigClass = DeblendCoaddSourcesConfig 

430 RunnerClass = DeblendCoaddSourcesRunner 

431 _DefaultName = "deblendCoaddSources" 

432 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False) 

433 

434 @classmethod 

435 def _makeArgumentParser(cls): 

436 parser = ArgumentParser(name=cls._DefaultName) 

437 parser.add_id_argument("--id", "deepCoadd_calexp", 

438 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i", 

439 ContainerClass=ExistingCoaddDataIdContainer) 

440 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

441 return parser 

442 

443 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs): 

444 CmdLineTask.__init__(self, **kwargs) 

445 if schema is None: 

446 assert butler is not None, "Neither butler nor schema is defined" 

447 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema 

448 self.schemaMapper = afwTable.SchemaMapper(schema) 

449 self.schemaMapper.addMinimalSchema(schema) 

450 self.schema = self.schemaMapper.getOutputSchema() 

451 if peakSchema is None: 

452 assert butler is not None, "Neither butler nor peakSchema is defined" 

453 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema 

454 

455 if self.config.simultaneous: 

456 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema) 

457 else: 

458 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema) 

459 

460 def getSchemaCatalogs(self): 

461 """Return a dict of empty catalogs for each catalog dataset produced by this task. 

462 

463 Returns 

464 ------- 

465 result: dict 

466 Dictionary of empty catalogs, with catalog names as keys. 

467 """ 

468 catalog = afwTable.SourceCatalog(self.schema) 

469 return {self.config.coaddName + "Coadd_deblendedFlux": catalog, 

470 self.config.coaddName + "Coadd_deblendedModel": catalog} 

471 

472 def runDataRef(self, patchRefList, psfCache=100): 

473 """Deblend the patch 

474 

475 Deblend each source simultaneously or separately 

476 (depending on `DeblendCoaddSourcesTask.config.simultaneous`). 

477 Set `is-primary` and related flags. 

478 Propagate flags from individual visits. 

479 Write the deblended sources out. 

480 

481 Parameters 

482 ---------- 

483 patchRefList: list 

484 List of data references for each filter 

485 """ 

486 

487 if self.config.hasFakes: 

488 coaddType = "fakes_" + self.config.coaddName 

489 else: 

490 coaddType = self.config.coaddName 

491 

492 if self.config.simultaneous: 

493 # Use SCARLET to simultaneously deblend across filters 

494 filters = [] 

495 exposures = [] 

496 for patchRef in patchRefList: 

497 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

498 filter = patchRef.get(coaddType + "Coadd_filterLabel", immediate=True) 

499 filters.append(filter.bandLabel) 

500 exposures.append(exposure) 

501 # Sort inputs by band to match Gen3 order of inputs 

502 exposures = [exposure for _, exposure in sorted(zip(filters, exposures))] 

503 patchRefList = [patchRef for _, patchRef in sorted(zip(filters, patchRefList))] 

504 filters.sort() 

505 # The input sources are the same for all bands, since it is a merged catalog 

506 sources = self.readSources(patchRef) 

507 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures) 

508 templateCatalogs, fluxCatalogs = self.multiBandDeblend.run(exposure, sources) 

509 for n in range(len(patchRefList)): 

510 self.write(patchRefList[n], templateCatalogs[filters[n]], "Model") 

511 if filters[n] in fluxCatalogs: 

512 self.write(patchRefList[n], fluxCatalogs[filters[n]], "Flux") 

513 else: 

514 # Use the singeband deblender to deblend each band separately 

515 for patchRef in patchRefList: 

516 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

517 exposure.getPsf().setCacheCapacity(psfCache) 

518 sources = self.readSources(patchRef) 

519 self.singleBandDeblend.run(exposure, sources) 

520 self.write(patchRef, sources) 

521 

522 def readSources(self, dataRef): 

523 """Read merged catalog 

524 

525 Read the catalog of merged detections and create a catalog 

526 in a single band. 

527 

528 Parameters 

529 ---------- 

530 dataRef: data reference 

531 Data reference for catalog of merged detections 

532 

533 Returns 

534 ------- 

535 sources: `SourceCatalog` 

536 List of sources in merged catalog 

537 

538 We also need to add columns to hold the measurements we're about to make 

539 so we can measure in-place. 

540 """ 

541 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True) 

542 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId) 

543 idFactory = self.makeIdFactory(dataRef) 

544 # There may be gaps in the mergeDet catalog, which will cause the 

545 # source ids to be inconsistent. So we update the id factory 

546 # with the largest id already in the catalog. 

547 maxId = np.max(merged["id"]) 

548 idFactory.notify(maxId) 

549 table = afwTable.SourceTable.make(self.schema, idFactory) 

550 sources = afwTable.SourceCatalog(table) 

551 sources.extend(merged, self.schemaMapper) 

552 return sources 

553 

554 def write(self, dataRef, sources, catalogType): 

555 """Write the source catalog(s) 

556 

557 Parameters 

558 ---------- 

559 dataRef: Data Reference 

560 Reference to the output catalog. 

561 sources: `SourceCatalog` 

562 Flux conserved sources to write to file. 

563 If using the single band deblender, this is the catalog 

564 generated. 

565 template_sources: `SourceCatalog` 

566 Source catalog using the multiband template models 

567 as footprints. 

568 """ 

569 dataRef.put(sources, self.config.coaddName + f"Coadd_deblended{catalogType}") 

570 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId) 

571 

572 def writeMetadata(self, dataRefList): 

573 """Write the metadata produced from processing the data. 

574 Parameters 

575 ---------- 

576 dataRefList 

577 List of Butler data references used to write the metadata. 

578 The metadata is written to dataset type `CmdLineTask._getMetadataName`. 

579 """ 

580 for dataRef in dataRefList: 

581 try: 

582 metadataName = self._getMetadataName() 

583 if metadataName is not None: 

584 dataRef.put(self.getFullMetadata(), metadataName) 

585 except Exception as e: 

586 self.log.warning("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e) 

587 

588 

589class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, 

590 dimensions=("tract", "patch", "band", "skymap"), 

591 defaultTemplates={"inputCoaddName": "deep", 

592 "outputCoaddName": "deep", 

593 "deblendedCatalog": "deblendedFlux"}): 

594 inputSchema = cT.InitInput( 

595 doc="Input schema for measure merged task produced by a deblender or detection task", 

596 name="{inputCoaddName}Coadd_deblendedFlux_schema", 

597 storageClass="SourceCatalog" 

598 ) 

599 outputSchema = cT.InitOutput( 

600 doc="Output schema after all new fields are added by task", 

601 name="{inputCoaddName}Coadd_meas_schema", 

602 storageClass="SourceCatalog" 

603 ) 

604 refCat = cT.PrerequisiteInput( 

605 doc="Reference catalog used to match measured sources against known sources", 

606 name="ref_cat", 

607 storageClass="SimpleCatalog", 

608 dimensions=("skypix",), 

609 deferLoad=True, 

610 multiple=True 

611 ) 

612 exposure = cT.Input( 

613 doc="Input coadd image", 

614 name="{inputCoaddName}Coadd_calexp", 

615 storageClass="ExposureF", 

616 dimensions=("tract", "patch", "band", "skymap") 

617 ) 

618 skyMap = cT.Input( 

619 doc="SkyMap to use in processing", 

620 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

621 storageClass="SkyMap", 

622 dimensions=("skymap",), 

623 ) 

624 visitCatalogs = cT.Input( 

625 doc="Source catalogs for visits which overlap input tract, patch, band. Will be " 

626 "further filtered in the task for the purpose of propagating flags from image calibration " 

627 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.", 

628 name="src", 

629 dimensions=("instrument", "visit", "detector"), 

630 storageClass="SourceCatalog", 

631 multiple=True 

632 ) 

633 sourceTableHandles = cT.Input( 

634 doc=("Source tables that are derived from the ``CalibrateTask`` sources. " 

635 "These tables contain astrometry and photometry flags, and optionally " 

636 "PSF flags."), 

637 name="sourceTable_visit", 

638 storageClass="DataFrame", 

639 dimensions=("instrument", "visit"), 

640 multiple=True, 

641 deferLoad=True, 

642 ) 

643 finalizedSourceTableHandles = cT.Input( 

644 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These " 

645 "tables contain PSF flags from the finalized PSF estimation."), 

646 name="finalized_src_table", 

647 storageClass="DataFrame", 

648 dimensions=("instrument", "visit"), 

649 multiple=True, 

650 deferLoad=True, 

651 ) 

652 inputCatalog = cT.Input( 

653 doc=("Name of the input catalog to use." 

654 "If the single band deblender was used this should be 'deblendedFlux." 

655 "If the multi-band deblender was used this should be 'deblendedModel, " 

656 "or deblendedFlux if the multiband deblender was configured to output " 

657 "deblended flux catalogs. If no deblending was performed this should " 

658 "be 'mergeDet'"), 

659 name="{inputCoaddName}Coadd_{deblendedCatalog}", 

660 storageClass="SourceCatalog", 

661 dimensions=("tract", "patch", "band", "skymap"), 

662 ) 

663 outputSources = cT.Output( 

664 doc="Source catalog containing all the measurement information generated in this task", 

665 name="{outputCoaddName}Coadd_meas", 

666 dimensions=("tract", "patch", "band", "skymap"), 

667 storageClass="SourceCatalog", 

668 ) 

669 matchResult = cT.Output( 

670 doc="Match catalog produced by configured matcher, optional on doMatchSources", 

671 name="{outputCoaddName}Coadd_measMatch", 

672 dimensions=("tract", "patch", "band", "skymap"), 

673 storageClass="Catalog", 

674 ) 

675 denormMatches = cT.Output( 

676 doc="Denormalized Match catalog produced by configured matcher, optional on " 

677 "doWriteMatchesDenormalized", 

678 name="{outputCoaddName}Coadd_measMatchFull", 

679 dimensions=("tract", "patch", "band", "skymap"), 

680 storageClass="Catalog", 

681 ) 

682 

683 def __init__(self, *, config=None): 

684 super().__init__(config=config) 

685 if config.doPropagateFlags is False: 

686 self.inputs -= set(("visitCatalogs",)) 

687 self.inputs -= set(("sourceTableHandles",)) 

688 self.inputs -= set(("finalizedSourceTableHandles",)) 

689 elif config.propagateFlags.target == PropagateSourceFlagsTask: 

690 # New PropagateSourceFlagsTask does not use visitCatalogs. 

691 self.inputs -= set(("visitCatalogs",)) 

692 # Check for types of flags required. 

693 if not config.propagateFlags.source_flags: 

694 self.inputs -= set(("sourceTableHandles",)) 

695 if not config.propagateFlags.finalized_source_flags: 

696 self.inputs -= set(("finalizedSourceTableHandles",)) 

697 else: 

698 # Deprecated PropagateVisitFlagsTask uses visitCatalogs. 

699 self.inputs -= set(("sourceTableHandles",)) 

700 self.inputs -= set(("finalizedSourceTableHandles",)) 

701 

702 if config.doMatchSources is False: 

703 self.outputs -= set(("matchResult",)) 

704 

705 if config.doWriteMatchesDenormalized is False: 

706 self.outputs -= set(("denormMatches",)) 

707 

708 

709class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig, 

710 pipelineConnections=MeasureMergedCoaddSourcesConnections): 

711 """! 

712 @anchor MeasureMergedCoaddSourcesConfig_ 

713 

714 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask 

715 """ 

716 inputCatalog = Field(dtype=str, default="deblendedFlux", 

717 doc=("Name of the input catalog to use." 

718 "If the single band deblender was used this should be 'deblendedFlux." 

719 "If the multi-band deblender was used this should be 'deblendedModel." 

720 "If no deblending was performed this should be 'mergeDet'")) 

721 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") 

722 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") 

723 doPropagateFlags = Field( 

724 dtype=bool, default=True, 

725 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 

726 ) 

727 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd") 

728 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?") 

729 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog") 

730 doWriteMatchesDenormalized = Field( 

731 dtype=bool, 

732 default=False, 

733 doc=("Write reference matches in denormalized format? " 

734 "This format uses more disk space, but is more convenient to read."), 

735 ) 

736 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

737 psfCache = Field(dtype=int, default=100, doc="Size of psfCache") 

738 checkUnitsParseStrict = Field( 

739 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", 

740 dtype=str, 

741 default="raise", 

742 ) 

743 doApCorr = Field( 

744 dtype=bool, 

745 default=True, 

746 doc="Apply aperture corrections" 

747 ) 

748 applyApCorr = ConfigurableField( 

749 target=ApplyApCorrTask, 

750 doc="Subtask to apply aperture corrections" 

751 ) 

752 doRunCatalogCalculation = Field( 

753 dtype=bool, 

754 default=True, 

755 doc='Run catalogCalculation task' 

756 ) 

757 catalogCalculation = ConfigurableField( 

758 target=CatalogCalculationTask, 

759 doc="Subtask to run catalogCalculation plugins on catalog" 

760 ) 

761 

762 hasFakes = Field( 

763 dtype=bool, 

764 default=False, 

765 doc="Should be set to True if fake sources have been inserted into the input data." 

766 ) 

767 

768 @property 

769 def refObjLoader(self): 

770 return self.match.refObjLoader 

771 

772 def setDefaults(self): 

773 super().setDefaults() 

774 self.measurement.plugins.names |= ['base_InputCount', 

775 'base_Variance', 

776 'base_LocalPhotoCalib', 

777 'base_LocalWcs'] 

778 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 

779 'INEXACT_PSF'] 

780 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 

781 'INEXACT_PSF'] 

782 

783 def validate(self): 

784 super().validate() 

785 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None) 

786 if refCatGen2 is not None and refCatGen2 != self.connections.refCat: 

787 raise ValueError( 

788 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs " 

789 f"are different. These options must be kept in sync until Gen2 is retired." 

790 ) 

791 

792 

793## @addtogroup LSST_task_documentation 

794## @{ 

795## @page page_MeasureMergedCoaddSourcesTask MeasureMergedCoaddSourcesTask 

796## @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask" 

797## @copybrief MeasureMergedCoaddSourcesTask 

798## @} 

799 

800 

801class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner): 

802 """Get the psfCache setting into MeasureMergedCoaddSourcesTask""" 

803 @staticmethod 

804 def getTargetList(parsedCmd, **kwargs): 

805 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache) 

806 

807 

808class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask): 

809 r"""! 

810 @anchor MeasureMergedCoaddSourcesTask_ 

811 

812 @brief Deblend sources from master catalog in each coadd seperately and measure. 

813 

814 @section pipe_tasks_multiBand_Contents Contents 

815 

816 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose 

817 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize 

818 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run 

819 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config 

820 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug 

821 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example 

822 

823 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description 

824 

825 Command-line task that uses peaks and footprints from a master catalog to perform deblending and 

826 measurement in each coadd. 

827 

828 Given a master input catalog of sources (peaks and footprints) or deblender outputs 

829 (including a HeavyFootprint in each band), measure each source on the 

830 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a 

831 consistent set of child sources. 

832 

833 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source 

834 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit 

835 flags are propagated to the coadd sources. 

836 

837 Optionally, we can match the coadd sources to an external reference catalog. 

838 

839 @par Inputs: 

840 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog 

841 @n deepCoadd_calexp{tract,patch,filter}: ExposureF 

842 @par Outputs: 

843 deepCoadd_meas{tract,patch,filter}: SourceCatalog 

844 @par Data Unit: 

845 tract, patch, filter 

846 

847 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks: 

848 

849 <DL> 

850 <DT> @ref SingleFrameMeasurementTask_ "measurement" 

851 <DD> Measure source properties of deblended sources.</DD> 

852 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags" 

853 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are 

854 not at the edge of the field and that have either not been deblended or are the children of deblended 

855 sources</DD> 

856 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags" 

857 <DD> Propagate flags set in individual visits to the coadd.</DD> 

858 <DT> @ref DirectMatchTask_ "match" 

859 <DD> Match input sources to a reference catalog (optional). 

860 </DD> 

861 </DL> 

862 These subtasks may be retargeted as required. 

863 

864 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization 

865 

866 @copydoc \_\_init\_\_ 

867 

868 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task 

869 

870 @copydoc run 

871 

872 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters 

873 

874 See @ref MeasureMergedCoaddSourcesConfig_ 

875 

876 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables 

877 

878 The command line task interface supports a 

879 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

880 files. 

881 

882 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to 

883 the various sub-tasks. See the documetation for individual sub-tasks for more information. 

884 

885 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using 

886 MeasureMergedCoaddSourcesTask 

887 

888 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs. 

889 The next stage in the multi-band processing procedure will merge these measurements into a suitable 

890 catalog for driving forced photometry. 

891 

892 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds 

893 to be processed. 

894 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the 

895 `--help` command line argument: 

896 @code 

897 measureCoaddSources.py --help 

898 @endcode 

899 

900 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

901 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished 

902 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band 

903 coadd as follows: 

904 @code 

905 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

906 @endcode 

907 This will process the HSC-I band data. The results are written in 

908 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits 

909 

910 It is also necessary to run 

911 @code 

912 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

913 @endcode 

914 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

915 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask". 

916 """ 

917 _DefaultName = "measureCoaddSources" 

918 ConfigClass = MeasureMergedCoaddSourcesConfig 

919 RunnerClass = MeasureMergedCoaddSourcesRunner 

920 getSchemaCatalogs = _makeGetSchemaCatalogs("meas") 

921 # The IDs we already have are of this type 

922 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False) 

923 

924 @classmethod 

925 def _makeArgumentParser(cls): 

926 parser = ArgumentParser(name=cls._DefaultName) 

927 parser.add_id_argument("--id", "deepCoadd_calexp", 

928 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

929 ContainerClass=ExistingCoaddDataIdContainer) 

930 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

931 return parser 

932 

933 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, 

934 **kwargs): 

935 """! 

936 @brief Initialize the task. 

937 

938 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

939 @param[in] schema: the schema of the merged detection catalog used as input to this one 

940 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog 

941 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference 

942 catalog. May be None if the loader can be constructed from the butler argument or all steps 

943 requiring a reference catalog are disabled. 

944 @param[in] butler: a butler used to read the input schemas from disk or construct the reference 

945 catalog loader, if schema or peakSchema or refObjLoader is None 

946 

947 The task will set its own self.schema attribute to the schema of the output measurement catalog. 

948 This will include all fields from the input schema, as well as additional fields for all the 

949 measurements. 

950 """ 

951 super().__init__(**kwargs) 

952 self.deblended = self.config.inputCatalog.startswith("deblended") 

953 self.inputCatalog = "Coadd_" + self.config.inputCatalog 

954 if initInputs is not None: 

955 schema = initInputs['inputSchema'].schema 

956 if schema is None: 

957 assert butler is not None, "Neither butler nor schema is defined" 

958 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema 

959 self.schemaMapper = afwTable.SchemaMapper(schema) 

960 self.schemaMapper.addMinimalSchema(schema) 

961 self.schema = self.schemaMapper.getOutputSchema() 

962 self.algMetadata = PropertyList() 

963 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) 

964 self.makeSubtask("setPrimaryFlags", schema=self.schema) 

965 if self.config.doMatchSources: 

966 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader) 

967 if self.config.doPropagateFlags: 

968 self.makeSubtask("propagateFlags", schema=self.schema) 

969 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) 

970 if self.config.doApCorr: 

971 self.makeSubtask("applyApCorr", schema=self.schema) 

972 if self.config.doRunCatalogCalculation: 

973 self.makeSubtask("catalogCalculation", schema=self.schema) 

974 

975 self.outputSchema = afwTable.SourceCatalog(self.schema) 

976 

977 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

978 inputs = butlerQC.get(inputRefs) 

979 

980 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], 

981 inputs.pop('refCat'), config=self.config.refObjLoader, 

982 log=self.log) 

983 self.match.setRefObjLoader(refObjLoader) 

984 

985 # Set psfcache 

986 # move this to run after gen2 deprecation 

987 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache) 

988 

989 # Get unique integer ID for IdFactory and RNG seeds 

990 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch") 

991 inputs['exposureId'] = exposureIdInfo.expId 

992 idFactory = exposureIdInfo.makeSourceIdFactory() 

993 # Transform inputCatalog 

994 table = afwTable.SourceTable.make(self.schema, idFactory) 

995 sources = afwTable.SourceCatalog(table) 

996 sources.extend(inputs.pop('inputCatalog'), self.schemaMapper) 

997 table = sources.getTable() 

998 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

999 inputs['sources'] = sources 

1000 

1001 skyMap = inputs.pop('skyMap') 

1002 tractNumber = inputRefs.inputCatalog.dataId['tract'] 

1003 tractInfo = skyMap[tractNumber] 

1004 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch']) 

1005 skyInfo = Struct( 

1006 skyMap=skyMap, 

1007 tractInfo=tractInfo, 

1008 patchInfo=patchInfo, 

1009 wcs=tractInfo.getWcs(), 

1010 bbox=patchInfo.getOuterBBox() 

1011 ) 

1012 inputs['skyInfo'] = skyInfo 

1013 

1014 if self.config.doPropagateFlags: 

1015 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

1016 # New version 

1017 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds 

1018 inputs["ccdInputs"] = ccdInputs 

1019 

1020 if "sourceTableHandles" in inputs: 

1021 sourceTableHandles = inputs.pop("sourceTableHandles") 

1022 sourceTableHandleDict = {handle.dataId["visit"]: handle 

1023 for handle in sourceTableHandles} 

1024 inputs["sourceTableHandleDict"] = sourceTableHandleDict 

1025 if "finalizedSourceTableHandles" in inputs: 

1026 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles") 

1027 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle 

1028 for handle in finalizedSourceTableHandles} 

1029 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict 

1030 else: 

1031 # Deprecated legacy version 

1032 # Filter out any visit catalog that is not coadd inputs 

1033 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds 

1034 visitKey = ccdInputs.schema.find("visit").key 

1035 ccdKey = ccdInputs.schema.find("ccd").key 

1036 inputVisitIds = set() 

1037 ccdRecordsWcs = {} 

1038 for ccdRecord in ccdInputs: 

1039 visit = ccdRecord.get(visitKey) 

1040 ccd = ccdRecord.get(ccdKey) 

1041 inputVisitIds.add((visit, ccd)) 

1042 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs() 

1043 

1044 inputCatalogsToKeep = [] 

1045 inputCatalogWcsUpdate = [] 

1046 for i, dataRef in enumerate(inputRefs.visitCatalogs): 

1047 key = (dataRef.dataId['visit'], dataRef.dataId['detector']) 

1048 if key in inputVisitIds: 

1049 inputCatalogsToKeep.append(inputs['visitCatalogs'][i]) 

1050 inputCatalogWcsUpdate.append(ccdRecordsWcs[key]) 

1051 inputs['visitCatalogs'] = inputCatalogsToKeep 

1052 inputs['wcsUpdates'] = inputCatalogWcsUpdate 

1053 inputs['ccdInputs'] = ccdInputs 

1054 

1055 outputs = self.run(**inputs) 

1056 butlerQC.put(outputs, outputRefs) 

1057 

1058 def runDataRef(self, patchRef, psfCache=100): 

1059 """! 

1060 @brief Deblend and measure. 

1061 

1062 @param[in] patchRef: Patch reference. 

1063 

1064 Set 'is-primary' and related flags. Propagate flags 

1065 from individual visits. Optionally match the sources to a reference catalog and write the matches. 

1066 Finally, write the deblended sources and measurements out. 

1067 """ 

1068 if self.config.hasFakes: 

1069 coaddType = "fakes_" + self.config.coaddName 

1070 else: 

1071 coaddType = self.config.coaddName 

1072 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

1073 exposure.getPsf().setCacheCapacity(psfCache) 

1074 sources = self.readSources(patchRef) 

1075 table = sources.getTable() 

1076 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

1077 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef) 

1078 

1079 if self.config.doPropagateFlags: 

1080 ccdInputs = self.propagateFlags.getCcdInputs(exposure) 

1081 else: 

1082 ccdInputs = None 

1083 

1084 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, includeBand=False, 

1085 log=self.log) 

1086 results = self.run(exposure=exposure, sources=sources, skyInfo=skyInfo, exposureId=expId, 

1087 ccdInputs=ccdInputs, butler=patchRef.getButler()) 

1088 

1089 if self.config.doMatchSources: 

1090 self.writeMatches(patchRef, results) 

1091 self.write(patchRef, results.outputSources) 

1092 

1093 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, 

1094 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None): 

1095 """Run measurement algorithms on the input exposure, and optionally populate the 

1096 resulting catalog with extra information. 

1097 

1098 Parameters 

1099 ---------- 

1100 exposure : `lsst.afw.exposure.Exposure` 

1101 The input exposure on which measurements are to be performed 

1102 sources : `lsst.afw.table.SourceCatalog` 

1103 A catalog built from the results of merged detections, or 

1104 deblender outputs. 

1105 skyInfo : `lsst.pipe.base.Struct` 

1106 A struct containing information about the position of the input exposure within 

1107 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box 

1108 exposureId : `int` or `bytes` 

1109 packed unique number or bytes unique to the input exposure 

1110 ccdInputs : `lsst.afw.table.ExposureCatalog` 

1111 Catalog containing information on the individual visits which went into making 

1112 the coadd. 

1113 sourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

1114 Dict for sourceTable_visit handles (key is visit) for propagating flags. 

1115 These tables are derived from the ``CalibrateTask`` sources, and contain 

1116 astrometry and photometry flags, and optionally PSF flags. 

1117 finalizedSourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`], optional 

1118 Dict for finalized_src_table handles (key is visit) for propagating flags. 

1119 These tables are derived from ``FinalizeCalibrationTask`` and contain 

1120 PSF flags from the finalized PSF estimation. 

1121 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` 

1122 A list of source catalogs corresponding to measurements made on the individual 

1123 visits which went into the input exposure. If None and butler is `None` then 

1124 the task cannot propagate visit flags to the output catalog. 

1125 Deprecated, to be removed with PropagateVisitFlagsTask. 

1126 wcsUpdates : list of `lsst.afw.geom.SkyWcs` 

1127 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 

1128 to the input visits. Used to put all coordinates to common system. If `None` and 

1129 butler is `None` then the task cannot propagate visit flags to the output catalog. 

1130 Deprecated, to be removed with PropagateVisitFlagsTask. 

1131 butler : `lsst.daf.persistence.Butler` 

1132 A gen2 butler used to load visit catalogs. 

1133 Deprecated, to be removed with Gen2. 

1134 

1135 Returns 

1136 ------- 

1137 results : `lsst.pipe.base.Struct` 

1138 Results of running measurement task. Will contain the catalog in the 

1139 sources attribute. Optionally will have results of matching to a 

1140 reference catalog in the matchResults attribute, and denormalized 

1141 matches in the denormMatches attribute. 

1142 """ 

1143 self.measurement.run(sources, exposure, exposureId=exposureId) 

1144 

1145 if self.config.doApCorr: 

1146 self.applyApCorr.run( 

1147 catalog=sources, 

1148 apCorrMap=exposure.getInfo().getApCorrMap() 

1149 ) 

1150 

1151 # TODO DM-11568: this contiguous check-and-copy could go away if we 

1152 # reserve enough space during SourceDetection and/or SourceDeblend. 

1153 # NOTE: sourceSelectors require contiguous catalogs, so ensure 

1154 # contiguity now, so views are preserved from here on. 

1155 if not sources.isContiguous(): 

1156 sources = sources.copy(deep=True) 

1157 

1158 if self.config.doRunCatalogCalculation: 

1159 self.catalogCalculation.run(sources) 

1160 

1161 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo, 

1162 patchInfo=skyInfo.patchInfo) 

1163 if self.config.doPropagateFlags: 

1164 if self.config.propagateFlags.target == PropagateSourceFlagsTask: 

1165 # New version 

1166 self.propagateFlags.run( 

1167 sources, 

1168 ccdInputs, 

1169 sourceTableHandleDict, 

1170 finalizedSourceTableHandleDict 

1171 ) 

1172 else: 

1173 # Legacy deprecated version 

1174 self.propagateFlags.run( 

1175 butler, 

1176 sources, 

1177 ccdInputs, 

1178 exposure.getWcs(), 

1179 visitCatalogs, 

1180 wcsUpdates 

1181 ) 

1182 

1183 results = Struct() 

1184 

1185 if self.config.doMatchSources: 

1186 matchResult = self.match.run(sources, exposure.getInfo().getFilterLabel().bandLabel) 

1187 matches = afwTable.packMatches(matchResult.matches) 

1188 matches.table.setMetadata(matchResult.matchMeta) 

1189 results.matchResult = matches 

1190 if self.config.doWriteMatchesDenormalized: 

1191 if matchResult.matches: 

1192 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta) 

1193 else: 

1194 self.log.warning("No matches, so generating dummy denormalized matches file") 

1195 denormMatches = afwTable.BaseCatalog(afwTable.Schema()) 

1196 denormMatches.setMetadata(PropertyList()) 

1197 denormMatches.getMetadata().add("COMMENT", 

1198 "This catalog is empty because no matches were found.") 

1199 results.denormMatches = denormMatches 

1200 results.denormMatches = denormMatches 

1201 

1202 results.outputSources = sources 

1203 return results 

1204 

1205 def readSources(self, dataRef): 

1206 """! 

1207 @brief Read input sources. 

1208 

1209 @param[in] dataRef: Data reference for catalog of merged detections 

1210 @return List of sources in merged catalog 

1211 

1212 We also need to add columns to hold the measurements we're about to make 

1213 so we can measure in-place. 

1214 """ 

1215 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True) 

1216 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId) 

1217 idFactory = self.makeIdFactory(dataRef) 

1218 for s in merged: 

1219 idFactory.notify(s.getId()) 

1220 table = afwTable.SourceTable.make(self.schema, idFactory) 

1221 sources = afwTable.SourceCatalog(table) 

1222 sources.extend(merged, self.schemaMapper) 

1223 return sources 

1224 

1225 def writeMatches(self, dataRef, results): 

1226 """! 

1227 @brief Write matches of the sources to the astrometric reference catalog. 

1228 

1229 @param[in] dataRef: data reference 

1230 @param[in] results: results struct from run method 

1231 """ 

1232 if hasattr(results, "matchResult"): 

1233 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch") 

1234 if hasattr(results, "denormMatches"): 

1235 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull") 

1236 

1237 def write(self, dataRef, sources): 

1238 """! 

1239 @brief Write the source catalog. 

1240 

1241 @param[in] dataRef: data reference 

1242 @param[in] sources: source catalog 

1243 """ 

1244 dataRef.put(sources, self.config.coaddName + "Coadd_meas") 

1245 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)