Coverage for python/lsst/pipe/tasks/multiBand.py: 27%

396 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2022-12-01 21:12 +0000

1#!/usr/bin/env python 

2# 

3# LSST Data Management System 

4# Copyright 2008-2015 AURA/LSST. 

5# 

6# This product includes software developed by the 

7# LSST Project (http://www.lsst.org/). 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the LSST License Statement and 

20# the GNU General Public License along with this program. If not, 

21# see <https://www.lsstcorp.org/LegalNotices/>. 

22# 

23from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer 

24from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner, 

25 PipelineTask, PipelineTaskConfig, PipelineTaskConnections) 

26import lsst.pipe.base.connectionTypes as cT 

27from lsst.pex.config import Config, Field, ConfigurableField 

28from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader 

29from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask 

30from lsst.meas.deblender import SourceDeblendTask 

31from lsst.meas.extensions.scarlet import ScarletDeblendTask 

32from lsst.pipe.tasks.coaddBase import getSkyInfo 

33from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask 

34from lsst.meas.astrom import DirectMatchTask, denormalizeMatches 

35from lsst.pipe.tasks.fakes import BaseFakeSourcesTask 

36from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask 

37from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask 

38import lsst.afw.image as afwImage 

39import lsst.afw.table as afwTable 

40import lsst.afw.math as afwMath 

41from lsst.daf.base import PropertyList 

42from lsst.skymap import BaseSkyMap 

43from lsst.obs.base import ExposureIdInfo 

44 

45# NOTE: these imports are a convenience so multiband users only have to import this file. 

46from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401 

47from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401 

48from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401 

49from .multiBandUtils import getInputSchema, readCatalog, _makeMakeIdFactory # noqa: F401 

50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401 

51from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401 

52from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401 

53from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401 

54 

55 

56""" 

57New set types: 

58* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 

59* deepCoadd_mergeDet: merged detections (tract, patch) 

60* deepCoadd_meas: measurements of merged detections (tract, patch, filter) 

61* deepCoadd_ref: reference sources (tract, patch) 

62All of these have associated *_schema catalogs that require no data ID and hold no records. 

63 

64In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 

65the mergeDet, meas, and ref dataset Footprints: 

66* deepCoadd_peak_schema 

67""" 

68 

69 

70############################################################################################################## 

71class DetectCoaddSourcesConnections(PipelineTaskConnections, 

72 dimensions=("tract", "patch", "band", "skymap"), 

73 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): 

74 detectionSchema = cT.InitOutput( 

75 doc="Schema of the detection catalog", 

76 name="{outputCoaddName}Coadd_det_schema", 

77 storageClass="SourceCatalog", 

78 ) 

79 exposure = cT.Input( 

80 doc="Exposure on which detections are to be performed", 

81 name="{inputCoaddName}Coadd", 

82 storageClass="ExposureF", 

83 dimensions=("tract", "patch", "band", "skymap") 

84 ) 

85 outputBackgrounds = cT.Output( 

86 doc="Output Backgrounds used in detection", 

87 name="{outputCoaddName}Coadd_calexp_background", 

88 storageClass="Background", 

89 dimensions=("tract", "patch", "band", "skymap") 

90 ) 

91 outputSources = cT.Output( 

92 doc="Detected sources catalog", 

93 name="{outputCoaddName}Coadd_det", 

94 storageClass="SourceCatalog", 

95 dimensions=("tract", "patch", "band", "skymap") 

96 ) 

97 outputExposure = cT.Output( 

98 doc="Exposure post detection", 

99 name="{outputCoaddName}Coadd_calexp", 

100 storageClass="ExposureF", 

101 dimensions=("tract", "patch", "band", "skymap") 

102 ) 

103 

104 

105class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections): 

106 """! 

107 @anchor DetectCoaddSourcesConfig_ 

108 

109 @brief Configuration parameters for the DetectCoaddSourcesTask 

110 """ 

111 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?") 

112 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling") 

113 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection") 

114 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

115 doInsertFakes = Field(dtype=bool, default=False, 

116 doc="Run fake sources injection task") 

117 insertFakes = ConfigurableField(target=BaseFakeSourcesTask, 

118 doc="Injection of fake sources for testing " 

119 "purposes (must be retargeted)") 

120 hasFakes = Field( 

121 dtype=bool, 

122 default=False, 

123 doc="Should be set to True if fake sources have been inserted into the input data." 

124 ) 

125 

126 def setDefaults(self): 

127 super().setDefaults() 

128 self.detection.thresholdType = "pixel_stdev" 

129 self.detection.isotropicGrow = True 

130 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic 

131 self.detection.reEstimateBackground = False 

132 self.detection.background.useApprox = False 

133 self.detection.background.binSize = 4096 

134 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER' 

135 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender 

136 

137## @addtogroup LSST_task_documentation 

138## @{ 

139## @page DetectCoaddSourcesTask 

140## @ref DetectCoaddSourcesTask_ "DetectCoaddSourcesTask" 

141## @copybrief DetectCoaddSourcesTask 

142## @} 

143 

144 

145class DetectCoaddSourcesTask(PipelineTask, CmdLineTask): 

146 r"""! 

147 @anchor DetectCoaddSourcesTask_ 

148 

149 @brief Detect sources on a coadd 

150 

151 @section pipe_tasks_multiBand_Contents Contents 

152 

153 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose 

154 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize 

155 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run 

156 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config 

157 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug 

158 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example 

159 

160 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description 

161 

162 Command-line task that detects sources on a coadd of exposures obtained with a single filter. 

163 

164 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise 

165 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane 

166 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should 

167 propagate the full covariance matrix -- but it is simple and works well in practice. 

168 

169 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref 

170 SourceDetectionTask_ "detection" subtask. 

171 

172 @par Inputs: 

173 deepCoadd{tract,patch,filter}: ExposureF 

174 @par Outputs: 

175 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints) 

176 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input 

177 exposure (ExposureF) 

178 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList 

179 @par Data Unit: 

180 tract, patch, filter 

181 

182 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask. 

183 You can retarget this subtask if you wish. 

184 

185 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization 

186 

187 @copydoc \_\_init\_\_ 

188 

189 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task 

190 

191 @copydoc run 

192 

193 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters 

194 

195 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig" 

196 

197 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables 

198 

199 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 

200 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

201 files. 

202 

203 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to 

204 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for 

205 @ref SourceDetectionTask_ "SourceDetectionTask" for further information. 

206 

207 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example 

208 of using DetectCoaddSourcesTask 

209 

210 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of 

211 the task is to update the background, detect all sources in a single band and generate a set of parent 

212 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and, 

213 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data 

214 reference to the coadd to be processed. A list of the available optional arguments can be obtained by 

215 calling detectCoaddSources.py with the `--help` command line argument: 

216 @code 

217 detectCoaddSources.py --help 

218 @endcode 

219 

220 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

221 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed 

222 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows: 

223 @code 

224 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

225 @endcode 

226 that will process the HSC-I band data. The results are written to 

227 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`. 

228 

229 It is also necessary to run: 

230 @code 

231 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

232 @endcode 

233 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

234 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask". 

235 """ 

236 _DefaultName = "detectCoaddSources" 

237 ConfigClass = DetectCoaddSourcesConfig 

238 getSchemaCatalogs = _makeGetSchemaCatalogs("det") 

239 makeIdFactory = _makeMakeIdFactory("CoaddId") 

240 

241 @classmethod 

242 def _makeArgumentParser(cls): 

243 parser = ArgumentParser(name=cls._DefaultName) 

244 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

245 ContainerClass=ExistingCoaddDataIdContainer) 

246 return parser 

247 

248 def __init__(self, schema=None, **kwargs): 

249 """! 

250 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask. 

251 

252 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

253 

254 @param[in] schema: initial schema for the output catalog, modified-in place to include all 

255 fields set by this task. If None, the source minimal schema will be used. 

256 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__ 

257 """ 

258 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree 

259 # call structure has been reviewed carefully to be sure super will work as intended. 

260 super().__init__(**kwargs) 

261 if schema is None: 

262 schema = afwTable.SourceTable.makeMinimalSchema() 

263 if self.config.doInsertFakes: 

264 self.makeSubtask("insertFakes") 

265 self.schema = schema 

266 self.makeSubtask("detection", schema=self.schema) 

267 if self.config.doScaleVariance: 

268 self.makeSubtask("scaleVariance") 

269 

270 self.detectionSchema = afwTable.SourceCatalog(self.schema) 

271 

272 def runDataRef(self, patchRef): 

273 """! 

274 @brief Run detection on a coadd. 

275 

276 Invokes @ref run and then uses @ref write to output the 

277 results. 

278 

279 @param[in] patchRef: data reference for patch 

280 """ 

281 if self.config.hasFakes: 

282 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True) 

283 else: 

284 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True) 

285 expId = int(patchRef.get(self.config.coaddName + "CoaddId")) 

286 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId) 

287 self.write(results, patchRef) 

288 return results 

289 

290 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

291 inputs = butlerQC.get(inputRefs) 

292 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band") 

293 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory() 

294 inputs["expId"] = exposureIdInfo.expId 

295 outputs = self.run(**inputs) 

296 butlerQC.put(outputs, outputRefs) 

297 

298 def run(self, exposure, idFactory, expId): 

299 """! 

300 @brief Run detection on an exposure. 

301 

302 First scale the variance plane to match the observed variance 

303 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to 

304 detect sources. 

305 

306 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled, 

307 depending on configuration). 

308 @param[in] idFactory: IdFactory to set source identifiers 

309 @param[in] expId: Exposure identifier (integer) for RNG seed 

310 

311 @return a pipe.base.Struct with fields 

312 - sources: catalog of detections 

313 - backgrounds: list of backgrounds 

314 """ 

315 if self.config.doScaleVariance: 

316 varScale = self.scaleVariance.run(exposure.maskedImage) 

317 exposure.getMetadata().add("VARIANCE_SCALE", varScale) 

318 backgrounds = afwMath.BackgroundList() 

319 if self.config.doInsertFakes: 

320 self.insertFakes.run(exposure, background=backgrounds) 

321 table = afwTable.SourceTable.make(self.schema, idFactory) 

322 detections = self.detection.run(table, exposure, expId=expId) 

323 sources = detections.sources 

324 fpSets = detections.fpSets 

325 if hasattr(fpSets, "background") and fpSets.background: 

326 for bg in fpSets.background: 

327 backgrounds.append(bg) 

328 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure) 

329 

330 def write(self, results, patchRef): 

331 """! 

332 @brief Write out results from runDetection. 

333 

334 @param[in] exposure: Exposure to write out 

335 @param[in] results: Struct returned from runDetection 

336 @param[in] patchRef: data reference for patch 

337 """ 

338 coaddName = self.config.coaddName + "Coadd" 

339 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background") 

340 patchRef.put(results.outputSources, coaddName + "_det") 

341 if self.config.hasFakes: 

342 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp") 

343 else: 

344 patchRef.put(results.outputExposure, coaddName + "_calexp") 

345 

346############################################################################################################## 

347 

348 

349class DeblendCoaddSourcesConfig(Config): 

350 """DeblendCoaddSourcesConfig 

351 

352 Configuration parameters for the `DeblendCoaddSourcesTask`. 

353 """ 

354 singleBandDeblend = ConfigurableField(target=SourceDeblendTask, 

355 doc="Deblend sources separately in each band") 

356 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask, 

357 doc="Deblend sources simultaneously across bands") 

358 simultaneous = Field(dtype=bool, 

359 default=True, 

360 doc="Simultaneously deblend all bands? " 

361 "True uses `multibandDeblend` while False uses `singleBandDeblend`") 

362 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

363 hasFakes = Field(dtype=bool, 

364 default=False, 

365 doc="Should be set to True if fake sources have been inserted into the input data.") 

366 

367 def setDefaults(self): 

368 Config.setDefaults(self) 

369 self.singleBandDeblend.propagateAllPeaks = True 

370 

371 

372class DeblendCoaddSourcesRunner(MergeSourcesRunner): 

373 """Task runner for the `MergeSourcesTask` 

374 

375 Required because the run method requires a list of 

376 dataRefs rather than a single dataRef. 

377 """ 

378 @staticmethod 

379 def getTargetList(parsedCmd, **kwargs): 

380 """Provide a list of patch references for each patch, tract, filter combo. 

381 

382 Parameters 

383 ---------- 

384 parsedCmd: 

385 The parsed command 

386 kwargs: 

387 Keyword arguments passed to the task 

388 

389 Returns 

390 ------- 

391 targetList: list 

392 List of tuples, where each tuple is a (dataRef, kwargs) pair. 

393 """ 

394 refDict = MergeSourcesRunner.buildRefDict(parsedCmd) 

395 kwargs["psfCache"] = parsedCmd.psfCache 

396 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()] 

397 

398 

399class DeblendCoaddSourcesTask(CmdLineTask): 

400 """Deblend the sources in a merged catalog 

401 

402 Deblend sources from master catalog in each coadd. 

403 This can either be done separately in each band using the HSC-SDSS deblender 

404 (`DeblendCoaddSourcesTask.config.simultaneous==False`) 

405 or use SCARLET to simultaneously fit the blend in all bands 

406 (`DeblendCoaddSourcesTask.config.simultaneous==True`). 

407 The task will set its own `self.schema` atribute to the `Schema` of the 

408 output deblended catalog. 

409 This will include all fields from the input `Schema`, as well as additional fields 

410 from the deblender. 

411 

412 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description 

413 --------------------------------------------------------- 

414 ` 

415 

416 Parameters 

417 ---------- 

418 butler: `Butler` 

419 Butler used to read the input schemas from disk or 

420 construct the reference catalog loader, if `schema` or `peakSchema` or 

421 schema: `Schema` 

422 The schema of the merged detection catalog as an input to this task. 

423 peakSchema: `Schema` 

424 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog 

425 """ 

426 ConfigClass = DeblendCoaddSourcesConfig 

427 RunnerClass = DeblendCoaddSourcesRunner 

428 _DefaultName = "deblendCoaddSources" 

429 makeIdFactory = _makeMakeIdFactory("MergedCoaddId") 

430 

431 @classmethod 

432 def _makeArgumentParser(cls): 

433 parser = ArgumentParser(name=cls._DefaultName) 

434 parser.add_id_argument("--id", "deepCoadd_calexp", 

435 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i", 

436 ContainerClass=ExistingCoaddDataIdContainer) 

437 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

438 return parser 

439 

440 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs): 

441 CmdLineTask.__init__(self, **kwargs) 

442 if schema is None: 

443 assert butler is not None, "Neither butler nor schema is defined" 

444 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema 

445 self.schemaMapper = afwTable.SchemaMapper(schema) 

446 self.schemaMapper.addMinimalSchema(schema) 

447 self.schema = self.schemaMapper.getOutputSchema() 

448 if peakSchema is None: 

449 assert butler is not None, "Neither butler nor peakSchema is defined" 

450 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema 

451 

452 if self.config.simultaneous: 

453 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema) 

454 else: 

455 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema) 

456 

457 def getSchemaCatalogs(self): 

458 """Return a dict of empty catalogs for each catalog dataset produced by this task. 

459 

460 Returns 

461 ------- 

462 result: dict 

463 Dictionary of empty catalogs, with catalog names as keys. 

464 """ 

465 catalog = afwTable.SourceCatalog(self.schema) 

466 return {self.config.coaddName + "Coadd_deblendedFlux": catalog, 

467 self.config.coaddName + "Coadd_deblendedModel": catalog} 

468 

469 def runDataRef(self, patchRefList, psfCache=100): 

470 """Deblend the patch 

471 

472 Deblend each source simultaneously or separately 

473 (depending on `DeblendCoaddSourcesTask.config.simultaneous`). 

474 Set `is-primary` and related flags. 

475 Propagate flags from individual visits. 

476 Write the deblended sources out. 

477 

478 Parameters 

479 ---------- 

480 patchRefList: list 

481 List of data references for each filter 

482 """ 

483 

484 if self.config.hasFakes: 

485 coaddType = "fakes_" + self.config.coaddName 

486 else: 

487 coaddType = self.config.coaddName 

488 

489 if self.config.simultaneous: 

490 # Use SCARLET to simultaneously deblend across filters 

491 filters = [] 

492 exposures = [] 

493 for patchRef in patchRefList: 

494 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

495 filter = patchRef.get(coaddType + "Coadd_filterLabel", immediate=True) 

496 filters.append(filter.bandLabel) 

497 exposures.append(exposure) 

498 # The input sources are the same for all bands, since it is a merged catalog 

499 sources = self.readSources(patchRef) 

500 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures) 

501 templateCatalogs = self.multiBandDeblend.run(exposure, sources) 

502 for n in range(len(patchRefList)): 

503 self.write(patchRefList[n], templateCatalogs[filters[n]]) 

504 else: 

505 # Use the singeband deblender to deblend each band separately 

506 for patchRef in patchRefList: 

507 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

508 exposure.getPsf().setCacheCapacity(psfCache) 

509 sources = self.readSources(patchRef) 

510 self.singleBandDeblend.run(exposure, sources) 

511 self.write(patchRef, sources) 

512 

513 def readSources(self, dataRef): 

514 """Read merged catalog 

515 

516 Read the catalog of merged detections and create a catalog 

517 in a single band. 

518 

519 Parameters 

520 ---------- 

521 dataRef: data reference 

522 Data reference for catalog of merged detections 

523 

524 Returns 

525 ------- 

526 sources: `SourceCatalog` 

527 List of sources in merged catalog 

528 

529 We also need to add columns to hold the measurements we're about to make 

530 so we can measure in-place. 

531 """ 

532 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True) 

533 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId) 

534 idFactory = self.makeIdFactory(dataRef) 

535 for s in merged: 

536 idFactory.notify(s.getId()) 

537 table = afwTable.SourceTable.make(self.schema, idFactory) 

538 sources = afwTable.SourceCatalog(table) 

539 sources.extend(merged, self.schemaMapper) 

540 return sources 

541 

542 def write(self, dataRef, sources): 

543 """Write the source catalog(s) 

544 

545 Parameters 

546 ---------- 

547 dataRef: Data Reference 

548 Reference to the output catalog. 

549 sources: `SourceCatalog` 

550 Flux conserved sources to write to file. 

551 If using the single band deblender, this is the catalog 

552 generated. 

553 template_sources: `SourceCatalog` 

554 Source catalog using the multiband template models 

555 as footprints. 

556 """ 

557 dataRef.put(sources, self.config.coaddName + "Coadd_deblendedFlux") 

558 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId) 

559 

560 def writeMetadata(self, dataRefList): 

561 """Write the metadata produced from processing the data. 

562 Parameters 

563 ---------- 

564 dataRefList 

565 List of Butler data references used to write the metadata. 

566 The metadata is written to dataset type `CmdLineTask._getMetadataName`. 

567 """ 

568 for dataRef in dataRefList: 

569 try: 

570 metadataName = self._getMetadataName() 

571 if metadataName is not None: 

572 dataRef.put(self.getFullMetadata(), metadataName) 

573 except Exception as e: 

574 self.log.warning("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e) 

575 

576 def getExposureId(self, dataRef): 

577 """Get the ExposureId from a data reference 

578 """ 

579 return int(dataRef.get(self.config.coaddName + "CoaddId")) 

580 

581 

582class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, 

583 dimensions=("tract", "patch", "band", "skymap"), 

584 defaultTemplates={"inputCoaddName": "deep", 

585 "outputCoaddName": "deep"}): 

586 inputSchema = cT.InitInput( 

587 doc="Input schema for measure merged task produced by a deblender or detection task", 

588 name="{inputCoaddName}Coadd_deblendedFlux_schema", 

589 storageClass="SourceCatalog" 

590 ) 

591 outputSchema = cT.InitOutput( 

592 doc="Output schema after all new fields are added by task", 

593 name="{inputCoaddName}Coadd_meas_schema", 

594 storageClass="SourceCatalog" 

595 ) 

596 refCat = cT.PrerequisiteInput( 

597 doc="Reference catalog used to match measured sources against known sources", 

598 name="ref_cat", 

599 storageClass="SimpleCatalog", 

600 dimensions=("skypix",), 

601 deferLoad=True, 

602 multiple=True 

603 ) 

604 exposure = cT.Input( 

605 doc="Input coadd image", 

606 name="{inputCoaddName}Coadd_calexp", 

607 storageClass="ExposureF", 

608 dimensions=("tract", "patch", "band", "skymap") 

609 ) 

610 skyMap = cT.Input( 

611 doc="SkyMap to use in processing", 

612 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

613 storageClass="SkyMap", 

614 dimensions=("skymap",), 

615 ) 

616 visitCatalogs = cT.Input( 

617 doc="Source catalogs for visits which overlap input tract, patch, band. Will be " 

618 "further filtered in the task for the purpose of propagating flags from image calibration " 

619 "and characterization to codd objects", 

620 name="src", 

621 dimensions=("instrument", "visit", "detector"), 

622 storageClass="SourceCatalog", 

623 multiple=True 

624 ) 

625 inputCatalog = cT.Input( 

626 doc=("Name of the input catalog to use." 

627 "If the single band deblender was used this should be 'deblendedFlux." 

628 "If the multi-band deblender was used this should be 'deblendedModel, " 

629 "or deblendedFlux if the multiband deblender was configured to output " 

630 "deblended flux catalogs. If no deblending was performed this should " 

631 "be 'mergeDet'"), 

632 name="{inputCoaddName}Coadd_deblendedFlux", 

633 storageClass="SourceCatalog", 

634 dimensions=("tract", "patch", "band", "skymap"), 

635 ) 

636 outputSources = cT.Output( 

637 doc="Source catalog containing all the measurement information generated in this task", 

638 name="{outputCoaddName}Coadd_meas", 

639 dimensions=("tract", "patch", "band", "skymap"), 

640 storageClass="SourceCatalog", 

641 ) 

642 matchResult = cT.Output( 

643 doc="Match catalog produced by configured matcher, optional on doMatchSources", 

644 name="{outputCoaddName}Coadd_measMatch", 

645 dimensions=("tract", "patch", "band", "skymap"), 

646 storageClass="Catalog", 

647 ) 

648 denormMatches = cT.Output( 

649 doc="Denormalized Match catalog produced by configured matcher, optional on " 

650 "doWriteMatchesDenormalized", 

651 name="{outputCoaddName}Coadd_measMatchFull", 

652 dimensions=("tract", "patch", "band", "skymap"), 

653 storageClass="Catalog", 

654 ) 

655 

656 def __init__(self, *, config=None): 

657 super().__init__(config=config) 

658 if config.doPropagateFlags is False: 

659 self.inputs -= set(("visitCatalogs",)) 

660 

661 if config.doMatchSources is False: 

662 self.outputs -= set(("matchResult",)) 

663 

664 if config.doWriteMatchesDenormalized is False: 

665 self.outputs -= set(("denormMatches",)) 

666 

667 

668class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig, 

669 pipelineConnections=MeasureMergedCoaddSourcesConnections): 

670 """! 

671 @anchor MeasureMergedCoaddSourcesConfig_ 

672 

673 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask 

674 """ 

675 inputCatalog = Field(dtype=str, default="deblendedFlux", 

676 doc=("Name of the input catalog to use." 

677 "If the single band deblender was used this should be 'deblendedFlux." 

678 "If the multi-band deblender was used this should be 'deblendedModel." 

679 "If no deblending was performed this should be 'mergeDet'")) 

680 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") 

681 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") 

682 doPropagateFlags = Field( 

683 dtype=bool, default=True, 

684 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 

685 ) 

686 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd") 

687 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?") 

688 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog") 

689 doWriteMatchesDenormalized = Field( 

690 dtype=bool, 

691 default=False, 

692 doc=("Write reference matches in denormalized format? " 

693 "This format uses more disk space, but is more convenient to read."), 

694 ) 

695 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

696 psfCache = Field(dtype=int, default=100, doc="Size of psfCache") 

697 checkUnitsParseStrict = Field( 

698 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", 

699 dtype=str, 

700 default="raise", 

701 ) 

702 doApCorr = Field( 

703 dtype=bool, 

704 default=True, 

705 doc="Apply aperture corrections" 

706 ) 

707 applyApCorr = ConfigurableField( 

708 target=ApplyApCorrTask, 

709 doc="Subtask to apply aperture corrections" 

710 ) 

711 doRunCatalogCalculation = Field( 

712 dtype=bool, 

713 default=True, 

714 doc='Run catalogCalculation task' 

715 ) 

716 catalogCalculation = ConfigurableField( 

717 target=CatalogCalculationTask, 

718 doc="Subtask to run catalogCalculation plugins on catalog" 

719 ) 

720 

721 hasFakes = Field( 

722 dtype=bool, 

723 default=False, 

724 doc="Should be set to True if fake sources have been inserted into the input data." 

725 ) 

726 

727 @property 

728 def refObjLoader(self): 

729 return self.match.refObjLoader 

730 

731 def setDefaults(self): 

732 super().setDefaults() 

733 self.measurement.plugins.names |= ['base_InputCount', 

734 'base_Variance', 

735 'base_LocalPhotoCalib', 

736 'base_LocalWcs'] 

737 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 

738 'INEXACT_PSF'] 

739 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 

740 'INEXACT_PSF'] 

741 

742 def validate(self): 

743 super().validate() 

744 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None) 

745 if refCatGen2 is not None and refCatGen2 != self.connections.refCat: 

746 raise ValueError( 

747 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs " 

748 f"are different. These options must be kept in sync until Gen2 is retired." 

749 ) 

750 

751 

752## @addtogroup LSST_task_documentation 

753## @{ 

754## @page MeasureMergedCoaddSourcesTask 

755## @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask" 

756## @copybrief MeasureMergedCoaddSourcesTask 

757## @} 

758 

759 

760class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner): 

761 """Get the psfCache setting into MeasureMergedCoaddSourcesTask""" 

762 @staticmethod 

763 def getTargetList(parsedCmd, **kwargs): 

764 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache) 

765 

766 

767class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask): 

768 r"""! 

769 @anchor MeasureMergedCoaddSourcesTask_ 

770 

771 @brief Deblend sources from master catalog in each coadd seperately and measure. 

772 

773 @section pipe_tasks_multiBand_Contents Contents 

774 

775 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose 

776 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize 

777 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run 

778 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config 

779 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug 

780 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example 

781 

782 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description 

783 

784 Command-line task that uses peaks and footprints from a master catalog to perform deblending and 

785 measurement in each coadd. 

786 

787 Given a master input catalog of sources (peaks and footprints) or deblender outputs 

788 (including a HeavyFootprint in each band), measure each source on the 

789 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a 

790 consistent set of child sources. 

791 

792 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source 

793 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit 

794 flags are propagated to the coadd sources. 

795 

796 Optionally, we can match the coadd sources to an external reference catalog. 

797 

798 @par Inputs: 

799 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog 

800 @n deepCoadd_calexp{tract,patch,filter}: ExposureF 

801 @par Outputs: 

802 deepCoadd_meas{tract,patch,filter}: SourceCatalog 

803 @par Data Unit: 

804 tract, patch, filter 

805 

806 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks: 

807 

808 <DL> 

809 <DT> @ref SingleFrameMeasurementTask_ "measurement" 

810 <DD> Measure source properties of deblended sources.</DD> 

811 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags" 

812 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are 

813 not at the edge of the field and that have either not been deblended or are the children of deblended 

814 sources</DD> 

815 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags" 

816 <DD> Propagate flags set in individual visits to the coadd.</DD> 

817 <DT> @ref DirectMatchTask_ "match" 

818 <DD> Match input sources to a reference catalog (optional). 

819 </DD> 

820 </DL> 

821 These subtasks may be retargeted as required. 

822 

823 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization 

824 

825 @copydoc \_\_init\_\_ 

826 

827 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task 

828 

829 @copydoc run 

830 

831 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters 

832 

833 See @ref MeasureMergedCoaddSourcesConfig_ 

834 

835 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables 

836 

837 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 

838 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

839 files. 

840 

841 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to 

842 the various sub-tasks. See the documetation for individual sub-tasks for more information. 

843 

844 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using 

845 MeasureMergedCoaddSourcesTask 

846 

847 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs. 

848 The next stage in the multi-band processing procedure will merge these measurements into a suitable 

849 catalog for driving forced photometry. 

850 

851 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds 

852 to be processed. 

853 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the 

854 `--help` command line argument: 

855 @code 

856 measureCoaddSources.py --help 

857 @endcode 

858 

859 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

860 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished 

861 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band 

862 coadd as follows: 

863 @code 

864 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

865 @endcode 

866 This will process the HSC-I band data. The results are written in 

867 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits 

868 

869 It is also necessary to run 

870 @code 

871 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

872 @endcode 

873 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

874 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask". 

875 """ 

876 _DefaultName = "measureCoaddSources" 

877 ConfigClass = MeasureMergedCoaddSourcesConfig 

878 RunnerClass = MeasureMergedCoaddSourcesRunner 

879 getSchemaCatalogs = _makeGetSchemaCatalogs("meas") 

880 makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type 

881 

882 @classmethod 

883 def _makeArgumentParser(cls): 

884 parser = ArgumentParser(name=cls._DefaultName) 

885 parser.add_id_argument("--id", "deepCoadd_calexp", 

886 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

887 ContainerClass=ExistingCoaddDataIdContainer) 

888 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

889 return parser 

890 

891 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, 

892 **kwargs): 

893 """! 

894 @brief Initialize the task. 

895 

896 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

897 @param[in] schema: the schema of the merged detection catalog used as input to this one 

898 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog 

899 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference 

900 catalog. May be None if the loader can be constructed from the butler argument or all steps 

901 requiring a reference catalog are disabled. 

902 @param[in] butler: a butler used to read the input schemas from disk or construct the reference 

903 catalog loader, if schema or peakSchema or refObjLoader is None 

904 

905 The task will set its own self.schema attribute to the schema of the output measurement catalog. 

906 This will include all fields from the input schema, as well as additional fields for all the 

907 measurements. 

908 """ 

909 super().__init__(**kwargs) 

910 self.deblended = self.config.inputCatalog.startswith("deblended") 

911 self.inputCatalog = "Coadd_" + self.config.inputCatalog 

912 if initInputs is not None: 

913 schema = initInputs['inputSchema'].schema 

914 if schema is None: 

915 assert butler is not None, "Neither butler nor schema is defined" 

916 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema 

917 self.schemaMapper = afwTable.SchemaMapper(schema) 

918 self.schemaMapper.addMinimalSchema(schema) 

919 self.schema = self.schemaMapper.getOutputSchema() 

920 self.algMetadata = PropertyList() 

921 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) 

922 self.makeSubtask("setPrimaryFlags", schema=self.schema) 

923 if self.config.doMatchSources: 

924 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader) 

925 if self.config.doPropagateFlags: 

926 self.makeSubtask("propagateFlags", schema=self.schema) 

927 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) 

928 if self.config.doApCorr: 

929 self.makeSubtask("applyApCorr", schema=self.schema) 

930 if self.config.doRunCatalogCalculation: 

931 self.makeSubtask("catalogCalculation", schema=self.schema) 

932 

933 self.outputSchema = afwTable.SourceCatalog(self.schema) 

934 

935 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

936 inputs = butlerQC.get(inputRefs) 

937 

938 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], 

939 inputs.pop('refCat'), config=self.config.refObjLoader, 

940 log=self.log) 

941 self.match.setRefObjLoader(refObjLoader) 

942 

943 # Set psfcache 

944 # move this to run after gen2 deprecation 

945 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache) 

946 

947 # Get unique integer ID for IdFactory and RNG seeds 

948 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch") 

949 inputs['exposureId'] = exposureIdInfo.expId 

950 idFactory = exposureIdInfo.makeSourceIdFactory() 

951 # Transform inputCatalog 

952 table = afwTable.SourceTable.make(self.schema, idFactory) 

953 sources = afwTable.SourceCatalog(table) 

954 sources.extend(inputs.pop('inputCatalog'), self.schemaMapper) 

955 table = sources.getTable() 

956 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

957 inputs['sources'] = sources 

958 

959 skyMap = inputs.pop('skyMap') 

960 tractNumber = inputRefs.inputCatalog.dataId['tract'] 

961 tractInfo = skyMap[tractNumber] 

962 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch']) 

963 skyInfo = Struct( 

964 skyMap=skyMap, 

965 tractInfo=tractInfo, 

966 patchInfo=patchInfo, 

967 wcs=tractInfo.getWcs(), 

968 bbox=patchInfo.getOuterBBox() 

969 ) 

970 inputs['skyInfo'] = skyInfo 

971 

972 if self.config.doPropagateFlags: 

973 # Filter out any visit catalog that is not coadd inputs 

974 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds 

975 visitKey = ccdInputs.schema.find("visit").key 

976 ccdKey = ccdInputs.schema.find("ccd").key 

977 inputVisitIds = set() 

978 ccdRecordsWcs = {} 

979 for ccdRecord in ccdInputs: 

980 visit = ccdRecord.get(visitKey) 

981 ccd = ccdRecord.get(ccdKey) 

982 inputVisitIds.add((visit, ccd)) 

983 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs() 

984 

985 inputCatalogsToKeep = [] 

986 inputCatalogWcsUpdate = [] 

987 for i, dataRef in enumerate(inputRefs.visitCatalogs): 

988 key = (dataRef.dataId['visit'], dataRef.dataId['detector']) 

989 if key in inputVisitIds: 

990 inputCatalogsToKeep.append(inputs['visitCatalogs'][i]) 

991 inputCatalogWcsUpdate.append(ccdRecordsWcs[key]) 

992 inputs['visitCatalogs'] = inputCatalogsToKeep 

993 inputs['wcsUpdates'] = inputCatalogWcsUpdate 

994 inputs['ccdInputs'] = ccdInputs 

995 

996 outputs = self.run(**inputs) 

997 butlerQC.put(outputs, outputRefs) 

998 

999 def runDataRef(self, patchRef, psfCache=100): 

1000 """! 

1001 @brief Deblend and measure. 

1002 

1003 @param[in] patchRef: Patch reference. 

1004 

1005 Set 'is-primary' and related flags. Propagate flags 

1006 from individual visits. Optionally match the sources to a reference catalog and write the matches. 

1007 Finally, write the deblended sources and measurements out. 

1008 """ 

1009 if self.config.hasFakes: 

1010 coaddType = "fakes_" + self.config.coaddName 

1011 else: 

1012 coaddType = self.config.coaddName 

1013 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

1014 exposure.getPsf().setCacheCapacity(psfCache) 

1015 sources = self.readSources(patchRef) 

1016 table = sources.getTable() 

1017 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

1018 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef) 

1019 

1020 if self.config.doPropagateFlags: 

1021 ccdInputs = self.propagateFlags.getCcdInputs(exposure) 

1022 else: 

1023 ccdInputs = None 

1024 

1025 results = self.run(exposure=exposure, sources=sources, 

1026 ccdInputs=ccdInputs, 

1027 skyInfo=skyInfo, butler=patchRef.getButler(), 

1028 exposureId=self.getExposureId(patchRef)) 

1029 

1030 if self.config.doMatchSources: 

1031 self.writeMatches(patchRef, results) 

1032 self.write(patchRef, results.outputSources) 

1033 

1034 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, 

1035 butler=None): 

1036 """Run measurement algorithms on the input exposure, and optionally populate the 

1037 resulting catalog with extra information. 

1038 

1039 Parameters 

1040 ---------- 

1041 exposure : `lsst.afw.exposure.Exposure` 

1042 The input exposure on which measurements are to be performed 

1043 sources : `lsst.afw.table.SourceCatalog` 

1044 A catalog built from the results of merged detections, or 

1045 deblender outputs. 

1046 skyInfo : `lsst.pipe.base.Struct` 

1047 A struct containing information about the position of the input exposure within 

1048 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box 

1049 exposureId : `int` or `bytes` 

1050 packed unique number or bytes unique to the input exposure 

1051 ccdInputs : `lsst.afw.table.ExposureCatalog` 

1052 Catalog containing information on the individual visits which went into making 

1053 the exposure 

1054 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None` 

1055 A list of source catalogs corresponding to measurements made on the individual 

1056 visits which went into the input exposure. If None and butler is `None` then 

1057 the task cannot propagate visit flags to the output catalog. 

1058 wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None` 

1059 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 

1060 to the input visits. Used to put all coordinates to common system. If `None` and 

1061 butler is `None` then the task cannot propagate visit flags to the output catalog. 

1062 butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler` 

1063 Either a gen2 or gen3 butler used to load visit catalogs 

1064 

1065 Returns 

1066 ------- 

1067 results : `lsst.pipe.base.Struct` 

1068 Results of running measurement task. Will contain the catalog in the 

1069 sources attribute. Optionally will have results of matching to a 

1070 reference catalog in the matchResults attribute, and denormalized 

1071 matches in the denormMatches attribute. 

1072 """ 

1073 self.measurement.run(sources, exposure, exposureId=exposureId) 

1074 

1075 if self.config.doApCorr: 

1076 self.applyApCorr.run( 

1077 catalog=sources, 

1078 apCorrMap=exposure.getInfo().getApCorrMap() 

1079 ) 

1080 

1081 # TODO DM-11568: this contiguous check-and-copy could go away if we 

1082 # reserve enough space during SourceDetection and/or SourceDeblend. 

1083 # NOTE: sourceSelectors require contiguous catalogs, so ensure 

1084 # contiguity now, so views are preserved from here on. 

1085 if not sources.isContiguous(): 

1086 sources = sources.copy(deep=True) 

1087 

1088 if self.config.doRunCatalogCalculation: 

1089 self.catalogCalculation.run(sources) 

1090 

1091 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo, 

1092 patchInfo=skyInfo.patchInfo) 

1093 if self.config.doPropagateFlags: 

1094 self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates) 

1095 

1096 results = Struct() 

1097 

1098 if self.config.doMatchSources: 

1099 matchResult = self.match.run(sources, exposure.getInfo().getFilterLabel().bandLabel) 

1100 matches = afwTable.packMatches(matchResult.matches) 

1101 matches.table.setMetadata(matchResult.matchMeta) 

1102 results.matchResult = matches 

1103 if self.config.doWriteMatchesDenormalized: 

1104 if matchResult.matches: 

1105 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta) 

1106 else: 

1107 self.log.warning("No matches, so generating dummy denormalized matches file") 

1108 denormMatches = afwTable.BaseCatalog(afwTable.Schema()) 

1109 denormMatches.setMetadata(PropertyList()) 

1110 denormMatches.getMetadata().add("COMMENT", 

1111 "This catalog is empty because no matches were found.") 

1112 results.denormMatches = denormMatches 

1113 results.denormMatches = denormMatches 

1114 

1115 results.outputSources = sources 

1116 return results 

1117 

1118 def readSources(self, dataRef): 

1119 """! 

1120 @brief Read input sources. 

1121 

1122 @param[in] dataRef: Data reference for catalog of merged detections 

1123 @return List of sources in merged catalog 

1124 

1125 We also need to add columns to hold the measurements we're about to make 

1126 so we can measure in-place. 

1127 """ 

1128 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True) 

1129 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId) 

1130 idFactory = self.makeIdFactory(dataRef) 

1131 for s in merged: 

1132 idFactory.notify(s.getId()) 

1133 table = afwTable.SourceTable.make(self.schema, idFactory) 

1134 sources = afwTable.SourceCatalog(table) 

1135 sources.extend(merged, self.schemaMapper) 

1136 return sources 

1137 

1138 def writeMatches(self, dataRef, results): 

1139 """! 

1140 @brief Write matches of the sources to the astrometric reference catalog. 

1141 

1142 @param[in] dataRef: data reference 

1143 @param[in] results: results struct from run method 

1144 """ 

1145 if hasattr(results, "matchResult"): 

1146 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch") 

1147 if hasattr(results, "denormMatches"): 

1148 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull") 

1149 

1150 def write(self, dataRef, sources): 

1151 """! 

1152 @brief Write the source catalog. 

1153 

1154 @param[in] dataRef: data reference 

1155 @param[in] sources: source catalog 

1156 """ 

1157 dataRef.put(sources, self.config.coaddName + "Coadd_meas") 

1158 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId) 

1159 

1160 def getExposureId(self, dataRef): 

1161 return int(dataRef.get(self.config.coaddName + "CoaddId"))