Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1#!/usr/bin/env python 

2# 

3# LSST Data Management System 

4# Copyright 2008-2015 AURA/LSST. 

5# 

6# This product includes software developed by the 

7# LSST Project (http://www.lsst.org/). 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the LSST License Statement and 

20# the GNU General Public License along with this program. If not, 

21# see <https://www.lsstcorp.org/LegalNotices/>. 

22# 

23from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer 

24from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner, 

25 PipelineTask, PipelineTaskConfig, PipelineTaskConnections) 

26import lsst.pipe.base.connectionTypes as cT 

27from lsst.pex.config import Config, Field, ConfigurableField 

28from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader 

29from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask 

30from lsst.meas.deblender import SourceDeblendTask 

31from lsst.meas.extensions.scarlet import ScarletDeblendTask 

32from lsst.pipe.tasks.coaddBase import getSkyInfo 

33from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask 

34from lsst.meas.astrom import DirectMatchTask, denormalizeMatches 

35from lsst.pipe.tasks.fakes import BaseFakeSourcesTask 

36from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask 

37from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask 

38import lsst.afw.image as afwImage 

39import lsst.afw.table as afwTable 

40import lsst.afw.math as afwMath 

41from lsst.daf.base import PropertyList 

42from lsst.skymap import BaseSkyMap 

43 

44# NOTE: these imports are a convenience so multiband users only have to import this file. 

45from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401 

46from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401 

47from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401 

48from .multiBandUtils import getInputSchema, readCatalog, _makeMakeIdFactory # noqa: F401 

49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401 

50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401 

51from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401 

52from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401 

53 

54 

55""" 

56New set types: 

57* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 

58* deepCoadd_mergeDet: merged detections (tract, patch) 

59* deepCoadd_meas: measurements of merged detections (tract, patch, filter) 

60* deepCoadd_ref: reference sources (tract, patch) 

61All of these have associated *_schema catalogs that require no data ID and hold no records. 

62 

63In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 

64the mergeDet, meas, and ref dataset Footprints: 

65* deepCoadd_peak_schema 

66""" 

67 

68 

69############################################################################################################## 

70class DetectCoaddSourcesConnections(PipelineTaskConnections, 

71 dimensions=("tract", "patch", "band", "skymap"), 

72 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): 

73 detectionSchema = cT.InitOutput( 

74 doc="Schema of the detection catalog", 

75 name="{outputCoaddName}Coadd_det_schema", 

76 storageClass="SourceCatalog", 

77 ) 

78 exposure = cT.Input( 

79 doc="Exposure on which detections are to be performed", 

80 name="{inputCoaddName}Coadd", 

81 storageClass="ExposureF", 

82 dimensions=("tract", "patch", "band", "skymap") 

83 ) 

84 outputBackgrounds = cT.Output( 

85 doc="Output Backgrounds used in detection", 

86 name="{outputCoaddName}Coadd_calexp_background", 

87 storageClass="Background", 

88 dimensions=("tract", "patch", "band", "skymap") 

89 ) 

90 outputSources = cT.Output( 

91 doc="Detected sources catalog", 

92 name="{outputCoaddName}Coadd_det", 

93 storageClass="SourceCatalog", 

94 dimensions=("tract", "patch", "band", "skymap") 

95 ) 

96 outputExposure = cT.Output( 

97 doc="Exposure post detection", 

98 name="{outputCoaddName}Coadd_calexp", 

99 storageClass="ExposureF", 

100 dimensions=("tract", "patch", "band", "skymap") 

101 ) 

102 

103 

104class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections): 

105 """! 

106 @anchor DetectCoaddSourcesConfig_ 

107 

108 @brief Configuration parameters for the DetectCoaddSourcesTask 

109 """ 

110 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?") 

111 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling") 

112 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection") 

113 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

114 doInsertFakes = Field(dtype=bool, default=False, 

115 doc="Run fake sources injection task") 

116 insertFakes = ConfigurableField(target=BaseFakeSourcesTask, 

117 doc="Injection of fake sources for testing " 

118 "purposes (must be retargeted)") 

119 hasFakes = Field( 

120 dtype=bool, 

121 default=False, 

122 doc="Should be set to True if fake sources have been inserted into the input data." 

123 ) 

124 

125 def setDefaults(self): 

126 super().setDefaults() 

127 self.detection.thresholdType = "pixel_stdev" 

128 self.detection.isotropicGrow = True 

129 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic 

130 self.detection.reEstimateBackground = False 

131 self.detection.background.useApprox = False 

132 self.detection.background.binSize = 4096 

133 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER' 

134 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender 

135 

136## @addtogroup LSST_task_documentation 

137## @{ 

138## @page DetectCoaddSourcesTask 

139## @ref DetectCoaddSourcesTask_ "DetectCoaddSourcesTask" 

140## @copybrief DetectCoaddSourcesTask 

141## @} 

142 

143 

144class DetectCoaddSourcesTask(PipelineTask, CmdLineTask): 

145 r"""! 

146 @anchor DetectCoaddSourcesTask_ 

147 

148 @brief Detect sources on a coadd 

149 

150 @section pipe_tasks_multiBand_Contents Contents 

151 

152 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose 

153 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize 

154 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run 

155 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config 

156 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug 

157 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example 

158 

159 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description 

160 

161 Command-line task that detects sources on a coadd of exposures obtained with a single filter. 

162 

163 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise 

164 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane 

165 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should 

166 propagate the full covariance matrix -- but it is simple and works well in practice. 

167 

168 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref 

169 SourceDetectionTask_ "detection" subtask. 

170 

171 @par Inputs: 

172 deepCoadd{tract,patch,filter}: ExposureF 

173 @par Outputs: 

174 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints) 

175 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input 

176 exposure (ExposureF) 

177 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList 

178 @par Data Unit: 

179 tract, patch, filter 

180 

181 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask. 

182 You can retarget this subtask if you wish. 

183 

184 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization 

185 

186 @copydoc \_\_init\_\_ 

187 

188 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task 

189 

190 @copydoc run 

191 

192 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters 

193 

194 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig" 

195 

196 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables 

197 

198 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 

199 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

200 files. 

201 

202 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to 

203 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for 

204 @ref SourceDetectionTask_ "SourceDetectionTask" for further information. 

205 

206 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example 

207 of using DetectCoaddSourcesTask 

208 

209 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of 

210 the task is to update the background, detect all sources in a single band and generate a set of parent 

211 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and, 

212 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data 

213 reference to the coadd to be processed. A list of the available optional arguments can be obtained by 

214 calling detectCoaddSources.py with the `--help` command line argument: 

215 @code 

216 detectCoaddSources.py --help 

217 @endcode 

218 

219 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

220 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed 

221 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows: 

222 @code 

223 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

224 @endcode 

225 that will process the HSC-I band data. The results are written to 

226 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`. 

227 

228 It is also necessary to run: 

229 @code 

230 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

231 @endcode 

232 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

233 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask". 

234 """ 

235 _DefaultName = "detectCoaddSources" 

236 ConfigClass = DetectCoaddSourcesConfig 

237 getSchemaCatalogs = _makeGetSchemaCatalogs("det") 

238 makeIdFactory = _makeMakeIdFactory("CoaddId") 

239 

240 @classmethod 

241 def _makeArgumentParser(cls): 

242 parser = ArgumentParser(name=cls._DefaultName) 

243 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

244 ContainerClass=ExistingCoaddDataIdContainer) 

245 return parser 

246 

247 def __init__(self, schema=None, **kwargs): 

248 """! 

249 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask. 

250 

251 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

252 

253 @param[in] schema: initial schema for the output catalog, modified-in place to include all 

254 fields set by this task. If None, the source minimal schema will be used. 

255 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__ 

256 """ 

257 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree 

258 # call structure has been reviewed carefully to be sure super will work as intended. 

259 super().__init__(**kwargs) 

260 if schema is None: 260 ↛ 262line 260 didn't jump to line 262, because the condition on line 260 was never false

261 schema = afwTable.SourceTable.makeMinimalSchema() 

262 if self.config.doInsertFakes: 262 ↛ 263line 262 didn't jump to line 263, because the condition on line 262 was never true

263 self.makeSubtask("insertFakes") 

264 self.schema = schema 

265 self.makeSubtask("detection", schema=self.schema) 

266 if self.config.doScaleVariance: 266 ↛ 269line 266 didn't jump to line 269, because the condition on line 266 was never false

267 self.makeSubtask("scaleVariance") 

268 

269 self.detectionSchema = afwTable.SourceCatalog(self.schema) 

270 

271 def runDataRef(self, patchRef): 

272 """! 

273 @brief Run detection on a coadd. 

274 

275 Invokes @ref run and then uses @ref write to output the 

276 results. 

277 

278 @param[in] patchRef: data reference for patch 

279 """ 

280 if self.config.hasFakes: 280 ↛ 281line 280 didn't jump to line 281, because the condition on line 280 was never true

281 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True) 

282 else: 

283 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True) 

284 expId = int(patchRef.get(self.config.coaddName + "CoaddId")) 

285 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId) 

286 self.write(results, patchRef) 

287 return results 

288 

289 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

290 inputs = butlerQC.get(inputRefs) 

291 packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch_band", returnMaxBits=True) 

292 inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits) 

293 inputs["expId"] = packedId 

294 outputs = self.run(**inputs) 

295 butlerQC.put(outputs, outputRefs) 

296 

297 def run(self, exposure, idFactory, expId): 

298 """! 

299 @brief Run detection on an exposure. 

300 

301 First scale the variance plane to match the observed variance 

302 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to 

303 detect sources. 

304 

305 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled, 

306 depending on configuration). 

307 @param[in] idFactory: IdFactory to set source identifiers 

308 @param[in] expId: Exposure identifier (integer) for RNG seed 

309 

310 @return a pipe.base.Struct with fields 

311 - sources: catalog of detections 

312 - backgrounds: list of backgrounds 

313 """ 

314 if self.config.doScaleVariance: 314 ↛ 317line 314 didn't jump to line 317, because the condition on line 314 was never false

315 varScale = self.scaleVariance.run(exposure.maskedImage) 

316 exposure.getMetadata().add("VARIANCE_SCALE", varScale) 

317 backgrounds = afwMath.BackgroundList() 

318 if self.config.doInsertFakes: 318 ↛ 319line 318 didn't jump to line 319, because the condition on line 318 was never true

319 self.insertFakes.run(exposure, background=backgrounds) 

320 table = afwTable.SourceTable.make(self.schema, idFactory) 

321 detections = self.detection.run(table, exposure, expId=expId) 

322 sources = detections.sources 

323 fpSets = detections.fpSets 

324 if hasattr(fpSets, "background") and fpSets.background: 324 ↛ 327line 324 didn't jump to line 327, because the condition on line 324 was never false

325 for bg in fpSets.background: 

326 backgrounds.append(bg) 

327 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure) 

328 

329 def write(self, results, patchRef): 

330 """! 

331 @brief Write out results from runDetection. 

332 

333 @param[in] exposure: Exposure to write out 

334 @param[in] results: Struct returned from runDetection 

335 @param[in] patchRef: data reference for patch 

336 """ 

337 coaddName = self.config.coaddName + "Coadd" 

338 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background") 

339 patchRef.put(results.outputSources, coaddName + "_det") 

340 if self.config.hasFakes: 340 ↛ 341line 340 didn't jump to line 341, because the condition on line 340 was never true

341 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp") 

342 else: 

343 patchRef.put(results.outputExposure, coaddName + "_calexp") 

344 

345############################################################################################################## 

346 

347 

348class DeblendCoaddSourcesConfig(Config): 

349 """DeblendCoaddSourcesConfig 

350 

351 Configuration parameters for the `DeblendCoaddSourcesTask`. 

352 """ 

353 singleBandDeblend = ConfigurableField(target=SourceDeblendTask, 

354 doc="Deblend sources separately in each band") 

355 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask, 

356 doc="Deblend sources simultaneously across bands") 

357 simultaneous = Field(dtype=bool, 

358 default=True, 

359 doc="Simultaneously deblend all bands? " 

360 "True uses 'singleBandDeblend' while False uses 'multibandDeblend'") 

361 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

362 hasFakes = Field(dtype=bool, 

363 default=False, 

364 doc="Should be set to True if fake sources have been inserted into the input data.") 

365 

366 def setDefaults(self): 

367 Config.setDefaults(self) 

368 self.singleBandDeblend.propagateAllPeaks = True 

369 

370 

371class DeblendCoaddSourcesRunner(MergeSourcesRunner): 

372 """Task runner for the `MergeSourcesTask` 

373 

374 Required because the run method requires a list of 

375 dataRefs rather than a single dataRef. 

376 """ 

377 @staticmethod 

378 def getTargetList(parsedCmd, **kwargs): 

379 """Provide a list of patch references for each patch, tract, filter combo. 

380 

381 Parameters 

382 ---------- 

383 parsedCmd: 

384 The parsed command 

385 kwargs: 

386 Keyword arguments passed to the task 

387 

388 Returns 

389 ------- 

390 targetList: list 

391 List of tuples, where each tuple is a (dataRef, kwargs) pair. 

392 """ 

393 refDict = MergeSourcesRunner.buildRefDict(parsedCmd) 

394 kwargs["psfCache"] = parsedCmd.psfCache 

395 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()] 

396 

397 

398class DeblendCoaddSourcesTask(CmdLineTask): 

399 """Deblend the sources in a merged catalog 

400 

401 Deblend sources from master catalog in each coadd. 

402 This can either be done separately in each band using the HSC-SDSS deblender 

403 (`DeblendCoaddSourcesTask.config.simultaneous==False`) 

404 or use SCARLET to simultaneously fit the blend in all bands 

405 (`DeblendCoaddSourcesTask.config.simultaneous==True`). 

406 The task will set its own `self.schema` atribute to the `Schema` of the 

407 output deblended catalog. 

408 This will include all fields from the input `Schema`, as well as additional fields 

409 from the deblender. 

410 

411 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description 

412 --------------------------------------------------------- 

413 ` 

414 

415 Parameters 

416 ---------- 

417 butler: `Butler` 

418 Butler used to read the input schemas from disk or 

419 construct the reference catalog loader, if `schema` or `peakSchema` or 

420 schema: `Schema` 

421 The schema of the merged detection catalog as an input to this task. 

422 peakSchema: `Schema` 

423 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog 

424 """ 

425 ConfigClass = DeblendCoaddSourcesConfig 

426 RunnerClass = DeblendCoaddSourcesRunner 

427 _DefaultName = "deblendCoaddSources" 

428 makeIdFactory = _makeMakeIdFactory("MergedCoaddId") 

429 

430 @classmethod 

431 def _makeArgumentParser(cls): 

432 parser = ArgumentParser(name=cls._DefaultName) 

433 parser.add_id_argument("--id", "deepCoadd_calexp", 

434 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i", 

435 ContainerClass=ExistingCoaddDataIdContainer) 

436 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

437 return parser 

438 

439 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs): 

440 CmdLineTask.__init__(self, **kwargs) 

441 if schema is None: 441 ↛ 444line 441 didn't jump to line 444, because the condition on line 441 was never false

442 assert butler is not None, "Neither butler nor schema is defined" 

443 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema 

444 self.schemaMapper = afwTable.SchemaMapper(schema) 

445 self.schemaMapper.addMinimalSchema(schema) 

446 self.schema = self.schemaMapper.getOutputSchema() 

447 if peakSchema is None: 447 ↛ 451line 447 didn't jump to line 451, because the condition on line 447 was never false

448 assert butler is not None, "Neither butler nor peakSchema is defined" 

449 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema 

450 

451 if self.config.simultaneous: 451 ↛ 454line 451 didn't jump to line 454, because the condition on line 451 was never false

452 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema) 

453 else: 

454 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema) 

455 

456 def getSchemaCatalogs(self): 

457 """Return a dict of empty catalogs for each catalog dataset produced by this task. 

458 

459 Returns 

460 ------- 

461 result: dict 

462 Dictionary of empty catalogs, with catalog names as keys. 

463 """ 

464 catalog = afwTable.SourceCatalog(self.schema) 

465 return {self.config.coaddName + "Coadd_deblendedFlux": catalog, 

466 self.config.coaddName + "Coadd_deblendedModel": catalog} 

467 

468 def runDataRef(self, patchRefList, psfCache=100): 

469 """Deblend the patch 

470 

471 Deblend each source simultaneously or separately 

472 (depending on `DeblendCoaddSourcesTask.config.simultaneous`). 

473 Set `is-primary` and related flags. 

474 Propagate flags from individual visits. 

475 Write the deblended sources out. 

476 

477 Parameters 

478 ---------- 

479 patchRefList: list 

480 List of data references for each filter 

481 """ 

482 

483 if self.config.hasFakes: 483 ↛ 484line 483 didn't jump to line 484, because the condition on line 483 was never true

484 coaddType = "fakes_" + self.config.coaddName 

485 else: 

486 coaddType = self.config.coaddName 

487 

488 if self.config.simultaneous: 488 ↛ 505line 488 didn't jump to line 505, because the condition on line 488 was never false

489 # Use SCARLET to simultaneously deblend across filters 

490 filters = [] 

491 exposures = [] 

492 for patchRef in patchRefList: 

493 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

494 filter = patchRef.get(coaddType + "Coadd_filterLabel", immediate=True) 

495 filters.append(filter.bandLabel) 

496 exposures.append(exposure) 

497 # The input sources are the same for all bands, since it is a merged catalog 

498 sources = self.readSources(patchRef) 

499 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures) 

500 templateCatalogs = self.multiBandDeblend.run(exposure, sources) 

501 for n in range(len(patchRefList)): 

502 self.write(patchRefList[n], templateCatalogs[filters[n]]) 

503 else: 

504 # Use the singeband deblender to deblend each band separately 

505 for patchRef in patchRefList: 

506 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

507 exposure.getPsf().setCacheCapacity(psfCache) 

508 sources = self.readSources(patchRef) 

509 self.singleBandDeblend.run(exposure, sources) 

510 self.write(patchRef, sources) 

511 

512 def readSources(self, dataRef): 

513 """Read merged catalog 

514 

515 Read the catalog of merged detections and create a catalog 

516 in a single band. 

517 

518 Parameters 

519 ---------- 

520 dataRef: data reference 

521 Data reference for catalog of merged detections 

522 

523 Returns 

524 ------- 

525 sources: `SourceCatalog` 

526 List of sources in merged catalog 

527 

528 We also need to add columns to hold the measurements we're about to make 

529 so we can measure in-place. 

530 """ 

531 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True) 

532 self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId)) 

533 idFactory = self.makeIdFactory(dataRef) 

534 for s in merged: 

535 idFactory.notify(s.getId()) 

536 table = afwTable.SourceTable.make(self.schema, idFactory) 

537 sources = afwTable.SourceCatalog(table) 

538 sources.extend(merged, self.schemaMapper) 

539 return sources 

540 

541 def write(self, dataRef, sources): 

542 """Write the source catalog(s) 

543 

544 Parameters 

545 ---------- 

546 dataRef: Data Reference 

547 Reference to the output catalog. 

548 sources: `SourceCatalog` 

549 Flux conserved sources to write to file. 

550 If using the single band deblender, this is the catalog 

551 generated. 

552 template_sources: `SourceCatalog` 

553 Source catalog using the multiband template models 

554 as footprints. 

555 """ 

556 dataRef.put(sources, self.config.coaddName + "Coadd_deblendedFlux") 

557 self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId)) 

558 

559 def writeMetadata(self, dataRefList): 

560 """Write the metadata produced from processing the data. 

561 Parameters 

562 ---------- 

563 dataRefList 

564 List of Butler data references used to write the metadata. 

565 The metadata is written to dataset type `CmdLineTask._getMetadataName`. 

566 """ 

567 for dataRef in dataRefList: 

568 try: 

569 metadataName = self._getMetadataName() 

570 if metadataName is not None: 

571 dataRef.put(self.getFullMetadata(), metadataName) 

572 except Exception as e: 

573 self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e) 

574 

575 def getExposureId(self, dataRef): 

576 """Get the ExposureId from a data reference 

577 """ 

578 return int(dataRef.get(self.config.coaddName + "CoaddId")) 

579 

580 

581class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, 

582 dimensions=("tract", "patch", "band", "skymap"), 

583 defaultTemplates={"inputCoaddName": "deep", 

584 "outputCoaddName": "deep"}): 

585 inputSchema = cT.InitInput( 

586 doc="Input schema for measure merged task produced by a deblender or detection task", 

587 name="{inputCoaddName}Coadd_deblendedFlux_schema", 

588 storageClass="SourceCatalog" 

589 ) 

590 outputSchema = cT.InitOutput( 

591 doc="Output schema after all new fields are added by task", 

592 name="{inputCoaddName}Coadd_meas_schema", 

593 storageClass="SourceCatalog" 

594 ) 

595 refCat = cT.PrerequisiteInput( 

596 doc="Reference catalog used to match measured sources against known sources", 

597 name="ref_cat", 

598 storageClass="SimpleCatalog", 

599 dimensions=("skypix",), 

600 deferLoad=True, 

601 multiple=True 

602 ) 

603 exposure = cT.Input( 

604 doc="Input coadd image", 

605 name="{inputCoaddName}Coadd_calexp", 

606 storageClass="ExposureF", 

607 dimensions=("tract", "patch", "band", "skymap") 

608 ) 

609 skyMap = cT.Input( 

610 doc="SkyMap to use in processing", 

611 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

612 storageClass="SkyMap", 

613 dimensions=("skymap",), 

614 ) 

615 visitCatalogs = cT.Input( 

616 doc="Source catalogs for visits which overlap input tract, patch, band. Will be " 

617 "further filtered in the task for the purpose of propagating flags from image calibration " 

618 "and characterization to codd objects", 

619 name="src", 

620 dimensions=("instrument", "visit", "detector"), 

621 storageClass="SourceCatalog", 

622 multiple=True 

623 ) 

624 inputCatalog = cT.Input( 

625 doc=("Name of the input catalog to use." 

626 "If the single band deblender was used this should be 'deblendedFlux." 

627 "If the multi-band deblender was used this should be 'deblendedModel, " 

628 "or deblendedFlux if the multiband deblender was configured to output " 

629 "deblended flux catalogs. If no deblending was performed this should " 

630 "be 'mergeDet'"), 

631 name="{inputCoaddName}Coadd_deblendedFlux", 

632 storageClass="SourceCatalog", 

633 dimensions=("tract", "patch", "band", "skymap"), 

634 ) 

635 outputSources = cT.Output( 

636 doc="Source catalog containing all the measurement information generated in this task", 

637 name="{outputCoaddName}Coadd_meas", 

638 dimensions=("tract", "patch", "band", "skymap"), 

639 storageClass="SourceCatalog", 

640 ) 

641 matchResult = cT.Output( 

642 doc="Match catalog produced by configured matcher, optional on doMatchSources", 

643 name="{outputCoaddName}Coadd_measMatch", 

644 dimensions=("tract", "patch", "band", "skymap"), 

645 storageClass="Catalog", 

646 ) 

647 denormMatches = cT.Output( 

648 doc="Denormalized Match catalog produced by configured matcher, optional on " 

649 "doWriteMatchesDenormalized", 

650 name="{outputCoaddName}Coadd_measMatchFull", 

651 dimensions=("tract", "patch", "band", "skymap"), 

652 storageClass="Catalog", 

653 ) 

654 

655 def __init__(self, *, config=None): 

656 super().__init__(config=config) 

657 if config.doPropagateFlags is False: 

658 self.inputs -= set(("visitCatalogs",)) 

659 

660 if config.doMatchSources is False: 

661 self.outputs -= set(("matchResult",)) 

662 

663 if config.doWriteMatchesDenormalized is False: 

664 self.outputs -= set(("denormMatches",)) 

665 

666 

667class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig, 

668 pipelineConnections=MeasureMergedCoaddSourcesConnections): 

669 """! 

670 @anchor MeasureMergedCoaddSourcesConfig_ 

671 

672 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask 

673 """ 

674 inputCatalog = Field(dtype=str, default="deblendedFlux", 

675 doc=("Name of the input catalog to use." 

676 "If the single band deblender was used this should be 'deblendedFlux." 

677 "If the multi-band deblender was used this should be 'deblendedModel." 

678 "If no deblending was performed this should be 'mergeDet'")) 

679 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") 

680 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") 

681 doPropagateFlags = Field( 

682 dtype=bool, default=True, 

683 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 

684 ) 

685 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd") 

686 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?") 

687 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog") 

688 doWriteMatchesDenormalized = Field( 

689 dtype=bool, 

690 default=False, 

691 doc=("Write reference matches in denormalized format? " 

692 "This format uses more disk space, but is more convenient to read."), 

693 ) 

694 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

695 psfCache = Field(dtype=int, default=100, doc="Size of psfCache") 

696 checkUnitsParseStrict = Field( 

697 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", 

698 dtype=str, 

699 default="raise", 

700 ) 

701 doApCorr = Field( 

702 dtype=bool, 

703 default=True, 

704 doc="Apply aperture corrections" 

705 ) 

706 applyApCorr = ConfigurableField( 

707 target=ApplyApCorrTask, 

708 doc="Subtask to apply aperture corrections" 

709 ) 

710 doRunCatalogCalculation = Field( 

711 dtype=bool, 

712 default=True, 

713 doc='Run catalogCalculation task' 

714 ) 

715 catalogCalculation = ConfigurableField( 

716 target=CatalogCalculationTask, 

717 doc="Subtask to run catalogCalculation plugins on catalog" 

718 ) 

719 

720 hasFakes = Field( 

721 dtype=bool, 

722 default=False, 

723 doc="Should be set to True if fake sources have been inserted into the input data." 

724 ) 

725 

726 @property 

727 def refObjLoader(self): 

728 return self.match.refObjLoader 

729 

730 def setDefaults(self): 

731 super().setDefaults() 

732 self.measurement.plugins.names |= ['base_InputCount', 

733 'base_Variance', 

734 'base_LocalPhotoCalib', 

735 'base_LocalWcs'] 

736 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 

737 'INEXACT_PSF'] 

738 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 

739 'INEXACT_PSF'] 

740 

741 def validate(self): 

742 super().validate() 

743 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None) 

744 if refCatGen2 is not None and refCatGen2 != self.connections.refCat: 

745 raise ValueError( 

746 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs " 

747 f"are different. These options must be kept in sync until Gen2 is retired." 

748 ) 

749 

750 

751## @addtogroup LSST_task_documentation 

752## @{ 

753## @page MeasureMergedCoaddSourcesTask 

754## @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask" 

755## @copybrief MeasureMergedCoaddSourcesTask 

756## @} 

757 

758 

759class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner): 

760 """Get the psfCache setting into MeasureMergedCoaddSourcesTask""" 

761 @staticmethod 

762 def getTargetList(parsedCmd, **kwargs): 

763 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache) 

764 

765 

766class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask): 

767 r"""! 

768 @anchor MeasureMergedCoaddSourcesTask_ 

769 

770 @brief Deblend sources from master catalog in each coadd seperately and measure. 

771 

772 @section pipe_tasks_multiBand_Contents Contents 

773 

774 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose 

775 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize 

776 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run 

777 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config 

778 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug 

779 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example 

780 

781 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description 

782 

783 Command-line task that uses peaks and footprints from a master catalog to perform deblending and 

784 measurement in each coadd. 

785 

786 Given a master input catalog of sources (peaks and footprints) or deblender outputs 

787 (including a HeavyFootprint in each band), measure each source on the 

788 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a 

789 consistent set of child sources. 

790 

791 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source 

792 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit 

793 flags are propagated to the coadd sources. 

794 

795 Optionally, we can match the coadd sources to an external reference catalog. 

796 

797 @par Inputs: 

798 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog 

799 @n deepCoadd_calexp{tract,patch,filter}: ExposureF 

800 @par Outputs: 

801 deepCoadd_meas{tract,patch,filter}: SourceCatalog 

802 @par Data Unit: 

803 tract, patch, filter 

804 

805 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks: 

806 

807 <DL> 

808 <DT> @ref SingleFrameMeasurementTask_ "measurement" 

809 <DD> Measure source properties of deblended sources.</DD> 

810 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags" 

811 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are 

812 not at the edge of the field and that have either not been deblended or are the children of deblended 

813 sources</DD> 

814 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags" 

815 <DD> Propagate flags set in individual visits to the coadd.</DD> 

816 <DT> @ref DirectMatchTask_ "match" 

817 <DD> Match input sources to a reference catalog (optional). 

818 </DD> 

819 </DL> 

820 These subtasks may be retargeted as required. 

821 

822 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization 

823 

824 @copydoc \_\_init\_\_ 

825 

826 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task 

827 

828 @copydoc run 

829 

830 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters 

831 

832 See @ref MeasureMergedCoaddSourcesConfig_ 

833 

834 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables 

835 

836 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 

837 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

838 files. 

839 

840 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to 

841 the various sub-tasks. See the documetation for individual sub-tasks for more information. 

842 

843 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using 

844 MeasureMergedCoaddSourcesTask 

845 

846 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs. 

847 The next stage in the multi-band processing procedure will merge these measurements into a suitable 

848 catalog for driving forced photometry. 

849 

850 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds 

851 to be processed. 

852 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the 

853 `--help` command line argument: 

854 @code 

855 measureCoaddSources.py --help 

856 @endcode 

857 

858 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

859 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished 

860 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band 

861 coadd as follows: 

862 @code 

863 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

864 @endcode 

865 This will process the HSC-I band data. The results are written in 

866 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits 

867 

868 It is also necessary to run 

869 @code 

870 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

871 @endcode 

872 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

873 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask". 

874 """ 

875 _DefaultName = "measureCoaddSources" 

876 ConfigClass = MeasureMergedCoaddSourcesConfig 

877 RunnerClass = MeasureMergedCoaddSourcesRunner 

878 getSchemaCatalogs = _makeGetSchemaCatalogs("meas") 

879 makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type 

880 

881 @classmethod 

882 def _makeArgumentParser(cls): 

883 parser = ArgumentParser(name=cls._DefaultName) 

884 parser.add_id_argument("--id", "deepCoadd_calexp", 

885 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

886 ContainerClass=ExistingCoaddDataIdContainer) 

887 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

888 return parser 

889 

890 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, 

891 **kwargs): 

892 """! 

893 @brief Initialize the task. 

894 

895 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

896 @param[in] schema: the schema of the merged detection catalog used as input to this one 

897 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog 

898 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference 

899 catalog. May be None if the loader can be constructed from the butler argument or all steps 

900 requiring a reference catalog are disabled. 

901 @param[in] butler: a butler used to read the input schemas from disk or construct the reference 

902 catalog loader, if schema or peakSchema or refObjLoader is None 

903 

904 The task will set its own self.schema attribute to the schema of the output measurement catalog. 

905 This will include all fields from the input schema, as well as additional fields for all the 

906 measurements. 

907 """ 

908 super().__init__(**kwargs) 

909 self.deblended = self.config.inputCatalog.startswith("deblended") 

910 self.inputCatalog = "Coadd_" + self.config.inputCatalog 

911 if initInputs is not None: 911 ↛ 912line 911 didn't jump to line 912, because the condition on line 911 was never true

912 schema = initInputs['inputSchema'].schema 

913 if schema is None: 913 ↛ 916line 913 didn't jump to line 916, because the condition on line 913 was never false

914 assert butler is not None, "Neither butler nor schema is defined" 

915 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema 

916 self.schemaMapper = afwTable.SchemaMapper(schema) 

917 self.schemaMapper.addMinimalSchema(schema) 

918 self.schema = self.schemaMapper.getOutputSchema() 

919 self.algMetadata = PropertyList() 

920 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) 

921 self.makeSubtask("setPrimaryFlags", schema=self.schema) 

922 if self.config.doMatchSources: 922 ↛ 923line 922 didn't jump to line 923, because the condition on line 922 was never true

923 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader) 

924 if self.config.doPropagateFlags: 924 ↛ 926line 924 didn't jump to line 926, because the condition on line 924 was never false

925 self.makeSubtask("propagateFlags", schema=self.schema) 

926 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) 

927 if self.config.doApCorr: 927 ↛ 929line 927 didn't jump to line 929, because the condition on line 927 was never false

928 self.makeSubtask("applyApCorr", schema=self.schema) 

929 if self.config.doRunCatalogCalculation: 929 ↛ 932line 929 didn't jump to line 932, because the condition on line 929 was never false

930 self.makeSubtask("catalogCalculation", schema=self.schema) 

931 

932 self.outputSchema = afwTable.SourceCatalog(self.schema) 

933 

934 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

935 inputs = butlerQC.get(inputRefs) 

936 

937 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], 

938 inputs.pop('refCat'), config=self.config.refObjLoader, 

939 log=self.log) 

940 self.match.setRefObjLoader(refObjLoader) 

941 

942 # Set psfcache 

943 # move this to run after gen2 deprecation 

944 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache) 

945 

946 # Get unique integer ID for IdFactory and RNG seeds 

947 packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True) 

948 inputs['exposureId'] = packedId 

949 idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits) 

950 # Transform inputCatalog 

951 table = afwTable.SourceTable.make(self.schema, idFactory) 

952 sources = afwTable.SourceCatalog(table) 

953 sources.extend(inputs.pop('inputCatalog'), self.schemaMapper) 

954 table = sources.getTable() 

955 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

956 inputs['sources'] = sources 

957 

958 skyMap = inputs.pop('skyMap') 

959 tractNumber = inputRefs.inputCatalog.dataId['tract'] 

960 tractInfo = skyMap[tractNumber] 

961 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch']) 

962 skyInfo = Struct( 

963 skyMap=skyMap, 

964 tractInfo=tractInfo, 

965 patchInfo=patchInfo, 

966 wcs=tractInfo.getWcs(), 

967 bbox=patchInfo.getOuterBBox() 

968 ) 

969 inputs['skyInfo'] = skyInfo 

970 

971 if self.config.doPropagateFlags: 

972 # Filter out any visit catalog that is not coadd inputs 

973 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds 

974 visitKey = ccdInputs.schema.find("visit").key 

975 ccdKey = ccdInputs.schema.find("ccd").key 

976 inputVisitIds = set() 

977 ccdRecordsWcs = {} 

978 for ccdRecord in ccdInputs: 

979 visit = ccdRecord.get(visitKey) 

980 ccd = ccdRecord.get(ccdKey) 

981 inputVisitIds.add((visit, ccd)) 

982 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs() 

983 

984 inputCatalogsToKeep = [] 

985 inputCatalogWcsUpdate = [] 

986 for i, dataRef in enumerate(inputRefs.visitCatalogs): 

987 key = (dataRef.dataId['visit'], dataRef.dataId['detector']) 

988 if key in inputVisitIds: 

989 inputCatalogsToKeep.append(inputs['visitCatalogs'][i]) 

990 inputCatalogWcsUpdate.append(ccdRecordsWcs[key]) 

991 inputs['visitCatalogs'] = inputCatalogsToKeep 

992 inputs['wcsUpdates'] = inputCatalogWcsUpdate 

993 inputs['ccdInputs'] = ccdInputs 

994 

995 outputs = self.run(**inputs) 

996 butlerQC.put(outputs, outputRefs) 

997 

998 def runDataRef(self, patchRef, psfCache=100): 

999 """! 

1000 @brief Deblend and measure. 

1001 

1002 @param[in] patchRef: Patch reference. 

1003 

1004 Set 'is-primary' and related flags. Propagate flags 

1005 from individual visits. Optionally match the sources to a reference catalog and write the matches. 

1006 Finally, write the deblended sources and measurements out. 

1007 """ 

1008 if self.config.hasFakes: 1008 ↛ 1009line 1008 didn't jump to line 1009, because the condition on line 1008 was never true

1009 coaddType = "fakes_" + self.config.coaddName 

1010 else: 

1011 coaddType = self.config.coaddName 

1012 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

1013 exposure.getPsf().setCacheCapacity(psfCache) 

1014 sources = self.readSources(patchRef) 

1015 table = sources.getTable() 

1016 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

1017 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef) 

1018 

1019 if self.config.doPropagateFlags: 1019 ↛ 1022line 1019 didn't jump to line 1022, because the condition on line 1019 was never false

1020 ccdInputs = self.propagateFlags.getCcdInputs(exposure) 

1021 else: 

1022 ccdInputs = None 

1023 

1024 results = self.run(exposure=exposure, sources=sources, 

1025 ccdInputs=ccdInputs, 

1026 skyInfo=skyInfo, butler=patchRef.getButler(), 

1027 exposureId=self.getExposureId(patchRef)) 

1028 

1029 if self.config.doMatchSources: 1029 ↛ 1030line 1029 didn't jump to line 1030, because the condition on line 1029 was never true

1030 self.writeMatches(patchRef, results) 

1031 self.write(patchRef, results.outputSources) 

1032 

1033 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, 

1034 butler=None): 

1035 """Run measurement algorithms on the input exposure, and optionally populate the 

1036 resulting catalog with extra information. 

1037 

1038 Parameters 

1039 ---------- 

1040 exposure : `lsst.afw.exposure.Exposure` 

1041 The input exposure on which measurements are to be performed 

1042 sources : `lsst.afw.table.SourceCatalog` 

1043 A catalog built from the results of merged detections, or 

1044 deblender outputs. 

1045 skyInfo : `lsst.pipe.base.Struct` 

1046 A struct containing information about the position of the input exposure within 

1047 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box 

1048 exposureId : `int` or `bytes` 

1049 packed unique number or bytes unique to the input exposure 

1050 ccdInputs : `lsst.afw.table.ExposureCatalog` 

1051 Catalog containing information on the individual visits which went into making 

1052 the exposure 

1053 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None` 

1054 A list of source catalogs corresponding to measurements made on the individual 

1055 visits which went into the input exposure. If None and butler is `None` then 

1056 the task cannot propagate visit flags to the output catalog. 

1057 wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None` 

1058 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 

1059 to the input visits. Used to put all coordinates to common system. If `None` and 

1060 butler is `None` then the task cannot propagate visit flags to the output catalog. 

1061 butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler` 

1062 Either a gen2 or gen3 butler used to load visit catalogs 

1063 

1064 Returns 

1065 ------- 

1066 results : `lsst.pipe.base.Struct` 

1067 Results of running measurement task. Will contain the catalog in the 

1068 sources attribute. Optionally will have results of matching to a 

1069 reference catalog in the matchResults attribute, and denormalized 

1070 matches in the denormMatches attribute. 

1071 """ 

1072 self.measurement.run(sources, exposure, exposureId=exposureId) 

1073 

1074 if self.config.doApCorr: 1074 ↛ 1084line 1074 didn't jump to line 1084, because the condition on line 1074 was never false

1075 self.applyApCorr.run( 

1076 catalog=sources, 

1077 apCorrMap=exposure.getInfo().getApCorrMap() 

1078 ) 

1079 

1080 # TODO DM-11568: this contiguous check-and-copy could go away if we 

1081 # reserve enough space during SourceDetection and/or SourceDeblend. 

1082 # NOTE: sourceSelectors require contiguous catalogs, so ensure 

1083 # contiguity now, so views are preserved from here on. 

1084 if not sources.isContiguous(): 1084 ↛ 1085line 1084 didn't jump to line 1085, because the condition on line 1084 was never true

1085 sources = sources.copy(deep=True) 

1086 

1087 if self.config.doRunCatalogCalculation: 1087 ↛ 1090line 1087 didn't jump to line 1090, because the condition on line 1087 was never false

1088 self.catalogCalculation.run(sources) 

1089 

1090 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo, 

1091 patchInfo=skyInfo.patchInfo, includeDeblend=self.deblended) 

1092 if self.config.doPropagateFlags: 1092 ↛ 1095line 1092 didn't jump to line 1095, because the condition on line 1092 was never false

1093 self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates) 

1094 

1095 results = Struct() 

1096 

1097 if self.config.doMatchSources: 1097 ↛ 1098line 1097 didn't jump to line 1098, because the condition on line 1097 was never true

1098 matchResult = self.match.run(sources, exposure.getInfo().getFilterLabel().bandLabel) 

1099 matches = afwTable.packMatches(matchResult.matches) 

1100 matches.table.setMetadata(matchResult.matchMeta) 

1101 results.matchResult = matches 

1102 if self.config.doWriteMatchesDenormalized: 

1103 if matchResult.matches: 

1104 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta) 

1105 else: 

1106 self.log.warn("No matches, so generating dummy denormalized matches file") 

1107 denormMatches = afwTable.BaseCatalog(afwTable.Schema()) 

1108 denormMatches.setMetadata(PropertyList()) 

1109 denormMatches.getMetadata().add("COMMENT", 

1110 "This catalog is empty because no matches were found.") 

1111 results.denormMatches = denormMatches 

1112 results.denormMatches = denormMatches 

1113 

1114 results.outputSources = sources 

1115 return results 

1116 

1117 def readSources(self, dataRef): 

1118 """! 

1119 @brief Read input sources. 

1120 

1121 @param[in] dataRef: Data reference for catalog of merged detections 

1122 @return List of sources in merged catalog 

1123 

1124 We also need to add columns to hold the measurements we're about to make 

1125 so we can measure in-place. 

1126 """ 

1127 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True) 

1128 self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId)) 

1129 idFactory = self.makeIdFactory(dataRef) 

1130 for s in merged: 

1131 idFactory.notify(s.getId()) 

1132 table = afwTable.SourceTable.make(self.schema, idFactory) 

1133 sources = afwTable.SourceCatalog(table) 

1134 sources.extend(merged, self.schemaMapper) 

1135 return sources 

1136 

1137 def writeMatches(self, dataRef, results): 

1138 """! 

1139 @brief Write matches of the sources to the astrometric reference catalog. 

1140 

1141 @param[in] dataRef: data reference 

1142 @param[in] results: results struct from run method 

1143 """ 

1144 if hasattr(results, "matchResult"): 

1145 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch") 

1146 if hasattr(results, "denormMatches"): 

1147 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull") 

1148 

1149 def write(self, dataRef, sources): 

1150 """! 

1151 @brief Write the source catalog. 

1152 

1153 @param[in] dataRef: data reference 

1154 @param[in] sources: source catalog 

1155 """ 

1156 dataRef.put(sources, self.config.coaddName + "Coadd_meas") 

1157 self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId)) 

1158 

1159 def getExposureId(self, dataRef): 

1160 return int(dataRef.get(self.config.coaddName + "CoaddId"))