Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1#!/usr/bin/env python 

2# 

3# LSST Data Management System 

4# Copyright 2008-2015 AURA/LSST. 

5# 

6# This product includes software developed by the 

7# LSST Project (http://www.lsst.org/). 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the LSST License Statement and 

20# the GNU General Public License along with this program. If not, 

21# see <https://www.lsstcorp.org/LegalNotices/>. 

22# 

23from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer 

24from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner, 

25 PipelineTask, PipelineTaskConfig, PipelineTaskConnections) 

26import lsst.pipe.base.connectionTypes as cT 

27from lsst.pex.config import Config, Field, ConfigurableField 

28from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader 

29from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask 

30from lsst.meas.deblender import SourceDeblendTask 

31from lsst.meas.extensions.scarlet import ScarletDeblendTask 

32from lsst.pipe.tasks.coaddBase import getSkyInfo 

33from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask 

34from lsst.meas.astrom import DirectMatchTask, denormalizeMatches 

35from lsst.pipe.tasks.fakes import BaseFakeSourcesTask 

36from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask 

37from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask 

38import lsst.afw.image as afwImage 

39import lsst.afw.table as afwTable 

40import lsst.afw.math as afwMath 

41from lsst.daf.base import PropertyList 

42 

43from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401 

44from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401 

45from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401 

46from .multiBandUtils import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory # noqa: F401 

47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401 

48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401 

49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401 

50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401 

51 

52 

53""" 

54New set types: 

55* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 

56* deepCoadd_mergeDet: merged detections (tract, patch) 

57* deepCoadd_meas: measurements of merged detections (tract, patch, filter) 

58* deepCoadd_ref: reference sources (tract, patch) 

59All of these have associated *_schema catalogs that require no data ID and hold no records. 

60 

61In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 

62the mergeDet, meas, and ref dataset Footprints: 

63* deepCoadd_peak_schema 

64""" 

65 

66 

67############################################################################################################## 

68class DetectCoaddSourcesConnections(PipelineTaskConnections, 

69 dimensions=("tract", "patch", "abstract_filter", "skymap"), 

70 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): 

71 detectionSchema = cT.InitOutput( 

72 doc="Schema of the detection catalog", 

73 name="{outputCoaddName}Coadd_det_schema", 

74 storageClass="SourceCatalog", 

75 ) 

76 exposure = cT.Input( 

77 doc="Exposure on which detections are to be performed", 

78 name="{inputCoaddName}Coadd", 

79 storageClass="ExposureF", 

80 dimensions=("tract", "patch", "abstract_filter", "skymap") 

81 ) 

82 outputBackgrounds = cT.Output( 

83 doc="Output Backgrounds used in detection", 

84 name="{outputCoaddName}Coadd_calexp_background", 

85 storageClass="Background", 

86 dimensions=("tract", "patch", "abstract_filter", "skymap") 

87 ) 

88 outputSources = cT.Output( 

89 doc="Detected sources catalog", 

90 name="{outputCoaddName}Coadd_det", 

91 storageClass="SourceCatalog", 

92 dimensions=("tract", "patch", "abstract_filter", "skymap") 

93 ) 

94 outputExposure = cT.Output( 

95 doc="Exposure post detection", 

96 name="{outputCoaddName}Coadd_calexp", 

97 storageClass="ExposureF", 

98 dimensions=("tract", "patch", "abstract_filter", "skymap") 

99 ) 

100 

101 

102class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections): 

103 """! 

104 @anchor DetectCoaddSourcesConfig_ 

105 

106 @brief Configuration parameters for the DetectCoaddSourcesTask 

107 """ 

108 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?") 

109 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling") 

110 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection") 

111 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

112 doInsertFakes = Field(dtype=bool, default=False, 

113 doc="Run fake sources injection task") 

114 insertFakes = ConfigurableField(target=BaseFakeSourcesTask, 

115 doc="Injection of fake sources for testing " 

116 "purposes (must be retargeted)") 

117 hasFakes = Field( 

118 dtype=bool, 

119 default=False, 

120 doc="Should be set to True if fake sources have been inserted into the input data." 

121 ) 

122 

123 def setDefaults(self): 

124 super().setDefaults() 

125 self.detection.thresholdType = "pixel_stdev" 

126 self.detection.isotropicGrow = True 

127 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic 

128 self.detection.reEstimateBackground = False 

129 self.detection.background.useApprox = False 

130 self.detection.background.binSize = 4096 

131 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER' 

132 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender 

133 

134## @addtogroup LSST_task_documentation 

135## @{ 

136## @page DetectCoaddSourcesTask 

137## @ref DetectCoaddSourcesTask_ "DetectCoaddSourcesTask" 

138## @copybrief DetectCoaddSourcesTask 

139## @} 

140 

141 

142class DetectCoaddSourcesTask(PipelineTask, CmdLineTask): 

143 r"""! 

144 @anchor DetectCoaddSourcesTask_ 

145 

146 @brief Detect sources on a coadd 

147 

148 @section pipe_tasks_multiBand_Contents Contents 

149 

150 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose 

151 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize 

152 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run 

153 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config 

154 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug 

155 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example 

156 

157 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description 

158 

159 Command-line task that detects sources on a coadd of exposures obtained with a single filter. 

160 

161 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise 

162 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane 

163 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should 

164 propagate the full covariance matrix -- but it is simple and works well in practice. 

165 

166 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref 

167 SourceDetectionTask_ "detection" subtask. 

168 

169 @par Inputs: 

170 deepCoadd{tract,patch,filter}: ExposureF 

171 @par Outputs: 

172 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints) 

173 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input 

174 exposure (ExposureF) 

175 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList 

176 @par Data Unit: 

177 tract, patch, filter 

178 

179 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask. 

180 You can retarget this subtask if you wish. 

181 

182 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization 

183 

184 @copydoc \_\_init\_\_ 

185 

186 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task 

187 

188 @copydoc run 

189 

190 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters 

191 

192 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig" 

193 

194 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables 

195 

196 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 

197 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

198 files. 

199 

200 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to 

201 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for 

202 @ref SourceDetectionTask_ "SourceDetectionTask" for further information. 

203 

204 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example 

205 of using DetectCoaddSourcesTask 

206 

207 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of 

208 the task is to update the background, detect all sources in a single band and generate a set of parent 

209 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and, 

210 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data 

211 reference to the coadd to be processed. A list of the available optional arguments can be obtained by 

212 calling detectCoaddSources.py with the `--help` command line argument: 

213 @code 

214 detectCoaddSources.py --help 

215 @endcode 

216 

217 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

218 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed 

219 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows: 

220 @code 

221 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

222 @endcode 

223 that will process the HSC-I band data. The results are written to 

224 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`. 

225 

226 It is also necessary to run: 

227 @code 

228 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

229 @endcode 

230 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

231 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask". 

232 """ 

233 _DefaultName = "detectCoaddSources" 

234 ConfigClass = DetectCoaddSourcesConfig 

235 getSchemaCatalogs = _makeGetSchemaCatalogs("det") 

236 makeIdFactory = _makeMakeIdFactory("CoaddId") 

237 

238 @classmethod 

239 def _makeArgumentParser(cls): 

240 parser = ArgumentParser(name=cls._DefaultName) 

241 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

242 ContainerClass=ExistingCoaddDataIdContainer) 

243 return parser 

244 

245 def __init__(self, schema=None, **kwargs): 

246 """! 

247 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask. 

248 

249 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

250 

251 @param[in] schema: initial schema for the output catalog, modified-in place to include all 

252 fields set by this task. If None, the source minimal schema will be used. 

253 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__ 

254 """ 

255 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree 

256 # call structure has been reviewed carefully to be sure super will work as intended. 

257 super().__init__(**kwargs) 

258 if schema is None: 258 ↛ 260line 258 didn't jump to line 260, because the condition on line 258 was never false

259 schema = afwTable.SourceTable.makeMinimalSchema() 

260 if self.config.doInsertFakes: 260 ↛ 261line 260 didn't jump to line 261, because the condition on line 260 was never true

261 self.makeSubtask("insertFakes") 

262 self.schema = schema 

263 self.makeSubtask("detection", schema=self.schema) 

264 if self.config.doScaleVariance: 264 ↛ 267line 264 didn't jump to line 267, because the condition on line 264 was never false

265 self.makeSubtask("scaleVariance") 

266 

267 self.detectionSchema = afwTable.SourceCatalog(self.schema) 

268 

269 def runDataRef(self, patchRef): 

270 """! 

271 @brief Run detection on a coadd. 

272 

273 Invokes @ref run and then uses @ref write to output the 

274 results. 

275 

276 @param[in] patchRef: data reference for patch 

277 """ 

278 if self.config.hasFakes: 278 ↛ 279line 278 didn't jump to line 279, because the condition on line 278 was never true

279 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True) 

280 else: 

281 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True) 

282 expId = int(patchRef.get(self.config.coaddName + "CoaddId")) 

283 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId) 

284 self.write(results, patchRef) 

285 return results 

286 

287 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

288 inputs = butlerQC.get(inputRefs) 

289 packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch_abstract_filter", returnMaxBits=True) 

290 inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits) 

291 inputs["expId"] = packedId 

292 outputs = self.run(**inputs) 

293 butlerQC.put(outputs, outputRefs) 

294 

295 def run(self, exposure, idFactory, expId): 

296 """! 

297 @brief Run detection on an exposure. 

298 

299 First scale the variance plane to match the observed variance 

300 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to 

301 detect sources. 

302 

303 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled, 

304 depending on configuration). 

305 @param[in] idFactory: IdFactory to set source identifiers 

306 @param[in] expId: Exposure identifier (integer) for RNG seed 

307 

308 @return a pipe.base.Struct with fields 

309 - sources: catalog of detections 

310 - backgrounds: list of backgrounds 

311 """ 

312 if self.config.doScaleVariance: 312 ↛ 315line 312 didn't jump to line 315, because the condition on line 312 was never false

313 varScale = self.scaleVariance.run(exposure.maskedImage) 

314 exposure.getMetadata().add("VARIANCE_SCALE", varScale) 

315 backgrounds = afwMath.BackgroundList() 

316 if self.config.doInsertFakes: 316 ↛ 317line 316 didn't jump to line 317, because the condition on line 316 was never true

317 self.insertFakes.run(exposure, background=backgrounds) 

318 table = afwTable.SourceTable.make(self.schema, idFactory) 

319 detections = self.detection.run(table, exposure, expId=expId) 

320 sources = detections.sources 

321 fpSets = detections.fpSets 

322 if hasattr(fpSets, "background") and fpSets.background: 322 ↛ 325line 322 didn't jump to line 325, because the condition on line 322 was never false

323 for bg in fpSets.background: 

324 backgrounds.append(bg) 

325 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure) 

326 

327 def write(self, results, patchRef): 

328 """! 

329 @brief Write out results from runDetection. 

330 

331 @param[in] exposure: Exposure to write out 

332 @param[in] results: Struct returned from runDetection 

333 @param[in] patchRef: data reference for patch 

334 """ 

335 coaddName = self.config.coaddName + "Coadd" 

336 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background") 

337 patchRef.put(results.outputSources, coaddName + "_det") 

338 if self.config.hasFakes: 338 ↛ 339line 338 didn't jump to line 339, because the condition on line 338 was never true

339 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp") 

340 else: 

341 patchRef.put(results.outputExposure, coaddName + "_calexp") 

342 

343############################################################################################################## 

344 

345 

346class DeblendCoaddSourcesConfig(Config): 

347 """DeblendCoaddSourcesConfig 

348 

349 Configuration parameters for the `DeblendCoaddSourcesTask`. 

350 """ 

351 singleBandDeblend = ConfigurableField(target=SourceDeblendTask, 

352 doc="Deblend sources separately in each band") 

353 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask, 

354 doc="Deblend sources simultaneously across bands") 

355 simultaneous = Field(dtype=bool, default=False, doc="Simultaneously deblend all bands?") 

356 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

357 hasFakes = Field(dtype=bool, 

358 default=False, 

359 doc="Should be set to True if fake sources have been inserted into the input data.") 

360 

361 def setDefaults(self): 

362 Config.setDefaults(self) 

363 self.singleBandDeblend.propagateAllPeaks = True 

364 

365 

366class DeblendCoaddSourcesRunner(MergeSourcesRunner): 

367 """Task runner for the `MergeSourcesTask` 

368 

369 Required because the run method requires a list of 

370 dataRefs rather than a single dataRef. 

371 """ 

372 @staticmethod 

373 def getTargetList(parsedCmd, **kwargs): 

374 """Provide a list of patch references for each patch, tract, filter combo. 

375 

376 Parameters 

377 ---------- 

378 parsedCmd: 

379 The parsed command 

380 kwargs: 

381 Keyword arguments passed to the task 

382 

383 Returns 

384 ------- 

385 targetList: list 

386 List of tuples, where each tuple is a (dataRef, kwargs) pair. 

387 """ 

388 refDict = MergeSourcesRunner.buildRefDict(parsedCmd) 

389 kwargs["psfCache"] = parsedCmd.psfCache 

390 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()] 

391 

392 

393class DeblendCoaddSourcesTask(CmdLineTask): 

394 """Deblend the sources in a merged catalog 

395 

396 Deblend sources from master catalog in each coadd. 

397 This can either be done separately in each band using the HSC-SDSS deblender 

398 (`DeblendCoaddSourcesTask.config.simultaneous==False`) 

399 or use SCARLET to simultaneously fit the blend in all bands 

400 (`DeblendCoaddSourcesTask.config.simultaneous==True`). 

401 The task will set its own `self.schema` atribute to the `Schema` of the 

402 output deblended catalog. 

403 This will include all fields from the input `Schema`, as well as additional fields 

404 from the deblender. 

405 

406 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description 

407 --------------------------------------------------------- 

408 ` 

409 

410 Parameters 

411 ---------- 

412 butler: `Butler` 

413 Butler used to read the input schemas from disk or 

414 construct the reference catalog loader, if `schema` or `peakSchema` or 

415 schema: `Schema` 

416 The schema of the merged detection catalog as an input to this task. 

417 peakSchema: `Schema` 

418 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog 

419 """ 

420 ConfigClass = DeblendCoaddSourcesConfig 

421 RunnerClass = DeblendCoaddSourcesRunner 

422 _DefaultName = "deblendCoaddSources" 

423 makeIdFactory = _makeMakeIdFactory("MergedCoaddId") 

424 

425 @classmethod 

426 def _makeArgumentParser(cls): 

427 parser = ArgumentParser(name=cls._DefaultName) 

428 parser.add_id_argument("--id", "deepCoadd_calexp", 

429 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i", 

430 ContainerClass=ExistingCoaddDataIdContainer) 

431 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

432 return parser 

433 

434 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs): 

435 CmdLineTask.__init__(self, **kwargs) 

436 if schema is None: 436 ↛ 439line 436 didn't jump to line 439, because the condition on line 436 was never false

437 assert butler is not None, "Neither butler nor schema is defined" 

438 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema 

439 self.schemaMapper = afwTable.SchemaMapper(schema) 

440 self.schemaMapper.addMinimalSchema(schema) 

441 self.schema = self.schemaMapper.getOutputSchema() 

442 if peakSchema is None: 442 ↛ 446line 442 didn't jump to line 446, because the condition on line 442 was never false

443 assert butler is not None, "Neither butler nor peakSchema is defined" 

444 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema 

445 

446 if self.config.simultaneous: 446 ↛ 447line 446 didn't jump to line 447, because the condition on line 446 was never true

447 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema) 

448 else: 

449 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema) 

450 

451 def getSchemaCatalogs(self): 

452 """Return a dict of empty catalogs for each catalog dataset produced by this task. 

453 

454 Returns 

455 ------- 

456 result: dict 

457 Dictionary of empty catalogs, with catalog names as keys. 

458 """ 

459 catalog = afwTable.SourceCatalog(self.schema) 

460 return {self.config.coaddName + "Coadd_deblendedFlux": catalog, 

461 self.config.coaddName + "Coadd_deblendedModel": catalog} 

462 

463 def runDataRef(self, patchRefList, psfCache=100): 

464 """Deblend the patch 

465 

466 Deblend each source simultaneously or separately 

467 (depending on `DeblendCoaddSourcesTask.config.simultaneous`). 

468 Set `is-primary` and related flags. 

469 Propagate flags from individual visits. 

470 Write the deblended sources out. 

471 

472 Parameters 

473 ---------- 

474 patchRefList: list 

475 List of data references for each filter 

476 """ 

477 

478 if self.config.hasFakes: 478 ↛ 479line 478 didn't jump to line 479, because the condition on line 478 was never true

479 coaddType = "fakes_" + self.config.coaddName 

480 else: 

481 coaddType = self.config.coaddName 

482 

483 if self.config.simultaneous: 483 ↛ 485line 483 didn't jump to line 485, because the condition on line 483 was never true

484 # Use SCARLET to simultaneously deblend across filters 

485 filters = [] 

486 exposures = [] 

487 for patchRef in patchRefList: 

488 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

489 filters.append(patchRef.dataId["filter"]) 

490 exposures.append(exposure) 

491 # The input sources are the same for all bands, since it is a merged catalog 

492 sources = self.readSources(patchRef) 

493 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures) 

494 fluxCatalogs, templateCatalogs = self.multiBandDeblend.run(exposure, sources) 

495 for n in range(len(patchRefList)): 

496 fluxCat = fluxCatalogs if fluxCatalogs is None else fluxCatalogs[filters[n]] 

497 self.write(patchRefList[n], fluxCat, templateCatalogs[filters[n]]) 

498 else: 

499 # Use the singeband deblender to deblend each band separately 

500 for patchRef in patchRefList: 

501 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

502 exposure.getPsf().setCacheCapacity(psfCache) 

503 sources = self.readSources(patchRef) 

504 self.singleBandDeblend.run(exposure, sources) 

505 self.write(patchRef, sources) 

506 

507 def readSources(self, dataRef): 

508 """Read merged catalog 

509 

510 Read the catalog of merged detections and create a catalog 

511 in a single band. 

512 

513 Parameters 

514 ---------- 

515 dataRef: data reference 

516 Data reference for catalog of merged detections 

517 

518 Returns 

519 ------- 

520 sources: `SourceCatalog` 

521 List of sources in merged catalog 

522 

523 We also need to add columns to hold the measurements we're about to make 

524 so we can measure in-place. 

525 """ 

526 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True) 

527 self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId)) 

528 idFactory = self.makeIdFactory(dataRef) 

529 for s in merged: 

530 idFactory.notify(s.getId()) 

531 table = afwTable.SourceTable.make(self.schema, idFactory) 

532 sources = afwTable.SourceCatalog(table) 

533 sources.extend(merged, self.schemaMapper) 

534 return sources 

535 

536 def write(self, dataRef, flux_sources, template_sources=None): 

537 """Write the source catalog(s) 

538 

539 Parameters 

540 ---------- 

541 dataRef: Data Reference 

542 Reference to the output catalog. 

543 flux_sources: `SourceCatalog` 

544 Flux conserved sources to write to file. 

545 If using the single band deblender, this is the catalog 

546 generated. 

547 template_sources: `SourceCatalog` 

548 Source catalog using the multiband template models 

549 as footprints. 

550 """ 

551 # The multiband deblender does not have to conserve flux, 

552 # so only write the flux conserved catalog if it exists 

553 if flux_sources is not None: 553 ↛ 560line 553 didn't jump to line 560, because the condition on line 553 was never false

554 assert not self.config.simultaneous or self.config.multiBandDeblend.conserveFlux 

555 dataRef.put(flux_sources, self.config.coaddName + "Coadd_deblendedFlux") 

556 self.log.info("Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId)) 

557 # Only the multiband deblender has the option to output the 

558 # template model catalog, which can optionally be used 

559 # in MeasureMergedCoaddSources 

560 if template_sources is not None: 560 ↛ 561line 560 didn't jump to line 561, because the condition on line 560 was never true

561 assert self.config.multiBandDeblend.saveTemplates 

562 dataRef.put(template_sources, self.config.coaddName + "Coadd_deblendedModel") 

563 self.log.info("Wrote %d sources: %s" % (len(template_sources), dataRef.dataId)) 

564 

565 def writeMetadata(self, dataRefList): 

566 """Write the metadata produced from processing the data. 

567 Parameters 

568 ---------- 

569 dataRefList 

570 List of Butler data references used to write the metadata. 

571 The metadata is written to dataset type `CmdLineTask._getMetadataName`. 

572 """ 

573 for dataRef in dataRefList: 

574 try: 

575 metadataName = self._getMetadataName() 

576 if metadataName is not None: 

577 dataRef.put(self.getFullMetadata(), metadataName) 

578 except Exception as e: 

579 self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e) 

580 

581 def getExposureId(self, dataRef): 

582 """Get the ExposureId from a data reference 

583 """ 

584 return int(dataRef.get(self.config.coaddName + "CoaddId")) 

585 

586 

587class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, 

588 dimensions=("tract", "patch", "abstract_filter", "skymap"), 

589 defaultTemplates={"inputCoaddName": "deep", 

590 "outputCoaddName": "deep"}): 

591 inputSchema = cT.InitInput( 

592 doc="Input schema for measure merged task produced by a deblender or detection task", 

593 name="{inputCoaddName}Coadd_deblendedFlux_schema", 

594 storageClass="SourceCatalog" 

595 ) 

596 outputSchema = cT.InitOutput( 

597 doc="Output schema after all new fields are added by task", 

598 name="{inputCoaddName}Coadd_meas_schema", 

599 storageClass="SourceCatalog" 

600 ) 

601 refCat = cT.PrerequisiteInput( 

602 doc="Reference catalog used to match measured sources against known sources", 

603 name="ref_cat", 

604 storageClass="SimpleCatalog", 

605 dimensions=("skypix",), 

606 deferLoad=True, 

607 multiple=True 

608 ) 

609 exposure = cT.Input( 

610 doc="Input coadd image", 

611 name="{inputCoaddName}Coadd_calexp", 

612 storageClass="ExposureF", 

613 dimensions=("tract", "patch", "abstract_filter", "skymap") 

614 ) 

615 skyMap = cT.Input( 

616 doc="SkyMap to use in processing", 

617 name="{inputCoaddName}Coadd_skyMap", 

618 storageClass="SkyMap", 

619 dimensions=("skymap",), 

620 ) 

621 visitCatalogs = cT.Input( 

622 doc="Source catalogs for visits which overlap input tract, patch, abstract_filter. Will be " 

623 "further filtered in the task for the purpose of propagating flags from image calibration " 

624 "and characterization to codd objects", 

625 name="src", 

626 dimensions=("instrument", "visit", "detector"), 

627 storageClass="SourceCatalog", 

628 multiple=True 

629 ) 

630 inputCatalog = cT.Input( 

631 doc=("Name of the input catalog to use." 

632 "If the single band deblender was used this should be 'deblendedFlux." 

633 "If the multi-band deblender was used this should be 'deblendedModel, " 

634 "or deblendedFlux if the multiband deblender was configured to output " 

635 "deblended flux catalogs. If no deblending was performed this should " 

636 "be 'mergeDet'"), 

637 name="{inputCoaddName}Coadd_deblendedFlux", 

638 storageClass="SourceCatalog", 

639 dimensions=("tract", "patch", "abstract_filter", "skymap"), 

640 ) 

641 outputSources = cT.Output( 

642 doc="Source catalog containing all the measurement information generated in this task", 

643 name="{outputCoaddName}Coadd_meas", 

644 dimensions=("tract", "patch", "abstract_filter", "skymap"), 

645 storageClass="SourceCatalog", 

646 ) 

647 matchResult = cT.Output( 

648 doc="Match catalog produced by configured matcher, optional on doMatchSources", 

649 name="{outputCoaddName}Coadd_measMatch", 

650 dimensions=("tract", "patch", "abstract_filter", "skymap"), 

651 storageClass="Catalog", 

652 ) 

653 denormMatches = cT.Output( 

654 doc="Denormalized Match catalog produced by configured matcher, optional on " 

655 "doWriteMatchesDenormalized", 

656 name="{outputCoaddName}Coadd_measMatchFull", 

657 dimensions=("tract", "patch", "abstract_filter", "skymap"), 

658 storageClass="Catalog", 

659 ) 

660 

661 def __init__(self, *, config=None): 

662 super().__init__(config=config) 

663 if config.doPropagateFlags is False: 

664 self.inputs -= set(("visitCatalogs",)) 

665 

666 if config.doMatchSources is False: 

667 self.outputs -= set(("matchResult",)) 

668 

669 if config.doWriteMatchesDenormalized is False: 

670 self.outputs -= set(("denormMatches",)) 

671 

672 

673class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig, 

674 pipelineConnections=MeasureMergedCoaddSourcesConnections): 

675 """! 

676 @anchor MeasureMergedCoaddSourcesConfig_ 

677 

678 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask 

679 """ 

680 inputCatalog = Field(dtype=str, default="deblendedFlux", 

681 doc=("Name of the input catalog to use." 

682 "If the single band deblender was used this should be 'deblendedFlux." 

683 "If the multi-band deblender was used this should be 'deblendedModel." 

684 "If no deblending was performed this should be 'mergeDet'")) 

685 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") 

686 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") 

687 doPropagateFlags = Field( 

688 dtype=bool, default=True, 

689 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 

690 ) 

691 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd") 

692 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?") 

693 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog") 

694 doWriteMatchesDenormalized = Field( 

695 dtype=bool, 

696 default=False, 

697 doc=("Write reference matches in denormalized format? " 

698 "This format uses more disk space, but is more convenient to read."), 

699 ) 

700 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

701 psfCache = Field(dtype=int, default=100, doc="Size of psfCache") 

702 checkUnitsParseStrict = Field( 

703 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", 

704 dtype=str, 

705 default="raise", 

706 ) 

707 doApCorr = Field( 

708 dtype=bool, 

709 default=True, 

710 doc="Apply aperture corrections" 

711 ) 

712 applyApCorr = ConfigurableField( 

713 target=ApplyApCorrTask, 

714 doc="Subtask to apply aperture corrections" 

715 ) 

716 doRunCatalogCalculation = Field( 

717 dtype=bool, 

718 default=True, 

719 doc='Run catalogCalculation task' 

720 ) 

721 catalogCalculation = ConfigurableField( 

722 target=CatalogCalculationTask, 

723 doc="Subtask to run catalogCalculation plugins on catalog" 

724 ) 

725 

726 hasFakes = Field( 

727 dtype=bool, 

728 default=False, 

729 doc="Should be set to True if fake sources have been inserted into the input data." 

730 ) 

731 

732 @property 

733 def refObjLoader(self): 

734 return self.match.refObjLoader 

735 

736 def setDefaults(self): 

737 super().setDefaults() 

738 self.measurement.plugins.names |= ['base_InputCount', 

739 'base_Variance', 

740 'base_LocalPhotoCalib', 

741 'base_LocalWcs'] 

742 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 

743 'INEXACT_PSF'] 

744 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 

745 'INEXACT_PSF'] 

746 

747 def validate(self): 

748 super().validate() 

749 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None) 

750 if refCatGen2 is not None and refCatGen2 != self.connections.refCat: 

751 raise ValueError( 

752 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs " 

753 f"are different. These options must be kept in sync until Gen2 is retired." 

754 ) 

755 

756 

757## @addtogroup LSST_task_documentation 

758## @{ 

759## @page MeasureMergedCoaddSourcesTask 

760## @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask" 

761## @copybrief MeasureMergedCoaddSourcesTask 

762## @} 

763 

764 

765class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner): 

766 """Get the psfCache setting into MeasureMergedCoaddSourcesTask""" 

767 @staticmethod 

768 def getTargetList(parsedCmd, **kwargs): 

769 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache) 

770 

771 

772class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask): 

773 r"""! 

774 @anchor MeasureMergedCoaddSourcesTask_ 

775 

776 @brief Deblend sources from master catalog in each coadd seperately and measure. 

777 

778 @section pipe_tasks_multiBand_Contents Contents 

779 

780 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose 

781 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize 

782 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run 

783 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config 

784 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug 

785 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example 

786 

787 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description 

788 

789 Command-line task that uses peaks and footprints from a master catalog to perform deblending and 

790 measurement in each coadd. 

791 

792 Given a master input catalog of sources (peaks and footprints) or deblender outputs 

793 (including a HeavyFootprint in each band), measure each source on the 

794 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a 

795 consistent set of child sources. 

796 

797 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source 

798 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit 

799 flags are propagated to the coadd sources. 

800 

801 Optionally, we can match the coadd sources to an external reference catalog. 

802 

803 @par Inputs: 

804 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog 

805 @n deepCoadd_calexp{tract,patch,filter}: ExposureF 

806 @par Outputs: 

807 deepCoadd_meas{tract,patch,filter}: SourceCatalog 

808 @par Data Unit: 

809 tract, patch, filter 

810 

811 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks: 

812 

813 <DL> 

814 <DT> @ref SingleFrameMeasurementTask_ "measurement" 

815 <DD> Measure source properties of deblended sources.</DD> 

816 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags" 

817 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are 

818 not at the edge of the field and that have either not been deblended or are the children of deblended 

819 sources</DD> 

820 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags" 

821 <DD> Propagate flags set in individual visits to the coadd.</DD> 

822 <DT> @ref DirectMatchTask_ "match" 

823 <DD> Match input sources to a reference catalog (optional). 

824 </DD> 

825 </DL> 

826 These subtasks may be retargeted as required. 

827 

828 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization 

829 

830 @copydoc \_\_init\_\_ 

831 

832 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task 

833 

834 @copydoc run 

835 

836 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters 

837 

838 See @ref MeasureMergedCoaddSourcesConfig_ 

839 

840 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables 

841 

842 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 

843 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

844 files. 

845 

846 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to 

847 the various sub-tasks. See the documetation for individual sub-tasks for more information. 

848 

849 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using 

850 MeasureMergedCoaddSourcesTask 

851 

852 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs. 

853 The next stage in the multi-band processing procedure will merge these measurements into a suitable 

854 catalog for driving forced photometry. 

855 

856 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds 

857 to be processed. 

858 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the 

859 `--help` command line argument: 

860 @code 

861 measureCoaddSources.py --help 

862 @endcode 

863 

864 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

865 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished 

866 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band 

867 coadd as follows: 

868 @code 

869 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

870 @endcode 

871 This will process the HSC-I band data. The results are written in 

872 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits 

873 

874 It is also necessary to run 

875 @code 

876 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

877 @endcode 

878 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

879 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask". 

880 """ 

881 _DefaultName = "measureCoaddSources" 

882 ConfigClass = MeasureMergedCoaddSourcesConfig 

883 RunnerClass = MeasureMergedCoaddSourcesRunner 

884 getSchemaCatalogs = _makeGetSchemaCatalogs("meas") 

885 makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type 

886 

887 @classmethod 

888 def _makeArgumentParser(cls): 

889 parser = ArgumentParser(name=cls._DefaultName) 

890 parser.add_id_argument("--id", "deepCoadd_calexp", 

891 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

892 ContainerClass=ExistingCoaddDataIdContainer) 

893 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

894 return parser 

895 

896 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, 

897 **kwargs): 

898 """! 

899 @brief Initialize the task. 

900 

901 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

902 @param[in] schema: the schema of the merged detection catalog used as input to this one 

903 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog 

904 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference 

905 catalog. May be None if the loader can be constructed from the butler argument or all steps 

906 requiring a reference catalog are disabled. 

907 @param[in] butler: a butler used to read the input schemas from disk or construct the reference 

908 catalog loader, if schema or peakSchema or refObjLoader is None 

909 

910 The task will set its own self.schema attribute to the schema of the output measurement catalog. 

911 This will include all fields from the input schema, as well as additional fields for all the 

912 measurements. 

913 """ 

914 super().__init__(**kwargs) 

915 self.deblended = self.config.inputCatalog.startswith("deblended") 

916 self.inputCatalog = "Coadd_" + self.config.inputCatalog 

917 if initInputs is not None: 917 ↛ 918line 917 didn't jump to line 918, because the condition on line 917 was never true

918 schema = initInputs['inputSchema'].schema 

919 if schema is None: 919 ↛ 922line 919 didn't jump to line 922, because the condition on line 919 was never false

920 assert butler is not None, "Neither butler nor schema is defined" 

921 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema 

922 self.schemaMapper = afwTable.SchemaMapper(schema) 

923 self.schemaMapper.addMinimalSchema(schema) 

924 self.schema = self.schemaMapper.getOutputSchema() 

925 self.algMetadata = PropertyList() 

926 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) 

927 self.makeSubtask("setPrimaryFlags", schema=self.schema) 

928 if self.config.doMatchSources: 928 ↛ 929line 928 didn't jump to line 929, because the condition on line 928 was never true

929 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader) 

930 if self.config.doPropagateFlags: 930 ↛ 932line 930 didn't jump to line 932, because the condition on line 930 was never false

931 self.makeSubtask("propagateFlags", schema=self.schema) 

932 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) 

933 if self.config.doApCorr: 933 ↛ 935line 933 didn't jump to line 935, because the condition on line 933 was never false

934 self.makeSubtask("applyApCorr", schema=self.schema) 

935 if self.config.doRunCatalogCalculation: 935 ↛ 938line 935 didn't jump to line 938, because the condition on line 935 was never false

936 self.makeSubtask("catalogCalculation", schema=self.schema) 

937 

938 self.outputSchema = afwTable.SourceCatalog(self.schema) 

939 

940 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

941 inputs = butlerQC.get(inputRefs) 

942 

943 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], 

944 inputs.pop('refCat'), config=self.config.refObjLoader, 

945 log=self.log) 

946 self.match.setRefObjLoader(refObjLoader) 

947 

948 # Set psfcache 

949 # move this to run after gen2 deprecation 

950 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache) 

951 

952 # Get unique integer ID for IdFactory and RNG seeds 

953 packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True) 

954 inputs['exposureId'] = packedId 

955 idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits) 

956 # Transform inputCatalog 

957 table = afwTable.SourceTable.make(self.schema, idFactory) 

958 sources = afwTable.SourceCatalog(table) 

959 sources.extend(inputs.pop('inputCatalog'), self.schemaMapper) 

960 table = sources.getTable() 

961 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

962 inputs['sources'] = sources 

963 

964 skyMap = inputs.pop('skyMap') 

965 tractNumber = inputRefs.inputCatalog.dataId['tract'] 

966 tractInfo = skyMap[tractNumber] 

967 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch']) 

968 skyInfo = Struct( 

969 skyMap=skyMap, 

970 tractInfo=tractInfo, 

971 patchInfo=patchInfo, 

972 wcs=tractInfo.getWcs(), 

973 bbox=patchInfo.getOuterBBox() 

974 ) 

975 inputs['skyInfo'] = skyInfo 

976 

977 if self.config.doPropagateFlags: 

978 # Filter out any visit catalog that is not coadd inputs 

979 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds 

980 visitKey = ccdInputs.schema.find("visit").key 

981 ccdKey = ccdInputs.schema.find("ccd").key 

982 inputVisitIds = set() 

983 ccdRecordsWcs = {} 

984 for ccdRecord in ccdInputs: 

985 visit = ccdRecord.get(visitKey) 

986 ccd = ccdRecord.get(ccdKey) 

987 inputVisitIds.add((visit, ccd)) 

988 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs() 

989 

990 inputCatalogsToKeep = [] 

991 inputCatalogWcsUpdate = [] 

992 for i, dataRef in enumerate(inputRefs.visitCatalogs): 

993 key = (dataRef.dataId['visit'], dataRef.dataId['detector']) 

994 if key in inputVisitIds: 

995 inputCatalogsToKeep.append(inputs['visitCatalogs'][i]) 

996 inputCatalogWcsUpdate.append(ccdRecordsWcs[key]) 

997 inputs['visitCatalogs'] = inputCatalogsToKeep 

998 inputs['wcsUpdates'] = inputCatalogWcsUpdate 

999 inputs['ccdInputs'] = ccdInputs 

1000 

1001 outputs = self.run(**inputs) 

1002 butlerQC.put(outputs, outputRefs) 

1003 

1004 def runDataRef(self, patchRef, psfCache=100): 

1005 """! 

1006 @brief Deblend and measure. 

1007 

1008 @param[in] patchRef: Patch reference. 

1009 

1010 Set 'is-primary' and related flags. Propagate flags 

1011 from individual visits. Optionally match the sources to a reference catalog and write the matches. 

1012 Finally, write the deblended sources and measurements out. 

1013 """ 

1014 if self.config.hasFakes: 1014 ↛ 1015line 1014 didn't jump to line 1015, because the condition on line 1014 was never true

1015 coaddType = "fakes_" + self.config.coaddName 

1016 else: 

1017 coaddType = self.config.coaddName 

1018 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

1019 exposure.getPsf().setCacheCapacity(psfCache) 

1020 sources = self.readSources(patchRef) 

1021 table = sources.getTable() 

1022 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

1023 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef) 

1024 

1025 if self.config.doPropagateFlags: 1025 ↛ 1028line 1025 didn't jump to line 1028, because the condition on line 1025 was never false

1026 ccdInputs = self.propagateFlags.getCcdInputs(exposure) 

1027 else: 

1028 ccdInputs = None 

1029 

1030 results = self.run(exposure=exposure, sources=sources, 

1031 ccdInputs=ccdInputs, 

1032 skyInfo=skyInfo, butler=patchRef.getButler(), 

1033 exposureId=self.getExposureId(patchRef)) 

1034 

1035 if self.config.doMatchSources: 1035 ↛ 1036line 1035 didn't jump to line 1036, because the condition on line 1035 was never true

1036 self.writeMatches(patchRef, results) 

1037 self.write(patchRef, results.outputSources) 

1038 

1039 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, 

1040 butler=None): 

1041 """Run measurement algorithms on the input exposure, and optionally populate the 

1042 resulting catalog with extra information. 

1043 

1044 Parameters 

1045 ---------- 

1046 exposure : `lsst.afw.exposure.Exposure` 

1047 The input exposure on which measurements are to be performed 

1048 sources : `lsst.afw.table.SourceCatalog` 

1049 A catalog built from the results of merged detections, or 

1050 deblender outputs. 

1051 skyInfo : `lsst.pipe.base.Struct` 

1052 A struct containing information about the position of the input exposure within 

1053 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box 

1054 exposureId : `int` or `bytes` 

1055 packed unique number or bytes unique to the input exposure 

1056 ccdInputs : `lsst.afw.table.ExposureCatalog` 

1057 Catalog containing information on the individual visits which went into making 

1058 the exposure 

1059 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None` 

1060 A list of source catalogs corresponding to measurements made on the individual 

1061 visits which went into the input exposure. If None and butler is `None` then 

1062 the task cannot propagate visit flags to the output catalog. 

1063 wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None` 

1064 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 

1065 to the input visits. Used to put all coordinates to common system. If `None` and 

1066 butler is `None` then the task cannot propagate visit flags to the output catalog. 

1067 butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler` 

1068 Either a gen2 or gen3 butler used to load visit catalogs 

1069 

1070 Returns 

1071 ------- 

1072 results : `lsst.pipe.base.Struct` 

1073 Results of running measurement task. Will contain the catalog in the 

1074 sources attribute. Optionally will have results of matching to a 

1075 reference catalog in the matchResults attribute, and denormalized 

1076 matches in the denormMatches attribute. 

1077 """ 

1078 self.measurement.run(sources, exposure, exposureId=exposureId) 

1079 

1080 if self.config.doApCorr: 1080 ↛ 1090line 1080 didn't jump to line 1090, because the condition on line 1080 was never false

1081 self.applyApCorr.run( 

1082 catalog=sources, 

1083 apCorrMap=exposure.getInfo().getApCorrMap() 

1084 ) 

1085 

1086 # TODO DM-11568: this contiguous check-and-copy could go away if we 

1087 # reserve enough space during SourceDetection and/or SourceDeblend. 

1088 # NOTE: sourceSelectors require contiguous catalogs, so ensure 

1089 # contiguity now, so views are preserved from here on. 

1090 if not sources.isContiguous(): 1090 ↛ 1091line 1090 didn't jump to line 1091, because the condition on line 1090 was never true

1091 sources = sources.copy(deep=True) 

1092 

1093 if self.config.doRunCatalogCalculation: 1093 ↛ 1096line 1093 didn't jump to line 1096, because the condition on line 1093 was never false

1094 self.catalogCalculation.run(sources) 

1095 

1096 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo, 

1097 patchInfo=skyInfo.patchInfo, includeDeblend=self.deblended) 

1098 if self.config.doPropagateFlags: 1098 ↛ 1101line 1098 didn't jump to line 1101, because the condition on line 1098 was never false

1099 self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates) 

1100 

1101 results = Struct() 

1102 

1103 if self.config.doMatchSources: 1103 ↛ 1104line 1103 didn't jump to line 1104, because the condition on line 1103 was never true

1104 matchResult = self.match.run(sources, exposure.getInfo().getFilter().getName()) 

1105 matches = afwTable.packMatches(matchResult.matches) 

1106 matches.table.setMetadata(matchResult.matchMeta) 

1107 results.matchResult = matches 

1108 if self.config.doWriteMatchesDenormalized: 

1109 if matchResult.matches: 

1110 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta) 

1111 else: 

1112 self.log.warn("No matches, so generating dummy denormalized matches file") 

1113 denormMatches = afwTable.BaseCatalog(afwTable.Schema()) 

1114 denormMatches.setMetadata(PropertyList()) 

1115 denormMatches.getMetadata().add("COMMENT", 

1116 "This catalog is empty because no matches were found.") 

1117 results.denormMatches = denormMatches 

1118 results.denormMatches = denormMatches 

1119 

1120 results.outputSources = sources 

1121 return results 

1122 

1123 def readSources(self, dataRef): 

1124 """! 

1125 @brief Read input sources. 

1126 

1127 @param[in] dataRef: Data reference for catalog of merged detections 

1128 @return List of sources in merged catalog 

1129 

1130 We also need to add columns to hold the measurements we're about to make 

1131 so we can measure in-place. 

1132 """ 

1133 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True) 

1134 self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId)) 

1135 idFactory = self.makeIdFactory(dataRef) 

1136 for s in merged: 

1137 idFactory.notify(s.getId()) 

1138 table = afwTable.SourceTable.make(self.schema, idFactory) 

1139 sources = afwTable.SourceCatalog(table) 

1140 sources.extend(merged, self.schemaMapper) 

1141 return sources 

1142 

1143 def writeMatches(self, dataRef, results): 

1144 """! 

1145 @brief Write matches of the sources to the astrometric reference catalog. 

1146 

1147 @param[in] dataRef: data reference 

1148 @param[in] results: results struct from run method 

1149 """ 

1150 if hasattr(results, "matchResult"): 

1151 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch") 

1152 if hasattr(results, "denormMatches"): 

1153 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull") 

1154 

1155 def write(self, dataRef, sources): 

1156 """! 

1157 @brief Write the source catalog. 

1158 

1159 @param[in] dataRef: data reference 

1160 @param[in] sources: source catalog 

1161 """ 

1162 dataRef.put(sources, self.config.coaddName + "Coadd_meas") 

1163 self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId)) 

1164 

1165 def getExposureId(self, dataRef): 

1166 return int(dataRef.get(self.config.coaddName + "CoaddId"))