Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1#!/usr/bin/env python 

2# 

3# LSST Data Management System 

4# Copyright 2008-2015 AURA/LSST. 

5# 

6# This product includes software developed by the 

7# LSST Project (http://www.lsst.org/). 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the LSST License Statement and 

20# the GNU General Public License along with this program. If not, 

21# see <https://www.lsstcorp.org/LegalNotices/>. 

22# 

23from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer 

24from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner, 

25 PipelineTask, PipelineTaskConfig, PipelineTaskConnections) 

26import lsst.pipe.base.connectionTypes as cT 

27from lsst.pex.config import Config, Field, ConfigurableField 

28from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader 

29from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask 

30from lsst.meas.deblender import SourceDeblendTask 

31from lsst.meas.extensions.scarlet import ScarletDeblendTask 

32from lsst.pipe.tasks.coaddBase import getSkyInfo 

33from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask 

34from lsst.meas.astrom import DirectMatchTask, denormalizeMatches 

35from lsst.pipe.tasks.fakes import BaseFakeSourcesTask 

36from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask 

37from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask 

38import lsst.afw.image as afwImage 

39import lsst.afw.table as afwTable 

40import lsst.afw.math as afwMath 

41from lsst.daf.base import PropertyList 

42from lsst.skymap import BaseSkyMap 

43 

44from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401 

45from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401 

46from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401 

47from .multiBandUtils import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory # noqa: F401 

48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401 

49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401 

50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401 

51from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401 

52 

53 

54""" 

55New set types: 

56* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 

57* deepCoadd_mergeDet: merged detections (tract, patch) 

58* deepCoadd_meas: measurements of merged detections (tract, patch, filter) 

59* deepCoadd_ref: reference sources (tract, patch) 

60All of these have associated *_schema catalogs that require no data ID and hold no records. 

61 

62In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 

63the mergeDet, meas, and ref dataset Footprints: 

64* deepCoadd_peak_schema 

65""" 

66 

67 

68############################################################################################################## 

69class DetectCoaddSourcesConnections(PipelineTaskConnections, 

70 dimensions=("tract", "patch", "band", "skymap"), 

71 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): 

72 detectionSchema = cT.InitOutput( 

73 doc="Schema of the detection catalog", 

74 name="{outputCoaddName}Coadd_det_schema", 

75 storageClass="SourceCatalog", 

76 ) 

77 exposure = cT.Input( 

78 doc="Exposure on which detections are to be performed", 

79 name="{inputCoaddName}Coadd", 

80 storageClass="ExposureF", 

81 dimensions=("tract", "patch", "band", "skymap") 

82 ) 

83 outputBackgrounds = cT.Output( 

84 doc="Output Backgrounds used in detection", 

85 name="{outputCoaddName}Coadd_calexp_background", 

86 storageClass="Background", 

87 dimensions=("tract", "patch", "band", "skymap") 

88 ) 

89 outputSources = cT.Output( 

90 doc="Detected sources catalog", 

91 name="{outputCoaddName}Coadd_det", 

92 storageClass="SourceCatalog", 

93 dimensions=("tract", "patch", "band", "skymap") 

94 ) 

95 outputExposure = cT.Output( 

96 doc="Exposure post detection", 

97 name="{outputCoaddName}Coadd_calexp", 

98 storageClass="ExposureF", 

99 dimensions=("tract", "patch", "band", "skymap") 

100 ) 

101 

102 

103class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections): 

104 """! 

105 @anchor DetectCoaddSourcesConfig_ 

106 

107 @brief Configuration parameters for the DetectCoaddSourcesTask 

108 """ 

109 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?") 

110 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling") 

111 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection") 

112 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

113 doInsertFakes = Field(dtype=bool, default=False, 

114 doc="Run fake sources injection task") 

115 insertFakes = ConfigurableField(target=BaseFakeSourcesTask, 

116 doc="Injection of fake sources for testing " 

117 "purposes (must be retargeted)") 

118 hasFakes = Field( 

119 dtype=bool, 

120 default=False, 

121 doc="Should be set to True if fake sources have been inserted into the input data." 

122 ) 

123 

124 def setDefaults(self): 

125 super().setDefaults() 

126 self.detection.thresholdType = "pixel_stdev" 

127 self.detection.isotropicGrow = True 

128 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic 

129 self.detection.reEstimateBackground = False 

130 self.detection.background.useApprox = False 

131 self.detection.background.binSize = 4096 

132 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER' 

133 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender 

134 

135## @addtogroup LSST_task_documentation 

136## @{ 

137## @page DetectCoaddSourcesTask 

138## @ref DetectCoaddSourcesTask_ "DetectCoaddSourcesTask" 

139## @copybrief DetectCoaddSourcesTask 

140## @} 

141 

142 

143class DetectCoaddSourcesTask(PipelineTask, CmdLineTask): 

144 r"""! 

145 @anchor DetectCoaddSourcesTask_ 

146 

147 @brief Detect sources on a coadd 

148 

149 @section pipe_tasks_multiBand_Contents Contents 

150 

151 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose 

152 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize 

153 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run 

154 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config 

155 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug 

156 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example 

157 

158 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description 

159 

160 Command-line task that detects sources on a coadd of exposures obtained with a single filter. 

161 

162 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise 

163 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane 

164 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should 

165 propagate the full covariance matrix -- but it is simple and works well in practice. 

166 

167 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref 

168 SourceDetectionTask_ "detection" subtask. 

169 

170 @par Inputs: 

171 deepCoadd{tract,patch,filter}: ExposureF 

172 @par Outputs: 

173 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints) 

174 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input 

175 exposure (ExposureF) 

176 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList 

177 @par Data Unit: 

178 tract, patch, filter 

179 

180 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask. 

181 You can retarget this subtask if you wish. 

182 

183 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization 

184 

185 @copydoc \_\_init\_\_ 

186 

187 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task 

188 

189 @copydoc run 

190 

191 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters 

192 

193 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig" 

194 

195 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables 

196 

197 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 

198 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

199 files. 

200 

201 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to 

202 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for 

203 @ref SourceDetectionTask_ "SourceDetectionTask" for further information. 

204 

205 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example 

206 of using DetectCoaddSourcesTask 

207 

208 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of 

209 the task is to update the background, detect all sources in a single band and generate a set of parent 

210 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and, 

211 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data 

212 reference to the coadd to be processed. A list of the available optional arguments can be obtained by 

213 calling detectCoaddSources.py with the `--help` command line argument: 

214 @code 

215 detectCoaddSources.py --help 

216 @endcode 

217 

218 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

219 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed 

220 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows: 

221 @code 

222 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

223 @endcode 

224 that will process the HSC-I band data. The results are written to 

225 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`. 

226 

227 It is also necessary to run: 

228 @code 

229 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

230 @endcode 

231 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

232 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask". 

233 """ 

234 _DefaultName = "detectCoaddSources" 

235 ConfigClass = DetectCoaddSourcesConfig 

236 getSchemaCatalogs = _makeGetSchemaCatalogs("det") 

237 makeIdFactory = _makeMakeIdFactory("CoaddId") 

238 

239 @classmethod 

240 def _makeArgumentParser(cls): 

241 parser = ArgumentParser(name=cls._DefaultName) 

242 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

243 ContainerClass=ExistingCoaddDataIdContainer) 

244 return parser 

245 

246 def __init__(self, schema=None, **kwargs): 

247 """! 

248 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask. 

249 

250 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

251 

252 @param[in] schema: initial schema for the output catalog, modified-in place to include all 

253 fields set by this task. If None, the source minimal schema will be used. 

254 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__ 

255 """ 

256 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree 

257 # call structure has been reviewed carefully to be sure super will work as intended. 

258 super().__init__(**kwargs) 

259 if schema is None: 259 ↛ 261line 259 didn't jump to line 261, because the condition on line 259 was never false

260 schema = afwTable.SourceTable.makeMinimalSchema() 

261 if self.config.doInsertFakes: 261 ↛ 262line 261 didn't jump to line 262, because the condition on line 261 was never true

262 self.makeSubtask("insertFakes") 

263 self.schema = schema 

264 self.makeSubtask("detection", schema=self.schema) 

265 if self.config.doScaleVariance: 265 ↛ 268line 265 didn't jump to line 268, because the condition on line 265 was never false

266 self.makeSubtask("scaleVariance") 

267 

268 self.detectionSchema = afwTable.SourceCatalog(self.schema) 

269 

270 def runDataRef(self, patchRef): 

271 """! 

272 @brief Run detection on a coadd. 

273 

274 Invokes @ref run and then uses @ref write to output the 

275 results. 

276 

277 @param[in] patchRef: data reference for patch 

278 """ 

279 if self.config.hasFakes: 279 ↛ 280line 279 didn't jump to line 280, because the condition on line 279 was never true

280 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True) 

281 else: 

282 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True) 

283 expId = int(patchRef.get(self.config.coaddName + "CoaddId")) 

284 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId) 

285 self.write(results, patchRef) 

286 return results 

287 

288 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

289 inputs = butlerQC.get(inputRefs) 

290 packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch_band", returnMaxBits=True) 

291 inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits) 

292 inputs["expId"] = packedId 

293 outputs = self.run(**inputs) 

294 butlerQC.put(outputs, outputRefs) 

295 

296 def run(self, exposure, idFactory, expId): 

297 """! 

298 @brief Run detection on an exposure. 

299 

300 First scale the variance plane to match the observed variance 

301 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to 

302 detect sources. 

303 

304 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled, 

305 depending on configuration). 

306 @param[in] idFactory: IdFactory to set source identifiers 

307 @param[in] expId: Exposure identifier (integer) for RNG seed 

308 

309 @return a pipe.base.Struct with fields 

310 - sources: catalog of detections 

311 - backgrounds: list of backgrounds 

312 """ 

313 if self.config.doScaleVariance: 313 ↛ 316line 313 didn't jump to line 316, because the condition on line 313 was never false

314 varScale = self.scaleVariance.run(exposure.maskedImage) 

315 exposure.getMetadata().add("VARIANCE_SCALE", varScale) 

316 backgrounds = afwMath.BackgroundList() 

317 if self.config.doInsertFakes: 317 ↛ 318line 317 didn't jump to line 318, because the condition on line 317 was never true

318 self.insertFakes.run(exposure, background=backgrounds) 

319 table = afwTable.SourceTable.make(self.schema, idFactory) 

320 detections = self.detection.run(table, exposure, expId=expId) 

321 sources = detections.sources 

322 fpSets = detections.fpSets 

323 if hasattr(fpSets, "background") and fpSets.background: 323 ↛ 326line 323 didn't jump to line 326, because the condition on line 323 was never false

324 for bg in fpSets.background: 

325 backgrounds.append(bg) 

326 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure) 

327 

328 def write(self, results, patchRef): 

329 """! 

330 @brief Write out results from runDetection. 

331 

332 @param[in] exposure: Exposure to write out 

333 @param[in] results: Struct returned from runDetection 

334 @param[in] patchRef: data reference for patch 

335 """ 

336 coaddName = self.config.coaddName + "Coadd" 

337 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background") 

338 patchRef.put(results.outputSources, coaddName + "_det") 

339 if self.config.hasFakes: 339 ↛ 340line 339 didn't jump to line 340, because the condition on line 339 was never true

340 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp") 

341 else: 

342 patchRef.put(results.outputExposure, coaddName + "_calexp") 

343 

344############################################################################################################## 

345 

346 

347class DeblendCoaddSourcesConfig(Config): 

348 """DeblendCoaddSourcesConfig 

349 

350 Configuration parameters for the `DeblendCoaddSourcesTask`. 

351 """ 

352 singleBandDeblend = ConfigurableField(target=SourceDeblendTask, 

353 doc="Deblend sources separately in each band") 

354 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask, 

355 doc="Deblend sources simultaneously across bands") 

356 simultaneous = Field(dtype=bool, default=False, doc="Simultaneously deblend all bands?") 

357 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

358 hasFakes = Field(dtype=bool, 

359 default=False, 

360 doc="Should be set to True if fake sources have been inserted into the input data.") 

361 

362 def setDefaults(self): 

363 Config.setDefaults(self) 

364 self.singleBandDeblend.propagateAllPeaks = True 

365 

366 

367class DeblendCoaddSourcesRunner(MergeSourcesRunner): 

368 """Task runner for the `MergeSourcesTask` 

369 

370 Required because the run method requires a list of 

371 dataRefs rather than a single dataRef. 

372 """ 

373 @staticmethod 

374 def getTargetList(parsedCmd, **kwargs): 

375 """Provide a list of patch references for each patch, tract, filter combo. 

376 

377 Parameters 

378 ---------- 

379 parsedCmd: 

380 The parsed command 

381 kwargs: 

382 Keyword arguments passed to the task 

383 

384 Returns 

385 ------- 

386 targetList: list 

387 List of tuples, where each tuple is a (dataRef, kwargs) pair. 

388 """ 

389 refDict = MergeSourcesRunner.buildRefDict(parsedCmd) 

390 kwargs["psfCache"] = parsedCmd.psfCache 

391 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()] 

392 

393 

394class DeblendCoaddSourcesTask(CmdLineTask): 

395 """Deblend the sources in a merged catalog 

396 

397 Deblend sources from master catalog in each coadd. 

398 This can either be done separately in each band using the HSC-SDSS deblender 

399 (`DeblendCoaddSourcesTask.config.simultaneous==False`) 

400 or use SCARLET to simultaneously fit the blend in all bands 

401 (`DeblendCoaddSourcesTask.config.simultaneous==True`). 

402 The task will set its own `self.schema` atribute to the `Schema` of the 

403 output deblended catalog. 

404 This will include all fields from the input `Schema`, as well as additional fields 

405 from the deblender. 

406 

407 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description 

408 --------------------------------------------------------- 

409 ` 

410 

411 Parameters 

412 ---------- 

413 butler: `Butler` 

414 Butler used to read the input schemas from disk or 

415 construct the reference catalog loader, if `schema` or `peakSchema` or 

416 schema: `Schema` 

417 The schema of the merged detection catalog as an input to this task. 

418 peakSchema: `Schema` 

419 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog 

420 """ 

421 ConfigClass = DeblendCoaddSourcesConfig 

422 RunnerClass = DeblendCoaddSourcesRunner 

423 _DefaultName = "deblendCoaddSources" 

424 makeIdFactory = _makeMakeIdFactory("MergedCoaddId") 

425 

426 @classmethod 

427 def _makeArgumentParser(cls): 

428 parser = ArgumentParser(name=cls._DefaultName) 

429 parser.add_id_argument("--id", "deepCoadd_calexp", 

430 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i", 

431 ContainerClass=ExistingCoaddDataIdContainer) 

432 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

433 return parser 

434 

435 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs): 

436 CmdLineTask.__init__(self, **kwargs) 

437 if schema is None: 437 ↛ 440line 437 didn't jump to line 440, because the condition on line 437 was never false

438 assert butler is not None, "Neither butler nor schema is defined" 

439 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema 

440 self.schemaMapper = afwTable.SchemaMapper(schema) 

441 self.schemaMapper.addMinimalSchema(schema) 

442 self.schema = self.schemaMapper.getOutputSchema() 

443 if peakSchema is None: 443 ↛ 447line 443 didn't jump to line 447, because the condition on line 443 was never false

444 assert butler is not None, "Neither butler nor peakSchema is defined" 

445 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema 

446 

447 if self.config.simultaneous: 447 ↛ 448line 447 didn't jump to line 448, because the condition on line 447 was never true

448 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema) 

449 else: 

450 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema) 

451 

452 def getSchemaCatalogs(self): 

453 """Return a dict of empty catalogs for each catalog dataset produced by this task. 

454 

455 Returns 

456 ------- 

457 result: dict 

458 Dictionary of empty catalogs, with catalog names as keys. 

459 """ 

460 catalog = afwTable.SourceCatalog(self.schema) 

461 return {self.config.coaddName + "Coadd_deblendedFlux": catalog, 

462 self.config.coaddName + "Coadd_deblendedModel": catalog} 

463 

464 def runDataRef(self, patchRefList, psfCache=100): 

465 """Deblend the patch 

466 

467 Deblend each source simultaneously or separately 

468 (depending on `DeblendCoaddSourcesTask.config.simultaneous`). 

469 Set `is-primary` and related flags. 

470 Propagate flags from individual visits. 

471 Write the deblended sources out. 

472 

473 Parameters 

474 ---------- 

475 patchRefList: list 

476 List of data references for each filter 

477 """ 

478 

479 if self.config.hasFakes: 479 ↛ 480line 479 didn't jump to line 480, because the condition on line 479 was never true

480 coaddType = "fakes_" + self.config.coaddName 

481 else: 

482 coaddType = self.config.coaddName 

483 

484 if self.config.simultaneous: 484 ↛ 486line 484 didn't jump to line 486, because the condition on line 484 was never true

485 # Use SCARLET to simultaneously deblend across filters 

486 filters = [] 

487 exposures = [] 

488 for patchRef in patchRefList: 

489 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

490 filters.append(patchRef.dataId["filter"]) 

491 exposures.append(exposure) 

492 # The input sources are the same for all bands, since it is a merged catalog 

493 sources = self.readSources(patchRef) 

494 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures) 

495 fluxCatalogs, templateCatalogs = self.multiBandDeblend.run(exposure, sources) 

496 for n in range(len(patchRefList)): 

497 fluxCat = fluxCatalogs if fluxCatalogs is None else fluxCatalogs[filters[n]] 

498 self.write(patchRefList[n], fluxCat, templateCatalogs[filters[n]]) 

499 else: 

500 # Use the singeband deblender to deblend each band separately 

501 for patchRef in patchRefList: 

502 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

503 exposure.getPsf().setCacheCapacity(psfCache) 

504 sources = self.readSources(patchRef) 

505 self.singleBandDeblend.run(exposure, sources) 

506 self.write(patchRef, sources) 

507 

508 def readSources(self, dataRef): 

509 """Read merged catalog 

510 

511 Read the catalog of merged detections and create a catalog 

512 in a single band. 

513 

514 Parameters 

515 ---------- 

516 dataRef: data reference 

517 Data reference for catalog of merged detections 

518 

519 Returns 

520 ------- 

521 sources: `SourceCatalog` 

522 List of sources in merged catalog 

523 

524 We also need to add columns to hold the measurements we're about to make 

525 so we can measure in-place. 

526 """ 

527 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True) 

528 self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId)) 

529 idFactory = self.makeIdFactory(dataRef) 

530 for s in merged: 

531 idFactory.notify(s.getId()) 

532 table = afwTable.SourceTable.make(self.schema, idFactory) 

533 sources = afwTable.SourceCatalog(table) 

534 sources.extend(merged, self.schemaMapper) 

535 return sources 

536 

537 def write(self, dataRef, flux_sources, template_sources=None): 

538 """Write the source catalog(s) 

539 

540 Parameters 

541 ---------- 

542 dataRef: Data Reference 

543 Reference to the output catalog. 

544 flux_sources: `SourceCatalog` 

545 Flux conserved sources to write to file. 

546 If using the single band deblender, this is the catalog 

547 generated. 

548 template_sources: `SourceCatalog` 

549 Source catalog using the multiband template models 

550 as footprints. 

551 """ 

552 # The multiband deblender does not have to conserve flux, 

553 # so only write the flux conserved catalog if it exists 

554 if flux_sources is not None: 554 ↛ 561line 554 didn't jump to line 561, because the condition on line 554 was never false

555 assert not self.config.simultaneous or self.config.multiBandDeblend.conserveFlux 

556 dataRef.put(flux_sources, self.config.coaddName + "Coadd_deblendedFlux") 

557 self.log.info("Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId)) 

558 # Only the multiband deblender has the option to output the 

559 # template model catalog, which can optionally be used 

560 # in MeasureMergedCoaddSources 

561 if template_sources is not None: 561 ↛ 562line 561 didn't jump to line 562, because the condition on line 561 was never true

562 assert self.config.multiBandDeblend.saveTemplates 

563 dataRef.put(template_sources, self.config.coaddName + "Coadd_deblendedModel") 

564 self.log.info("Wrote %d sources: %s" % (len(template_sources), dataRef.dataId)) 

565 

566 def writeMetadata(self, dataRefList): 

567 """Write the metadata produced from processing the data. 

568 Parameters 

569 ---------- 

570 dataRefList 

571 List of Butler data references used to write the metadata. 

572 The metadata is written to dataset type `CmdLineTask._getMetadataName`. 

573 """ 

574 for dataRef in dataRefList: 

575 try: 

576 metadataName = self._getMetadataName() 

577 if metadataName is not None: 

578 dataRef.put(self.getFullMetadata(), metadataName) 

579 except Exception as e: 

580 self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e) 

581 

582 def getExposureId(self, dataRef): 

583 """Get the ExposureId from a data reference 

584 """ 

585 return int(dataRef.get(self.config.coaddName + "CoaddId")) 

586 

587 

588class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, 

589 dimensions=("tract", "patch", "band", "skymap"), 

590 defaultTemplates={"inputCoaddName": "deep", 

591 "outputCoaddName": "deep"}): 

592 inputSchema = cT.InitInput( 

593 doc="Input schema for measure merged task produced by a deblender or detection task", 

594 name="{inputCoaddName}Coadd_deblendedFlux_schema", 

595 storageClass="SourceCatalog" 

596 ) 

597 outputSchema = cT.InitOutput( 

598 doc="Output schema after all new fields are added by task", 

599 name="{inputCoaddName}Coadd_meas_schema", 

600 storageClass="SourceCatalog" 

601 ) 

602 refCat = cT.PrerequisiteInput( 

603 doc="Reference catalog used to match measured sources against known sources", 

604 name="ref_cat", 

605 storageClass="SimpleCatalog", 

606 dimensions=("skypix",), 

607 deferLoad=True, 

608 multiple=True 

609 ) 

610 exposure = cT.Input( 

611 doc="Input coadd image", 

612 name="{inputCoaddName}Coadd_calexp", 

613 storageClass="ExposureF", 

614 dimensions=("tract", "patch", "band", "skymap") 

615 ) 

616 skyMap = cT.Input( 

617 doc="SkyMap to use in processing", 

618 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

619 storageClass="SkyMap", 

620 dimensions=("skymap",), 

621 ) 

622 visitCatalogs = cT.Input( 

623 doc="Source catalogs for visits which overlap input tract, patch, band. Will be " 

624 "further filtered in the task for the purpose of propagating flags from image calibration " 

625 "and characterization to codd objects", 

626 name="src", 

627 dimensions=("instrument", "visit", "detector"), 

628 storageClass="SourceCatalog", 

629 multiple=True 

630 ) 

631 inputCatalog = cT.Input( 

632 doc=("Name of the input catalog to use." 

633 "If the single band deblender was used this should be 'deblendedFlux." 

634 "If the multi-band deblender was used this should be 'deblendedModel, " 

635 "or deblendedFlux if the multiband deblender was configured to output " 

636 "deblended flux catalogs. If no deblending was performed this should " 

637 "be 'mergeDet'"), 

638 name="{inputCoaddName}Coadd_deblendedFlux", 

639 storageClass="SourceCatalog", 

640 dimensions=("tract", "patch", "band", "skymap"), 

641 ) 

642 outputSources = cT.Output( 

643 doc="Source catalog containing all the measurement information generated in this task", 

644 name="{outputCoaddName}Coadd_meas", 

645 dimensions=("tract", "patch", "band", "skymap"), 

646 storageClass="SourceCatalog", 

647 ) 

648 matchResult = cT.Output( 

649 doc="Match catalog produced by configured matcher, optional on doMatchSources", 

650 name="{outputCoaddName}Coadd_measMatch", 

651 dimensions=("tract", "patch", "band", "skymap"), 

652 storageClass="Catalog", 

653 ) 

654 denormMatches = cT.Output( 

655 doc="Denormalized Match catalog produced by configured matcher, optional on " 

656 "doWriteMatchesDenormalized", 

657 name="{outputCoaddName}Coadd_measMatchFull", 

658 dimensions=("tract", "patch", "band", "skymap"), 

659 storageClass="Catalog", 

660 ) 

661 

662 def __init__(self, *, config=None): 

663 super().__init__(config=config) 

664 if config.doPropagateFlags is False: 

665 self.inputs -= set(("visitCatalogs",)) 

666 

667 if config.doMatchSources is False: 

668 self.outputs -= set(("matchResult",)) 

669 

670 if config.doWriteMatchesDenormalized is False: 

671 self.outputs -= set(("denormMatches",)) 

672 

673 

674class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig, 

675 pipelineConnections=MeasureMergedCoaddSourcesConnections): 

676 """! 

677 @anchor MeasureMergedCoaddSourcesConfig_ 

678 

679 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask 

680 """ 

681 inputCatalog = Field(dtype=str, default="deblendedFlux", 

682 doc=("Name of the input catalog to use." 

683 "If the single band deblender was used this should be 'deblendedFlux." 

684 "If the multi-band deblender was used this should be 'deblendedModel." 

685 "If no deblending was performed this should be 'mergeDet'")) 

686 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") 

687 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") 

688 doPropagateFlags = Field( 

689 dtype=bool, default=True, 

690 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 

691 ) 

692 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd") 

693 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?") 

694 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog") 

695 doWriteMatchesDenormalized = Field( 

696 dtype=bool, 

697 default=False, 

698 doc=("Write reference matches in denormalized format? " 

699 "This format uses more disk space, but is more convenient to read."), 

700 ) 

701 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

702 psfCache = Field(dtype=int, default=100, doc="Size of psfCache") 

703 checkUnitsParseStrict = Field( 

704 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", 

705 dtype=str, 

706 default="raise", 

707 ) 

708 doApCorr = Field( 

709 dtype=bool, 

710 default=True, 

711 doc="Apply aperture corrections" 

712 ) 

713 applyApCorr = ConfigurableField( 

714 target=ApplyApCorrTask, 

715 doc="Subtask to apply aperture corrections" 

716 ) 

717 doRunCatalogCalculation = Field( 

718 dtype=bool, 

719 default=True, 

720 doc='Run catalogCalculation task' 

721 ) 

722 catalogCalculation = ConfigurableField( 

723 target=CatalogCalculationTask, 

724 doc="Subtask to run catalogCalculation plugins on catalog" 

725 ) 

726 

727 hasFakes = Field( 

728 dtype=bool, 

729 default=False, 

730 doc="Should be set to True if fake sources have been inserted into the input data." 

731 ) 

732 

733 @property 

734 def refObjLoader(self): 

735 return self.match.refObjLoader 

736 

737 def setDefaults(self): 

738 super().setDefaults() 

739 self.measurement.plugins.names |= ['base_InputCount', 

740 'base_Variance', 

741 'base_LocalPhotoCalib', 

742 'base_LocalWcs'] 

743 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 

744 'INEXACT_PSF'] 

745 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 

746 'INEXACT_PSF'] 

747 

748 def validate(self): 

749 super().validate() 

750 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None) 

751 if refCatGen2 is not None and refCatGen2 != self.connections.refCat: 

752 raise ValueError( 

753 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs " 

754 f"are different. These options must be kept in sync until Gen2 is retired." 

755 ) 

756 

757 

758## @addtogroup LSST_task_documentation 

759## @{ 

760## @page MeasureMergedCoaddSourcesTask 

761## @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask" 

762## @copybrief MeasureMergedCoaddSourcesTask 

763## @} 

764 

765 

766class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner): 

767 """Get the psfCache setting into MeasureMergedCoaddSourcesTask""" 

768 @staticmethod 

769 def getTargetList(parsedCmd, **kwargs): 

770 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache) 

771 

772 

773class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask): 

774 r"""! 

775 @anchor MeasureMergedCoaddSourcesTask_ 

776 

777 @brief Deblend sources from master catalog in each coadd seperately and measure. 

778 

779 @section pipe_tasks_multiBand_Contents Contents 

780 

781 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose 

782 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize 

783 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run 

784 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config 

785 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug 

786 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example 

787 

788 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description 

789 

790 Command-line task that uses peaks and footprints from a master catalog to perform deblending and 

791 measurement in each coadd. 

792 

793 Given a master input catalog of sources (peaks and footprints) or deblender outputs 

794 (including a HeavyFootprint in each band), measure each source on the 

795 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a 

796 consistent set of child sources. 

797 

798 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source 

799 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit 

800 flags are propagated to the coadd sources. 

801 

802 Optionally, we can match the coadd sources to an external reference catalog. 

803 

804 @par Inputs: 

805 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog 

806 @n deepCoadd_calexp{tract,patch,filter}: ExposureF 

807 @par Outputs: 

808 deepCoadd_meas{tract,patch,filter}: SourceCatalog 

809 @par Data Unit: 

810 tract, patch, filter 

811 

812 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks: 

813 

814 <DL> 

815 <DT> @ref SingleFrameMeasurementTask_ "measurement" 

816 <DD> Measure source properties of deblended sources.</DD> 

817 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags" 

818 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are 

819 not at the edge of the field and that have either not been deblended or are the children of deblended 

820 sources</DD> 

821 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags" 

822 <DD> Propagate flags set in individual visits to the coadd.</DD> 

823 <DT> @ref DirectMatchTask_ "match" 

824 <DD> Match input sources to a reference catalog (optional). 

825 </DD> 

826 </DL> 

827 These subtasks may be retargeted as required. 

828 

829 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization 

830 

831 @copydoc \_\_init\_\_ 

832 

833 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task 

834 

835 @copydoc run 

836 

837 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters 

838 

839 See @ref MeasureMergedCoaddSourcesConfig_ 

840 

841 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables 

842 

843 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 

844 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

845 files. 

846 

847 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to 

848 the various sub-tasks. See the documetation for individual sub-tasks for more information. 

849 

850 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using 

851 MeasureMergedCoaddSourcesTask 

852 

853 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs. 

854 The next stage in the multi-band processing procedure will merge these measurements into a suitable 

855 catalog for driving forced photometry. 

856 

857 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds 

858 to be processed. 

859 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the 

860 `--help` command line argument: 

861 @code 

862 measureCoaddSources.py --help 

863 @endcode 

864 

865 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

866 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished 

867 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band 

868 coadd as follows: 

869 @code 

870 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

871 @endcode 

872 This will process the HSC-I band data. The results are written in 

873 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits 

874 

875 It is also necessary to run 

876 @code 

877 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

878 @endcode 

879 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

880 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask". 

881 """ 

882 _DefaultName = "measureCoaddSources" 

883 ConfigClass = MeasureMergedCoaddSourcesConfig 

884 RunnerClass = MeasureMergedCoaddSourcesRunner 

885 getSchemaCatalogs = _makeGetSchemaCatalogs("meas") 

886 makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type 

887 

888 @classmethod 

889 def _makeArgumentParser(cls): 

890 parser = ArgumentParser(name=cls._DefaultName) 

891 parser.add_id_argument("--id", "deepCoadd_calexp", 

892 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

893 ContainerClass=ExistingCoaddDataIdContainer) 

894 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

895 return parser 

896 

897 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, 

898 **kwargs): 

899 """! 

900 @brief Initialize the task. 

901 

902 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

903 @param[in] schema: the schema of the merged detection catalog used as input to this one 

904 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog 

905 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference 

906 catalog. May be None if the loader can be constructed from the butler argument or all steps 

907 requiring a reference catalog are disabled. 

908 @param[in] butler: a butler used to read the input schemas from disk or construct the reference 

909 catalog loader, if schema or peakSchema or refObjLoader is None 

910 

911 The task will set its own self.schema attribute to the schema of the output measurement catalog. 

912 This will include all fields from the input schema, as well as additional fields for all the 

913 measurements. 

914 """ 

915 super().__init__(**kwargs) 

916 self.deblended = self.config.inputCatalog.startswith("deblended") 

917 self.inputCatalog = "Coadd_" + self.config.inputCatalog 

918 if initInputs is not None: 918 ↛ 919line 918 didn't jump to line 919, because the condition on line 918 was never true

919 schema = initInputs['inputSchema'].schema 

920 if schema is None: 920 ↛ 923line 920 didn't jump to line 923, because the condition on line 920 was never false

921 assert butler is not None, "Neither butler nor schema is defined" 

922 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema 

923 self.schemaMapper = afwTable.SchemaMapper(schema) 

924 self.schemaMapper.addMinimalSchema(schema) 

925 self.schema = self.schemaMapper.getOutputSchema() 

926 self.algMetadata = PropertyList() 

927 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) 

928 self.makeSubtask("setPrimaryFlags", schema=self.schema) 

929 if self.config.doMatchSources: 929 ↛ 930line 929 didn't jump to line 930, because the condition on line 929 was never true

930 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader) 

931 if self.config.doPropagateFlags: 931 ↛ 933line 931 didn't jump to line 933, because the condition on line 931 was never false

932 self.makeSubtask("propagateFlags", schema=self.schema) 

933 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) 

934 if self.config.doApCorr: 934 ↛ 936line 934 didn't jump to line 936, because the condition on line 934 was never false

935 self.makeSubtask("applyApCorr", schema=self.schema) 

936 if self.config.doRunCatalogCalculation: 936 ↛ 939line 936 didn't jump to line 939, because the condition on line 936 was never false

937 self.makeSubtask("catalogCalculation", schema=self.schema) 

938 

939 self.outputSchema = afwTable.SourceCatalog(self.schema) 

940 

941 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

942 inputs = butlerQC.get(inputRefs) 

943 

944 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], 

945 inputs.pop('refCat'), config=self.config.refObjLoader, 

946 log=self.log) 

947 self.match.setRefObjLoader(refObjLoader) 

948 

949 # Set psfcache 

950 # move this to run after gen2 deprecation 

951 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache) 

952 

953 # Get unique integer ID for IdFactory and RNG seeds 

954 packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True) 

955 inputs['exposureId'] = packedId 

956 idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits) 

957 # Transform inputCatalog 

958 table = afwTable.SourceTable.make(self.schema, idFactory) 

959 sources = afwTable.SourceCatalog(table) 

960 sources.extend(inputs.pop('inputCatalog'), self.schemaMapper) 

961 table = sources.getTable() 

962 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

963 inputs['sources'] = sources 

964 

965 skyMap = inputs.pop('skyMap') 

966 tractNumber = inputRefs.inputCatalog.dataId['tract'] 

967 tractInfo = skyMap[tractNumber] 

968 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch']) 

969 skyInfo = Struct( 

970 skyMap=skyMap, 

971 tractInfo=tractInfo, 

972 patchInfo=patchInfo, 

973 wcs=tractInfo.getWcs(), 

974 bbox=patchInfo.getOuterBBox() 

975 ) 

976 inputs['skyInfo'] = skyInfo 

977 

978 if self.config.doPropagateFlags: 

979 # Filter out any visit catalog that is not coadd inputs 

980 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds 

981 visitKey = ccdInputs.schema.find("visit").key 

982 ccdKey = ccdInputs.schema.find("ccd").key 

983 inputVisitIds = set() 

984 ccdRecordsWcs = {} 

985 for ccdRecord in ccdInputs: 

986 visit = ccdRecord.get(visitKey) 

987 ccd = ccdRecord.get(ccdKey) 

988 inputVisitIds.add((visit, ccd)) 

989 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs() 

990 

991 inputCatalogsToKeep = [] 

992 inputCatalogWcsUpdate = [] 

993 for i, dataRef in enumerate(inputRefs.visitCatalogs): 

994 key = (dataRef.dataId['visit'], dataRef.dataId['detector']) 

995 if key in inputVisitIds: 

996 inputCatalogsToKeep.append(inputs['visitCatalogs'][i]) 

997 inputCatalogWcsUpdate.append(ccdRecordsWcs[key]) 

998 inputs['visitCatalogs'] = inputCatalogsToKeep 

999 inputs['wcsUpdates'] = inputCatalogWcsUpdate 

1000 inputs['ccdInputs'] = ccdInputs 

1001 

1002 outputs = self.run(**inputs) 

1003 butlerQC.put(outputs, outputRefs) 

1004 

1005 def runDataRef(self, patchRef, psfCache=100): 

1006 """! 

1007 @brief Deblend and measure. 

1008 

1009 @param[in] patchRef: Patch reference. 

1010 

1011 Set 'is-primary' and related flags. Propagate flags 

1012 from individual visits. Optionally match the sources to a reference catalog and write the matches. 

1013 Finally, write the deblended sources and measurements out. 

1014 """ 

1015 if self.config.hasFakes: 1015 ↛ 1016line 1015 didn't jump to line 1016, because the condition on line 1015 was never true

1016 coaddType = "fakes_" + self.config.coaddName 

1017 else: 

1018 coaddType = self.config.coaddName 

1019 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

1020 exposure.getPsf().setCacheCapacity(psfCache) 

1021 sources = self.readSources(patchRef) 

1022 table = sources.getTable() 

1023 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

1024 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef) 

1025 

1026 if self.config.doPropagateFlags: 1026 ↛ 1029line 1026 didn't jump to line 1029, because the condition on line 1026 was never false

1027 ccdInputs = self.propagateFlags.getCcdInputs(exposure) 

1028 else: 

1029 ccdInputs = None 

1030 

1031 results = self.run(exposure=exposure, sources=sources, 

1032 ccdInputs=ccdInputs, 

1033 skyInfo=skyInfo, butler=patchRef.getButler(), 

1034 exposureId=self.getExposureId(patchRef)) 

1035 

1036 if self.config.doMatchSources: 1036 ↛ 1037line 1036 didn't jump to line 1037, because the condition on line 1036 was never true

1037 self.writeMatches(patchRef, results) 

1038 self.write(patchRef, results.outputSources) 

1039 

1040 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, 

1041 butler=None): 

1042 """Run measurement algorithms on the input exposure, and optionally populate the 

1043 resulting catalog with extra information. 

1044 

1045 Parameters 

1046 ---------- 

1047 exposure : `lsst.afw.exposure.Exposure` 

1048 The input exposure on which measurements are to be performed 

1049 sources : `lsst.afw.table.SourceCatalog` 

1050 A catalog built from the results of merged detections, or 

1051 deblender outputs. 

1052 skyInfo : `lsst.pipe.base.Struct` 

1053 A struct containing information about the position of the input exposure within 

1054 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box 

1055 exposureId : `int` or `bytes` 

1056 packed unique number or bytes unique to the input exposure 

1057 ccdInputs : `lsst.afw.table.ExposureCatalog` 

1058 Catalog containing information on the individual visits which went into making 

1059 the exposure 

1060 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None` 

1061 A list of source catalogs corresponding to measurements made on the individual 

1062 visits which went into the input exposure. If None and butler is `None` then 

1063 the task cannot propagate visit flags to the output catalog. 

1064 wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None` 

1065 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 

1066 to the input visits. Used to put all coordinates to common system. If `None` and 

1067 butler is `None` then the task cannot propagate visit flags to the output catalog. 

1068 butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler` 

1069 Either a gen2 or gen3 butler used to load visit catalogs 

1070 

1071 Returns 

1072 ------- 

1073 results : `lsst.pipe.base.Struct` 

1074 Results of running measurement task. Will contain the catalog in the 

1075 sources attribute. Optionally will have results of matching to a 

1076 reference catalog in the matchResults attribute, and denormalized 

1077 matches in the denormMatches attribute. 

1078 """ 

1079 self.measurement.run(sources, exposure, exposureId=exposureId) 

1080 

1081 if self.config.doApCorr: 1081 ↛ 1091line 1081 didn't jump to line 1091, because the condition on line 1081 was never false

1082 self.applyApCorr.run( 

1083 catalog=sources, 

1084 apCorrMap=exposure.getInfo().getApCorrMap() 

1085 ) 

1086 

1087 # TODO DM-11568: this contiguous check-and-copy could go away if we 

1088 # reserve enough space during SourceDetection and/or SourceDeblend. 

1089 # NOTE: sourceSelectors require contiguous catalogs, so ensure 

1090 # contiguity now, so views are preserved from here on. 

1091 if not sources.isContiguous(): 1091 ↛ 1092line 1091 didn't jump to line 1092, because the condition on line 1091 was never true

1092 sources = sources.copy(deep=True) 

1093 

1094 if self.config.doRunCatalogCalculation: 1094 ↛ 1097line 1094 didn't jump to line 1097, because the condition on line 1094 was never false

1095 self.catalogCalculation.run(sources) 

1096 

1097 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo, 

1098 patchInfo=skyInfo.patchInfo, includeDeblend=self.deblended) 

1099 if self.config.doPropagateFlags: 1099 ↛ 1102line 1099 didn't jump to line 1102, because the condition on line 1099 was never false

1100 self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates) 

1101 

1102 results = Struct() 

1103 

1104 if self.config.doMatchSources: 1104 ↛ 1105line 1104 didn't jump to line 1105, because the condition on line 1104 was never true

1105 matchResult = self.match.run(sources, exposure.getInfo().getFilter().getName()) 

1106 matches = afwTable.packMatches(matchResult.matches) 

1107 matches.table.setMetadata(matchResult.matchMeta) 

1108 results.matchResult = matches 

1109 if self.config.doWriteMatchesDenormalized: 

1110 if matchResult.matches: 

1111 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta) 

1112 else: 

1113 self.log.warn("No matches, so generating dummy denormalized matches file") 

1114 denormMatches = afwTable.BaseCatalog(afwTable.Schema()) 

1115 denormMatches.setMetadata(PropertyList()) 

1116 denormMatches.getMetadata().add("COMMENT", 

1117 "This catalog is empty because no matches were found.") 

1118 results.denormMatches = denormMatches 

1119 results.denormMatches = denormMatches 

1120 

1121 results.outputSources = sources 

1122 return results 

1123 

1124 def readSources(self, dataRef): 

1125 """! 

1126 @brief Read input sources. 

1127 

1128 @param[in] dataRef: Data reference for catalog of merged detections 

1129 @return List of sources in merged catalog 

1130 

1131 We also need to add columns to hold the measurements we're about to make 

1132 so we can measure in-place. 

1133 """ 

1134 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True) 

1135 self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId)) 

1136 idFactory = self.makeIdFactory(dataRef) 

1137 for s in merged: 

1138 idFactory.notify(s.getId()) 

1139 table = afwTable.SourceTable.make(self.schema, idFactory) 

1140 sources = afwTable.SourceCatalog(table) 

1141 sources.extend(merged, self.schemaMapper) 

1142 return sources 

1143 

1144 def writeMatches(self, dataRef, results): 

1145 """! 

1146 @brief Write matches of the sources to the astrometric reference catalog. 

1147 

1148 @param[in] dataRef: data reference 

1149 @param[in] results: results struct from run method 

1150 """ 

1151 if hasattr(results, "matchResult"): 

1152 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch") 

1153 if hasattr(results, "denormMatches"): 

1154 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull") 

1155 

1156 def write(self, dataRef, sources): 

1157 """! 

1158 @brief Write the source catalog. 

1159 

1160 @param[in] dataRef: data reference 

1161 @param[in] sources: source catalog 

1162 """ 

1163 dataRef.put(sources, self.config.coaddName + "Coadd_meas") 

1164 self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId)) 

1165 

1166 def getExposureId(self, dataRef): 

1167 return int(dataRef.get(self.config.coaddName + "CoaddId"))