Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1#!/usr/bin/env python 

2# 

3# LSST Data Management System 

4# Copyright 2008-2015 AURA/LSST. 

5# 

6# This product includes software developed by the 

7# LSST Project (http://www.lsst.org/). 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the LSST License Statement and 

20# the GNU General Public License along with this program. If not, 

21# see <https://www.lsstcorp.org/LegalNotices/>. 

22# 

23from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer 

24from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner, 

25 PipelineTask, PipelineTaskConfig, PipelineTaskConnections) 

26import lsst.pipe.base.connectionTypes as cT 

27from lsst.pex.config import Config, Field, ConfigurableField 

28from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader 

29from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask 

30from lsst.meas.deblender import SourceDeblendTask 

31from lsst.meas.extensions.scarlet import ScarletDeblendTask 

32from lsst.pipe.tasks.coaddBase import getSkyInfo 

33from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask 

34from lsst.meas.astrom import DirectMatchTask, denormalizeMatches 

35from lsst.pipe.tasks.fakes import BaseFakeSourcesTask 

36from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask 

37from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask 

38import lsst.afw.image as afwImage 

39import lsst.afw.table as afwTable 

40import lsst.afw.math as afwMath 

41from lsst.daf.base import PropertyList 

42from lsst.skymap import BaseSkyMap 

43 

44from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401 

45from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401 

46from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401 

47from .multiBandUtils import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory # noqa: F401 

48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401 

49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401 

50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401 

51from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401 

52 

53 

54""" 

55New set types: 

56* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 

57* deepCoadd_mergeDet: merged detections (tract, patch) 

58* deepCoadd_meas: measurements of merged detections (tract, patch, filter) 

59* deepCoadd_ref: reference sources (tract, patch) 

60All of these have associated *_schema catalogs that require no data ID and hold no records. 

61 

62In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 

63the mergeDet, meas, and ref dataset Footprints: 

64* deepCoadd_peak_schema 

65""" 

66 

67 

68############################################################################################################## 

69class DetectCoaddSourcesConnections(PipelineTaskConnections, 

70 dimensions=("tract", "patch", "band", "skymap"), 

71 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}): 

72 detectionSchema = cT.InitOutput( 

73 doc="Schema of the detection catalog", 

74 name="{outputCoaddName}Coadd_det_schema", 

75 storageClass="SourceCatalog", 

76 ) 

77 exposure = cT.Input( 

78 doc="Exposure on which detections are to be performed", 

79 name="{inputCoaddName}Coadd", 

80 storageClass="ExposureF", 

81 dimensions=("tract", "patch", "band", "skymap") 

82 ) 

83 outputBackgrounds = cT.Output( 

84 doc="Output Backgrounds used in detection", 

85 name="{outputCoaddName}Coadd_calexp_background", 

86 storageClass="Background", 

87 dimensions=("tract", "patch", "band", "skymap") 

88 ) 

89 outputSources = cT.Output( 

90 doc="Detected sources catalog", 

91 name="{outputCoaddName}Coadd_det", 

92 storageClass="SourceCatalog", 

93 dimensions=("tract", "patch", "band", "skymap") 

94 ) 

95 outputExposure = cT.Output( 

96 doc="Exposure post detection", 

97 name="{outputCoaddName}Coadd_calexp", 

98 storageClass="ExposureF", 

99 dimensions=("tract", "patch", "band", "skymap") 

100 ) 

101 

102 

103class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections): 

104 """! 

105 @anchor DetectCoaddSourcesConfig_ 

106 

107 @brief Configuration parameters for the DetectCoaddSourcesTask 

108 """ 

109 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?") 

110 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling") 

111 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection") 

112 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

113 doInsertFakes = Field(dtype=bool, default=False, 

114 doc="Run fake sources injection task") 

115 insertFakes = ConfigurableField(target=BaseFakeSourcesTask, 

116 doc="Injection of fake sources for testing " 

117 "purposes (must be retargeted)") 

118 hasFakes = Field( 

119 dtype=bool, 

120 default=False, 

121 doc="Should be set to True if fake sources have been inserted into the input data." 

122 ) 

123 

124 def setDefaults(self): 

125 super().setDefaults() 

126 self.detection.thresholdType = "pixel_stdev" 

127 self.detection.isotropicGrow = True 

128 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic 

129 self.detection.reEstimateBackground = False 

130 self.detection.background.useApprox = False 

131 self.detection.background.binSize = 4096 

132 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER' 

133 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender 

134 

135## @addtogroup LSST_task_documentation 

136## @{ 

137## @page DetectCoaddSourcesTask 

138## @ref DetectCoaddSourcesTask_ "DetectCoaddSourcesTask" 

139## @copybrief DetectCoaddSourcesTask 

140## @} 

141 

142 

143class DetectCoaddSourcesTask(PipelineTask, CmdLineTask): 

144 r"""! 

145 @anchor DetectCoaddSourcesTask_ 

146 

147 @brief Detect sources on a coadd 

148 

149 @section pipe_tasks_multiBand_Contents Contents 

150 

151 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose 

152 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize 

153 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run 

154 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config 

155 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug 

156 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example 

157 

158 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description 

159 

160 Command-line task that detects sources on a coadd of exposures obtained with a single filter. 

161 

162 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise 

163 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane 

164 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should 

165 propagate the full covariance matrix -- but it is simple and works well in practice. 

166 

167 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref 

168 SourceDetectionTask_ "detection" subtask. 

169 

170 @par Inputs: 

171 deepCoadd{tract,patch,filter}: ExposureF 

172 @par Outputs: 

173 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints) 

174 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input 

175 exposure (ExposureF) 

176 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList 

177 @par Data Unit: 

178 tract, patch, filter 

179 

180 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask. 

181 You can retarget this subtask if you wish. 

182 

183 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization 

184 

185 @copydoc \_\_init\_\_ 

186 

187 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task 

188 

189 @copydoc run 

190 

191 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters 

192 

193 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig" 

194 

195 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables 

196 

197 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 

198 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

199 files. 

200 

201 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to 

202 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for 

203 @ref SourceDetectionTask_ "SourceDetectionTask" for further information. 

204 

205 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example 

206 of using DetectCoaddSourcesTask 

207 

208 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of 

209 the task is to update the background, detect all sources in a single band and generate a set of parent 

210 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and, 

211 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data 

212 reference to the coadd to be processed. A list of the available optional arguments can be obtained by 

213 calling detectCoaddSources.py with the `--help` command line argument: 

214 @code 

215 detectCoaddSources.py --help 

216 @endcode 

217 

218 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

219 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed 

220 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows: 

221 @code 

222 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

223 @endcode 

224 that will process the HSC-I band data. The results are written to 

225 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`. 

226 

227 It is also necessary to run: 

228 @code 

229 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

230 @endcode 

231 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

232 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask". 

233 """ 

234 _DefaultName = "detectCoaddSources" 

235 ConfigClass = DetectCoaddSourcesConfig 

236 getSchemaCatalogs = _makeGetSchemaCatalogs("det") 

237 makeIdFactory = _makeMakeIdFactory("CoaddId") 

238 

239 @classmethod 

240 def _makeArgumentParser(cls): 

241 parser = ArgumentParser(name=cls._DefaultName) 

242 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

243 ContainerClass=ExistingCoaddDataIdContainer) 

244 return parser 

245 

246 def __init__(self, schema=None, **kwargs): 

247 """! 

248 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask. 

249 

250 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

251 

252 @param[in] schema: initial schema for the output catalog, modified-in place to include all 

253 fields set by this task. If None, the source minimal schema will be used. 

254 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__ 

255 """ 

256 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree 

257 # call structure has been reviewed carefully to be sure super will work as intended. 

258 super().__init__(**kwargs) 

259 if schema is None: 

260 schema = afwTable.SourceTable.makeMinimalSchema() 

261 if self.config.doInsertFakes: 

262 self.makeSubtask("insertFakes") 

263 self.schema = schema 

264 self.makeSubtask("detection", schema=self.schema) 

265 if self.config.doScaleVariance: 

266 self.makeSubtask("scaleVariance") 

267 

268 self.detectionSchema = afwTable.SourceCatalog(self.schema) 

269 

270 def runDataRef(self, patchRef): 

271 """! 

272 @brief Run detection on a coadd. 

273 

274 Invokes @ref run and then uses @ref write to output the 

275 results. 

276 

277 @param[in] patchRef: data reference for patch 

278 """ 

279 if self.config.hasFakes: 

280 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True) 

281 else: 

282 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True) 

283 expId = int(patchRef.get(self.config.coaddName + "CoaddId")) 

284 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId) 

285 self.write(results, patchRef) 

286 return results 

287 

288 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

289 inputs = butlerQC.get(inputRefs) 

290 packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch_band", returnMaxBits=True) 

291 inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits) 

292 inputs["expId"] = packedId 

293 outputs = self.run(**inputs) 

294 butlerQC.put(outputs, outputRefs) 

295 

296 def run(self, exposure, idFactory, expId): 

297 """! 

298 @brief Run detection on an exposure. 

299 

300 First scale the variance plane to match the observed variance 

301 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to 

302 detect sources. 

303 

304 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled, 

305 depending on configuration). 

306 @param[in] idFactory: IdFactory to set source identifiers 

307 @param[in] expId: Exposure identifier (integer) for RNG seed 

308 

309 @return a pipe.base.Struct with fields 

310 - sources: catalog of detections 

311 - backgrounds: list of backgrounds 

312 """ 

313 if self.config.doScaleVariance: 

314 varScale = self.scaleVariance.run(exposure.maskedImage) 

315 exposure.getMetadata().add("VARIANCE_SCALE", varScale) 

316 backgrounds = afwMath.BackgroundList() 

317 if self.config.doInsertFakes: 

318 self.insertFakes.run(exposure, background=backgrounds) 

319 table = afwTable.SourceTable.make(self.schema, idFactory) 

320 detections = self.detection.run(table, exposure, expId=expId) 

321 sources = detections.sources 

322 fpSets = detections.fpSets 

323 if hasattr(fpSets, "background") and fpSets.background: 

324 for bg in fpSets.background: 

325 backgrounds.append(bg) 

326 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure) 

327 

328 def write(self, results, patchRef): 

329 """! 

330 @brief Write out results from runDetection. 

331 

332 @param[in] exposure: Exposure to write out 

333 @param[in] results: Struct returned from runDetection 

334 @param[in] patchRef: data reference for patch 

335 """ 

336 coaddName = self.config.coaddName + "Coadd" 

337 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background") 

338 patchRef.put(results.outputSources, coaddName + "_det") 

339 if self.config.hasFakes: 

340 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp") 

341 else: 

342 patchRef.put(results.outputExposure, coaddName + "_calexp") 

343 

344############################################################################################################## 

345 

346 

347class DeblendCoaddSourcesConfig(Config): 

348 """DeblendCoaddSourcesConfig 

349 

350 Configuration parameters for the `DeblendCoaddSourcesTask`. 

351 """ 

352 singleBandDeblend = ConfigurableField(target=SourceDeblendTask, 

353 doc="Deblend sources separately in each band") 

354 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask, 

355 doc="Deblend sources simultaneously across bands") 

356 simultaneous = Field(dtype=bool, 

357 default=True, 

358 doc="Simultaneously deblend all bands? " 

359 "True uses 'singleBandDeblend' while False uses 'multibandDeblend'") 

360 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

361 hasFakes = Field(dtype=bool, 

362 default=False, 

363 doc="Should be set to True if fake sources have been inserted into the input data.") 

364 

365 def setDefaults(self): 

366 Config.setDefaults(self) 

367 self.singleBandDeblend.propagateAllPeaks = True 

368 

369 

370class DeblendCoaddSourcesRunner(MergeSourcesRunner): 

371 """Task runner for the `MergeSourcesTask` 

372 

373 Required because the run method requires a list of 

374 dataRefs rather than a single dataRef. 

375 """ 

376 @staticmethod 

377 def getTargetList(parsedCmd, **kwargs): 

378 """Provide a list of patch references for each patch, tract, filter combo. 

379 

380 Parameters 

381 ---------- 

382 parsedCmd: 

383 The parsed command 

384 kwargs: 

385 Keyword arguments passed to the task 

386 

387 Returns 

388 ------- 

389 targetList: list 

390 List of tuples, where each tuple is a (dataRef, kwargs) pair. 

391 """ 

392 refDict = MergeSourcesRunner.buildRefDict(parsedCmd) 

393 kwargs["psfCache"] = parsedCmd.psfCache 

394 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()] 

395 

396 

397class DeblendCoaddSourcesTask(CmdLineTask): 

398 """Deblend the sources in a merged catalog 

399 

400 Deblend sources from master catalog in each coadd. 

401 This can either be done separately in each band using the HSC-SDSS deblender 

402 (`DeblendCoaddSourcesTask.config.simultaneous==False`) 

403 or use SCARLET to simultaneously fit the blend in all bands 

404 (`DeblendCoaddSourcesTask.config.simultaneous==True`). 

405 The task will set its own `self.schema` atribute to the `Schema` of the 

406 output deblended catalog. 

407 This will include all fields from the input `Schema`, as well as additional fields 

408 from the deblender. 

409 

410 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description 

411 --------------------------------------------------------- 

412 ` 

413 

414 Parameters 

415 ---------- 

416 butler: `Butler` 

417 Butler used to read the input schemas from disk or 

418 construct the reference catalog loader, if `schema` or `peakSchema` or 

419 schema: `Schema` 

420 The schema of the merged detection catalog as an input to this task. 

421 peakSchema: `Schema` 

422 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog 

423 """ 

424 ConfigClass = DeblendCoaddSourcesConfig 

425 RunnerClass = DeblendCoaddSourcesRunner 

426 _DefaultName = "deblendCoaddSources" 

427 makeIdFactory = _makeMakeIdFactory("MergedCoaddId") 

428 

429 @classmethod 

430 def _makeArgumentParser(cls): 

431 parser = ArgumentParser(name=cls._DefaultName) 

432 parser.add_id_argument("--id", "deepCoadd_calexp", 

433 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i", 

434 ContainerClass=ExistingCoaddDataIdContainer) 

435 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

436 return parser 

437 

438 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs): 

439 CmdLineTask.__init__(self, **kwargs) 

440 if schema is None: 

441 assert butler is not None, "Neither butler nor schema is defined" 

442 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema 

443 self.schemaMapper = afwTable.SchemaMapper(schema) 

444 self.schemaMapper.addMinimalSchema(schema) 

445 self.schema = self.schemaMapper.getOutputSchema() 

446 if peakSchema is None: 

447 assert butler is not None, "Neither butler nor peakSchema is defined" 

448 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema 

449 

450 if self.config.simultaneous: 

451 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema) 

452 else: 

453 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema) 

454 

455 def getSchemaCatalogs(self): 

456 """Return a dict of empty catalogs for each catalog dataset produced by this task. 

457 

458 Returns 

459 ------- 

460 result: dict 

461 Dictionary of empty catalogs, with catalog names as keys. 

462 """ 

463 catalog = afwTable.SourceCatalog(self.schema) 

464 return {self.config.coaddName + "Coadd_deblendedFlux": catalog, 

465 self.config.coaddName + "Coadd_deblendedModel": catalog} 

466 

467 def runDataRef(self, patchRefList, psfCache=100): 

468 """Deblend the patch 

469 

470 Deblend each source simultaneously or separately 

471 (depending on `DeblendCoaddSourcesTask.config.simultaneous`). 

472 Set `is-primary` and related flags. 

473 Propagate flags from individual visits. 

474 Write the deblended sources out. 

475 

476 Parameters 

477 ---------- 

478 patchRefList: list 

479 List of data references for each filter 

480 """ 

481 

482 if self.config.hasFakes: 

483 coaddType = "fakes_" + self.config.coaddName 

484 else: 

485 coaddType = self.config.coaddName 

486 

487 if self.config.simultaneous: 

488 # Use SCARLET to simultaneously deblend across filters 

489 filters = [] 

490 exposures = [] 

491 for patchRef in patchRefList: 

492 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

493 filters.append(patchRef.dataId["filter"]) 

494 exposures.append(exposure) 

495 # The input sources are the same for all bands, since it is a merged catalog 

496 sources = self.readSources(patchRef) 

497 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures) 

498 templateCatalogs = self.multiBandDeblend.run(exposure, sources) 

499 for n in range(len(patchRefList)): 

500 self.write(patchRefList[n], templateCatalogs[filters[n]]) 

501 else: 

502 # Use the singeband deblender to deblend each band separately 

503 for patchRef in patchRefList: 

504 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

505 exposure.getPsf().setCacheCapacity(psfCache) 

506 sources = self.readSources(patchRef) 

507 self.singleBandDeblend.run(exposure, sources) 

508 self.write(patchRef, sources) 

509 

510 def readSources(self, dataRef): 

511 """Read merged catalog 

512 

513 Read the catalog of merged detections and create a catalog 

514 in a single band. 

515 

516 Parameters 

517 ---------- 

518 dataRef: data reference 

519 Data reference for catalog of merged detections 

520 

521 Returns 

522 ------- 

523 sources: `SourceCatalog` 

524 List of sources in merged catalog 

525 

526 We also need to add columns to hold the measurements we're about to make 

527 so we can measure in-place. 

528 """ 

529 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True) 

530 self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId)) 

531 idFactory = self.makeIdFactory(dataRef) 

532 for s in merged: 

533 idFactory.notify(s.getId()) 

534 table = afwTable.SourceTable.make(self.schema, idFactory) 

535 sources = afwTable.SourceCatalog(table) 

536 sources.extend(merged, self.schemaMapper) 

537 return sources 

538 

539 def write(self, dataRef, sources): 

540 """Write the source catalog(s) 

541 

542 Parameters 

543 ---------- 

544 dataRef: Data Reference 

545 Reference to the output catalog. 

546 sources: `SourceCatalog` 

547 Flux conserved sources to write to file. 

548 If using the single band deblender, this is the catalog 

549 generated. 

550 template_sources: `SourceCatalog` 

551 Source catalog using the multiband template models 

552 as footprints. 

553 """ 

554 dataRef.put(sources, self.config.coaddName + "Coadd_deblendedFlux") 

555 self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId)) 

556 

557 def writeMetadata(self, dataRefList): 

558 """Write the metadata produced from processing the data. 

559 Parameters 

560 ---------- 

561 dataRefList 

562 List of Butler data references used to write the metadata. 

563 The metadata is written to dataset type `CmdLineTask._getMetadataName`. 

564 """ 

565 for dataRef in dataRefList: 

566 try: 

567 metadataName = self._getMetadataName() 

568 if metadataName is not None: 

569 dataRef.put(self.getFullMetadata(), metadataName) 

570 except Exception as e: 

571 self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e) 

572 

573 def getExposureId(self, dataRef): 

574 """Get the ExposureId from a data reference 

575 """ 

576 return int(dataRef.get(self.config.coaddName + "CoaddId")) 

577 

578 

579class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, 

580 dimensions=("tract", "patch", "band", "skymap"), 

581 defaultTemplates={"inputCoaddName": "deep", 

582 "outputCoaddName": "deep"}): 

583 inputSchema = cT.InitInput( 

584 doc="Input schema for measure merged task produced by a deblender or detection task", 

585 name="{inputCoaddName}Coadd_deblendedFlux_schema", 

586 storageClass="SourceCatalog" 

587 ) 

588 outputSchema = cT.InitOutput( 

589 doc="Output schema after all new fields are added by task", 

590 name="{inputCoaddName}Coadd_meas_schema", 

591 storageClass="SourceCatalog" 

592 ) 

593 refCat = cT.PrerequisiteInput( 

594 doc="Reference catalog used to match measured sources against known sources", 

595 name="ref_cat", 

596 storageClass="SimpleCatalog", 

597 dimensions=("skypix",), 

598 deferLoad=True, 

599 multiple=True 

600 ) 

601 exposure = cT.Input( 

602 doc="Input coadd image", 

603 name="{inputCoaddName}Coadd_calexp", 

604 storageClass="ExposureF", 

605 dimensions=("tract", "patch", "band", "skymap") 

606 ) 

607 skyMap = cT.Input( 

608 doc="SkyMap to use in processing", 

609 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

610 storageClass="SkyMap", 

611 dimensions=("skymap",), 

612 ) 

613 visitCatalogs = cT.Input( 

614 doc="Source catalogs for visits which overlap input tract, patch, band. Will be " 

615 "further filtered in the task for the purpose of propagating flags from image calibration " 

616 "and characterization to codd objects", 

617 name="src", 

618 dimensions=("instrument", "visit", "detector"), 

619 storageClass="SourceCatalog", 

620 multiple=True 

621 ) 

622 inputCatalog = cT.Input( 

623 doc=("Name of the input catalog to use." 

624 "If the single band deblender was used this should be 'deblendedFlux." 

625 "If the multi-band deblender was used this should be 'deblendedModel, " 

626 "or deblendedFlux if the multiband deblender was configured to output " 

627 "deblended flux catalogs. If no deblending was performed this should " 

628 "be 'mergeDet'"), 

629 name="{inputCoaddName}Coadd_deblendedFlux", 

630 storageClass="SourceCatalog", 

631 dimensions=("tract", "patch", "band", "skymap"), 

632 ) 

633 outputSources = cT.Output( 

634 doc="Source catalog containing all the measurement information generated in this task", 

635 name="{outputCoaddName}Coadd_meas", 

636 dimensions=("tract", "patch", "band", "skymap"), 

637 storageClass="SourceCatalog", 

638 ) 

639 matchResult = cT.Output( 

640 doc="Match catalog produced by configured matcher, optional on doMatchSources", 

641 name="{outputCoaddName}Coadd_measMatch", 

642 dimensions=("tract", "patch", "band", "skymap"), 

643 storageClass="Catalog", 

644 ) 

645 denormMatches = cT.Output( 

646 doc="Denormalized Match catalog produced by configured matcher, optional on " 

647 "doWriteMatchesDenormalized", 

648 name="{outputCoaddName}Coadd_measMatchFull", 

649 dimensions=("tract", "patch", "band", "skymap"), 

650 storageClass="Catalog", 

651 ) 

652 

653 def __init__(self, *, config=None): 

654 super().__init__(config=config) 

655 if config.doPropagateFlags is False: 

656 self.inputs -= set(("visitCatalogs",)) 

657 

658 if config.doMatchSources is False: 

659 self.outputs -= set(("matchResult",)) 

660 

661 if config.doWriteMatchesDenormalized is False: 

662 self.outputs -= set(("denormMatches",)) 

663 

664 

665class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig, 

666 pipelineConnections=MeasureMergedCoaddSourcesConnections): 

667 """! 

668 @anchor MeasureMergedCoaddSourcesConfig_ 

669 

670 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask 

671 """ 

672 inputCatalog = Field(dtype=str, default="deblendedFlux", 

673 doc=("Name of the input catalog to use." 

674 "If the single band deblender was used this should be 'deblendedFlux." 

675 "If the multi-band deblender was used this should be 'deblendedModel." 

676 "If no deblending was performed this should be 'mergeDet'")) 

677 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement") 

678 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch") 

679 doPropagateFlags = Field( 

680 dtype=bool, default=True, 

681 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 

682 ) 

683 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd") 

684 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?") 

685 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog") 

686 doWriteMatchesDenormalized = Field( 

687 dtype=bool, 

688 default=False, 

689 doc=("Write reference matches in denormalized format? " 

690 "This format uses more disk space, but is more convenient to read."), 

691 ) 

692 coaddName = Field(dtype=str, default="deep", doc="Name of coadd") 

693 psfCache = Field(dtype=int, default=100, doc="Size of psfCache") 

694 checkUnitsParseStrict = Field( 

695 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'", 

696 dtype=str, 

697 default="raise", 

698 ) 

699 doApCorr = Field( 

700 dtype=bool, 

701 default=True, 

702 doc="Apply aperture corrections" 

703 ) 

704 applyApCorr = ConfigurableField( 

705 target=ApplyApCorrTask, 

706 doc="Subtask to apply aperture corrections" 

707 ) 

708 doRunCatalogCalculation = Field( 

709 dtype=bool, 

710 default=True, 

711 doc='Run catalogCalculation task' 

712 ) 

713 catalogCalculation = ConfigurableField( 

714 target=CatalogCalculationTask, 

715 doc="Subtask to run catalogCalculation plugins on catalog" 

716 ) 

717 

718 hasFakes = Field( 

719 dtype=bool, 

720 default=False, 

721 doc="Should be set to True if fake sources have been inserted into the input data." 

722 ) 

723 

724 @property 

725 def refObjLoader(self): 

726 return self.match.refObjLoader 

727 

728 def setDefaults(self): 

729 super().setDefaults() 

730 self.measurement.plugins.names |= ['base_InputCount', 

731 'base_Variance', 

732 'base_LocalPhotoCalib', 

733 'base_LocalWcs'] 

734 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE', 

735 'INEXACT_PSF'] 

736 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE', 

737 'INEXACT_PSF'] 

738 

739 def validate(self): 

740 super().validate() 

741 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None) 

742 if refCatGen2 is not None and refCatGen2 != self.connections.refCat: 

743 raise ValueError( 

744 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs " 

745 f"are different. These options must be kept in sync until Gen2 is retired." 

746 ) 

747 

748 

749## @addtogroup LSST_task_documentation 

750## @{ 

751## @page MeasureMergedCoaddSourcesTask 

752## @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask" 

753## @copybrief MeasureMergedCoaddSourcesTask 

754## @} 

755 

756 

757class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner): 

758 """Get the psfCache setting into MeasureMergedCoaddSourcesTask""" 

759 @staticmethod 

760 def getTargetList(parsedCmd, **kwargs): 

761 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache) 

762 

763 

764class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask): 

765 r"""! 

766 @anchor MeasureMergedCoaddSourcesTask_ 

767 

768 @brief Deblend sources from master catalog in each coadd seperately and measure. 

769 

770 @section pipe_tasks_multiBand_Contents Contents 

771 

772 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose 

773 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize 

774 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run 

775 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config 

776 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug 

777 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example 

778 

779 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description 

780 

781 Command-line task that uses peaks and footprints from a master catalog to perform deblending and 

782 measurement in each coadd. 

783 

784 Given a master input catalog of sources (peaks and footprints) or deblender outputs 

785 (including a HeavyFootprint in each band), measure each source on the 

786 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a 

787 consistent set of child sources. 

788 

789 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source 

790 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit 

791 flags are propagated to the coadd sources. 

792 

793 Optionally, we can match the coadd sources to an external reference catalog. 

794 

795 @par Inputs: 

796 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog 

797 @n deepCoadd_calexp{tract,patch,filter}: ExposureF 

798 @par Outputs: 

799 deepCoadd_meas{tract,patch,filter}: SourceCatalog 

800 @par Data Unit: 

801 tract, patch, filter 

802 

803 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks: 

804 

805 <DL> 

806 <DT> @ref SingleFrameMeasurementTask_ "measurement" 

807 <DD> Measure source properties of deblended sources.</DD> 

808 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags" 

809 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are 

810 not at the edge of the field and that have either not been deblended or are the children of deblended 

811 sources</DD> 

812 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags" 

813 <DD> Propagate flags set in individual visits to the coadd.</DD> 

814 <DT> @ref DirectMatchTask_ "match" 

815 <DD> Match input sources to a reference catalog (optional). 

816 </DD> 

817 </DL> 

818 These subtasks may be retargeted as required. 

819 

820 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization 

821 

822 @copydoc \_\_init\_\_ 

823 

824 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task 

825 

826 @copydoc run 

827 

828 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters 

829 

830 See @ref MeasureMergedCoaddSourcesConfig_ 

831 

832 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables 

833 

834 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 

835 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 

836 files. 

837 

838 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to 

839 the various sub-tasks. See the documetation for individual sub-tasks for more information. 

840 

841 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using 

842 MeasureMergedCoaddSourcesTask 

843 

844 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs. 

845 The next stage in the multi-band processing procedure will merge these measurements into a suitable 

846 catalog for driving forced photometry. 

847 

848 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds 

849 to be processed. 

850 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the 

851 `--help` command line argument: 

852 @code 

853 measureCoaddSources.py --help 

854 @endcode 

855 

856 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 

857 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished 

858 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band 

859 coadd as follows: 

860 @code 

861 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 

862 @endcode 

863 This will process the HSC-I band data. The results are written in 

864 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits 

865 

866 It is also necessary to run 

867 @code 

868 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 

869 @endcode 

870 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 

871 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask". 

872 """ 

873 _DefaultName = "measureCoaddSources" 

874 ConfigClass = MeasureMergedCoaddSourcesConfig 

875 RunnerClass = MeasureMergedCoaddSourcesRunner 

876 getSchemaCatalogs = _makeGetSchemaCatalogs("meas") 

877 makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type 

878 

879 @classmethod 

880 def _makeArgumentParser(cls): 

881 parser = ArgumentParser(name=cls._DefaultName) 

882 parser.add_id_argument("--id", "deepCoadd_calexp", 

883 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r", 

884 ContainerClass=ExistingCoaddDataIdContainer) 

885 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache") 

886 return parser 

887 

888 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None, 

889 **kwargs): 

890 """! 

891 @brief Initialize the task. 

892 

893 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 

894 @param[in] schema: the schema of the merged detection catalog used as input to this one 

895 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog 

896 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference 

897 catalog. May be None if the loader can be constructed from the butler argument or all steps 

898 requiring a reference catalog are disabled. 

899 @param[in] butler: a butler used to read the input schemas from disk or construct the reference 

900 catalog loader, if schema or peakSchema or refObjLoader is None 

901 

902 The task will set its own self.schema attribute to the schema of the output measurement catalog. 

903 This will include all fields from the input schema, as well as additional fields for all the 

904 measurements. 

905 """ 

906 super().__init__(**kwargs) 

907 self.deblended = self.config.inputCatalog.startswith("deblended") 

908 self.inputCatalog = "Coadd_" + self.config.inputCatalog 

909 if initInputs is not None: 

910 schema = initInputs['inputSchema'].schema 

911 if schema is None: 

912 assert butler is not None, "Neither butler nor schema is defined" 

913 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema 

914 self.schemaMapper = afwTable.SchemaMapper(schema) 

915 self.schemaMapper.addMinimalSchema(schema) 

916 self.schema = self.schemaMapper.getOutputSchema() 

917 self.algMetadata = PropertyList() 

918 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata) 

919 self.makeSubtask("setPrimaryFlags", schema=self.schema) 

920 if self.config.doMatchSources: 

921 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader) 

922 if self.config.doPropagateFlags: 

923 self.makeSubtask("propagateFlags", schema=self.schema) 

924 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict) 

925 if self.config.doApCorr: 

926 self.makeSubtask("applyApCorr", schema=self.schema) 

927 if self.config.doRunCatalogCalculation: 

928 self.makeSubtask("catalogCalculation", schema=self.schema) 

929 

930 self.outputSchema = afwTable.SourceCatalog(self.schema) 

931 

932 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

933 inputs = butlerQC.get(inputRefs) 

934 

935 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat], 

936 inputs.pop('refCat'), config=self.config.refObjLoader, 

937 log=self.log) 

938 self.match.setRefObjLoader(refObjLoader) 

939 

940 # Set psfcache 

941 # move this to run after gen2 deprecation 

942 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache) 

943 

944 # Get unique integer ID for IdFactory and RNG seeds 

945 packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True) 

946 inputs['exposureId'] = packedId 

947 idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits) 

948 # Transform inputCatalog 

949 table = afwTable.SourceTable.make(self.schema, idFactory) 

950 sources = afwTable.SourceCatalog(table) 

951 sources.extend(inputs.pop('inputCatalog'), self.schemaMapper) 

952 table = sources.getTable() 

953 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

954 inputs['sources'] = sources 

955 

956 skyMap = inputs.pop('skyMap') 

957 tractNumber = inputRefs.inputCatalog.dataId['tract'] 

958 tractInfo = skyMap[tractNumber] 

959 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch']) 

960 skyInfo = Struct( 

961 skyMap=skyMap, 

962 tractInfo=tractInfo, 

963 patchInfo=patchInfo, 

964 wcs=tractInfo.getWcs(), 

965 bbox=patchInfo.getOuterBBox() 

966 ) 

967 inputs['skyInfo'] = skyInfo 

968 

969 if self.config.doPropagateFlags: 

970 # Filter out any visit catalog that is not coadd inputs 

971 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds 

972 visitKey = ccdInputs.schema.find("visit").key 

973 ccdKey = ccdInputs.schema.find("ccd").key 

974 inputVisitIds = set() 

975 ccdRecordsWcs = {} 

976 for ccdRecord in ccdInputs: 

977 visit = ccdRecord.get(visitKey) 

978 ccd = ccdRecord.get(ccdKey) 

979 inputVisitIds.add((visit, ccd)) 

980 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs() 

981 

982 inputCatalogsToKeep = [] 

983 inputCatalogWcsUpdate = [] 

984 for i, dataRef in enumerate(inputRefs.visitCatalogs): 

985 key = (dataRef.dataId['visit'], dataRef.dataId['detector']) 

986 if key in inputVisitIds: 

987 inputCatalogsToKeep.append(inputs['visitCatalogs'][i]) 

988 inputCatalogWcsUpdate.append(ccdRecordsWcs[key]) 

989 inputs['visitCatalogs'] = inputCatalogsToKeep 

990 inputs['wcsUpdates'] = inputCatalogWcsUpdate 

991 inputs['ccdInputs'] = ccdInputs 

992 

993 outputs = self.run(**inputs) 

994 butlerQC.put(outputs, outputRefs) 

995 

996 def runDataRef(self, patchRef, psfCache=100): 

997 """! 

998 @brief Deblend and measure. 

999 

1000 @param[in] patchRef: Patch reference. 

1001 

1002 Set 'is-primary' and related flags. Propagate flags 

1003 from individual visits. Optionally match the sources to a reference catalog and write the matches. 

1004 Finally, write the deblended sources and measurements out. 

1005 """ 

1006 if self.config.hasFakes: 

1007 coaddType = "fakes_" + self.config.coaddName 

1008 else: 

1009 coaddType = self.config.coaddName 

1010 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True) 

1011 exposure.getPsf().setCacheCapacity(psfCache) 

1012 sources = self.readSources(patchRef) 

1013 table = sources.getTable() 

1014 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog. 

1015 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef) 

1016 

1017 if self.config.doPropagateFlags: 

1018 ccdInputs = self.propagateFlags.getCcdInputs(exposure) 

1019 else: 

1020 ccdInputs = None 

1021 

1022 results = self.run(exposure=exposure, sources=sources, 

1023 ccdInputs=ccdInputs, 

1024 skyInfo=skyInfo, butler=patchRef.getButler(), 

1025 exposureId=self.getExposureId(patchRef)) 

1026 

1027 if self.config.doMatchSources: 

1028 self.writeMatches(patchRef, results) 

1029 self.write(patchRef, results.outputSources) 

1030 

1031 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None, 

1032 butler=None): 

1033 """Run measurement algorithms on the input exposure, and optionally populate the 

1034 resulting catalog with extra information. 

1035 

1036 Parameters 

1037 ---------- 

1038 exposure : `lsst.afw.exposure.Exposure` 

1039 The input exposure on which measurements are to be performed 

1040 sources : `lsst.afw.table.SourceCatalog` 

1041 A catalog built from the results of merged detections, or 

1042 deblender outputs. 

1043 skyInfo : `lsst.pipe.base.Struct` 

1044 A struct containing information about the position of the input exposure within 

1045 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box 

1046 exposureId : `int` or `bytes` 

1047 packed unique number or bytes unique to the input exposure 

1048 ccdInputs : `lsst.afw.table.ExposureCatalog` 

1049 Catalog containing information on the individual visits which went into making 

1050 the exposure 

1051 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None` 

1052 A list of source catalogs corresponding to measurements made on the individual 

1053 visits which went into the input exposure. If None and butler is `None` then 

1054 the task cannot propagate visit flags to the output catalog. 

1055 wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None` 

1056 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 

1057 to the input visits. Used to put all coordinates to common system. If `None` and 

1058 butler is `None` then the task cannot propagate visit flags to the output catalog. 

1059 butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler` 

1060 Either a gen2 or gen3 butler used to load visit catalogs 

1061 

1062 Returns 

1063 ------- 

1064 results : `lsst.pipe.base.Struct` 

1065 Results of running measurement task. Will contain the catalog in the 

1066 sources attribute. Optionally will have results of matching to a 

1067 reference catalog in the matchResults attribute, and denormalized 

1068 matches in the denormMatches attribute. 

1069 """ 

1070 self.measurement.run(sources, exposure, exposureId=exposureId) 

1071 

1072 if self.config.doApCorr: 

1073 self.applyApCorr.run( 

1074 catalog=sources, 

1075 apCorrMap=exposure.getInfo().getApCorrMap() 

1076 ) 

1077 

1078 # TODO DM-11568: this contiguous check-and-copy could go away if we 

1079 # reserve enough space during SourceDetection and/or SourceDeblend. 

1080 # NOTE: sourceSelectors require contiguous catalogs, so ensure 

1081 # contiguity now, so views are preserved from here on. 

1082 if not sources.isContiguous(): 

1083 sources = sources.copy(deep=True) 

1084 

1085 if self.config.doRunCatalogCalculation: 

1086 self.catalogCalculation.run(sources) 

1087 

1088 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo, 

1089 patchInfo=skyInfo.patchInfo, includeDeblend=self.deblended) 

1090 if self.config.doPropagateFlags: 

1091 self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates) 

1092 

1093 results = Struct() 

1094 

1095 if self.config.doMatchSources: 

1096 matchResult = self.match.run(sources, exposure.getInfo().getFilter().getName()) 

1097 matches = afwTable.packMatches(matchResult.matches) 

1098 matches.table.setMetadata(matchResult.matchMeta) 

1099 results.matchResult = matches 

1100 if self.config.doWriteMatchesDenormalized: 

1101 if matchResult.matches: 

1102 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta) 

1103 else: 

1104 self.log.warn("No matches, so generating dummy denormalized matches file") 

1105 denormMatches = afwTable.BaseCatalog(afwTable.Schema()) 

1106 denormMatches.setMetadata(PropertyList()) 

1107 denormMatches.getMetadata().add("COMMENT", 

1108 "This catalog is empty because no matches were found.") 

1109 results.denormMatches = denormMatches 

1110 results.denormMatches = denormMatches 

1111 

1112 results.outputSources = sources 

1113 return results 

1114 

1115 def readSources(self, dataRef): 

1116 """! 

1117 @brief Read input sources. 

1118 

1119 @param[in] dataRef: Data reference for catalog of merged detections 

1120 @return List of sources in merged catalog 

1121 

1122 We also need to add columns to hold the measurements we're about to make 

1123 so we can measure in-place. 

1124 """ 

1125 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True) 

1126 self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId)) 

1127 idFactory = self.makeIdFactory(dataRef) 

1128 for s in merged: 

1129 idFactory.notify(s.getId()) 

1130 table = afwTable.SourceTable.make(self.schema, idFactory) 

1131 sources = afwTable.SourceCatalog(table) 

1132 sources.extend(merged, self.schemaMapper) 

1133 return sources 

1134 

1135 def writeMatches(self, dataRef, results): 

1136 """! 

1137 @brief Write matches of the sources to the astrometric reference catalog. 

1138 

1139 @param[in] dataRef: data reference 

1140 @param[in] results: results struct from run method 

1141 """ 

1142 if hasattr(results, "matchResult"): 

1143 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch") 

1144 if hasattr(results, "denormMatches"): 

1145 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull") 

1146 

1147 def write(self, dataRef, sources): 

1148 """! 

1149 @brief Write the source catalog. 

1150 

1151 @param[in] dataRef: data reference 

1152 @param[in] sources: source catalog 

1153 """ 

1154 dataRef.put(sources, self.config.coaddName + "Coadd_meas") 

1155 self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId)) 

1156 

1157 def getExposureId(self, dataRef): 

1158 return int(dataRef.get(self.config.coaddName + "CoaddId"))