Coverage for python/lsst/ap/association/diaPipe.py: 23%

198 statements  

« prev     ^ index     » next       coverage.py v7.5.0, created at 2024-05-02 04:31 -0700

1# 

2# LSST Data Management System 

3# Copyright 2008-2016 AURA/LSST. 

4# 

5# This product includes software developed by the 

6# LSST Project (http://www.lsst.org/). 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <https://www.lsstcorp.org/LegalNotices/>. 

21# 

22 

23"""PipelineTask for associating DiaSources with previous DiaObjects. 

24 

25Additionally performs forced photometry on the calibrated and difference 

26images at the updated locations of DiaObjects. 

27 

28Currently loads directly from the Apdb rather than pre-loading. 

29""" 

30 

31__all__ = ("DiaPipelineConfig", 

32 "DiaPipelineTask", 

33 "DiaPipelineConnections") 

34 

35 

36import warnings 

37 

38import numpy as np 

39import pandas as pd 

40 

41from lsst.daf.base import DateTime 

42import lsst.dax.apdb as daxApdb 

43from lsst.meas.base import DetectorVisitIdGeneratorConfig, DiaObjectCalculationTask 

44import lsst.pex.config as pexConfig 

45import lsst.pipe.base as pipeBase 

46import lsst.pipe.base.connectionTypes as connTypes 

47from lsst.utils.timer import timeMethod 

48 

49from lsst.ap.association import ( 

50 AssociationTask, 

51 DiaForcedSourceTask, 

52 LoadDiaCatalogsTask, 

53 PackageAlertsTask) 

54from lsst.ap.association.ssoAssociation import SolarSystemAssociationTask 

55 

56 

57class DiaPipelineConnections( 

58 pipeBase.PipelineTaskConnections, 

59 dimensions=("instrument", "visit", "detector"), 

60 defaultTemplates={"coaddName": "deep", "fakesType": ""}): 

61 """Butler connections for DiaPipelineTask. 

62 """ 

63 diaSourceTable = connTypes.Input( 

64 doc="Catalog of calibrated DiaSources.", 

65 name="{fakesType}{coaddName}Diff_diaSrcTable", 

66 storageClass="DataFrame", 

67 dimensions=("instrument", "visit", "detector"), 

68 ) 

69 solarSystemObjectTable = connTypes.Input( 

70 doc="Catalog of SolarSolarSystem objects expected to be observable in " 

71 "this detectorVisit.", 

72 name="visitSsObjects", 

73 storageClass="DataFrame", 

74 dimensions=("instrument", "visit"), 

75 ) 

76 diffIm = connTypes.Input( 

77 doc="Difference image on which the DiaSources were detected.", 

78 name="{fakesType}{coaddName}Diff_differenceExp", 

79 storageClass="ExposureF", 

80 dimensions=("instrument", "visit", "detector"), 

81 ) 

82 exposure = connTypes.Input( 

83 doc="Calibrated exposure differenced with a template image during " 

84 "image differencing.", 

85 name="{fakesType}calexp", 

86 storageClass="ExposureF", 

87 dimensions=("instrument", "visit", "detector"), 

88 ) 

89 template = connTypes.Input( 

90 doc="Warped template used to create `subtractedExposure`. Not PSF " 

91 "matched.", 

92 dimensions=("instrument", "visit", "detector"), 

93 storageClass="ExposureF", 

94 name="{fakesType}{coaddName}Diff_templateExp", 

95 ) 

96 apdbMarker = connTypes.Output( 

97 doc="Marker dataset storing the configuration of the Apdb for each " 

98 "visit/detector. Used to signal the completion of the pipeline.", 

99 name="apdb_marker", 

100 storageClass="Config", 

101 dimensions=("instrument", "visit", "detector"), 

102 ) 

103 associatedDiaSources = connTypes.Output( 

104 doc="Optional output storing the DiaSource catalog after matching, " 

105 "calibration, and standardization for insertion into the Apdb.", 

106 name="{fakesType}{coaddName}Diff_assocDiaSrc", 

107 storageClass="DataFrame", 

108 dimensions=("instrument", "visit", "detector"), 

109 ) 

110 diaForcedSources = connTypes.Output( 

111 doc="Optional output storing the forced sources computed at the diaObject positions.", 

112 name="{fakesType}{coaddName}Diff_diaForcedSrc", 

113 storageClass="DataFrame", 

114 dimensions=("instrument", "visit", "detector"), 

115 ) 

116 diaObjects = connTypes.Output( 

117 doc="Optional output storing the updated diaObjects associated to these sources.", 

118 name="{fakesType}{coaddName}Diff_diaObject", 

119 storageClass="DataFrame", 

120 dimensions=("instrument", "visit", "detector"), 

121 ) 

122 longTrailedSources = pipeBase.connectionTypes.Output( 

123 doc="Optional output temporarily storing long trailed diaSources.", 

124 dimensions=("instrument", "visit", "detector"), 

125 storageClass="DataFrame", 

126 name="{fakesType}{coaddName}Diff_longTrailedSrc", 

127 ) 

128 

129 def __init__(self, *, config=None): 

130 super().__init__(config=config) 

131 

132 if not config.doWriteAssociatedSources: 

133 self.outputs.remove("associatedDiaSources") 

134 self.outputs.remove("diaForcedSources") 

135 self.outputs.remove("diaObjects") 

136 elif not config.doRunForcedMeasurement: 

137 self.outputs.remove("diaForcedSources") 

138 if not config.doSolarSystemAssociation: 

139 self.inputs.remove("solarSystemObjectTable") 

140 if not config.associator.doTrailedSourceFilter: 

141 self.outputs.remove("longTrailedSources") 

142 

143 def adjustQuantum(self, inputs, outputs, label, dataId): 

144 """Override to make adjustments to `lsst.daf.butler.DatasetRef` objects 

145 in the `lsst.daf.butler.core.Quantum` during the graph generation stage 

146 of the activator. 

147 

148 This implementation checks to make sure that the filters in the dataset 

149 are compatible with AP processing as set by the Apdb/DPDD schema. 

150 

151 Parameters 

152 ---------- 

153 inputs : `dict` 

154 Dictionary whose keys are an input (regular or prerequisite) 

155 connection name and whose values are a tuple of the connection 

156 instance and a collection of associated `DatasetRef` objects. 

157 The exact type of the nested collections is unspecified; it can be 

158 assumed to be multi-pass iterable and support `len` and ``in``, but 

159 it should not be mutated in place. In contrast, the outer 

160 dictionaries are guaranteed to be temporary copies that are true 

161 `dict` instances, and hence may be modified and even returned; this 

162 is especially useful for delegating to `super` (see notes below). 

163 outputs : `dict` 

164 Dict of output datasets, with the same structure as ``inputs``. 

165 label : `str` 

166 Label for this task in the pipeline (should be used in all 

167 diagnostic messages). 

168 data_id : `lsst.daf.butler.DataCoordinate` 

169 Data ID for this quantum in the pipeline (should be used in all 

170 diagnostic messages). 

171 

172 Returns 

173 ------- 

174 adjusted_inputs : `dict` 

175 Dict of the same form as ``inputs`` with updated containers of 

176 input `DatasetRef` objects. Connections that are not changed 

177 should not be returned at all. Datasets may only be removed, not 

178 added. Nested collections may be of any multi-pass iterable type, 

179 and the order of iteration will set the order of iteration within 

180 `PipelineTask.runQuantum`. 

181 adjusted_outputs : `dict` 

182 Dict of updated output datasets, with the same structure and 

183 interpretation as ``adjusted_inputs``. 

184 

185 Raises 

186 ------ 

187 ScalarError 

188 Raised if any `Input` or `PrerequisiteInput` connection has 

189 ``multiple`` set to `False`, but multiple datasets. 

190 NoWorkFound 

191 Raised to indicate that this quantum should not be run; not enough 

192 datasets were found for a regular `Input` connection, and the 

193 quantum should be pruned or skipped. 

194 FileNotFoundError 

195 Raised to cause QuantumGraph generation to fail (with the message 

196 included in this exception); not enough datasets were found for a 

197 `PrerequisiteInput` connection. 

198 """ 

199 _, refs = inputs["diffIm"] 

200 for ref in refs: 

201 if ref.dataId["band"] not in self.config.validBands: 

202 raise ValueError( 

203 f"Requested '{ref.dataId['band']}' not in " 

204 "DiaPipelineConfig.validBands. To process bands not in " 

205 "the standard Rubin set (ugrizy) you must add the band to " 

206 "the validBands list in DiaPipelineConfig and add the " 

207 "appropriate columns to the Apdb schema.") 

208 return super().adjustQuantum(inputs, outputs, label, dataId) 

209 

210 

211class DiaPipelineConfig(pipeBase.PipelineTaskConfig, 

212 pipelineConnections=DiaPipelineConnections): 

213 """Config for DiaPipelineTask. 

214 """ 

215 coaddName = pexConfig.Field( 

216 doc="coadd name: typically one of deep, goodSeeing, or dcr", 

217 dtype=str, 

218 default="deep", 

219 ) 

220 apdb = pexConfig.ConfigurableField( # TODO: remove on DM-43419 

221 target=daxApdb.ApdbSql, 

222 doc="Database connection for storing associated DiaSources and " 

223 "DiaObjects. Must already be initialized.", 

224 deprecated="This field has been replaced by ``apdb_config_url``; set " 

225 "``doConfigureApdb=False`` to use it. Will be removed after v28.", 

226 ) 

227 apdb_config_url = pexConfig.Field( 

228 dtype=str, 

229 default=None, 

230 optional=False, 

231 doc="A config file specifying the APDB and its connection parameters, " 

232 "typically written by the apdb-cli command-line utility. " 

233 "The database must already be initialized.", 

234 ) 

235 validBands = pexConfig.ListField( 

236 dtype=str, 

237 default=["u", "g", "r", "i", "z", "y"], 

238 doc="List of bands that are valid for AP processing. To process a " 

239 "band not on this list, the appropriate band specific columns " 

240 "must be added to the Apdb schema in dax_apdb.", 

241 ) 

242 diaCatalogLoader = pexConfig.ConfigurableField( 

243 target=LoadDiaCatalogsTask, 

244 doc="Task to load DiaObjects and DiaSources from the Apdb.", 

245 ) 

246 associator = pexConfig.ConfigurableField( 

247 target=AssociationTask, 

248 doc="Task used to associate DiaSources with DiaObjects.", 

249 ) 

250 doSolarSystemAssociation = pexConfig.Field( 

251 dtype=bool, 

252 default=False, 

253 doc="Process SolarSystem objects through the pipeline.", 

254 ) 

255 solarSystemAssociator = pexConfig.ConfigurableField( 

256 target=SolarSystemAssociationTask, 

257 doc="Task used to associate DiaSources with SolarSystemObjects.", 

258 ) 

259 diaCalculation = pexConfig.ConfigurableField( 

260 target=DiaObjectCalculationTask, 

261 doc="Task to compute summary statistics for DiaObjects.", 

262 ) 

263 doLoadForcedSources = pexConfig.Field( 

264 dtype=bool, 

265 default=True, 

266 deprecated="Added to allow disabling forced sources for performance " 

267 "reasons during the ops rehearsal. " 

268 "It is expected to be removed.", 

269 doc="Load forced DiaSource history from the APDB? " 

270 "This should only be turned off for debugging purposes.", 

271 ) 

272 doRunForcedMeasurement = pexConfig.Field( 

273 dtype=bool, 

274 default=True, 

275 deprecated="Added to allow disabling forced sources for performance " 

276 "reasons during the ops rehearsal. " 

277 "It is expected to be removed.", 

278 doc="Run forced measurement on all of the diaObjects? " 

279 "This should only be turned off for debugging purposes.", 

280 ) 

281 diaForcedSource = pexConfig.ConfigurableField( 

282 target=DiaForcedSourceTask, 

283 doc="Task used for force photometer DiaObject locations in direct and " 

284 "difference images.", 

285 ) 

286 alertPackager = pexConfig.ConfigurableField( 

287 target=PackageAlertsTask, 

288 doc="Subtask for packaging Ap data into alerts.", 

289 ) 

290 doPackageAlerts = pexConfig.Field( 

291 dtype=bool, 

292 default=False, 

293 doc="Package Dia-data into serialized alerts for distribution and " 

294 "write them to disk.", 

295 ) 

296 doWriteAssociatedSources = pexConfig.Field( 

297 dtype=bool, 

298 default=True, 

299 doc="Write out associated DiaSources, DiaForcedSources, and DiaObjects, " 

300 "formatted following the Science Data Model.", 

301 ) 

302 imagePixelMargin = pexConfig.RangeField( 

303 dtype=int, 

304 default=10, 

305 min=0, 

306 doc="Pad the image by this many pixels before removing off-image " 

307 "diaObjects for association.", 

308 ) 

309 idGenerator = DetectorVisitIdGeneratorConfig.make_field() 

310 doConfigureApdb = pexConfig.Field( # TODO: remove on DM-43419 

311 dtype=bool, 

312 default=True, 

313 doc="Use the deprecated ``apdb`` sub-config to set up the APDB, " 

314 "instead of the new config (``apdb_config_url``). This field is " 

315 "provided for backward-compatibility ONLY and will be removed " 

316 "without notice after v28.", 

317 ) 

318 

319 def setDefaults(self): 

320 self.apdb.dia_object_index = "baseline" 

321 self.apdb.dia_object_columns = [] 

322 self.diaCalculation.plugins = ["ap_meanPosition", 

323 "ap_nDiaSources", 

324 "ap_diaObjectFlag", 

325 "ap_meanFlux", 

326 "ap_percentileFlux", 

327 "ap_sigmaFlux", 

328 "ap_chi2Flux", 

329 "ap_madFlux", 

330 "ap_skewFlux", 

331 "ap_minMaxFlux", 

332 "ap_maxSlopeFlux", 

333 "ap_meanErrFlux", 

334 "ap_linearFit", 

335 "ap_stetsonJ", 

336 "ap_meanTotFlux", 

337 "ap_sigmaTotFlux"] 

338 

339 # TODO: remove on DM-43419 

340 def validate(self): 

341 # Sidestep Config.validate to avoid validating uninitialized fields we're not using. 

342 skip = {"apdb_config_url"} if self.doConfigureApdb else {"apdb"} 

343 for name, field in self._fields.items(): 

344 if name not in skip: 

345 field.validate(self) 

346 

347 # It's possible to use apdb without setting it, bypassing the deprecation warning. 

348 if self.doConfigureApdb: 

349 warnings.warn("Config field DiaPipelineConfig.apdb is deprecated: " 

350 # Workaround for DM-44051 

351 "This field has been replaced by ``apdb_config_url``; set " 

352 "``doConfigureApdb=False`` to use it. Will be removed after v28.", 

353 FutureWarning) 

354 

355 

356class DiaPipelineTask(pipeBase.PipelineTask): 

357 """Task for loading, associating and storing Difference Image Analysis 

358 (DIA) Objects and Sources. 

359 """ 

360 ConfigClass = DiaPipelineConfig 

361 _DefaultName = "diaPipe" 

362 

363 def __init__(self, initInputs=None, **kwargs): 

364 super().__init__(**kwargs) 

365 if self.config.doConfigureApdb: 

366 self.apdb = self.config.apdb.apply() 

367 else: 

368 self.apdb = daxApdb.Apdb.from_uri(self.config.apdb_config_url) 

369 self.makeSubtask("diaCatalogLoader") 

370 self.makeSubtask("associator") 

371 self.makeSubtask("diaCalculation") 

372 if self.config.doRunForcedMeasurement: 

373 self.makeSubtask("diaForcedSource") 

374 if self.config.doPackageAlerts: 

375 self.makeSubtask("alertPackager") 

376 if self.config.doSolarSystemAssociation: 

377 self.makeSubtask("solarSystemAssociator") 

378 

379 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

380 inputs = butlerQC.get(inputRefs) 

381 inputs["idGenerator"] = self.config.idGenerator.apply(butlerQC.quantum.dataId) 

382 inputs["band"] = butlerQC.quantum.dataId["band"] 

383 if not self.config.doSolarSystemAssociation: 

384 inputs["solarSystemObjectTable"] = None 

385 

386 outputs = self.run(**inputs) 

387 

388 butlerQC.put(outputs, outputRefs) 

389 

390 @timeMethod 

391 def run(self, 

392 diaSourceTable, 

393 solarSystemObjectTable, 

394 diffIm, 

395 exposure, 

396 template, 

397 band, 

398 idGenerator): 

399 """Process DiaSources and DiaObjects. 

400 

401 Load previous DiaObjects and their DiaSource history. Calibrate the 

402 values in the diaSourceCat. Associate new DiaSources with previous 

403 DiaObjects. Run forced photometry at the updated DiaObject locations. 

404 Store the results in the Alert Production Database (Apdb). 

405 

406 Parameters 

407 ---------- 

408 diaSourceTable : `pandas.DataFrame` 

409 Newly detected DiaSources. 

410 diffIm : `lsst.afw.image.ExposureF` 

411 Difference image exposure in which the sources in ``diaSourceCat`` 

412 were detected. 

413 exposure : `lsst.afw.image.ExposureF` 

414 Calibrated exposure differenced with a template to create 

415 ``diffIm``. 

416 template : `lsst.afw.image.ExposureF` 

417 Template exposure used to create diffIm. 

418 band : `str` 

419 The band in which the new DiaSources were detected. 

420 idGenerator : `lsst.meas.base.IdGenerator` 

421 Object that generates source IDs and random number generator seeds. 

422 

423 Returns 

424 ------- 

425 results : `lsst.pipe.base.Struct` 

426 Results struct with components. 

427 

428 - ``apdbMaker`` : Marker dataset to store in the Butler indicating 

429 that this ccdVisit has completed successfully. 

430 (`lsst.dax.apdb.ApdbConfig`) 

431 - ``associatedDiaSources`` : Catalog of newly associated 

432 DiaSources. (`pandas.DataFrame`) 

433 """ 

434 # Load the DiaObjects and DiaSource history. 

435 loaderResult = self.diaCatalogLoader.run(diffIm, self.apdb, 

436 doLoadForcedSources=self.config.doLoadForcedSources) 

437 if len(loaderResult.diaObjects) > 0: 

438 diaObjects = self.purgeDiaObjects(diffIm.getBBox(), diffIm.getWcs(), loaderResult.diaObjects, 

439 buffer=self.config.imagePixelMargin) 

440 else: 

441 diaObjects = loaderResult.diaObjects 

442 

443 # Associate new DiaSources with existing DiaObjects. 

444 assocResults = self.associator.run(diaSourceTable, diaObjects, 

445 exposure_time=diffIm.visitInfo.exposureTime) 

446 

447 if self.config.doSolarSystemAssociation: 

448 ssoAssocResult = self.solarSystemAssociator.run( 

449 assocResults.unAssocDiaSources, 

450 solarSystemObjectTable, 

451 diffIm) 

452 createResults = self.createNewDiaObjects( 

453 ssoAssocResult.unAssocDiaSources) 

454 toAssociate = [] 

455 if len(assocResults.matchedDiaSources) > 0: 

456 toAssociate.append(assocResults.matchedDiaSources) 

457 if len(ssoAssocResult.ssoAssocDiaSources) > 0: 

458 toAssociate.append(ssoAssocResult.ssoAssocDiaSources) 

459 toAssociate.append(createResults.diaSources) 

460 associatedDiaSources = pd.concat(toAssociate) 

461 nTotalSsObjects = ssoAssocResult.nTotalSsObjects 

462 nAssociatedSsObjects = ssoAssocResult.nAssociatedSsObjects 

463 else: 

464 createResults = self.createNewDiaObjects( 

465 assocResults.unAssocDiaSources) 

466 toAssociate = [] 

467 if len(assocResults.matchedDiaSources) > 0: 

468 toAssociate.append(assocResults.matchedDiaSources) 

469 toAssociate.append(createResults.diaSources) 

470 associatedDiaSources = pd.concat(toAssociate) 

471 nTotalSsObjects = 0 

472 nAssociatedSsObjects = 0 

473 

474 # Create new DiaObjects from unassociated diaSources. 

475 self._add_association_meta_data(assocResults.nUpdatedDiaObjects, 

476 assocResults.nUnassociatedDiaObjects, 

477 createResults.nNewDiaObjects, 

478 nTotalSsObjects, 

479 nAssociatedSsObjects) 

480 # Index the DiaSource catalog for this visit after all associations 

481 # have been made. 

482 updatedDiaObjectIds = associatedDiaSources["diaObjectId"][ 

483 associatedDiaSources["diaObjectId"] != 0].to_numpy() 

484 associatedDiaSources.set_index(["diaObjectId", 

485 "band", 

486 "diaSourceId"], 

487 drop=False, 

488 inplace=True) 

489 

490 # Append new DiaObjects and DiaSources to their previous history. 

491 diaObjects = pd.concat( 

492 [diaObjects, 

493 createResults.newDiaObjects.set_index("diaObjectId", drop=False)], 

494 sort=True) 

495 if self.testDataFrameIndex(diaObjects): 

496 raise RuntimeError( 

497 "Duplicate DiaObjects created after association. This is " 

498 "likely due to re-running data with an already populated " 

499 "Apdb. If this was not the case then there was an unexpected " 

500 "failure in Association while matching and creating new " 

501 "DiaObjects and should be reported. Exiting.") 

502 

503 if len(loaderResult.diaSources) > 0: 

504 # We need to coerce the types of loaderResult.diaSources 

505 # to be the same as associatedDiaSources, thanks to pandas 

506 # datetime issues (DM-41100). And we may as well coerce 

507 # all the columns to ensure consistency for future compatibility. 

508 for name, dtype in associatedDiaSources.dtypes.items(): 

509 if name in loaderResult.diaSources.columns and loaderResult.diaSources[name].dtype != dtype: 

510 self.log.debug( 

511 "Coercing loaderResult.diaSources column %s from %s to %s", 

512 name, 

513 str(loaderResult.diaSources[name].dtype), 

514 str(dtype), 

515 ) 

516 loaderResult.diaSources[name] = loaderResult.diaSources[name].astype(dtype) 

517 

518 mergedDiaSourceHistory = pd.concat( 

519 [loaderResult.diaSources, associatedDiaSources], 

520 sort=True) 

521 else: 

522 mergedDiaSourceHistory = pd.concat([associatedDiaSources], sort=True) 

523 

524 # Test for DiaSource duplication first. If duplicates are found, 

525 # this likely means this is duplicate data being processed and sent 

526 # to the Apdb. 

527 if self.testDataFrameIndex(mergedDiaSourceHistory): 

528 raise RuntimeError( 

529 "Duplicate DiaSources found after association and merging " 

530 "with history. This is likely due to re-running data with an " 

531 "already populated Apdb. If this was not the case then there " 

532 "was an unexpected failure in Association while matching " 

533 "sources to objects, and should be reported. Exiting.") 

534 

535 # Compute DiaObject Summary statistics from their full DiaSource 

536 # history. 

537 diaCalResult = self.diaCalculation.run( 

538 diaObjects, 

539 mergedDiaSourceHistory, 

540 updatedDiaObjectIds, 

541 [band]) 

542 # Test for duplication in the updated DiaObjects. 

543 if self.testDataFrameIndex(diaCalResult.diaObjectCat): 

544 raise RuntimeError( 

545 "Duplicate DiaObjects (loaded + updated) created after " 

546 "DiaCalculation. This is unexpected behavior and should be " 

547 "reported. Exiting.") 

548 if self.testDataFrameIndex(diaCalResult.updatedDiaObjects): 

549 raise RuntimeError( 

550 "Duplicate DiaObjects (updated) created after " 

551 "DiaCalculation. This is unexpected behavior and should be " 

552 "reported. Exiting.") 

553 

554 if self.config.doRunForcedMeasurement: 

555 # Force photometer on the Difference and Calibrated exposures using 

556 # the new and updated DiaObject locations. 

557 diaForcedSources = self.diaForcedSource.run( 

558 diaCalResult.diaObjectCat, 

559 diaCalResult.updatedDiaObjects.loc[:, "diaObjectId"].to_numpy(), 

560 exposure, 

561 diffIm, 

562 idGenerator=idGenerator) 

563 else: 

564 # alertPackager needs correct columns 

565 diaForcedSources = pd.DataFrame(columns=[ 

566 "diaForcedSourceId", "diaObjectID", "ccdVisitID", "psfFlux", "psfFluxErr", 

567 "x", "y", "flags", "midpointMjdTai", "band", 

568 ]) 

569 

570 # Store DiaSources, updated DiaObjects, and DiaForcedSources in the 

571 # Apdb. 

572 self.apdb.store( 

573 DateTime.now().toAstropy(), 

574 diaCalResult.updatedDiaObjects, 

575 associatedDiaSources, 

576 diaForcedSources) 

577 

578 if self.config.doPackageAlerts: 

579 if len(loaderResult.diaForcedSources) > 1: 

580 # We need to coerce the types of loaderResult.diaForcedSources 

581 # to be the same as associatedDiaSources, thanks to pandas 

582 # datetime issues (DM-41100). And we may as well coerce 

583 # all the columns to ensure consistency for future compatibility. 

584 for name, dtype in diaForcedSources.dtypes.items(): 

585 if name in loaderResult.diaForcedSources.columns and \ 

586 loaderResult.diaForcedSources[name].dtype != dtype: 

587 self.log.debug( 

588 "Coercing loaderResult.diaForcedSources column %s from %s to %s", 

589 name, 

590 str(loaderResult.diaForcedSources[name].dtype), 

591 str(dtype), 

592 ) 

593 loaderResult.diaForcedSources[name] = ( 

594 loaderResult.diaForcedSources[name].astype(dtype) 

595 ) 

596 diaForcedSources = pd.concat( 

597 [diaForcedSources, loaderResult.diaForcedSources], 

598 sort=True) 

599 if self.testDataFrameIndex(diaForcedSources): 

600 self.log.warning( 

601 "Duplicate DiaForcedSources created after merge with " 

602 "history and new sources. This may cause downstream " 

603 "problems. Dropping duplicates.") 

604 # Drop duplicates via index and keep the first appearance. 

605 # Reset due to the index shape being slight different than 

606 # expected. 

607 diaForcedSources = diaForcedSources.groupby( 

608 diaForcedSources.index).first() 

609 diaForcedSources.reset_index(drop=True, inplace=True) 

610 diaForcedSources.set_index( 

611 ["diaObjectId", "diaForcedSourceId"], 

612 drop=False, 

613 inplace=True) 

614 self.alertPackager.run(associatedDiaSources, 

615 diaCalResult.diaObjectCat, 

616 loaderResult.diaSources, 

617 diaForcedSources, 

618 diffIm, 

619 exposure, 

620 template, 

621 doRunForcedMeasurement=self.config.doRunForcedMeasurement, 

622 ) 

623 

624 # For historical reasons, apdbMarker is a Config even if it's not meant to be read. 

625 # A default Config is the cheapest way to satisfy the storage class. 

626 marker = self.config.apdb.value if self.config.doConfigureApdb else pexConfig.Config() 

627 return pipeBase.Struct(apdbMarker=marker, 

628 associatedDiaSources=associatedDiaSources, 

629 diaForcedSources=diaForcedSources, 

630 diaObjects=diaObjects, 

631 longTrailedSources=assocResults.longTrailedSources 

632 ) 

633 

634 def createNewDiaObjects(self, unAssocDiaSources): 

635 """Loop through the set of DiaSources and create new DiaObjects 

636 for unassociated DiaSources. 

637 

638 Parameters 

639 ---------- 

640 unAssocDiaSources : `pandas.DataFrame` 

641 Set of DiaSources to create new DiaObjects from. 

642 

643 Returns 

644 ------- 

645 results : `lsst.pipe.base.Struct` 

646 Results struct containing: 

647 

648 - ``diaSources`` : DiaSource catalog with updated DiaObject ids. 

649 (`pandas.DataFrame`) 

650 - ``newDiaObjects`` : Newly created DiaObjects from the 

651 unassociated DiaSources. (`pandas.DataFrame`) 

652 - ``nNewDiaObjects`` : Number of newly created diaObjects.(`int`) 

653 """ 

654 if len(unAssocDiaSources) == 0: 

655 tmpObj = self._initialize_dia_object(0) 

656 newDiaObjects = pd.DataFrame(data=[], 

657 columns=tmpObj.keys()) 

658 else: 

659 newDiaObjects = unAssocDiaSources["diaSourceId"].apply( 

660 self._initialize_dia_object) 

661 unAssocDiaSources["diaObjectId"] = unAssocDiaSources["diaSourceId"] 

662 return pipeBase.Struct(diaSources=unAssocDiaSources, 

663 newDiaObjects=newDiaObjects, 

664 nNewDiaObjects=len(newDiaObjects)) 

665 

666 def _initialize_dia_object(self, objId): 

667 """Create a new DiaObject with values required to be initialized by the 

668 Ppdb. 

669 

670 Parameters 

671 ---------- 

672 objid : `int` 

673 ``diaObjectId`` value for the of the new DiaObject. 

674 

675 Returns 

676 ------- 

677 diaObject : `dict` 

678 Newly created DiaObject with keys: 

679 

680 ``diaObjectId`` 

681 Unique DiaObjectId (`int`). 

682 ``pmParallaxNdata`` 

683 Number of data points used for parallax calculation (`int`). 

684 ``nearbyObj1`` 

685 Id of the a nearbyObject in the Object table (`int`). 

686 ``nearbyObj2`` 

687 Id of the a nearbyObject in the Object table (`int`). 

688 ``nearbyObj3`` 

689 Id of the a nearbyObject in the Object table (`int`). 

690 ``?_psfFluxNdata`` 

691 Number of data points used to calculate point source flux 

692 summary statistics in each bandpass (`int`). 

693 """ 

694 new_dia_object = {"diaObjectId": objId, 

695 "pmParallaxNdata": 0, 

696 "nearbyObj1": 0, 

697 "nearbyObj2": 0, 

698 "nearbyObj3": 0, 

699 "flags": 0} 

700 for f in ["u", "g", "r", "i", "z", "y"]: 

701 new_dia_object["%s_psfFluxNdata" % f] = 0 

702 return pd.Series(data=new_dia_object) 

703 

704 def testDataFrameIndex(self, df): 

705 """Test the sorted DataFrame index for duplicates. 

706 

707 Wrapped as a separate function to allow for mocking of the this task 

708 in unittesting. Default of a mock return for this test is True. 

709 

710 Parameters 

711 ---------- 

712 df : `pandas.DataFrame` 

713 DataFrame to text. 

714 

715 Returns 

716 ------- 

717 `bool` 

718 True if DataFrame contains duplicate rows. 

719 """ 

720 return df.index.has_duplicates 

721 

722 def _add_association_meta_data(self, 

723 nUpdatedDiaObjects, 

724 nUnassociatedDiaObjects, 

725 nNewDiaObjects, 

726 nTotalSsObjects, 

727 nAssociatedSsObjects): 

728 """Store summaries of the association step in the task metadata. 

729 

730 Parameters 

731 ---------- 

732 nUpdatedDiaObjects : `int` 

733 Number of previous DiaObjects associated and updated in this 

734 ccdVisit. 

735 nUnassociatedDiaObjects : `int` 

736 Number of previous DiaObjects that were not associated or updated 

737 in this ccdVisit. 

738 nNewDiaObjects : `int` 

739 Number of newly created DiaObjects for this ccdVisit. 

740 nTotalSsObjects : `int` 

741 Number of SolarSystemObjects within the observable detector 

742 area. 

743 nAssociatedSsObjects : `int` 

744 Number of successfully associated SolarSystemObjects. 

745 """ 

746 self.metadata.add('numUpdatedDiaObjects', nUpdatedDiaObjects) 

747 self.metadata.add('numUnassociatedDiaObjects', nUnassociatedDiaObjects) 

748 self.metadata.add('numNewDiaObjects', nNewDiaObjects) 

749 self.metadata.add('numTotalSolarSystemObjects', nTotalSsObjects) 

750 self.metadata.add('numAssociatedSsObjects', nAssociatedSsObjects) 

751 

752 def purgeDiaObjects(self, bbox, wcs, diaObjCat, buffer=0): 

753 """Drop diaObjects that are outside the exposure bounding box. 

754 

755 Parameters 

756 ---------- 

757 bbox : `lsst.geom.Box2I` 

758 Bounding box of the exposure. 

759 wcs : `lsst.afw.geom.SkyWcs` 

760 Coordinate system definition (wcs) for the exposure. 

761 diaObjCat : `pandas.DataFrame` 

762 DiaObjects loaded from the Apdb. 

763 buffer : `int`, optional 

764 Width, in pixels, to pad the exposure bounding box. 

765 

766 Returns 

767 ------- 

768 diaObjCat : `pandas.DataFrame` 

769 DiaObjects loaded from the Apdb, restricted to the exposure 

770 bounding box. 

771 """ 

772 try: 

773 bbox.grow(buffer) 

774 raVals = diaObjCat.ra.to_numpy() 

775 decVals = diaObjCat.dec.to_numpy() 

776 xVals, yVals = wcs.skyToPixelArray(raVals, decVals, degrees=True) 

777 selector = bbox.contains(xVals, yVals) 

778 nPurged = np.sum(~selector) 

779 if nPurged > 0: 

780 diaObjCat = diaObjCat[selector].copy() 

781 self.log.info(f"Dropped {nPurged} diaObjects that were outside the bbox " 

782 f"leaving {len(diaObjCat)} in the catalog") 

783 except Exception as e: 

784 self.log.warning("Error attempting to check diaObject history: %s", e) 

785 return diaObjCat