Coverage for python/lsst/ap/association/diaPipe.py: 31%

132 statements  

« prev     ^ index     » next       coverage.py v6.4.1, created at 2022-06-15 02:50 -0700

1# 

2# LSST Data Management System 

3# Copyright 2008-2016 AURA/LSST. 

4# 

5# This product includes software developed by the 

6# LSST Project (http://www.lsst.org/). 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <https://www.lsstcorp.org/LegalNotices/>. 

21# 

22 

23"""PipelineTask for associating DiaSources with previous DiaObjects. 

24 

25Additionally performs forced photometry on the calibrated and difference 

26images at the updated locations of DiaObjects. 

27 

28Currently loads directly from the Apdb rather than pre-loading. 

29""" 

30 

31import pandas as pd 

32 

33import lsst.dax.apdb as daxApdb 

34from lsst.meas.base import DiaObjectCalculationTask 

35import lsst.pex.config as pexConfig 

36import lsst.pipe.base as pipeBase 

37import lsst.pipe.base.connectionTypes as connTypes 

38from lsst.utils.timer import timeMethod 

39 

40from lsst.ap.association import ( 

41 AssociationTask, 

42 DiaForcedSourceTask, 

43 LoadDiaCatalogsTask, 

44 PackageAlertsTask) 

45from lsst.ap.association.ssoAssociation import SolarSystemAssociationTask 

46 

47__all__ = ("DiaPipelineConfig", 

48 "DiaPipelineTask", 

49 "DiaPipelineConnections") 

50 

51 

52class DiaPipelineConnections( 

53 pipeBase.PipelineTaskConnections, 

54 dimensions=("instrument", "visit", "detector"), 

55 defaultTemplates={"coaddName": "deep", "fakesType": ""}): 

56 """Butler connections for DiaPipelineTask. 

57 """ 

58 diaSourceTable = connTypes.Input( 

59 doc="Catalog of calibrated DiaSources.", 

60 name="{fakesType}{coaddName}Diff_diaSrcTable", 

61 storageClass="DataFrame", 

62 dimensions=("instrument", "visit", "detector"), 

63 ) 

64 solarSystemObjectTable = connTypes.Input( 

65 doc="Catalog of SolarSolarSystem objects expected to be observable in " 

66 "this detectorVisit.", 

67 name="visitSsObjects", 

68 storageClass="DataFrame", 

69 dimensions=("instrument", "visit"), 

70 ) 

71 diffIm = connTypes.Input( 

72 doc="Difference image on which the DiaSources were detected.", 

73 name="{fakesType}{coaddName}Diff_differenceExp", 

74 storageClass="ExposureF", 

75 dimensions=("instrument", "visit", "detector"), 

76 ) 

77 exposure = connTypes.Input( 

78 doc="Calibrated exposure differenced with a template image during " 

79 "image differencing.", 

80 name="{fakesType}calexp", 

81 storageClass="ExposureF", 

82 dimensions=("instrument", "visit", "detector"), 

83 ) 

84 warpedExposure = connTypes.Input( 

85 doc="Warped template used to create `subtractedExposure`. Not PSF " 

86 "matched.", 

87 dimensions=("instrument", "visit", "detector"), 

88 storageClass="ExposureF", 

89 name="{fakesType}{coaddName}Diff_warpedExp", 

90 ) 

91 apdbMarker = connTypes.Output( 

92 doc="Marker dataset storing the configuration of the Apdb for each " 

93 "visit/detector. Used to signal the completion of the pipeline.", 

94 name="apdb_marker", 

95 storageClass="Config", 

96 dimensions=("instrument", "visit", "detector"), 

97 ) 

98 associatedDiaSources = connTypes.Output( 

99 doc="Optional output storing the DiaSource catalog after matching, " 

100 "calibration, and standardization for insertation into the Apdb.", 

101 name="{fakesType}{coaddName}Diff_assocDiaSrc", 

102 storageClass="DataFrame", 

103 dimensions=("instrument", "visit", "detector"), 

104 ) 

105 

106 def __init__(self, *, config=None): 

107 super().__init__(config=config) 

108 

109 if not config.doWriteAssociatedSources: 

110 self.outputs.remove("associatedDiaSources") 

111 if not config.doSolarSystemAssociation: 

112 self.inputs.remove("solarSystemObjectTable") 

113 

114 def adjustQuantum(self, inputs, outputs, label, dataId): 

115 """Override to make adjustments to `lsst.daf.butler.DatasetRef` objects 

116 in the `lsst.daf.butler.core.Quantum` during the graph generation stage 

117 of the activator. 

118 

119 This implementation checks to make sure that the filters in the dataset 

120 are compatible with AP processing as set by the Apdb/DPDD schema. 

121 

122 Parameters 

123 ---------- 

124 inputs : `dict` 

125 Dictionary whose keys are an input (regular or prerequisite) 

126 connection name and whose values are a tuple of the connection 

127 instance and a collection of associated `DatasetRef` objects. 

128 The exact type of the nested collections is unspecified; it can be 

129 assumed to be multi-pass iterable and support `len` and ``in``, but 

130 it should not be mutated in place. In contrast, the outer 

131 dictionaries are guaranteed to be temporary copies that are true 

132 `dict` instances, and hence may be modified and even returned; this 

133 is especially useful for delegating to `super` (see notes below). 

134 outputs : `dict` 

135 Dict of output datasets, with the same structure as ``inputs``. 

136 label : `str` 

137 Label for this task in the pipeline (should be used in all 

138 diagnostic messages). 

139 data_id : `lsst.daf.butler.DataCoordinate` 

140 Data ID for this quantum in the pipeline (should be used in all 

141 diagnostic messages). 

142 

143 Returns 

144 ------- 

145 adjusted_inputs : `dict` 

146 Dict of the same form as ``inputs`` with updated containers of 

147 input `DatasetRef` objects. Connections that are not changed 

148 should not be returned at all. Datasets may only be removed, not 

149 added. Nested collections may be of any multi-pass iterable type, 

150 and the order of iteration will set the order of iteration within 

151 `PipelineTask.runQuantum`. 

152 adjusted_outputs : `dict` 

153 Dict of updated output datasets, with the same structure and 

154 interpretation as ``adjusted_inputs``. 

155 

156 Raises 

157 ------ 

158 ScalarError 

159 Raised if any `Input` or `PrerequisiteInput` connection has 

160 ``multiple`` set to `False`, but multiple datasets. 

161 NoWorkFound 

162 Raised to indicate that this quantum should not be run; not enough 

163 datasets were found for a regular `Input` connection, and the 

164 quantum should be pruned or skipped. 

165 FileNotFoundError 

166 Raised to cause QuantumGraph generation to fail (with the message 

167 included in this exception); not enough datasets were found for a 

168 `PrerequisiteInput` connection. 

169 """ 

170 _, refs = inputs["diffIm"] 

171 for ref in refs: 

172 if ref.dataId["band"] not in self.config.validBands: 

173 raise ValueError( 

174 f"Requested '{ref.dataId['band']}' not in " 

175 "DiaPipelineConfig.validBands. To process bands not in " 

176 "the standard Rubin set (ugrizy) you must add the band to " 

177 "the validBands list in DiaPipelineConfig and add the " 

178 "appropriate columns to the Apdb schema.") 

179 return super().adjustQuantum(inputs, outputs, label, dataId) 

180 

181 

182class DiaPipelineConfig(pipeBase.PipelineTaskConfig, 

183 pipelineConnections=DiaPipelineConnections): 

184 """Config for DiaPipelineTask. 

185 """ 

186 coaddName = pexConfig.Field( 

187 doc="coadd name: typically one of deep, goodSeeing, or dcr", 

188 dtype=str, 

189 default="deep", 

190 ) 

191 apdb = daxApdb.ApdbSql.makeField( 

192 doc="Database connection for storing associated DiaSources and " 

193 "DiaObjects. Must already be initialized.", 

194 ) 

195 validBands = pexConfig.ListField( 

196 dtype=str, 

197 default=["u", "g", "r", "i", "z", "y"], 

198 doc="List of bands that are valid for AP processing. To process a " 

199 "band not on this list, the appropriate band specific columns " 

200 "must be added to the Apdb schema in dax_apdb.", 

201 ) 

202 diaCatalogLoader = pexConfig.ConfigurableField( 

203 target=LoadDiaCatalogsTask, 

204 doc="Task to load DiaObjects and DiaSources from the Apdb.", 

205 ) 

206 associator = pexConfig.ConfigurableField( 

207 target=AssociationTask, 

208 doc="Task used to associate DiaSources with DiaObjects.", 

209 ) 

210 doSolarSystemAssociation = pexConfig.Field( 

211 dtype=bool, 

212 default=False, 

213 doc="Process SolarSystem objects through the pipeline.", 

214 ) 

215 solarSystemAssociator = pexConfig.ConfigurableField( 

216 target=SolarSystemAssociationTask, 

217 doc="Task used to associate DiaSources with SolarSystemObjects.", 

218 ) 

219 diaCalculation = pexConfig.ConfigurableField( 

220 target=DiaObjectCalculationTask, 

221 doc="Task to compute summary statistics for DiaObjects.", 

222 ) 

223 diaForcedSource = pexConfig.ConfigurableField( 

224 target=DiaForcedSourceTask, 

225 doc="Task used for force photometer DiaObject locations in direct and " 

226 "difference images.", 

227 ) 

228 alertPackager = pexConfig.ConfigurableField( 

229 target=PackageAlertsTask, 

230 doc="Subtask for packaging Ap data into alerts.", 

231 ) 

232 doPackageAlerts = pexConfig.Field( 

233 dtype=bool, 

234 default=False, 

235 doc="Package Dia-data into serialized alerts for distribution and " 

236 "write them to disk.", 

237 ) 

238 doWriteAssociatedSources = pexConfig.Field( 

239 dtype=bool, 

240 default=False, 

241 doc="Write out associated and SDMed DiaSources.", 

242 ) 

243 

244 def setDefaults(self): 

245 self.apdb.dia_object_index = "baseline" 

246 self.apdb.dia_object_columns = [] 

247 self.diaCalculation.plugins = ["ap_meanPosition", 

248 "ap_nDiaSources", 

249 "ap_diaObjectFlag", 

250 "ap_meanFlux", 

251 "ap_percentileFlux", 

252 "ap_sigmaFlux", 

253 "ap_chi2Flux", 

254 "ap_madFlux", 

255 "ap_skewFlux", 

256 "ap_minMaxFlux", 

257 "ap_maxSlopeFlux", 

258 "ap_meanErrFlux", 

259 "ap_linearFit", 

260 "ap_stetsonJ", 

261 "ap_meanTotFlux", 

262 "ap_sigmaTotFlux"] 

263 

264 

265class DiaPipelineTask(pipeBase.PipelineTask): 

266 """Task for loading, associating and storing Difference Image Analysis 

267 (DIA) Objects and Sources. 

268 """ 

269 ConfigClass = DiaPipelineConfig 

270 _DefaultName = "diaPipe" 

271 RunnerClass = pipeBase.ButlerInitializedTaskRunner 

272 

273 def __init__(self, initInputs=None, **kwargs): 

274 super().__init__(**kwargs) 

275 self.apdb = self.config.apdb.apply() 

276 self.makeSubtask("diaCatalogLoader") 

277 self.makeSubtask("associator") 

278 self.makeSubtask("diaCalculation") 

279 self.makeSubtask("diaForcedSource") 

280 if self.config.doPackageAlerts: 

281 self.makeSubtask("alertPackager") 

282 if self.config.doSolarSystemAssociation: 

283 self.makeSubtask("solarSystemAssociator") 

284 

285 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

286 inputs = butlerQC.get(inputRefs) 

287 expId, expBits = butlerQC.quantum.dataId.pack("visit_detector", 

288 returnMaxBits=True) 

289 inputs["ccdExposureIdBits"] = expBits 

290 inputs["band"] = butlerQC.quantum.dataId["band"] 

291 if not self.config.doSolarSystemAssociation: 

292 inputs["solarSystemObjectTable"] = None 

293 

294 outputs = self.run(**inputs) 

295 

296 butlerQC.put(outputs, outputRefs) 

297 

298 @timeMethod 

299 def run(self, 

300 diaSourceTable, 

301 solarSystemObjectTable, 

302 diffIm, 

303 exposure, 

304 warpedExposure, 

305 ccdExposureIdBits, 

306 band): 

307 """Process DiaSources and DiaObjects. 

308 

309 Load previous DiaObjects and their DiaSource history. Calibrate the 

310 values in the diaSourceCat. Associate new DiaSources with previous 

311 DiaObjects. Run forced photometry at the updated DiaObject locations. 

312 Store the results in the Alert Production Database (Apdb). 

313 

314 Parameters 

315 ---------- 

316 diaSourceTable : `pandas.DataFrame` 

317 Newly detected DiaSources. 

318 diffIm : `lsst.afw.image.ExposureF` 

319 Difference image exposure in which the sources in ``diaSourceCat`` 

320 were detected. 

321 exposure : `lsst.afw.image.ExposureF` 

322 Calibrated exposure differenced with a template to create 

323 ``diffIm``. 

324 warpedExposure : `lsst.afw.image.ExposureF` 

325 Template exposure used to create diffIm. 

326 ccdExposureIdBits : `int` 

327 Number of bits used for a unique ``ccdVisitId``. 

328 band : `str` 

329 The band in which the new DiaSources were detected. 

330 

331 Returns 

332 ------- 

333 results : `lsst.pipe.base.Struct` 

334 Results struct with components. 

335 

336 - ``apdbMaker`` : Marker dataset to store in the Butler indicating 

337 that this ccdVisit has completed successfully. 

338 (`lsst.dax.apdb.ApdbConfig`) 

339 - ``associatedDiaSources`` : Catalog of newly associated 

340 DiaSources. (`pandas.DataFrame`) 

341 """ 

342 # Load the DiaObjects and DiaSource history. 

343 loaderResult = self.diaCatalogLoader.run(diffIm, self.apdb) 

344 

345 # Associate new DiaSources with existing DiaObjects. 

346 assocResults = self.associator.run(diaSourceTable, 

347 loaderResult.diaObjects) 

348 if self.config.doSolarSystemAssociation: 

349 ssoAssocResult = self.solarSystemAssociator.run( 

350 assocResults.unAssocDiaSources, 

351 solarSystemObjectTable, 

352 diffIm) 

353 createResults = self.createNewDiaObjects( 

354 ssoAssocResult.unAssocDiaSources) 

355 associatedDiaSources = pd.concat( 

356 [assocResults.matchedDiaSources, 

357 ssoAssocResult.ssoAssocDiaSources, 

358 createResults.diaSources]) 

359 nTotalSsObjects = ssoAssocResult.nTotalSsObjects 

360 nAssociatedSsObjects = ssoAssocResult.nAssociatedSsObjects 

361 else: 

362 createResults = self.createNewDiaObjects( 

363 assocResults.unAssocDiaSources) 

364 associatedDiaSources = pd.concat( 

365 [assocResults.matchedDiaSources, 

366 createResults.diaSources]) 

367 nTotalSsObjects = 0 

368 nAssociatedSsObjects = 0 

369 

370 # Create new DiaObjects from unassociated diaSources. 

371 self._add_association_meta_data(assocResults.nUpdatedDiaObjects, 

372 assocResults.nUnassociatedDiaObjects, 

373 createResults.nNewDiaObjects, 

374 nTotalSsObjects, 

375 nAssociatedSsObjects) 

376 # Index the DiaSource catalog for this visit after all associations 

377 # have been made. 

378 updatedDiaObjectIds = associatedDiaSources["diaObjectId"][ 

379 associatedDiaSources["diaObjectId"] != 0].to_numpy() 

380 associatedDiaSources.set_index(["diaObjectId", 

381 "filterName", 

382 "diaSourceId"], 

383 drop=False, 

384 inplace=True) 

385 

386 # Append new DiaObjects and DiaSources to their previous history. 

387 diaObjects = loaderResult.diaObjects.append( 

388 createResults.newDiaObjects.set_index("diaObjectId", drop=False), 

389 sort=True) 

390 if self.testDataFrameIndex(diaObjects): 

391 raise RuntimeError( 

392 "Duplicate DiaObjects created after association. This is " 

393 "likely due to re-running data with an already populated " 

394 "Apdb. If this was not the case then there was an unexpected " 

395 "failure in Association while matching and creating new " 

396 "DiaObjects and should be reported. Exiting.") 

397 mergedDiaSourceHistory = loaderResult.diaSources.append( 

398 associatedDiaSources, 

399 sort=True) 

400 # Test for DiaSource duplication first. If duplicates are found, 

401 # this likely means this is duplicate data being processed and sent 

402 # to the Apdb. 

403 if self.testDataFrameIndex(mergedDiaSourceHistory): 

404 raise RuntimeError( 

405 "Duplicate DiaSources found after association and merging " 

406 "with history. This is likely due to re-running data with an " 

407 "already populated Apdb. If this was not the case then there " 

408 "was an unexpected failure in Association while matching " 

409 "sources to objects, and should be reported. Exiting.") 

410 

411 # Compute DiaObject Summary statistics from their full DiaSource 

412 # history. 

413 diaCalResult = self.diaCalculation.run( 

414 diaObjects, 

415 mergedDiaSourceHistory, 

416 updatedDiaObjectIds, 

417 [band]) 

418 # Test for duplication in the updated DiaObjects. 

419 if self.testDataFrameIndex(diaCalResult.diaObjectCat): 

420 raise RuntimeError( 

421 "Duplicate DiaObjects (loaded + updated) created after " 

422 "DiaCalculation. This is unexpected behavior and should be " 

423 "reported. Existing.") 

424 if self.testDataFrameIndex(diaCalResult.updatedDiaObjects): 

425 raise RuntimeError( 

426 "Duplicate DiaObjects (updated) created after " 

427 "DiaCalculation. This is unexpected behavior and should be " 

428 "reported. Existing.") 

429 

430 # Force photometer on the Difference and Calibrated exposures using 

431 # the new and updated DiaObject locations. 

432 diaForcedSources = self.diaForcedSource.run( 

433 diaCalResult.diaObjectCat, 

434 diaCalResult.updatedDiaObjects.loc[:, "diaObjectId"].to_numpy(), 

435 ccdExposureIdBits, 

436 exposure, 

437 diffIm) 

438 

439 # Store DiaSources, updated DiaObjects, and DiaForcedSources in the 

440 # Apdb. 

441 self.apdb.store( 

442 exposure.getInfo().getVisitInfo().getDate(), 

443 diaCalResult.updatedDiaObjects, 

444 associatedDiaSources, 

445 diaForcedSources) 

446 

447 if self.config.doPackageAlerts: 

448 if len(loaderResult.diaForcedSources) > 1: 

449 diaForcedSources = diaForcedSources.append( 

450 loaderResult.diaForcedSources, 

451 sort=True) 

452 if self.testDataFrameIndex(diaForcedSources): 

453 self.log.warning( 

454 "Duplicate DiaForcedSources created after merge with " 

455 "history and new sources. This may cause downstream " 

456 "problems. Dropping duplicates.") 

457 # Drop duplicates via index and keep the first appearance. 

458 # Reset due to the index shape being slight different than 

459 # expected. 

460 diaForcedSources = diaForcedSources.groupby( 

461 diaForcedSources.index).first() 

462 diaForcedSources.reset_index(drop=True, inplace=True) 

463 diaForcedSources.set_index( 

464 ["diaObjectId", "diaForcedSourceId"], 

465 drop=False, 

466 inplace=True) 

467 self.alertPackager.run(associatedDiaSources, 

468 diaCalResult.diaObjectCat, 

469 loaderResult.diaSources, 

470 diaForcedSources, 

471 diffIm, 

472 warpedExposure, 

473 ccdExposureIdBits) 

474 

475 return pipeBase.Struct(apdbMarker=self.config.apdb.value, 

476 associatedDiaSources=associatedDiaSources,) 

477 

478 def createNewDiaObjects(self, unAssocDiaSources): 

479 """Loop through the set of DiaSources and create new DiaObjects 

480 for unassociated DiaSources. 

481 

482 Parameters 

483 ---------- 

484 unAssocDiaSources : `pandas.DataFrame` 

485 Set of DiaSources to create new DiaObjects from. 

486 

487 Returns 

488 ------- 

489 results : `lsst.pipe.base.Struct` 

490 Results struct containing: 

491 

492 - ``diaSources`` : DiaSource catalog with updated DiaObject ids. 

493 (`pandas.DataFrame`) 

494 - ``newDiaObjects`` : Newly created DiaObjects from the 

495 unassociated DiaSources. (`pandas.DataFrame`) 

496 - ``nNewDiaObjects`` : Number of newly created diaObjects.(`int`) 

497 """ 

498 if len(unAssocDiaSources) == 0: 

499 tmpObj = self._initialize_dia_object(0) 

500 newDiaObjects = pd.DataFrame(data=[], 

501 columns=tmpObj.keys()) 

502 else: 

503 newDiaObjects = unAssocDiaSources["diaSourceId"].apply( 

504 self._initialize_dia_object) 

505 unAssocDiaSources["diaObjectId"] = unAssocDiaSources["diaSourceId"] 

506 return pipeBase.Struct(diaSources=unAssocDiaSources, 

507 newDiaObjects=newDiaObjects, 

508 nNewDiaObjects=len(newDiaObjects)) 

509 

510 def _initialize_dia_object(self, objId): 

511 """Create a new DiaObject with values required to be initialized by the 

512 Ppdb. 

513 

514 Parameters 

515 ---------- 

516 objid : `int` 

517 ``diaObjectId`` value for the of the new DiaObject. 

518 

519 Returns 

520 ------- 

521 diaObject : `dict` 

522 Newly created DiaObject with keys: 

523 

524 ``diaObjectId`` 

525 Unique DiaObjectId (`int`). 

526 ``pmParallaxNdata`` 

527 Number of data points used for parallax calculation (`int`). 

528 ``nearbyObj1`` 

529 Id of the a nearbyObject in the Object table (`int`). 

530 ``nearbyObj2`` 

531 Id of the a nearbyObject in the Object table (`int`). 

532 ``nearbyObj3`` 

533 Id of the a nearbyObject in the Object table (`int`). 

534 ``?PSFluxData`` 

535 Number of data points used to calculate point source flux 

536 summary statistics in each bandpass (`int`). 

537 """ 

538 new_dia_object = {"diaObjectId": objId, 

539 "pmParallaxNdata": 0, 

540 "nearbyObj1": 0, 

541 "nearbyObj2": 0, 

542 "nearbyObj3": 0, 

543 "flags": 0} 

544 for f in ["u", "g", "r", "i", "z", "y"]: 

545 new_dia_object["%sPSFluxNdata" % f] = 0 

546 return pd.Series(data=new_dia_object) 

547 

548 def testDataFrameIndex(self, df): 

549 """Test the sorted DataFrame index for duplicates. 

550 

551 Wrapped as a separate function to allow for mocking of the this task 

552 in unittesting. Default of a mock return for this test is True. 

553 

554 Parameters 

555 ---------- 

556 df : `pandas.DataFrame` 

557 DataFrame to text. 

558 

559 Returns 

560 ------- 

561 `bool` 

562 True if DataFrame contains duplicate rows. 

563 """ 

564 return df.index.has_duplicates 

565 

566 def _add_association_meta_data(self, 

567 nUpdatedDiaObjects, 

568 nUnassociatedDiaObjects, 

569 nNewDiaObjects, 

570 nTotalSsObjects, 

571 nAssociatedSsObjects): 

572 """Store summaries of the association step in the task metadata. 

573 

574 Parameters 

575 ---------- 

576 nUpdatedDiaObjects : `int` 

577 Number of previous DiaObjects associated and updated in this 

578 ccdVisit. 

579 nUnassociatedDiaObjects : `int` 

580 Number of previous DiaObjects that were not associated or updated 

581 in this ccdVisit. 

582 nNewDiaObjects : `int` 

583 Number of newly created DiaObjects for this ccdVisit. 

584 nTotalSsObjects : `int` 

585 Number of SolarSystemObjects within the observable detector 

586 area. 

587 nAssociatedSsObjects : `int` 

588 Number of successfully associated SolarSystemObjects. 

589 """ 

590 self.metadata.add('numUpdatedDiaObjects', nUpdatedDiaObjects) 

591 self.metadata.add('numUnassociatedDiaObjects', nUnassociatedDiaObjects) 

592 self.metadata.add('numNewDiaObjects', nNewDiaObjects) 

593 self.metadata.add('numTotalSolarSystemObjects', nTotalSsObjects) 

594 self.metadata.add('numAssociatedSsObjects', nAssociatedSsObjects)