Coverage for python/lsst/drp/tasks/gbdesAstrometricFit.py: 10%

741 statements  

« prev     ^ index     » next       coverage.py v7.4.4, created at 2024-04-11 11:29 +0000

1# This file is part of drp_tasks. 

2# 

3# LSST Data Management System 

4# This product includes software developed by the 

5# LSST Project (http://www.lsst.org/). 

6# See COPYRIGHT file at the top of the source tree. 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <https://www.lsstcorp.org/LegalNotices/>. 

21# 

22import astropy.coordinates 

23import astropy.time 

24import astropy.units as u 

25import astshim 

26import lsst.afw.geom as afwgeom 

27import lsst.afw.table 

28import lsst.geom 

29import lsst.pex.config as pexConfig 

30import lsst.pipe.base as pipeBase 

31import lsst.sphgeom 

32import numpy as np 

33import wcsfit 

34import yaml 

35from lsst.meas.algorithms import ( 

36 LoadReferenceObjectsConfig, 

37 ReferenceObjectLoader, 

38 ReferenceSourceSelectorTask, 

39) 

40from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry 

41from sklearn.cluster import AgglomerativeClustering 

42from smatch.matcher import Matcher 

43 

44__all__ = [ 

45 "GbdesAstrometricFitConnections", 

46 "GbdesAstrometricFitConfig", 

47 "GbdesAstrometricFitTask", 

48 "GbdesGlobalAstrometricFitConnections", 

49 "GbdesGlobalAstrometricFitConfig", 

50 "GbdesGlobalAstrometricFitTask", 

51] 

52 

53 

54def _make_ref_covariance_matrix( 

55 refCat, inputUnit=u.radian, outputCoordUnit=u.marcsec, outputPMUnit=u.marcsec, version=1 

56): 

57 """Make a covariance matrix for the reference catalog including proper 

58 motion and parallax. 

59 

60 The output is flattened to one dimension to match the format expected by 

61 `gbdes`. 

62 

63 Parameters 

64 ---------- 

65 refCat : `lsst.afw.table.SimpleCatalog` 

66 Catalog including proper motion and parallax measurements. 

67 inputUnit : `astropy.unit.core.Unit` 

68 Units of the input catalog 

69 outputCoordUnit : `astropy.unit.core.Unit` 

70 Units required for the coordinates in the covariance matrix. `gbdes` 

71 expects milliarcseconds. 

72 outputPMUnit : `astropy.unit.core.Unit` 

73 Units required for the proper motion/parallax in the covariance matrix. 

74 `gbdes` expects milliarcseconds. 

75 version : `int` 

76 Version of the reference catalog. Version 2 includes covariance 

77 measurements. 

78 Returns 

79 ------- 

80 cov : `list` [`float`] 

81 Flattened output covariance matrix. 

82 """ 

83 cov = np.zeros((len(refCat), 25)) 

84 if version == 1: 

85 # Here is the standard ordering of components in the cov matrix, 

86 # to match the PM enumeration in C++ code of gbdes package's Match. 

87 # Each tuple gives: the array holding the 1d error, 

88 # the string in Gaia column names for this 

89 # the ordering in the Gaia catalog 

90 # and the ordering of the tuples is the order we want in our cov matrix 

91 raErr = (refCat["coord_raErr"]).to(outputCoordUnit).to_value() 

92 decErr = (refCat["coord_decErr"]).to(outputCoordUnit).to_value() 

93 raPMErr = (refCat["pm_raErr"]).to(outputPMUnit).to_value() 

94 decPMErr = (refCat["pm_decErr"]).to(outputPMUnit).to_value() 

95 parallaxErr = (refCat["parallaxErr"]).to(outputPMUnit).to_value() 

96 stdOrder = ( 

97 (raErr, "ra", 0), 

98 (decErr, "dec", 1), 

99 (raPMErr, "pmra", 3), 

100 (decPMErr, "pmdec", 4), 

101 (parallaxErr, "parallax", 2), 

102 ) 

103 

104 k = 0 

105 for i, pr1 in enumerate(stdOrder): 

106 for j, pr2 in enumerate(stdOrder): 

107 if pr1[2] < pr2[2]: 

108 cov[:, k] = 0 

109 elif pr1[2] > pr2[2]: 

110 cov[:, k] = 0 

111 else: 

112 # diagnonal element 

113 cov[:, k] = pr1[0] * pr2[0] 

114 k = k + 1 

115 

116 elif version == 2: 

117 positionParameters = ["coord_ra", "coord_dec", "pm_ra", "pm_dec", "parallax"] 

118 units = [outputCoordUnit, outputCoordUnit, outputPMUnit, outputPMUnit, outputPMUnit] 

119 k = 0 

120 for i, pi in enumerate(positionParameters): 

121 for j, pj in enumerate(positionParameters): 

122 if i == j: 

123 cov[:, k] = ((refCat[f"{pi}Err"].value) ** 2 * inputUnit**2).to(units[j] * units[j]).value 

124 elif i > j: 

125 cov[:, k] = (refCat[f"{pj}_{pi}_Cov"].value * inputUnit**2).to_value(units[i] * units[j]) 

126 else: 

127 cov[:, k] = (refCat[f"{pi}_{pj}_Cov"].value * inputUnit**2).to_value(units[i] * units[j]) 

128 k += 1 

129 return cov 

130 

131 

132def _nCoeffsFromDegree(degree): 

133 """Get the number of coefficients for a polynomial of a certain degree with 

134 two variables. 

135 

136 This uses the general formula that the number of coefficients for a 

137 polynomial of degree d with n variables is (n + d) choose d, where in this 

138 case n is fixed to 2. 

139 

140 Parameters 

141 ---------- 

142 degree : `int` 

143 Degree of the polynomial in question. 

144 

145 Returns 

146 ------- 

147 nCoeffs : `int` 

148 Number of coefficients for the polynomial in question. 

149 """ 

150 nCoeffs = int((degree + 2) * (degree + 1) / 2) 

151 return nCoeffs 

152 

153 

154def _degreeFromNCoeffs(nCoeffs): 

155 """Get the degree for a polynomial with two variables and a certain number 

156 of coefficients. 

157 

158 This is done by applying the quadratic formula to the 

159 formula for calculating the number of coefficients of the polynomial. 

160 

161 Parameters 

162 ---------- 

163 nCoeffs : `int` 

164 Number of coefficients for the polynomial in question. 

165 

166 Returns 

167 ------- 

168 degree : `int` 

169 Degree of the polynomial in question. 

170 """ 

171 degree = int(-1.5 + 0.5 * (1 + 8 * nCoeffs) ** 0.5) 

172 return degree 

173 

174 

175def _convert_to_ast_polymap_coefficients(coefficients): 

176 """Convert vector of polynomial coefficients from the format used in 

177 `gbdes` into AST format (see Poly2d::vectorIndex(i, j) in 

178 gbdes/gbutil/src/Poly2d.cpp). This assumes two input and two output 

179 coordinates. 

180 

181 Parameters 

182 ---------- 

183 coefficients : `list` 

184 Coefficients of the polynomials. 

185 degree : `int` 

186 Degree of the polynomial. 

187 

188 Returns 

189 ------- 

190 astPoly : `astshim.PolyMap` 

191 Coefficients in AST polynomial format. 

192 """ 

193 polyArray = np.zeros((len(coefficients), 4)) 

194 N = len(coefficients) / 2 

195 degree = _degreeFromNCoeffs(N) 

196 

197 for outVar in [1, 2]: 

198 for i in range(degree + 1): 

199 for j in range(degree + 1): 

200 if (i + j) > degree: 

201 continue 

202 vectorIndex = int(((i + j) * (i + j + 1)) / 2 + j + N * (outVar - 1)) 

203 polyArray[vectorIndex, 0] = coefficients[vectorIndex] 

204 polyArray[vectorIndex, 1] = outVar 

205 polyArray[vectorIndex, 2] = i 

206 polyArray[vectorIndex, 3] = j 

207 

208 astPoly = astshim.PolyMap(polyArray, 2, options="IterInverse=1,NIterInverse=10,TolInverse=1e-7") 

209 return astPoly 

210 

211 

212class GbdesAstrometricFitConnections( 

213 pipeBase.PipelineTaskConnections, dimensions=("skymap", "tract", "instrument", "physical_filter") 

214): 

215 """Middleware input/output connections for task data.""" 

216 

217 inputCatalogRefs = pipeBase.connectionTypes.Input( 

218 doc="Source table in parquet format, per visit.", 

219 name="preSourceTable_visit", 

220 storageClass="DataFrame", 

221 dimensions=("instrument", "visit"), 

222 deferLoad=True, 

223 multiple=True, 

224 ) 

225 inputVisitSummaries = pipeBase.connectionTypes.Input( 

226 doc=( 

227 "Per-visit consolidated exposure metadata built from calexps. " 

228 "These catalogs use detector id for the id and must be sorted for " 

229 "fast lookups of a detector." 

230 ), 

231 name="visitSummary", 

232 storageClass="ExposureCatalog", 

233 dimensions=("instrument", "visit"), 

234 multiple=True, 

235 ) 

236 referenceCatalog = pipeBase.connectionTypes.PrerequisiteInput( 

237 doc="The astrometry reference catalog to match to loaded input catalog sources.", 

238 name="gaia_dr3_20230707", 

239 storageClass="SimpleCatalog", 

240 dimensions=("skypix",), 

241 deferLoad=True, 

242 multiple=True, 

243 ) 

244 outputWcs = pipeBase.connectionTypes.Output( 

245 doc=( 

246 "Per-tract, per-visit world coordinate systems derived from the fitted model." 

247 " These catalogs only contain entries for detectors with an output, and use" 

248 " the detector id for the catalog id, sorted on id for fast lookups of a detector." 

249 ), 

250 name="gbdesAstrometricFitSkyWcsCatalog", 

251 storageClass="ExposureCatalog", 

252 dimensions=("instrument", "visit", "skymap", "tract"), 

253 multiple=True, 

254 ) 

255 outputCatalog = pipeBase.connectionTypes.Output( 

256 doc=( 

257 "Catalog of sources used in fit, along with residuals in pixel coordinates and tangent " 

258 "plane coordinates and chisq values." 

259 ), 

260 name="gbdesAstrometricFit_fitStars", 

261 storageClass="ArrowNumpyDict", 

262 dimensions=("instrument", "skymap", "tract", "physical_filter"), 

263 ) 

264 starCatalog = pipeBase.connectionTypes.Output( 

265 doc=( 

266 "Catalog of best-fit object positions. Also includes the fit proper motion and parallax if " 

267 "fitProperMotion is True." 

268 ), 

269 name="gbdesAstrometricFit_starCatalog", 

270 storageClass="ArrowNumpyDict", 

271 dimensions=("instrument", "skymap", "tract", "physical_filter"), 

272 ) 

273 modelParams = pipeBase.connectionTypes.Output( 

274 doc="WCS parameters and covariance.", 

275 name="gbdesAstrometricFit_modelParams", 

276 storageClass="ArrowNumpyDict", 

277 dimensions=("instrument", "skymap", "tract", "physical_filter"), 

278 ) 

279 

280 def getSpatialBoundsConnections(self): 

281 return ("inputVisitSummaries",) 

282 

283 def __init__(self, *, config=None): 

284 super().__init__(config=config) 

285 

286 if not self.config.saveModelParams: 

287 self.outputs.remove("modelParams") 

288 

289 

290class GbdesAstrometricFitConfig( 

291 pipeBase.PipelineTaskConfig, pipelineConnections=GbdesAstrometricFitConnections 

292): 

293 """Configuration for GbdesAstrometricFitTask""" 

294 

295 sourceSelector = sourceSelectorRegistry.makeField( 

296 doc="How to select sources for cross-matching.", default="science" 

297 ) 

298 referenceSelector = pexConfig.ConfigurableField( 

299 target=ReferenceSourceSelectorTask, 

300 doc="How to down-select the loaded astrometry reference catalog.", 

301 ) 

302 referenceFilter = pexConfig.Field( 

303 dtype=str, 

304 doc="Name of filter to load from reference catalog. This is a required argument, although the values" 

305 "returned are not used.", 

306 default="phot_g_mean", 

307 ) 

308 applyRefCatProperMotion = pexConfig.Field( 

309 dtype=bool, 

310 doc="Apply proper motion to shift reference catalog to epoch of observations.", 

311 default=True, 

312 ) 

313 matchRadius = pexConfig.Field( 

314 doc="Matching tolerance between associated objects (arcseconds).", dtype=float, default=1.0 

315 ) 

316 minMatches = pexConfig.Field( 

317 doc="Number of matches required to keep a source object.", dtype=int, default=2 

318 ) 

319 allowSelfMatches = pexConfig.Field( 

320 doc="Allow multiple sources from the same visit to be associated with the same object.", 

321 dtype=bool, 

322 default=False, 

323 ) 

324 sourceFluxType = pexConfig.Field( 

325 dtype=str, 

326 doc="Source flux field to use in source selection and to get fluxes from the catalog.", 

327 default="apFlux_12_0", 

328 ) 

329 systematicError = pexConfig.Field( 

330 dtype=float, 

331 doc=( 

332 "Systematic error padding added in quadrature for the science catalogs (marcsec). The default" 

333 "value is equivalent to 0.02 pixels for HSC." 

334 ), 

335 default=0.0034, 

336 ) 

337 referenceSystematicError = pexConfig.Field( 

338 dtype=float, 

339 doc="Systematic error padding added in quadrature for the reference catalog (marcsec).", 

340 default=0.0, 

341 ) 

342 modelComponents = pexConfig.ListField( 

343 dtype=str, 

344 doc=( 

345 "List of mappings to apply to transform from pixels to sky, in order of their application." 

346 "Supported options are 'INSTRUMENT/DEVICE' and 'EXPOSURE'." 

347 ), 

348 default=["INSTRUMENT/DEVICE", "EXPOSURE"], 

349 ) 

350 deviceModel = pexConfig.ListField( 

351 dtype=str, 

352 doc=( 

353 "List of mappings to apply to transform from detector pixels to intermediate frame. Map names" 

354 "should match the format 'BAND/DEVICE/<map name>'." 

355 ), 

356 default=["BAND/DEVICE/poly"], 

357 ) 

358 exposureModel = pexConfig.ListField( 

359 dtype=str, 

360 doc=( 

361 "List of mappings to apply to transform from intermediate frame to sky coordinates. Map names" 

362 "should match the format 'EXPOSURE/<map name>'." 

363 ), 

364 default=["EXPOSURE/poly"], 

365 ) 

366 devicePolyOrder = pexConfig.Field(dtype=int, doc="Order of device polynomial model.", default=4) 

367 exposurePolyOrder = pexConfig.Field(dtype=int, doc="Order of exposure polynomial model.", default=6) 

368 fitProperMotion = pexConfig.Field(dtype=bool, doc="Fit the proper motions of the objects.", default=False) 

369 excludeNonPMObjects = pexConfig.Field( 

370 dtype=bool, doc="Exclude reference objects without proper motion/parallax information.", default=True 

371 ) 

372 fitReserveFraction = pexConfig.Field( 

373 dtype=float, default=0.2, doc="Fraction of objects to reserve from fit for validation." 

374 ) 

375 fitReserveRandomSeed = pexConfig.Field( 

376 dtype=int, 

377 doc="Set the random seed for selecting data points to reserve from the fit for validation.", 

378 default=1234, 

379 ) 

380 saveModelParams = pexConfig.Field( 

381 dtype=bool, 

382 doc=( 

383 "Save the parameters and covariance of the WCS model. Default to " 

384 "false because this can be very large." 

385 ), 

386 default=False, 

387 ) 

388 

389 def setDefaults(self): 

390 # Use only stars because aperture fluxes of galaxies are biased and 

391 # depend on seeing. 

392 self.sourceSelector["science"].doUnresolved = True 

393 self.sourceSelector["science"].unresolved.name = "sizeExtendedness" 

394 

395 # Use only isolated sources. 

396 self.sourceSelector["science"].doIsolated = True 

397 self.sourceSelector["science"].isolated.parentName = "parentSourceId" 

398 self.sourceSelector["science"].isolated.nChildName = "deblend_nChild" 

399 # Do not use either flux or centroid measurements with flags, 

400 # chosen from the usual QA flags for stars. 

401 self.sourceSelector["science"].doFlags = True 

402 badFlags = [ 

403 "pixelFlags_edge", 

404 "pixelFlags_saturated", 

405 "pixelFlags_interpolatedCenter", 

406 "pixelFlags_interpolated", 

407 "pixelFlags_crCenter", 

408 "pixelFlags_bad", 

409 "hsmPsfMoments_flag", 

410 f"{self.sourceFluxType}_flag", 

411 ] 

412 self.sourceSelector["science"].flags.bad = badFlags 

413 

414 # Use only primary sources. 

415 self.sourceSelector["science"].doRequirePrimary = True 

416 

417 def validate(self): 

418 super().validate() 

419 

420 # Check if all components of the device and exposure models are 

421 # supported. 

422 for component in self.deviceModel: 

423 if not (("poly" in component.lower()) or ("identity" in component.lower())): 

424 raise pexConfig.FieldValidationError( 

425 GbdesAstrometricFitConfig.deviceModel, 

426 self, 

427 f"deviceModel component {component} is not supported.", 

428 ) 

429 

430 for component in self.exposureModel: 

431 if not (("poly" in component.lower()) or ("identity" in component.lower())): 

432 raise pexConfig.FieldValidationError( 

433 GbdesAstrometricFitConfig.exposureModel, 

434 self, 

435 f"exposureModel component {component} is not supported.", 

436 ) 

437 

438 

439class GbdesAstrometricFitTask(pipeBase.PipelineTask): 

440 """Calibrate the WCS across multiple visits of the same field using the 

441 GBDES package. 

442 """ 

443 

444 ConfigClass = GbdesAstrometricFitConfig 

445 _DefaultName = "gbdesAstrometricFit" 

446 

447 def __init__(self, **kwargs): 

448 super().__init__(**kwargs) 

449 self.makeSubtask("sourceSelector") 

450 self.makeSubtask("referenceSelector") 

451 

452 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

453 # We override runQuantum to set up the refObjLoaders 

454 inputs = butlerQC.get(inputRefs) 

455 

456 instrumentName = butlerQC.quantum.dataId["instrument"] 

457 

458 # Ensure the inputs are in a consistent and deterministic order 

459 inputCatVisits = np.array([inputCat.dataId["visit"] for inputCat in inputs["inputCatalogRefs"]]) 

460 inputs["inputCatalogRefs"] = [inputs["inputCatalogRefs"][v] for v in inputCatVisits.argsort()] 

461 inputSumVisits = np.array([inputSum[0]["visit"] for inputSum in inputs["inputVisitSummaries"]]) 

462 inputs["inputVisitSummaries"] = [inputs["inputVisitSummaries"][v] for v in inputSumVisits.argsort()] 

463 inputRefHtm7s = np.array([inputRefCat.dataId["htm7"] for inputRefCat in inputRefs.referenceCatalog]) 

464 inputRefCatRefs = [inputRefs.referenceCatalog[htm7] for htm7 in inputRefHtm7s.argsort()] 

465 inputRefCats = np.array([inputRefCat.dataId["htm7"] for inputRefCat in inputs["referenceCatalog"]]) 

466 inputs["referenceCatalog"] = [inputs["referenceCatalog"][v] for v in inputRefCats.argsort()] 

467 

468 refConfig = LoadReferenceObjectsConfig() 

469 if self.config.applyRefCatProperMotion: 

470 refConfig.requireProperMotion = True 

471 refObjectLoader = ReferenceObjectLoader( 

472 dataIds=[ref.datasetRef.dataId for ref in inputRefCatRefs], 

473 refCats=inputs.pop("referenceCatalog"), 

474 config=refConfig, 

475 log=self.log, 

476 ) 

477 

478 output = self.run(**inputs, instrumentName=instrumentName, refObjectLoader=refObjectLoader) 

479 

480 wcsOutputRefDict = {outWcsRef.dataId["visit"]: outWcsRef for outWcsRef in outputRefs.outputWcs} 

481 for visit, outputWcs in output.outputWcss.items(): 

482 butlerQC.put(outputWcs, wcsOutputRefDict[visit]) 

483 butlerQC.put(output.outputCatalog, outputRefs.outputCatalog) 

484 butlerQC.put(output.starCatalog, outputRefs.starCatalog) 

485 if self.config.saveModelParams: 

486 butlerQC.put(output.modelParams, outputRefs.modelParams) 

487 

488 def run( 

489 self, inputCatalogRefs, inputVisitSummaries, instrumentName="", refEpoch=None, refObjectLoader=None 

490 ): 

491 """Run the WCS fit for a given set of visits 

492 

493 Parameters 

494 ---------- 

495 inputCatalogRefs : `list` [`DeferredDatasetHandle`] 

496 List of handles pointing to visit-level source 

497 tables. 

498 inputVisitSummaries : `list` [`lsst.afw.table.ExposureCatalog`] 

499 List of catalogs with per-detector summary information. 

500 instrumentName : `str`, optional 

501 Name of the instrument used. This is only used for labelling. 

502 refEpoch : `float` 

503 Epoch of the reference objects in MJD. 

504 refObjectLoader : instance of 

505 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

506 Referencef object loader instance. 

507 

508 Returns 

509 ------- 

510 result : `lsst.pipe.base.Struct` 

511 ``outputWcss`` : `list` [`lsst.afw.table.ExposureCatalog`] 

512 List of exposure catalogs (one per visit) with the WCS for each 

513 detector set by the new fitted WCS. 

514 ``fitModel`` : `wcsfit.WCSFit` 

515 Model-fitting object with final model parameters. 

516 ``outputCatalog`` : `pyarrow.Table` 

517 Catalog with fit residuals of all sources used. 

518 ``starCatalog`` : `pyarrow.Table` 

519 Catalog with best-fit positions of the objects fit. 

520 ``modelParams`` : `dict` 

521 Parameters and covariance of the best-fit WCS model. 

522 """ 

523 self.log.info("Gather instrument, exposure, and field info") 

524 # Set up an instrument object 

525 instrument = wcsfit.Instrument(instrumentName) 

526 

527 # Get RA, Dec, MJD, etc., for the input visits 

528 exposureInfo, exposuresHelper, extensionInfo = self._get_exposure_info( 

529 inputVisitSummaries, instrument 

530 ) 

531 

532 # Get information about the extent of the input visits 

533 fields, fieldCenter, fieldRadius = self._prep_sky(inputVisitSummaries, exposureInfo.medianEpoch) 

534 

535 self.log.info("Load catalogs and associate sources") 

536 # Set up class to associate sources into matches using a 

537 # friends-of-friends algorithm 

538 associations = wcsfit.FoFClass( 

539 fields, 

540 [instrument], 

541 exposuresHelper, 

542 [fieldRadius.asDegrees()], 

543 (self.config.matchRadius * u.arcsec).to(u.degree).value, 

544 ) 

545 

546 # Add the reference catalog to the associator 

547 medianEpoch = astropy.time.Time(exposureInfo.medianEpoch, format="decimalyear").mjd 

548 refObjects, refCovariance = self._load_refcat( 

549 refObjectLoader, 

550 extensionInfo, 

551 epoch=medianEpoch, 

552 center=fieldCenter, 

553 radius=fieldRadius, 

554 associations=associations, 

555 ) 

556 

557 # Add the science catalogs and associate new sources as they are added 

558 sourceIndices, usedColumns = self._load_catalogs_and_associate( 

559 associations, inputCatalogRefs, extensionInfo 

560 ) 

561 self._check_degeneracies(associations, extensionInfo) 

562 

563 self.log.info("Fit the WCSs") 

564 # Set up a YAML-type string using the config variables and a sample 

565 # visit 

566 inputYaml, mapTemplate = self.make_yaml(inputVisitSummaries[0]) 

567 

568 # Set the verbosity level for WCSFit from the task log level. 

569 # TODO: DM-36850, Add lsst.log to gbdes so that log messages are 

570 # properly propagated. 

571 loglevel = self.log.getEffectiveLevel() 

572 if loglevel >= self.log.WARNING: 

573 verbose = 0 

574 elif loglevel == self.log.INFO: 

575 verbose = 1 

576 else: 

577 verbose = 2 

578 

579 # Set up the WCS-fitting class using the results of the FOF associator 

580 wcsf = wcsfit.WCSFit( 

581 fields, 

582 [instrument], 

583 exposuresHelper, 

584 extensionInfo.visitIndex, 

585 extensionInfo.detectorIndex, 

586 inputYaml, 

587 extensionInfo.wcs, 

588 associations.sequence, 

589 associations.extn, 

590 associations.obj, 

591 sysErr=self.config.systematicError, 

592 refSysErr=self.config.referenceSystematicError, 

593 usePM=self.config.fitProperMotion, 

594 verbose=verbose, 

595 ) 

596 

597 # Add the science and reference sources 

598 self._add_objects(wcsf, inputCatalogRefs, sourceIndices, extensionInfo, usedColumns) 

599 self._add_ref_objects(wcsf, refObjects, refCovariance, extensionInfo) 

600 

601 # There must be at least as many sources per visit as the number of 

602 # free parameters in the per-visit mapping. Set minFitExposures to be 

603 # the number of free parameters, so that visits with fewer visits are 

604 # dropped. 

605 nCoeffVisitModel = _nCoeffsFromDegree(self.config.exposurePolyOrder) 

606 # Do the WCS fit 

607 wcsf.fit( 

608 reserveFraction=self.config.fitReserveFraction, 

609 randomNumberSeed=self.config.fitReserveRandomSeed, 

610 minFitExposures=nCoeffVisitModel, 

611 ) 

612 self.log.info("WCS fitting done") 

613 

614 outputWcss = self._make_outputs(wcsf, inputVisitSummaries, exposureInfo, mapTemplate=mapTemplate) 

615 outputCatalog = wcsf.getOutputCatalog() 

616 starCatalog = wcsf.getStarCatalog() 

617 modelParams = self._compute_model_params(wcsf) if self.config.saveModelParams else None 

618 

619 return pipeBase.Struct( 

620 outputWcss=outputWcss, 

621 fitModel=wcsf, 

622 outputCatalog=outputCatalog, 

623 starCatalog=starCatalog, 

624 modelParams=modelParams, 

625 ) 

626 

627 def _prep_sky(self, inputVisitSummaries, epoch, fieldName="Field"): 

628 """Get center and radius of the input tract. This assumes that all 

629 visits will be put into the same `wcsfit.Field` and fit together. 

630 

631 Paramaters 

632 ---------- 

633 inputVisitSummaries : `list` [`lsst.afw.table.ExposureCatalog`] 

634 List of catalogs with per-detector summary information. 

635 epoch : float 

636 Reference epoch. 

637 fieldName : str 

638 Name of the field, used internally. 

639 

640 Returns 

641 ------- 

642 fields : `wcsfit.Fields` 

643 Object with field information. 

644 center : `lsst.geom.SpherePoint` 

645 Center of the field. 

646 radius : `lsst.sphgeom._sphgeom.Angle` 

647 Radius of the bounding circle of the tract. 

648 """ 

649 allDetectorCorners = [] 

650 for visSum in inputVisitSummaries: 

651 detectorCorners = [ 

652 lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees).getVector() 

653 for (ra, dec) in zip(visSum["raCorners"].ravel(), visSum["decCorners"].ravel()) 

654 if (np.isfinite(ra) and (np.isfinite(dec))) 

655 ] 

656 allDetectorCorners.extend(detectorCorners) 

657 boundingCircle = lsst.sphgeom.ConvexPolygon.convexHull(allDetectorCorners).getBoundingCircle() 

658 center = lsst.geom.SpherePoint(boundingCircle.getCenter()) 

659 ra = center.getRa().asDegrees() 

660 dec = center.getDec().asDegrees() 

661 radius = boundingCircle.getOpeningAngle() 

662 

663 # wcsfit.Fields describes a list of fields, but we assume all 

664 # observations will be fit together in one field. 

665 fields = wcsfit.Fields([fieldName], [ra], [dec], [epoch]) 

666 

667 return fields, center, radius 

668 

669 def _get_exposure_info( 

670 self, 

671 inputVisitSummaries, 

672 instrument, 

673 fieldNumber=0, 

674 instrumentNumber=0, 

675 refEpoch=None, 

676 fieldRegions=None, 

677 ): 

678 """Get various information about the input visits to feed to the 

679 fitting routines. 

680 

681 Parameters 

682 ---------- 

683 inputVisitSummaries : `list [`lsst.afw.table.ExposureCatalog`] 

684 Tables for each visit with information for detectors. 

685 instrument : `wcsfit.Instrument` 

686 Instrument object to which detector information is added. 

687 fieldNumber : `int`, optional 

688 Index of the field for these visits. Should be zero if all data is 

689 being fit together. This is ignored if `fieldRegions` is not None. 

690 instrumentNumber : `int`, optional 

691 Index of the instrument for these visits. Should be zero if all 

692 data comes from the same instrument. 

693 refEpoch : `float`, optional 

694 Epoch of the reference objects in MJD. 

695 fieldRegions : `dict` [`int`, `lsst.sphgeom.ConvexPolygon`], optional 

696 Dictionary of regions encompassing each group of input visits 

697 keyed by an arbitrary index. 

698 

699 Returns 

700 ------- 

701 exposureInfo : `lsst.pipe.base.Struct` 

702 Struct containing general properties for the visits: 

703 ``visits`` : `list` 

704 List of visit names. 

705 ``detectors`` : `list` 

706 List of all detectors in any visit. 

707 ``ras`` : `list` [`float`] 

708 List of boresight RAs for each visit. 

709 ``decs`` : `list` [`float`] 

710 List of borseight Decs for each visit. 

711 ``medianEpoch`` : float 

712 Median epoch of all visits in decimal-year format. 

713 exposuresHelper : `wcsfit.ExposuresHelper` 

714 Object containing information about the input visits. 

715 extensionInfo : `lsst.pipe.base.Struct` 

716 Struct containing properties for each extension (visit/detector): 

717 ``visit`` : `np.ndarray` 

718 Name of the visit for this extension. 

719 ``detector`` : `np.ndarray` 

720 Name of the detector for this extension. 

721 ``visitIndex` : `np.ndarray` [`int`] 

722 Index of visit for this extension. 

723 ``detectorIndex`` : `np.ndarray` [`int`] 

724 Index of the detector for this extension. 

725 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`] 

726 Initial WCS for this extension. 

727 ``extensionType`` : `np.ndarray` [`str`] 

728 "SCIENCE" or "REFERENCE". 

729 """ 

730 exposureNames = [] 

731 ras = [] 

732 decs = [] 

733 visits = [] 

734 detectors = [] 

735 airmasses = [] 

736 exposureTimes = [] 

737 mjds = [] 

738 observatories = [] 

739 wcss = [] 

740 fieldNumbers = [] 

741 

742 extensionType = [] 

743 extensionVisitIndices = [] 

744 extensionDetectorIndices = [] 

745 extensionVisits = [] 

746 extensionDetectors = [] 

747 # Get information for all the science visits 

748 for v, visitSummary in enumerate(inputVisitSummaries): 

749 visitInfo = visitSummary[0].getVisitInfo() 

750 visit = visitSummary[0]["visit"] 

751 visits.append(visit) 

752 exposureNames.append(str(visit)) 

753 raDec = visitInfo.getBoresightRaDec() 

754 ras.append(raDec.getRa().asRadians()) 

755 decs.append(raDec.getDec().asRadians()) 

756 if fieldRegions is not None: 

757 inField = [r for r, region in fieldRegions.items() if region.contains(raDec.getVector())] 

758 if len(inField) != 1: 

759 raise RuntimeError( 

760 f"Visit should be in one and only one field, but {visit} is contained " 

761 f"in {len(inField)} fields." 

762 ) 

763 fieldNumbers.append(inField[0]) 

764 else: 

765 fieldNumbers.append(fieldNumber) 

766 airmasses.append(visitInfo.getBoresightAirmass()) 

767 exposureTimes.append(visitInfo.getExposureTime()) 

768 obsDate = visitInfo.getDate() 

769 obsMJD = obsDate.get(obsDate.MJD) 

770 mjds.append(obsMJD) 

771 # Get the observatory ICRS position for use in fitting parallax 

772 obsLon = visitInfo.observatory.getLongitude().asDegrees() 

773 obsLat = visitInfo.observatory.getLatitude().asDegrees() 

774 obsElev = visitInfo.observatory.getElevation() 

775 earthLocation = astropy.coordinates.EarthLocation.from_geodetic(obsLon, obsLat, obsElev) 

776 observatory_gcrs = earthLocation.get_gcrs(astropy.time.Time(obsMJD, format="mjd")) 

777 observatory_icrs = observatory_gcrs.transform_to(astropy.coordinates.ICRS()) 

778 # We want the position in AU in Cartesian coordinates 

779 observatories.append(observatory_icrs.cartesian.xyz.to(u.AU).value) 

780 

781 for row in visitSummary: 

782 detector = row["id"] 

783 

784 wcs = row.getWcs() 

785 if wcs is None: 

786 self.log.warning( 

787 "WCS is None for visit %d, detector %d: this extension (visit/detector) will be " 

788 "dropped.", 

789 visit, 

790 detector, 

791 ) 

792 continue 

793 else: 

794 wcsRA = wcs.getSkyOrigin().getRa().asRadians() 

795 wcsDec = wcs.getSkyOrigin().getDec().asRadians() 

796 tangentPoint = wcsfit.Gnomonic(wcsRA, wcsDec) 

797 mapping = wcs.getFrameDict().getMapping("PIXELS", "IWC") 

798 gbdes_wcs = wcsfit.Wcs(wcsfit.ASTMap(mapping), tangentPoint) 

799 wcss.append(gbdes_wcs) 

800 

801 if detector not in detectors: 

802 detectors.append(detector) 

803 detectorBounds = wcsfit.Bounds( 

804 row["bbox_min_x"], row["bbox_max_x"], row["bbox_min_y"], row["bbox_max_y"] 

805 ) 

806 instrument.addDevice(str(detector), detectorBounds) 

807 

808 detectorIndex = np.flatnonzero(detector == np.array(detectors))[0] 

809 extensionVisitIndices.append(v) 

810 extensionDetectorIndices.append(detectorIndex) 

811 extensionVisits.append(visit) 

812 extensionDetectors.append(detector) 

813 extensionType.append("SCIENCE") 

814 

815 instrumentNumbers = list(np.ones(len(exposureNames), dtype=int) * instrumentNumber) 

816 

817 # Set the reference epoch to be the median of the science visits. 

818 # The reference catalog will be shifted to this date. 

819 medianMJD = np.median(mjds) 

820 medianEpoch = astropy.time.Time(medianMJD, format="mjd").decimalyear 

821 

822 # Add information for the reference catalog. Most of the values are 

823 # not used. There needs to be a separate catalog for each field. 

824 if fieldRegions is None: 

825 fieldRegions = {0: None} 

826 for f in fieldRegions: 

827 exposureNames.append("REFERENCE") 

828 # Make the "visit" number the field * -1 to disambiguate it from 

829 # any potential visit number: 

830 visits.append(-1 * f) 

831 fieldNumbers.append(f) 

832 if self.config.fitProperMotion: 

833 instrumentNumbers.append(-2) 

834 else: 

835 instrumentNumbers.append(-1) 

836 ras.append(0.0) 

837 decs.append(0.0) 

838 airmasses.append(0.0) 

839 exposureTimes.append(0) 

840 mjds.append((refEpoch if (refEpoch is not None) else medianMJD)) 

841 observatories.append(np.array([0, 0, 0])) 

842 identity = wcsfit.IdentityMap() 

843 icrs = wcsfit.SphericalICRS() 

844 refWcs = wcsfit.Wcs(identity, icrs, "Identity", np.pi / 180.0) 

845 wcss.append(refWcs) 

846 

847 extensionVisitIndices.append(len(exposureNames) - 1) 

848 extensionDetectorIndices.append(-1) # REFERENCE device must be -1 

849 extensionVisits.append(-1 * f) 

850 extensionDetectors.append(-1) 

851 extensionType.append("REFERENCE") 

852 

853 # Make a table of information to use elsewhere in the class 

854 extensionInfo = pipeBase.Struct( 

855 visit=np.array(extensionVisits), 

856 detector=np.array(extensionDetectors), 

857 visitIndex=np.array(extensionVisitIndices), 

858 detectorIndex=np.array(extensionDetectorIndices), 

859 wcs=np.array(wcss), 

860 extensionType=np.array(extensionType), 

861 ) 

862 

863 # Make the exposureHelper object to use in the fitting routines 

864 exposuresHelper = wcsfit.ExposuresHelper( 

865 exposureNames, 

866 fieldNumbers, 

867 instrumentNumbers, 

868 ras, 

869 decs, 

870 airmasses, 

871 exposureTimes, 

872 mjds, 

873 observatories, 

874 ) 

875 

876 exposureInfo = pipeBase.Struct( 

877 visits=visits, detectors=detectors, ras=ras, decs=decs, medianEpoch=medianEpoch 

878 ) 

879 

880 return exposureInfo, exposuresHelper, extensionInfo 

881 

882 def _load_refcat( 

883 self, 

884 refObjectLoader, 

885 extensionInfo, 

886 epoch=None, 

887 fieldIndex=0, 

888 associations=None, 

889 center=None, 

890 radius=None, 

891 region=None, 

892 ): 

893 """Load the reference catalog and add reference objects to the 

894 `wcsfit.FoFClass` object. 

895 

896 Parameters 

897 ---------- 

898 refObjectLoader : 

899 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

900 Object set up to load reference catalog objects. 

901 extensionInfo : `lsst.pipe.base.Struct` 

902 Struct containing properties for each extension (visit/detector). 

903 ``visit`` : `np.ndarray` 

904 Name of the visit for this extension. 

905 ``detector`` : `np.ndarray` 

906 Name of the detector for this extension. 

907 ``visitIndex` : `np.ndarray` [`int`] 

908 Index of visit for this extension. 

909 ``detectorIndex`` : `np.ndarray` [`int`] 

910 Index of the detector for this extension. 

911 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`] 

912 Initial WCS for this extension. 

913 ``extensionType`` : `np.ndarray` [`str`] 

914 "SCIENCE" or "REFERENCE". 

915 epoch : `float`, optional 

916 MJD to which to correct the object positions. 

917 fieldIndex : `int`, optional 

918 Index of the field. Should be zero if all the data is fit together. 

919 associations : `wcsfit.FoFClass`, optional 

920 Object to which to add the catalog of reference objects. 

921 center : `lsst.geom.SpherePoint`, optional 

922 Center of the circle in which to load reference objects. Ignored if 

923 `region` is set. If used, `radius` must also be set. 

924 radius : `lsst.sphgeom._sphgeom.Angle`, optional 

925 Radius of the circle in which to load reference objects. Ignored if 

926 `region` is set. If used, `center` must also be set. 

927 region : `lsst.sphgeom.ConvexPolygon`, optional 

928 Region in which to load reference objects. 

929 

930 Returns 

931 ------- 

932 refObjects : `dict` 

933 Position and error information of reference objects. 

934 refCovariance : `list` [`float`] 

935 Flattened output covariance matrix. 

936 """ 

937 if self.config.applyRefCatProperMotion: 

938 formattedEpoch = astropy.time.Time(epoch, format="mjd") 

939 else: 

940 formattedEpoch = None 

941 

942 if region is not None: 

943 skyRegion = refObjectLoader.loadRegion(region, self.config.referenceFilter, epoch=formattedEpoch) 

944 elif (center is not None) and (radius is not None): 

945 skyRegion = refObjectLoader.loadSkyCircle( 

946 center, radius, self.config.referenceFilter, epoch=formattedEpoch 

947 ) 

948 else: 

949 raise RuntimeError("Either `region` or `center` and `radius` must be set.") 

950 

951 selected = self.referenceSelector.run(skyRegion.refCat) 

952 # Need memory contiguity to get reference filters as a vector. 

953 if not selected.sourceCat.isContiguous(): 

954 refCat = selected.sourceCat.copy(deep=True) 

955 else: 

956 refCat = selected.sourceCat 

957 refCat = refCat.asAstropy() 

958 

959 # In Gaia DR3, missing values are denoted by NaNs. 

960 finiteInd = np.isfinite(refCat["coord_ra"]) & np.isfinite(refCat["coord_dec"]) 

961 refCat = refCat[finiteInd] 

962 

963 if self.config.excludeNonPMObjects and self.config.applyRefCatProperMotion: 

964 # Gaia DR2 has zeros for missing data, while Gaia DR3 has NaNs: 

965 hasPM = ( 

966 (refCat["pm_raErr"] != 0) & np.isfinite(refCat["pm_raErr"]) & np.isfinite(refCat["pm_decErr"]) 

967 ) 

968 refCat = refCat[hasPM] 

969 

970 ra = (refCat["coord_ra"]).to(u.degree).to_value().tolist() 

971 dec = (refCat["coord_dec"]).to(u.degree).to_value().tolist() 

972 raCov = ((refCat["coord_raErr"]).to(u.degree).to_value() ** 2).tolist() 

973 decCov = ((refCat["coord_decErr"]).to(u.degree).to_value() ** 2).tolist() 

974 

975 # Get refcat version from refcat metadata 

976 refCatMetadata = refObjectLoader.refCats[0].get().getMetadata() 

977 refCatVersion = refCatMetadata["REFCAT_FORMAT_VERSION"] 

978 if refCatVersion == 2: 

979 raDecCov = (refCat["coord_ra_coord_dec_Cov"]).to(u.degree**2).to_value().tolist() 

980 else: 

981 raDecCov = np.zeros(len(ra)) 

982 

983 refObjects = {"ra": ra, "dec": dec, "raCov": raCov, "decCov": decCov, "raDecCov": raDecCov} 

984 refCovariance = [] 

985 

986 if self.config.fitProperMotion: 

987 raPM = (refCat["pm_ra"]).to(u.marcsec).to_value().tolist() 

988 decPM = (refCat["pm_dec"]).to(u.marcsec).to_value().tolist() 

989 parallax = (refCat["parallax"]).to(u.marcsec).to_value().tolist() 

990 cov = _make_ref_covariance_matrix(refCat, version=refCatVersion) 

991 pmDict = {"raPM": raPM, "decPM": decPM, "parallax": parallax} 

992 refObjects.update(pmDict) 

993 refCovariance = cov 

994 

995 if associations is not None: 

996 extensionIndex = np.flatnonzero(extensionInfo.extensionType == "REFERENCE")[0] 

997 visitIndex = extensionInfo.visitIndex[extensionIndex] 

998 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

999 instrumentIndex = -1 # -1 indicates the reference catalog 

1000 refWcs = extensionInfo.wcs[extensionIndex] 

1001 

1002 associations.addCatalog( 

1003 refWcs, 

1004 "STELLAR", 

1005 visitIndex, 

1006 fieldIndex, 

1007 instrumentIndex, 

1008 detectorIndex, 

1009 extensionIndex, 

1010 np.ones(len(refCat), dtype=bool), 

1011 ra, 

1012 dec, 

1013 np.arange(len(ra)), 

1014 ) 

1015 

1016 return refObjects, refCovariance 

1017 

1018 @staticmethod 

1019 def _find_extension_index(extensionInfo, visit, detector): 

1020 """Find the index for a given extension from its visit and detector 

1021 number. 

1022 

1023 If no match is found, None is returned. 

1024 

1025 Parameters 

1026 ---------- 

1027 extensionInfo : `lsst.pipe.base.Struct` 

1028 Struct containing properties for each extension. 

1029 visit : `int` 

1030 Visit number 

1031 detector : `int` 

1032 Detector number 

1033 

1034 Returns 

1035 ------- 

1036 extensionIndex : `int` or None 

1037 Index of this extension 

1038 """ 

1039 findExtension = np.flatnonzero((extensionInfo.visit == visit) & (extensionInfo.detector == detector)) 

1040 if len(findExtension) == 0: 

1041 extensionIndex = None 

1042 else: 

1043 extensionIndex = findExtension[0] 

1044 return extensionIndex 

1045 

1046 def _load_catalogs_and_associate( 

1047 self, associations, inputCatalogRefs, extensionInfo, fieldIndex=0, instrumentIndex=0 

1048 ): 

1049 """Load the science catalogs and add the sources to the associator 

1050 class `wcsfit.FoFClass`, associating them into matches as you go. 

1051 

1052 Parameters 

1053 ---------- 

1054 associations : `wcsfit.FoFClass` 

1055 Object to which to add the catalog of source and which performs 

1056 the source association. 

1057 inputCatalogRefs : `list` 

1058 List of DeferredDatasetHandles pointing to visit-level source 

1059 tables. 

1060 extensionInfo : `lsst.pipe.base.Struct` 

1061 Struct containing properties for each extension (visit/detector). 

1062 ``visit`` : `np.ndarray` 

1063 Name of the visit for this extension. 

1064 ``detector`` : `np.ndarray` 

1065 Name of the detector for this extension. 

1066 ``visitIndex` : `np.ndarray` [`int`] 

1067 Index of visit for this extension. 

1068 ``detectorIndex`` : `np.ndarray` [`int`] 

1069 Index of the detector for this extension. 

1070 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`] 

1071 Initial WCS for this extension. 

1072 ``extensionType`` : `np.ndarray` [`str`] 

1073 "SCIENCE" or "REFERENCE". 

1074 fieldIndex : `int` 

1075 Index of the field for these catalogs. Should be zero assuming all 

1076 data is being fit together. 

1077 instrumentIndex : `int` 

1078 Index of the instrument for these catalogs. Should be zero 

1079 assuming all data comes from the same instrument. 

1080 

1081 Returns 

1082 ------- 

1083 sourceIndices : `list` 

1084 List of boolean arrays used to select sources. 

1085 columns : `list` [`str`] 

1086 List of columns needed from source tables. 

1087 """ 

1088 columns = [ 

1089 "detector", 

1090 "sourceId", 

1091 "x", 

1092 "xErr", 

1093 "y", 

1094 "yErr", 

1095 "ixx", 

1096 "iyy", 

1097 "ixy", 

1098 f"{self.config.sourceFluxType}_instFlux", 

1099 f"{self.config.sourceFluxType}_instFluxErr", 

1100 ] 

1101 if self.sourceSelector.config.doFlags: 

1102 columns.extend(self.sourceSelector.config.flags.bad) 

1103 if self.sourceSelector.config.doUnresolved: 

1104 columns.append(self.sourceSelector.config.unresolved.name) 

1105 if self.sourceSelector.config.doIsolated: 

1106 columns.append(self.sourceSelector.config.isolated.parentName) 

1107 columns.append(self.sourceSelector.config.isolated.nChildName) 

1108 if self.sourceSelector.config.doRequirePrimary: 

1109 columns.append(self.sourceSelector.config.requirePrimary.primaryColName) 

1110 

1111 sourceIndices = [None] * len(extensionInfo.visit) 

1112 for inputCatalogRef in inputCatalogRefs: 

1113 visit = inputCatalogRef.dataId["visit"] 

1114 inputCatalog = inputCatalogRef.get(parameters={"columns": columns}) 

1115 # Get a sorted array of detector names 

1116 detectors = np.unique(inputCatalog["detector"]) 

1117 

1118 for detector in detectors: 

1119 detectorSources = inputCatalog[inputCatalog["detector"] == detector] 

1120 xCov = detectorSources["xErr"] ** 2 

1121 yCov = detectorSources["yErr"] ** 2 

1122 xyCov = ( 

1123 detectorSources["ixy"] * (xCov + yCov) / (detectorSources["ixx"] + detectorSources["iyy"]) 

1124 ) 

1125 # Remove sources with bad shape measurements 

1126 goodShapes = xyCov**2 <= (xCov * yCov) 

1127 selected = self.sourceSelector.run(detectorSources) 

1128 goodInds = selected.selected & goodShapes 

1129 

1130 isStar = np.ones(goodInds.sum()) 

1131 extensionIndex = self._find_extension_index(extensionInfo, visit, detector) 

1132 if extensionIndex is None: 

1133 # This extension does not have information necessary for 

1134 # fit. Skip the detections from this detector for this 

1135 # visit. 

1136 continue 

1137 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

1138 visitIndex = extensionInfo.visitIndex[extensionIndex] 

1139 

1140 sourceIndices[extensionIndex] = goodInds 

1141 

1142 wcs = extensionInfo.wcs[extensionIndex] 

1143 associations.reprojectWCS(wcs, fieldIndex) 

1144 

1145 associations.addCatalog( 

1146 wcs, 

1147 "STELLAR", 

1148 visitIndex, 

1149 fieldIndex, 

1150 instrumentIndex, 

1151 detectorIndex, 

1152 extensionIndex, 

1153 isStar, 

1154 detectorSources[goodInds]["x"].to_list(), 

1155 detectorSources[goodInds]["y"].to_list(), 

1156 np.arange(goodInds.sum()), 

1157 ) 

1158 

1159 associations.sortMatches( 

1160 fieldIndex, minMatches=self.config.minMatches, allowSelfMatches=self.config.allowSelfMatches 

1161 ) 

1162 

1163 return sourceIndices, columns 

1164 

1165 def _check_degeneracies(self, associations, extensionInfo): 

1166 """Check that the minimum number of visits and sources needed to 

1167 constrain the model are present. 

1168 

1169 This does not guarantee that the Hessian matrix of the chi-square, 

1170 which is used to fit the model, will be positive-definite, but if the 

1171 checks here do not pass, the matrix is certain to not be 

1172 positive-definite and the model cannot be fit. 

1173 

1174 Parameters 

1175 ---------- 

1176 associations : `wcsfit.FoFClass` 

1177 Object holding the source association information. 

1178 extensionInfo : `lsst.pipe.base.Struct` 

1179 Struct containing properties for each extension (visit/detector): 

1180 ``visit`` : `np.ndarray` 

1181 Name of the visit for this extension. 

1182 ``detector`` : `np.ndarray` 

1183 Name of the detector for this extension. 

1184 ``visitIndex` : `np.ndarray` [`int`] 

1185 Index of visit for this extension. 

1186 ``detectorIndex`` : `np.ndarray` [`int`] 

1187 Index of the detector for this extension. 

1188 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`] 

1189 Initial WCS for this extension. 

1190 ``extensionType`` : `np.ndarray` [`str`] 

1191 "SCIENCE" or "REFERENCE". 

1192 """ 

1193 # As a baseline, need to have more stars per detector than per-detector 

1194 # parameters, and more stars per visit than per-visit parameters. 

1195 whichExtension = np.array(associations.extn) 

1196 whichDetector = np.zeros(len(whichExtension)) 

1197 whichVisit = np.zeros(len(whichExtension)) 

1198 

1199 for extension, (detector, visit) in enumerate(zip(extensionInfo.detector, extensionInfo.visit)): 

1200 ex_ind = whichExtension == extension 

1201 whichDetector[ex_ind] = detector 

1202 whichVisit[ex_ind] = visit 

1203 

1204 if "BAND/DEVICE/poly" in self.config.deviceModel: 

1205 nCoeffDetectorModel = _nCoeffsFromDegree(self.config.devicePolyOrder) 

1206 unconstrainedDetectors = [] 

1207 for detector in np.unique(extensionInfo.detector): 

1208 numSources = (whichDetector == detector).sum() 

1209 if numSources < nCoeffDetectorModel: 

1210 unconstrainedDetectors.append(str(detector)) 

1211 

1212 if unconstrainedDetectors: 

1213 raise RuntimeError( 

1214 "The model is not constrained. The following detectors do not have enough " 

1215 f"sources ({nCoeffDetectorModel} required): ", 

1216 ", ".join(unconstrainedDetectors), 

1217 ) 

1218 

1219 def make_yaml(self, inputVisitSummary, inputFile=None): 

1220 """Make a YAML-type object that describes the parameters of the fit 

1221 model. 

1222 

1223 Parameters 

1224 ---------- 

1225 inputVisitSummary : `lsst.afw.table.ExposureCatalog` 

1226 Catalog with per-detector summary information. 

1227 inputFile : `str` 

1228 Path to a file that contains a basic model. 

1229 

1230 Returns 

1231 ------- 

1232 inputYaml : `wcsfit.YAMLCollector` 

1233 YAML object containing the model description. 

1234 inputDict : `dict` [`str`, `str`] 

1235 Dictionary containing the model description. 

1236 """ 

1237 if inputFile is not None: 

1238 inputYaml = wcsfit.YAMLCollector(inputFile, "PixelMapCollection") 

1239 else: 

1240 inputYaml = wcsfit.YAMLCollector("", "PixelMapCollection") 

1241 inputDict = {} 

1242 modelComponents = ["INSTRUMENT/DEVICE", "EXPOSURE"] 

1243 baseMap = {"Type": "Composite", "Elements": modelComponents} 

1244 inputDict["EXPOSURE/DEVICE/base"] = baseMap 

1245 

1246 xMin = str(inputVisitSummary["bbox_min_x"].min()) 

1247 xMax = str(inputVisitSummary["bbox_max_x"].max()) 

1248 yMin = str(inputVisitSummary["bbox_min_y"].min()) 

1249 yMax = str(inputVisitSummary["bbox_max_y"].max()) 

1250 

1251 deviceModel = {"Type": "Composite", "Elements": self.config.deviceModel.list()} 

1252 inputDict["INSTRUMENT/DEVICE"] = deviceModel 

1253 for component in self.config.deviceModel: 

1254 if "poly" in component.lower(): 

1255 componentDict = { 

1256 "Type": "Poly", 

1257 "XPoly": {"OrderX": self.config.devicePolyOrder, "SumOrder": True}, 

1258 "YPoly": {"OrderX": self.config.devicePolyOrder, "SumOrder": True}, 

1259 "XMin": xMin, 

1260 "XMax": xMax, 

1261 "YMin": yMin, 

1262 "YMax": yMax, 

1263 } 

1264 elif "identity" in component.lower(): 

1265 componentDict = {"Type": "Identity"} 

1266 

1267 inputDict[component] = componentDict 

1268 

1269 exposureModel = {"Type": "Composite", "Elements": self.config.exposureModel.list()} 

1270 inputDict["EXPOSURE"] = exposureModel 

1271 for component in self.config.exposureModel: 

1272 if "poly" in component.lower(): 

1273 componentDict = { 

1274 "Type": "Poly", 

1275 "XPoly": {"OrderX": self.config.exposurePolyOrder, "SumOrder": "true"}, 

1276 "YPoly": {"OrderX": self.config.exposurePolyOrder, "SumOrder": "true"}, 

1277 } 

1278 elif "identity" in component.lower(): 

1279 componentDict = {"Type": "Identity"} 

1280 

1281 inputDict[component] = componentDict 

1282 

1283 inputYaml.addInput(yaml.dump(inputDict)) 

1284 inputYaml.addInput("Identity:\n Type: Identity\n") 

1285 

1286 return inputYaml, inputDict 

1287 

1288 def _add_objects(self, wcsf, inputCatalogRefs, sourceIndices, extensionInfo, columns): 

1289 """Add science sources to the wcsfit.WCSFit object. 

1290 

1291 Parameters 

1292 ---------- 

1293 wcsf : `wcsfit.WCSFit` 

1294 WCS-fitting object. 

1295 inputCatalogRefs : `list` 

1296 List of DeferredDatasetHandles pointing to visit-level source 

1297 tables. 

1298 sourceIndices : `list` 

1299 List of boolean arrays used to select sources. 

1300 extensionInfo : `lsst.pipe.base.Struct` 

1301 Struct containing properties for each extension (visit/detector): 

1302 ``visit`` : `np.ndarray` 

1303 Name of the visit for this extension. 

1304 ``detector`` : `np.ndarray` 

1305 Name of the detector for this extension. 

1306 ``visitIndex` : `np.ndarray` [`int`] 

1307 Index of visit for this extension. 

1308 ``detectorIndex`` : `np.ndarray` [`int`] 

1309 Index of the detector for this extension. 

1310 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`] 

1311 Initial WCS for this extension. 

1312 ``extensionType`` : `np.ndarray` [`str`] 

1313 "SCIENCE" or "REFERENCE". 

1314 columns : `list` [`str`] 

1315 List of columns needed from source tables. 

1316 """ 

1317 for inputCatalogRef in inputCatalogRefs: 

1318 visit = inputCatalogRef.dataId["visit"] 

1319 inputCatalog = inputCatalogRef.get(parameters={"columns": columns}) 

1320 detectors = np.unique(inputCatalog["detector"]) 

1321 

1322 for detector in detectors: 

1323 detectorSources = inputCatalog[inputCatalog["detector"] == detector] 

1324 

1325 extensionIndex = self._find_extension_index(extensionInfo, visit, detector) 

1326 if extensionIndex is None: 

1327 # This extension does not have information necessary for 

1328 # fit. Skip the detections from this detector for this 

1329 # visit. 

1330 continue 

1331 

1332 sourceCat = detectorSources[sourceIndices[extensionIndex]] 

1333 

1334 xCov = sourceCat["xErr"] ** 2 

1335 yCov = sourceCat["yErr"] ** 2 

1336 xyCov = sourceCat["ixy"] * (xCov + yCov) / (sourceCat["ixx"] + sourceCat["iyy"]) 

1337 # TODO: add correct xyErr if DM-7101 is ever done. 

1338 

1339 d = { 

1340 "x": sourceCat["x"].to_numpy(), 

1341 "y": sourceCat["y"].to_numpy(), 

1342 "xCov": xCov.to_numpy(), 

1343 "yCov": yCov.to_numpy(), 

1344 "xyCov": xyCov.to_numpy(), 

1345 } 

1346 

1347 wcsf.setObjects(extensionIndex, d, "x", "y", ["xCov", "yCov", "xyCov"]) 

1348 

1349 def _add_ref_objects(self, wcsf, refObjects, refCovariance, extensionInfo, fieldIndex=0): 

1350 """Add reference sources to the wcsfit.WCSFit object. 

1351 

1352 Parameters 

1353 ---------- 

1354 wcsf : `wcsfit.WCSFit` 

1355 WCS-fitting object. 

1356 refObjects : `dict` 

1357 Position and error information of reference objects. 

1358 refCovariance : `list` [`float`] 

1359 Flattened output covariance matrix. 

1360 extensionInfo : `lsst.pipe.base.Struct` 

1361 Struct containing properties for each extension (visit/detector): 

1362 ``visit`` : `np.ndarray` 

1363 Name of the visit for this extension. 

1364 ``detector`` : `np.ndarray` 

1365 Name of the detector for this extension. 

1366 ``visitIndex` : `np.ndarray` [`int`] 

1367 Index of visit for this extension. 

1368 ``detectorIndex`` : `np.ndarray` [`int`] 

1369 Index of the detector for this extension. 

1370 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`] 

1371 Initial WCS for this extension. 

1372 ``extensionType`` : `np.ndarray` [`str`] 

1373 "SCIENCE" or "REFERENCE". 

1374 fieldIndex : `int`, optional 

1375 Index of the field to which these sources correspond. 

1376 """ 

1377 extensionIndex = np.flatnonzero( 

1378 (extensionInfo.extensionType == "REFERENCE") & (extensionInfo.visit == fieldIndex) 

1379 )[0] 

1380 if self.config.fitProperMotion: 

1381 wcsf.setObjects( 

1382 extensionIndex, 

1383 refObjects, 

1384 "ra", 

1385 "dec", 

1386 ["raCov", "decCov", "raDecCov"], 

1387 pmDecKey="decPM", 

1388 pmRaKey="raPM", 

1389 parallaxKey="parallax", 

1390 pmCovKey="fullCov", 

1391 pmCov=refCovariance, 

1392 ) 

1393 else: 

1394 wcsf.setObjects(extensionIndex, refObjects, "ra", "dec", ["raCov", "decCov", "raDecCov"]) 

1395 

1396 def _make_afw_wcs(self, mapDict, centerRA, centerDec, doNormalizePixels=False, xScale=1, yScale=1): 

1397 """Make an `lsst.afw.geom.SkyWcs` from a dictionary of mappings. 

1398 

1399 Parameters 

1400 ---------- 

1401 mapDict : `dict` 

1402 Dictionary of mapping parameters. 

1403 centerRA : `lsst.geom.Angle` 

1404 RA of the tangent point. 

1405 centerDec : `lsst.geom.Angle` 

1406 Declination of the tangent point. 

1407 doNormalizePixels : `bool` 

1408 Whether to normalize pixels so that range is [-1,1]. 

1409 xScale : `float` 

1410 Factor by which to normalize x-dimension. Corresponds to width of 

1411 detector. 

1412 yScale : `float` 

1413 Factor by which to normalize y-dimension. Corresponds to height of 

1414 detector. 

1415 

1416 Returns 

1417 ------- 

1418 outWCS : `lsst.afw.geom.SkyWcs` 

1419 WCS constructed from the input mappings 

1420 """ 

1421 # Set up pixel frames 

1422 pixelFrame = astshim.Frame(2, "Domain=PIXELS") 

1423 normedPixelFrame = astshim.Frame(2, "Domain=NORMEDPIXELS") 

1424 

1425 if doNormalizePixels: 

1426 # Pixels will need to be rescaled before going into the mappings 

1427 normCoefficients = [-1.0, 2.0 / xScale, 0, -1.0, 0, 2.0 / yScale] 

1428 normMap = _convert_to_ast_polymap_coefficients(normCoefficients) 

1429 else: 

1430 normMap = astshim.UnitMap(2) 

1431 

1432 # All of the detectors for one visit map to the same tangent plane 

1433 tangentPoint = lsst.geom.SpherePoint(centerRA, centerDec) 

1434 cdMatrix = afwgeom.makeCdMatrix(1.0 * lsst.geom.degrees, 0 * lsst.geom.degrees, True) 

1435 iwcToSkyWcs = afwgeom.makeSkyWcs(lsst.geom.Point2D(0, 0), tangentPoint, cdMatrix) 

1436 iwcToSkyMap = iwcToSkyWcs.getFrameDict().getMapping("PIXELS", "SKY") 

1437 skyFrame = iwcToSkyWcs.getFrameDict().getFrame("SKY") 

1438 

1439 frameDict = astshim.FrameDict(pixelFrame) 

1440 frameDict.addFrame("PIXELS", normMap, normedPixelFrame) 

1441 

1442 currentFrameName = "NORMEDPIXELS" 

1443 

1444 # Dictionary values are ordered according to the maps' application. 

1445 for m, mapElement in enumerate(mapDict.values()): 

1446 mapType = mapElement["Type"] 

1447 

1448 if mapType == "Poly": 

1449 mapCoefficients = mapElement["Coefficients"] 

1450 astMap = _convert_to_ast_polymap_coefficients(mapCoefficients) 

1451 elif mapType == "Identity": 

1452 astMap = astshim.UnitMap(2) 

1453 else: 

1454 raise ValueError(f"Converting map type {mapType} to WCS is not supported") 

1455 

1456 if m == len(mapDict) - 1: 

1457 newFrameName = "IWC" 

1458 else: 

1459 newFrameName = "INTERMEDIATE" + str(m) 

1460 newFrame = astshim.Frame(2, f"Domain={newFrameName}") 

1461 frameDict.addFrame(currentFrameName, astMap, newFrame) 

1462 currentFrameName = newFrameName 

1463 frameDict.addFrame("IWC", iwcToSkyMap, skyFrame) 

1464 

1465 outWCS = afwgeom.SkyWcs(frameDict) 

1466 return outWCS 

1467 

1468 def _make_outputs(self, wcsf, visitSummaryTables, exposureInfo, mapTemplate=None): 

1469 """Make a WCS object out of the WCS models. 

1470 

1471 Parameters 

1472 ---------- 

1473 wcsf : `wcsfit.WCSFit` 

1474 WCSFit object, assumed to have fit model. 

1475 visitSummaryTables : `list` [`lsst.afw.table.ExposureCatalog`] 

1476 Catalogs with per-detector summary information from which to grab 

1477 detector information. 

1478 extensionInfo : `lsst.pipe.base.Struct` 

1479 Struct containing properties for each extension (visit/detector): 

1480 ``visit`` : `np.ndarray` 

1481 Name of the visit for this extension. 

1482 ``detector`` : `np.ndarray` 

1483 Name of the detector for this extension. 

1484 ``visitIndex` : `np.ndarray` [`int`] 

1485 Index of visit for this extension. 

1486 ``detectorIndex`` : `np.ndarray` [`int`] 

1487 Index of the detector for this extension. 

1488 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`] 

1489 Initial WCS for this extension. 

1490 ``extensionType`` : `np.ndarray` [`str`] 

1491 "SCIENCE" or "REFERENCE". 

1492 Returns 

1493 ------- 

1494 catalogs : `dict` [`str`, `lsst.afw.table.ExposureCatalog`] 

1495 Dictionary of `lsst.afw.table.ExposureCatalog` objects with the WCS 

1496 set to the WCS fit in wcsf, keyed by the visit name. 

1497 """ 

1498 # Get the parameters of the fit models 

1499 mapParams = wcsf.mapCollection.getParamDict() 

1500 

1501 # Set up the schema for the output catalogs 

1502 schema = lsst.afw.table.ExposureTable.makeMinimalSchema() 

1503 schema.addField("visit", type="L", doc="Visit number") 

1504 schema.addField( 

1505 "recoveredWcs", 

1506 type="Flag", 

1507 doc="Input WCS missing, output recovered from other input visit/detectors.", 

1508 ) 

1509 

1510 # Pixels will need to be rescaled before going into the mappings 

1511 sampleDetector = visitSummaryTables[0][0] 

1512 xscale = sampleDetector["bbox_max_x"] - sampleDetector["bbox_min_x"] 

1513 yscale = sampleDetector["bbox_max_y"] - sampleDetector["bbox_min_y"] 

1514 

1515 catalogs = {} 

1516 for v, visitSummary in enumerate(visitSummaryTables): 

1517 visit = visitSummary[0]["visit"] 

1518 

1519 visitMap = wcsf.mapCollection.orderAtoms(f"{visit}")[0] 

1520 visitMapType = wcsf.mapCollection.getMapType(visitMap) 

1521 if (visitMap not in mapParams) and (visitMapType != "Identity"): 

1522 self.log.warning("Visit %d was dropped because of an insufficient number of sources.", visit) 

1523 continue 

1524 

1525 catalog = lsst.afw.table.ExposureCatalog(schema) 

1526 catalog.resize(len(exposureInfo.detectors)) 

1527 catalog["visit"] = visit 

1528 

1529 for d, detector in enumerate(exposureInfo.detectors): 

1530 mapName = f"{visit}/{detector}" 

1531 if mapName in wcsf.mapCollection.allMapNames(): 

1532 mapElements = wcsf.mapCollection.orderAtoms(f"{mapName}/base") 

1533 catalog[d]["recoveredWcs"] = False 

1534 else: 

1535 # This extension was not fit, but the WCS can be recovered 

1536 # using the maps fit from sources on other visits but the 

1537 # same detector and from sources on other detectors from 

1538 # this visit. 

1539 genericElements = mapTemplate["EXPOSURE/DEVICE/base"]["Elements"] 

1540 mapElements = [] 

1541 instrument = visitSummary[0].getVisitInfo().instrumentLabel 

1542 # Go through the generic map components to build the names 

1543 # of the specific maps for this extension. 

1544 for component in genericElements: 

1545 elements = mapTemplate[component]["Elements"] 

1546 for element in elements: 

1547 # TODO: DM-42519, gbdes sets the "BAND" to the 

1548 # instrument name currently. This will need to be 

1549 # disambiguated if we run on multiple bands at 

1550 # once. 

1551 element = element.replace("BAND", str(instrument)) 

1552 element = element.replace("EXPOSURE", str(visit)) 

1553 element = element.replace("DEVICE", str(detector)) 

1554 mapElements.append(element) 

1555 catalog[d]["recoveredWcs"] = True 

1556 mapDict = {} 

1557 for m, mapElement in enumerate(mapElements): 

1558 mapType = wcsf.mapCollection.getMapType(mapElement) 

1559 mapDict[mapElement] = {"Type": mapType} 

1560 

1561 if mapType == "Poly": 

1562 mapCoefficients = mapParams[mapElement] 

1563 mapDict[mapElement]["Coefficients"] = mapCoefficients 

1564 

1565 # The RA and Dec of the visit are needed for the last step of 

1566 # the mapping from the visit tangent plane to RA and Dec 

1567 outWCS = self._make_afw_wcs( 

1568 mapDict, 

1569 exposureInfo.ras[v] * lsst.geom.radians, 

1570 exposureInfo.decs[v] * lsst.geom.radians, 

1571 doNormalizePixels=True, 

1572 xScale=xscale, 

1573 yScale=yscale, 

1574 ) 

1575 

1576 catalog[d].setId(detector) 

1577 catalog[d].setWcs(outWCS) 

1578 catalog.sort() 

1579 catalogs[visit] = catalog 

1580 

1581 return catalogs 

1582 

1583 def _compute_model_params(self, wcsf): 

1584 """Get the WCS model parameters and covariance and convert to a 

1585 dictionary that will be readable as a pandas dataframe or other table. 

1586 

1587 Parameters 

1588 ---------- 

1589 wcsf : `wcsfit.WCSFit` 

1590 WCSFit object, assumed to have fit model. 

1591 

1592 Returns 

1593 ------- 

1594 modelParams : `dict` 

1595 Parameters and covariance of the best-fit WCS model. 

1596 """ 

1597 modelParamDict = wcsf.mapCollection.getParamDict() 

1598 modelCovariance = wcsf.getModelCovariance() 

1599 

1600 modelParams = {k: [] for k in ["mapName", "coordinate", "parameter", "coefficientNumber"]} 

1601 i = 0 

1602 for mapName, params in modelParamDict.items(): 

1603 nCoeffs = len(params) 

1604 # There are an equal number of x and y coordinate parameters 

1605 nCoordCoeffs = nCoeffs // 2 

1606 modelParams["mapName"].extend([mapName] * nCoeffs) 

1607 modelParams["coordinate"].extend(["x"] * nCoordCoeffs) 

1608 modelParams["coordinate"].extend(["y"] * nCoordCoeffs) 

1609 modelParams["parameter"].extend(params) 

1610 modelParams["coefficientNumber"].extend(np.arange(nCoordCoeffs)) 

1611 modelParams["coefficientNumber"].extend(np.arange(nCoordCoeffs)) 

1612 

1613 for p in range(nCoeffs): 

1614 if p < nCoordCoeffs: 

1615 coord = "x" 

1616 else: 

1617 coord = "y" 

1618 modelParams[f"{mapName}_{coord}_{p}_cov"] = modelCovariance[i] 

1619 i += 1 

1620 

1621 # Convert the dictionary values from lists to numpy arrays. 

1622 for key, value in modelParams.items(): 

1623 modelParams[key] = np.array(value) 

1624 

1625 return modelParams 

1626 

1627 

1628class GbdesGlobalAstrometricFitConnections( 

1629 pipeBase.PipelineTaskConnections, dimensions=("instrument", "physical_filter") 

1630): 

1631 inputVisitSummaries = pipeBase.connectionTypes.Input( 

1632 doc=( 

1633 "Per-visit consolidated exposure metadata built from calexps. " 

1634 "These catalogs use detector id for the id and must be sorted for " 

1635 "fast lookups of a detector." 

1636 ), 

1637 name="visitSummary", 

1638 storageClass="ExposureCatalog", 

1639 dimensions=("instrument", "visit"), 

1640 multiple=True, 

1641 ) 

1642 referenceCatalog = pipeBase.connectionTypes.PrerequisiteInput( 

1643 doc="The astrometry reference catalog to match to loaded input catalog sources.", 

1644 name="gaia_dr3_20230707", 

1645 storageClass="SimpleCatalog", 

1646 dimensions=("skypix",), 

1647 deferLoad=True, 

1648 multiple=True, 

1649 ) 

1650 isolatedStarSources = pipeBase.connectionTypes.Input( 

1651 doc="Catalog of matched sources.", 

1652 name="isolated_star_sources", 

1653 storageClass="DataFrame", 

1654 dimensions=( 

1655 "instrument", 

1656 "skymap", 

1657 "tract", 

1658 ), 

1659 multiple=True, 

1660 deferLoad=True, 

1661 ) 

1662 isolatedStarCatalogs = pipeBase.connectionTypes.Input( 

1663 doc="Catalog of objects corresponding to the isolatedStarSources.", 

1664 name="isolated_star_cat", 

1665 storageClass="DataFrame", 

1666 dimensions=( 

1667 "instrument", 

1668 "skymap", 

1669 "tract", 

1670 ), 

1671 multiple=True, 

1672 deferLoad=True, 

1673 ) 

1674 outputWcs = pipeBase.connectionTypes.Output( 

1675 doc=( 

1676 "Per-visit world coordinate systems derived from the fitted model. These catalogs only contain " 

1677 "entries for detectors with an output, and use the detector id for the catalog id, sorted on id " 

1678 "for fast lookups of a detector." 

1679 ), 

1680 name="gbdesGlobalAstrometricFitSkyWcsCatalog", 

1681 storageClass="ExposureCatalog", 

1682 dimensions=("instrument", "visit"), 

1683 multiple=True, 

1684 ) 

1685 outputCatalog = pipeBase.connectionTypes.Output( 

1686 doc=( 

1687 "Catalog of sources used in fit, along with residuals in pixel coordinates and tangent " 

1688 "plane coordinates and chisq values." 

1689 ), 

1690 name="gbdesGlobalAstrometricFit_fitStars", 

1691 storageClass="ArrowNumpyDict", 

1692 dimensions=("instrument", "physical_filter"), 

1693 ) 

1694 starCatalog = pipeBase.connectionTypes.Output( 

1695 doc=( 

1696 "Catalog of best-fit object positions. Also includes the fit proper motion and parallax if " 

1697 "fitProperMotion is True." 

1698 ), 

1699 name="gbdesGlobalAstrometricFit_starCatalog", 

1700 storageClass="ArrowNumpyDict", 

1701 dimensions=("instrument", "physical_filter"), 

1702 ) 

1703 modelParams = pipeBase.connectionTypes.Output( 

1704 doc="WCS parameters and covariance.", 

1705 name="gbdesGlobalAstrometricFit_modelParams", 

1706 storageClass="ArrowNumpyDict", 

1707 dimensions=("instrument", "physical_filter"), 

1708 ) 

1709 

1710 def getSpatialBoundsConnections(self): 

1711 return ("inputVisitSummaries",) 

1712 

1713 def __init__(self, *, config=None): 

1714 super().__init__(config=config) 

1715 

1716 if not self.config.saveModelParams: 

1717 self.outputs.remove("modelParams") 

1718 

1719 

1720class GbdesGlobalAstrometricFitConfig( 

1721 GbdesAstrometricFitConfig, pipelineConnections=GbdesGlobalAstrometricFitConnections 

1722): 

1723 visitOverlap = pexConfig.Field( 

1724 dtype=float, 

1725 default=1.0, 

1726 doc=( 

1727 "The linkage distance threshold above which clustered groups of visits will not be merged " 

1728 "together in an agglomerative clustering algorithm. The linkage distance is calculated using the " 

1729 "minimum distance between the field-of-view centers of a given visit and all other visits in a " 

1730 "group, and is in units of the field-of-view radius. The resulting groups of visits define the " 

1731 "fields for the astrometric fit." 

1732 ), 

1733 ) 

1734 

1735 

1736class GbdesGlobalAstrometricFitTask(GbdesAstrometricFitTask): 

1737 """Calibrate the WCS across multiple visits and multiple fields using the 

1738 GBDES package. 

1739 

1740 This class assumes that the input visits can be separated into contiguous 

1741 groups, for which an individual group covers an area of less than a 

1742 hemisphere. 

1743 """ 

1744 

1745 ConfigClass = GbdesGlobalAstrometricFitConfig 

1746 _DefaultName = "gbdesAstrometricFit" 

1747 

1748 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

1749 # We override runQuantum to set up the refObjLoaders 

1750 inputs = butlerQC.get(inputRefs) 

1751 

1752 instrumentName = butlerQC.quantum.dataId["instrument"] 

1753 

1754 # Ensure the inputs are in a consistent and deterministic order 

1755 inputSumVisits = np.array([inputSum[0]["visit"] for inputSum in inputs["inputVisitSummaries"]]) 

1756 inputs["inputVisitSummaries"] = [inputs["inputVisitSummaries"][v] for v in inputSumVisits.argsort()] 

1757 inputRefHtm7s = np.array([inputRefCat.dataId["htm7"] for inputRefCat in inputRefs.referenceCatalog]) 

1758 inputRefCatRefs = [inputRefs.referenceCatalog[htm7] for htm7 in inputRefHtm7s.argsort()] 

1759 inputRefCats = np.array([inputRefCat.dataId["htm7"] for inputRefCat in inputs["referenceCatalog"]]) 

1760 inputs["referenceCatalog"] = [inputs["referenceCatalog"][v] for v in inputRefCats.argsort()] 

1761 inputIsolatedStarSourceTracts = np.array( 

1762 [isolatedStarSource.dataId["tract"] for isolatedStarSource in inputs["isolatedStarSources"]] 

1763 ) 

1764 inputIsolatedStarCatalogTracts = np.array( 

1765 [isolatedStarCatalog.dataId["tract"] for isolatedStarCatalog in inputs["isolatedStarCatalogs"]] 

1766 ) 

1767 for tract in inputIsolatedStarCatalogTracts: 

1768 if tract not in inputIsolatedStarSourceTracts: 

1769 raise RuntimeError(f"tract {tract} in isolated_star_cats but not isolated_star_sources") 

1770 inputs["isolatedStarSources"] = np.array( 

1771 [inputs["isolatedStarSources"][t] for t in inputIsolatedStarSourceTracts.argsort()] 

1772 ) 

1773 inputs["isolatedStarCatalogs"] = np.array( 

1774 [inputs["isolatedStarCatalogs"][t] for t in inputIsolatedStarSourceTracts.argsort()] 

1775 ) 

1776 

1777 refConfig = LoadReferenceObjectsConfig() 

1778 if self.config.applyRefCatProperMotion: 

1779 refConfig.requireProperMotion = True 

1780 refObjectLoader = ReferenceObjectLoader( 

1781 dataIds=[ref.datasetRef.dataId for ref in inputRefCatRefs], 

1782 refCats=inputs.pop("referenceCatalog"), 

1783 config=refConfig, 

1784 log=self.log, 

1785 ) 

1786 

1787 output = self.run(**inputs, instrumentName=instrumentName, refObjectLoader=refObjectLoader) 

1788 

1789 for outputRef in outputRefs.outputWcs: 

1790 visit = outputRef.dataId["visit"] 

1791 butlerQC.put(output.outputWcss[visit], outputRef) 

1792 butlerQC.put(output.outputCatalog, outputRefs.outputCatalog) 

1793 butlerQC.put(output.starCatalog, outputRefs.starCatalog) 

1794 if self.config.saveModelParams: 

1795 butlerQC.put(output.modelParams, outputRefs.modelParams) 

1796 

1797 def run( 

1798 self, 

1799 inputVisitSummaries, 

1800 isolatedStarSources, 

1801 isolatedStarCatalogs, 

1802 instrumentName="", 

1803 refEpoch=None, 

1804 refObjectLoader=None, 

1805 ): 

1806 """Run the WCS fit for a given set of visits 

1807 

1808 Parameters 

1809 ---------- 

1810 inputVisitSummaries : `list` [`lsst.afw.table.ExposureCatalog`] 

1811 List of catalogs with per-detector summary information. 

1812 isolatedStarSources : `list` [`DeferredDatasetHandle`] 

1813 List of handles pointing to isolated star sources. 

1814 isolatedStarCatalog: `list` [`DeferredDatasetHandle`] 

1815 List of handles pointing to isolated star catalogs. 

1816 instrumentName : `str`, optional 

1817 Name of the instrument used. This is only used for labelling. 

1818 refEpoch : `float`, optional 

1819 Epoch of the reference objects in MJD. 

1820 refObjectLoader : instance of 

1821 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader`, 

1822 optional 

1823 Reference object loader instance. 

1824 

1825 Returns 

1826 ------- 

1827 result : `lsst.pipe.base.Struct` 

1828 ``outputWcss`` : `list` [`lsst.afw.table.ExposureCatalog`] 

1829 List of exposure catalogs (one per visit) with the WCS for each 

1830 detector set by the new fitted WCS. 

1831 ``fitModel`` : `wcsfit.WCSFit` 

1832 Model-fitting object with final model parameters. 

1833 ``outputCatalog`` : `pyarrow.Table` 

1834 Catalog with fit residuals of all sources used. 

1835 ``starCatalog`` : `pyarrow.Table` 

1836 Catalog with best-fit positions of the objects fit. 

1837 ``modelParams`` : `dict` 

1838 Parameters and covariance of the best-fit WCS model. 

1839 """ 

1840 self.log.info("Gather instrument, exposure, and field info") 

1841 # Set up an instrument object 

1842 instrument = wcsfit.Instrument(instrumentName) 

1843 

1844 # Get information about the extent of the input visits 

1845 fields, fieldRegions = self._prep_sky(inputVisitSummaries) 

1846 

1847 # Get RA, Dec, MJD, etc., for the input visits 

1848 exposureInfo, exposuresHelper, extensionInfo = self._get_exposure_info( 

1849 inputVisitSummaries, instrument, fieldRegions=fieldRegions 

1850 ) 

1851 

1852 self.log.info("Load associated sources") 

1853 medianEpoch = astropy.time.Time(exposureInfo.medianEpoch, format="decimalyear").mjd 

1854 allRefObjects, allRefCovariances = {}, {} 

1855 for f, fieldRegion in fieldRegions.items(): 

1856 refObjects, refCovariance = self._load_refcat( 

1857 refObjectLoader, extensionInfo, epoch=medianEpoch, region=fieldRegion 

1858 ) 

1859 allRefObjects[f] = refObjects 

1860 allRefCovariances[f] = refCovariance 

1861 

1862 associations, sourceDict = self._associate_from_isolated_sources( 

1863 isolatedStarSources, isolatedStarCatalogs, extensionInfo, allRefObjects 

1864 ) 

1865 

1866 self.log.info("Fit the WCSs") 

1867 # Set up a YAML-type string using the config variables and a sample 

1868 # visit 

1869 inputYaml, mapTemplate = self.make_yaml(inputVisitSummaries[0]) 

1870 

1871 # Set the verbosity level for WCSFit from the task log level. 

1872 # TODO: DM-36850, Add lsst.log to gbdes so that log messages are 

1873 # properly propagated. 

1874 loglevel = self.log.getEffectiveLevel() 

1875 if loglevel >= self.log.WARNING: 

1876 verbose = 0 

1877 elif loglevel == self.log.INFO: 

1878 verbose = 1 

1879 else: 

1880 verbose = 2 

1881 

1882 # Set up the WCS-fitting class using the source matches from the 

1883 # isolated star sources plus the reference catalog. 

1884 wcsf = wcsfit.WCSFit( 

1885 fields, 

1886 [instrument], 

1887 exposuresHelper, 

1888 extensionInfo.visitIndex, 

1889 extensionInfo.detectorIndex, 

1890 inputYaml, 

1891 extensionInfo.wcs, 

1892 associations.sequence, 

1893 associations.extn, 

1894 associations.obj, 

1895 sysErr=self.config.systematicError, 

1896 refSysErr=self.config.referenceSystematicError, 

1897 usePM=self.config.fitProperMotion, 

1898 verbose=verbose, 

1899 ) 

1900 

1901 # Add the science and reference sources 

1902 self._add_objects(wcsf, sourceDict, extensionInfo) 

1903 for f in fieldRegions.keys(): 

1904 self._add_ref_objects( 

1905 wcsf, allRefObjects[f], allRefCovariances[f], extensionInfo, fieldIndex=-1 * f 

1906 ) 

1907 

1908 # Do the WCS fit 

1909 wcsf.fit( 

1910 reserveFraction=self.config.fitReserveFraction, randomNumberSeed=self.config.fitReserveRandomSeed 

1911 ) 

1912 self.log.info("WCS fitting done") 

1913 

1914 outputWcss = self._make_outputs(wcsf, inputVisitSummaries, exposureInfo, mapTemplate=mapTemplate) 

1915 outputCatalog = wcsf.getOutputCatalog() 

1916 starCatalog = wcsf.getStarCatalog() 

1917 modelParams = self._compute_model_params(wcsf) if self.config.saveModelParams else None 

1918 

1919 return pipeBase.Struct( 

1920 outputWcss=outputWcss, 

1921 fitModel=wcsf, 

1922 outputCatalog=outputCatalog, 

1923 starCatalog=starCatalog, 

1924 modelParams=modelParams, 

1925 ) 

1926 

1927 def _prep_sky(self, inputVisitSummaries): 

1928 """Cluster the input visits into disjoint groups that will define 

1929 separate fields in the astrometric fit, and, for each group, get the 

1930 convex hull around all of its component visits. 

1931 

1932 The groups are created such that each visit overlaps with at least one 

1933 other visit in the same group by the `visitOverlap` amount, which is 

1934 calculated as a fraction of the field-of-view radius, and no visits 

1935 from separate groups overlap by more than this amount. 

1936 

1937 Paramaters 

1938 ---------- 

1939 inputVisitSummaries : `list` [`lsst.afw.table.ExposureCatalog`] 

1940 List of catalogs with per-detector summary information. 

1941 

1942 Returns 

1943 ------- 

1944 fields : `wcsfit.Fields` 

1945 Object with field information. 

1946 fieldRegions : `dict` [`int`, `lsst.sphgeom.ConvexPolygon`] 

1947 Dictionary of regions encompassing each group of input visits, 

1948 keyed by an arbitrary index. 

1949 """ 

1950 allDetectorCorners = [] 

1951 mjds = [] 

1952 radecs = [] 

1953 radii = [] 

1954 for visSum in inputVisitSummaries: 

1955 detectorCorners = [ 

1956 lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees).getVector() 

1957 for (ra, dec) in zip(visSum["raCorners"].ravel(), visSum["decCorners"].ravel()) 

1958 if (np.isfinite(ra) and (np.isfinite(dec))) 

1959 ] 

1960 allDetectorCorners.append(detectorCorners) 

1961 

1962 # Get center and approximate radius of field of view 

1963 boundingCircle = lsst.sphgeom.ConvexPolygon.convexHull(detectorCorners).getBoundingCircle() 

1964 center = lsst.geom.SpherePoint(boundingCircle.getCenter()) 

1965 ra = center.getRa().asDegrees() 

1966 dec = center.getDec().asDegrees() 

1967 radecs.append([ra, dec]) 

1968 radius = boundingCircle.getOpeningAngle() 

1969 radii.append(radius) 

1970 

1971 obsDate = visSum[0].getVisitInfo().getDate() 

1972 obsMJD = obsDate.get(obsDate.MJD) 

1973 mjds.append(obsMJD) 

1974 

1975 # Find groups of visits where any one of the visits overlaps another by 

1976 # a given fraction of the field-of-view radius. 

1977 distance = self.config.visitOverlap * np.median(radii) 

1978 clustering = AgglomerativeClustering( 

1979 distance_threshold=distance.asDegrees(), n_clusters=None, linkage="single" 

1980 ) 

1981 clusters = clustering.fit(np.array(radecs)) 

1982 

1983 medianMJD = np.median(mjds) 

1984 medianEpoch = astropy.time.Time(medianMJD, format="mjd").decimalyear 

1985 

1986 fieldNames = [] 

1987 fieldRAs = [] 

1988 fieldDecs = [] 

1989 epochs = [] 

1990 fieldRegions = {} 

1991 

1992 for i in range(clusters.n_clusters_): 

1993 fieldInd = clusters.labels_ == i 

1994 # Concatenate the lists of all detector corners that are in this 

1995 # field 

1996 fieldDetectors = sum([allDetectorCorners[f] for f, fInd in enumerate(fieldInd) if fInd], []) 

1997 hull = lsst.sphgeom.ConvexPolygon.convexHull(fieldDetectors) 

1998 center = lsst.geom.SpherePoint(hull.getCentroid()) 

1999 ra = center.getRa().asDegrees() 

2000 dec = center.getDec().asDegrees() 

2001 

2002 fieldRegions[i] = hull 

2003 fieldNames.append(str(i)) 

2004 fieldRAs.append(ra) 

2005 fieldDecs.append(dec) 

2006 # Use the same median epoch for all fields so that the final object 

2007 # positions are calculated for the same epoch. 

2008 epochs.append(medianEpoch) 

2009 

2010 fields = wcsfit.Fields(fieldNames, fieldRAs, fieldDecs, epochs) 

2011 

2012 return fields, fieldRegions 

2013 

2014 def _associate_from_isolated_sources( 

2015 self, isolatedStarSourceRefs, isolatedStarCatalogRefs, extensionInfo, refObjects 

2016 ): 

2017 """Match the input catalog of isolated stars with the reference catalog 

2018 and transform the combined isolated star sources and reference source 

2019 into the format needed for gbdes. 

2020 

2021 Parameters 

2022 ---------- 

2023 isolatedStarSourceRefs : `list` [`DeferredDatasetHandle`] 

2024 List of handles pointing to isolated star sources. 

2025 isolatedStarCatalogRefs: `list` [`DeferredDatasetHandle`] 

2026 List of handles pointing to isolated star catalogs. 

2027 extensionInfo : `lsst.pipe.base.Struct` 

2028 Struct containing properties for each extension (visit/detector). 

2029 ``visit`` : `np.ndarray` 

2030 Name of the visit for this extension. 

2031 ``detector`` : `np.ndarray` 

2032 Name of the detector for this extension. 

2033 ``visitIndex` : `np.ndarray` [`int`] 

2034 Index of visit for this extension. 

2035 ``detectorIndex`` : `np.ndarray` [`int`] 

2036 Index of the detector for this extension. 

2037 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`] 

2038 Initial WCS for this extension. 

2039 ``extensionType`` : `np.ndarray` [`str`] 

2040 "SCIENCE" or "REFERENCE". 

2041 refObjects : `dict` 

2042 Dictionary of dictionaries containing the position and error 

2043 information of reference objects. 

2044 

2045 Returns 

2046 ------- 

2047 associations : `lsst.pipe.base.Struct` 

2048 Struct containing the associations of sources with objects. 

2049 sourceDict : `dict` [`int`, [`int`, [`str`, `list` [`float`]]]] 

2050 Dictionary containing the source centroids for each visit. 

2051 """ 

2052 sequences = [] 

2053 extensions = [] 

2054 object_indices = [] 

2055 

2056 sourceColumns = ["x", "y", "xErr", "yErr", "ixx", "ixy", "iyy", "obj_index", "visit", "detector"] 

2057 catalogColumns = ["ra", "dec"] 

2058 

2059 sourceDict = dict([(visit, {}) for visit in np.unique(extensionInfo.visit)]) 

2060 for visit, detector in zip(extensionInfo.visit, extensionInfo.detector): 

2061 sourceDict[visit][detector] = {"x": [], "y": [], "xCov": [], "yCov": [], "xyCov": []} 

2062 

2063 for isolatedStarCatalogRef, isolatedStarSourceRef in zip( 

2064 isolatedStarCatalogRefs, isolatedStarSourceRefs 

2065 ): 

2066 isolatedStarCatalog = isolatedStarCatalogRef.get(parameters={"columns": catalogColumns}) 

2067 isolatedStarSources = isolatedStarSourceRef.get(parameters={"columns": sourceColumns}) 

2068 if len(isolatedStarCatalog) == 0: 

2069 # This is expected when only one visit overlaps with a given 

2070 # tract, meaning that no sources can be associated. 

2071 self.log.debug( 

2072 "Skipping tract %d, which has no associated isolated stars", 

2073 isolatedStarCatalogRef.dataId["tract"], 

2074 ) 

2075 continue 

2076 

2077 # Match the reference stars to the existing isolated stars, then 

2078 # insert the reference stars into the isolated star sources. 

2079 allVisits = np.copy(isolatedStarSources["visit"]) 

2080 allDetectors = np.copy(isolatedStarSources["detector"]) 

2081 allObjectIndices = np.copy(isolatedStarSources["obj_index"]) 

2082 issIndices = np.copy(isolatedStarSources.index) 

2083 for f, regionRefObjects in refObjects.items(): 

2084 # Use the same matching technique that is done in 

2085 # isolatedStarAssociation and fgcmBuildFromIsolatedStars. 

2086 with Matcher( 

2087 isolatedStarCatalog["ra"].to_numpy(), isolatedStarCatalog["dec"].to_numpy() 

2088 ) as matcher: 

2089 idx, i1, i2, d = matcher.query_radius( 

2090 np.array(regionRefObjects["ra"]), 

2091 np.array(regionRefObjects["dec"]), 

2092 self.config.matchRadius / 3600.0, 

2093 return_indices=True, 

2094 ) 

2095 

2096 refSort = np.searchsorted(isolatedStarSources["obj_index"], i1) 

2097 refDetector = np.ones(len(i1)) * -1 

2098 # The "visit" for the reference catalogs is the field times -1. 

2099 refVisit = np.ones(len(i1)) * f * -1 

2100 

2101 allVisits = np.insert(allVisits, refSort, refVisit) 

2102 allDetectors = np.insert(allDetectors, refSort, refDetector) 

2103 allObjectIndices = np.insert(allObjectIndices, refSort, i1) 

2104 issIndices = np.insert(issIndices, refSort, i2) 

2105 

2106 # Loop through the associated sources to convert them to the gbdes 

2107 # format, which requires the extension index, the source's index in 

2108 # the input table, and a sequence number corresponding to the 

2109 # object with which it is associated. 

2110 sequence = 0 

2111 obj_index = allObjectIndices[0] 

2112 for visit, detector, row, obj_ind in zip(allVisits, allDetectors, issIndices, allObjectIndices): 

2113 extensionIndex = np.flatnonzero( 

2114 (extensionInfo.visit == visit) & (extensionInfo.detector == detector) 

2115 ) 

2116 if len(extensionIndex) == 0: 

2117 # This happens for runs where you are not using all the 

2118 # visits overlapping a given tract that were included in 

2119 # the isolated star association task." 

2120 continue 

2121 else: 

2122 extensionIndex = extensionIndex[0] 

2123 

2124 extensions.append(extensionIndex) 

2125 if visit <= 0: 

2126 object_indices.append(row) 

2127 else: 

2128 object_indices.append(len(sourceDict[visit][detector]["x"])) 

2129 source = isolatedStarSources.loc[row] 

2130 sourceDict[visit][detector]["x"].append(source["x"]) 

2131 sourceDict[visit][detector]["y"].append(source["y"]) 

2132 xCov = source["xErr"] ** 2 

2133 yCov = source["yErr"] ** 2 

2134 xyCov = source["ixy"] * (xCov + yCov) / (source["ixx"] + source["iyy"]) 

2135 # TODO: add correct xyErr if DM-7101 is ever done. 

2136 sourceDict[visit][detector]["xCov"].append(xCov) 

2137 sourceDict[visit][detector]["yCov"].append(yCov) 

2138 sourceDict[visit][detector]["xyCov"].append(xyCov) 

2139 if obj_ind != obj_index: 

2140 sequence = 0 

2141 sequences.append(sequence) 

2142 obj_index = obj_ind 

2143 sequence += 1 

2144 else: 

2145 sequences.append(sequence) 

2146 sequence += 1 

2147 

2148 associations = pipeBase.Struct(extn=extensions, obj=object_indices, sequence=sequences) 

2149 return associations, sourceDict 

2150 

2151 def _add_objects(self, wcsf, sourceDict, extensionInfo): 

2152 """Add science sources to the wcsfit.WCSFit object. 

2153 

2154 Parameters 

2155 ---------- 

2156 wcsf : `wcsfit.WCSFit` 

2157 WCS-fitting object. 

2158 sourceDict : `dict` 

2159 Dictionary containing the source centroids for each visit. 

2160 extensionInfo : `lsst.pipe.base.Struct` 

2161 Struct containing properties for each extension (visit/detector). 

2162 ``visit`` : `np.ndarray` 

2163 Name of the visit for this extension. 

2164 ``detector`` : `np.ndarray` 

2165 Name of the detector for this extension. 

2166 ``visitIndex` : `np.ndarray` [`int`] 

2167 Index of visit for this extension. 

2168 ``detectorIndex`` : `np.ndarray` [`int`] 

2169 Index of the detector for this extension. 

2170 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`] 

2171 Initial WCS for this extension. 

2172 ``extensionType`` : `np.ndarray` [`str`] 

2173 "SCIENCE" or "REFERENCE". 

2174 """ 

2175 for visit, visitSources in sourceDict.items(): 

2176 # Visit numbers equal or below zero connote the reference catalog. 

2177 if visit <= 0: 

2178 # This "visit" number corresponds to a reference catalog. 

2179 continue 

2180 

2181 for detector, sourceCat in visitSources.items(): 

2182 extensionIndex = np.flatnonzero( 

2183 (extensionInfo.visit == visit) & (extensionInfo.detector == detector) 

2184 )[0] 

2185 

2186 d = { 

2187 "x": np.array(sourceCat["x"]), 

2188 "y": np.array(sourceCat["y"]), 

2189 "xCov": np.array(sourceCat["xCov"]), 

2190 "yCov": np.array(sourceCat["yCov"]), 

2191 "xyCov": np.array(sourceCat["xyCov"]), 

2192 } 

2193 wcsf.setObjects(extensionIndex, d, "x", "y", ["xCov", "yCov", "xyCov"])