Coverage for python/lsst/drp/tasks/gbdesAstrometricFit.py: 11%

451 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-11-18 13:24 +0000

1# This file is part of drp_tasks. 

2# 

3# LSST Data Management System 

4# This product includes software developed by the 

5# LSST Project (http://www.lsst.org/). 

6# See COPYRIGHT file at the top of the source tree. 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <https://www.lsstcorp.org/LegalNotices/>. 

21# 

22import astropy.coordinates 

23import astropy.time 

24import astropy.units as u 

25import astshim 

26import lsst.afw.geom as afwgeom 

27import lsst.afw.table 

28import lsst.geom 

29import lsst.pex.config as pexConfig 

30import lsst.pipe.base as pipeBase 

31import lsst.sphgeom 

32import numpy as np 

33import wcsfit 

34import yaml 

35from lsst.meas.algorithms import ( 

36 LoadReferenceObjectsConfig, 

37 ReferenceObjectLoader, 

38 ReferenceSourceSelectorTask, 

39) 

40from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry 

41 

42__all__ = ["GbdesAstrometricFitConnections", "GbdesAstrometricFitConfig", "GbdesAstrometricFitTask"] 

43 

44 

45def _make_ref_covariance_matrix( 

46 refCat, inputUnit=u.radian, outputCoordUnit=u.marcsec, outputPMUnit=u.marcsec, version=1 

47): 

48 """Make a covariance matrix for the reference catalog including proper 

49 motion and parallax. 

50 

51 The output is flattened to one dimension to match the format expected by 

52 `gbdes`. 

53 

54 Parameters 

55 ---------- 

56 refCat : `lsst.afw.table.SimpleCatalog` 

57 Catalog including proper motion and parallax measurements. 

58 inputUnit : `astropy.unit.core.Unit` 

59 Units of the input catalog 

60 outputCoordUnit : `astropy.unit.core.Unit` 

61 Units required for the coordinates in the covariance matrix. `gbdes` 

62 expects milliarcseconds. 

63 outputPMUnit : `astropy.unit.core.Unit` 

64 Units required for the proper motion/parallax in the covariance matrix. 

65 `gbdes` expects milliarcseconds. 

66 version : `int` 

67 Version of the reference catalog. Version 2 includes covariance 

68 measurements. 

69 Returns 

70 ------- 

71 cov : `list` of `float` 

72 Flattened output covariance matrix. 

73 """ 

74 cov = np.zeros((len(refCat), 25)) 

75 if version == 1: 

76 # Here is the standard ordering of components in the cov matrix, 

77 # to match the PM enumeration in C++ code of gbdes package's Match. 

78 # Each tuple gives: the array holding the 1d error, 

79 # the string in Gaia column names for this 

80 # the ordering in the Gaia catalog 

81 # and the ordering of the tuples is the order we want in our cov matrix 

82 raErr = (refCat["coord_raErr"] * inputUnit).to(outputCoordUnit).to_value() 

83 decErr = (refCat["coord_decErr"] * inputUnit).to(outputCoordUnit).to_value() 

84 raPMErr = (refCat["pm_raErr"] * inputUnit).to(outputPMUnit).to_value() 

85 decPMErr = (refCat["pm_decErr"] * inputUnit).to(outputPMUnit).to_value() 

86 parallaxErr = (refCat["parallaxErr"] * inputUnit).to(outputPMUnit).to_value() 

87 stdOrder = ( 

88 (raErr, "ra", 0), 

89 (decErr, "dec", 1), 

90 (raPMErr, "pmra", 3), 

91 (decPMErr, "pmdec", 4), 

92 (parallaxErr, "parallax", 2), 

93 ) 

94 

95 k = 0 

96 for i, pr1 in enumerate(stdOrder): 

97 for j, pr2 in enumerate(stdOrder): 

98 if pr1[2] < pr2[2]: 

99 cov[:, k] = 0 

100 elif pr1[2] > pr2[2]: 

101 cov[:, k] = 0 

102 else: 

103 # diagnonal element 

104 cov[:, k] = pr1[0] * pr2[0] 

105 k = k + 1 

106 

107 elif version == 2: 

108 positionParameters = ["coord_ra", "coord_dec", "pm_ra", "pm_dec", "parallax"] 

109 units = [outputCoordUnit, outputCoordUnit, outputPMUnit, outputPMUnit, outputPMUnit] 

110 k = 0 

111 for i, pi in enumerate(positionParameters): 

112 for j, pj in enumerate(positionParameters): 

113 if i == j: 

114 cov[:, k] = (refCat[f"{pi}Err"] ** 2 * inputUnit**2).to_value(units[j] * units[j]) 

115 elif i > j: 

116 cov[:, k] = (refCat[f"{pj}_{pi}_Cov"] * inputUnit**2).to_value(units[i] * units[j]) 

117 else: 

118 cov[:, k] = (refCat[f"{pi}_{pj}_Cov"] * inputUnit**2).to_value(units[i] * units[j]) 

119 

120 k += 1 

121 return cov 

122 

123 

124def _convert_to_ast_polymap_coefficients(coefficients): 

125 """Convert vector of polynomial coefficients from the format used in 

126 `gbdes` into AST format (see Poly2d::vectorIndex(i, j) in 

127 gbdes/gbutil/src/Poly2d.cpp). This assumes two input and two output 

128 coordinates. 

129 

130 Parameters 

131 ---------- 

132 coefficients : `list` 

133 Coefficients of the polynomials. 

134 degree : `int` 

135 Degree of the polynomial. 

136 

137 Returns 

138 ------- 

139 astPoly : `astshim.PolyMap` 

140 Coefficients in AST polynomial format. 

141 """ 

142 polyArray = np.zeros((len(coefficients), 4)) 

143 N = len(coefficients) / 2 

144 # Get the degree of the polynomial by applying the quadratic formula to the 

145 # formula for calculating the number of coefficients of the polynomial. 

146 degree = int(-1.5 + 0.5 * (1 + 8 * N) ** 0.5) 

147 

148 for outVar in [1, 2]: 

149 for i in range(degree + 1): 

150 for j in range(degree + 1): 

151 if (i + j) > degree: 

152 continue 

153 vectorIndex = int(((i + j) * (i + j + 1)) / 2 + j + N * (outVar - 1)) 

154 polyArray[vectorIndex, 0] = coefficients[vectorIndex] 

155 polyArray[vectorIndex, 1] = outVar 

156 polyArray[vectorIndex, 2] = i 

157 polyArray[vectorIndex, 3] = j 

158 

159 astPoly = astshim.PolyMap(polyArray, 2, options="IterInverse=1,NIterInverse=10,TolInverse=1e-7") 

160 return astPoly 

161 

162 

163def _get_wcs_from_sip(butlerWcs): 

164 """Get wcsfit.Wcs in TPV format from the SIP-formatted input WCS. 

165 

166 Parameters 

167 ---------- 

168 butlerWcs : `lsst.afw.geom.SkyWcs` 

169 Input WCS from the calexp in SIP format. 

170 

171 Returns 

172 ------- 

173 wcs : `wcsfit.Wcs` 

174 WCS object in TPV format. 

175 """ 

176 fits_metadata = butlerWcs.getFitsMetadata() 

177 if not ( 

178 (fits_metadata.get("CTYPE1") == "RA---TAN-SIP") and (fits_metadata.get("CTYPE2") == "DEC--TAN-SIP") 

179 ): 

180 raise ValueError( 

181 f"CTYPES {fits_metadata.get('CTYPE1')} and {fits_metadata.get('CTYPE2')}" 

182 "do not match SIP convention" 

183 ) 

184 

185 # Correct CRPIX values to correspond to source table pixel indexing 

186 # convention 

187 crpix1 = fits_metadata.get("CRPIX1") 

188 crpix2 = fits_metadata.get("CRPIX2") 

189 fits_metadata.set("CRPIX1", crpix1 - 1) 

190 fits_metadata.set("CRPIX2", crpix2 - 1) 

191 

192 floatDict = {k: fits_metadata[k] for k in fits_metadata if isinstance(fits_metadata[k], (int, float))} 

193 

194 wcs = wcsfit.readTPVFromSIP(floatDict, "SIP") 

195 

196 return wcs 

197 

198 

199class GbdesAstrometricFitConnections( 

200 pipeBase.PipelineTaskConnections, dimensions=("skymap", "tract", "instrument", "physical_filter") 

201): 

202 """Middleware input/output connections for task data.""" 

203 

204 inputCatalogRefs = pipeBase.connectionTypes.Input( 

205 doc="Source table in parquet format, per visit.", 

206 name="preSourceTable_visit", 

207 storageClass="DataFrame", 

208 dimensions=("instrument", "visit"), 

209 deferLoad=True, 

210 multiple=True, 

211 ) 

212 inputVisitSummaries = pipeBase.connectionTypes.Input( 

213 doc=( 

214 "Per-visit consolidated exposure metadata built from calexps. " 

215 "These catalogs use detector id for the id and must be sorted for " 

216 "fast lookups of a detector." 

217 ), 

218 name="visitSummary", 

219 storageClass="ExposureCatalog", 

220 dimensions=("instrument", "visit"), 

221 multiple=True, 

222 ) 

223 referenceCatalog = pipeBase.connectionTypes.PrerequisiteInput( 

224 doc="The astrometry reference catalog to match to loaded input catalog sources.", 

225 name="gaia_dr3_20230707", 

226 storageClass="SimpleCatalog", 

227 dimensions=("skypix",), 

228 deferLoad=True, 

229 multiple=True, 

230 ) 

231 outputWcs = pipeBase.connectionTypes.Output( 

232 doc=( 

233 "Per-tract, per-visit world coordinate systems derived from the fitted model." 

234 " These catalogs only contain entries for detectors with an output, and use" 

235 " the detector id for the catalog id, sorted on id for fast lookups of a detector." 

236 ), 

237 name="gbdesAstrometricFitSkyWcsCatalog", 

238 storageClass="ExposureCatalog", 

239 dimensions=("instrument", "visit", "skymap", "tract"), 

240 multiple=True, 

241 ) 

242 outputCatalog = pipeBase.connectionTypes.Output( 

243 doc=( 

244 "Source table with stars used in fit, along with residuals in pixel coordinates and tangent " 

245 "plane coordinates and chisq values." 

246 ), 

247 name="gbdesAstrometricFit_fitStars", 

248 storageClass="ArrowNumpyDict", 

249 dimensions=("instrument", "skymap", "tract", "physical_filter"), 

250 ) 

251 starCatalog = pipeBase.connectionTypes.Output( 

252 doc="Star catalog.", 

253 name="gbdesAstrometricFit_starCatalog", 

254 storageClass="ArrowNumpyDict", 

255 dimensions=("instrument", "skymap", "tract", "physical_filter"), 

256 ) 

257 

258 def getSpatialBoundsConnections(self): 

259 return ("inputVisitSummaries",) 

260 

261 

262class GbdesAstrometricFitConfig( 

263 pipeBase.PipelineTaskConfig, pipelineConnections=GbdesAstrometricFitConnections 

264): 

265 """Configuration for GbdesAstrometricFitTask""" 

266 

267 sourceSelector = sourceSelectorRegistry.makeField( 

268 doc="How to select sources for cross-matching.", default="science" 

269 ) 

270 referenceSelector = pexConfig.ConfigurableField( 

271 target=ReferenceSourceSelectorTask, 

272 doc="How to down-select the loaded astrometry reference catalog.", 

273 ) 

274 matchRadius = pexConfig.Field( 

275 doc="Matching tolerance between associated objects (arcseconds).", dtype=float, default=1.0 

276 ) 

277 minMatches = pexConfig.Field( 

278 doc="Number of matches required to keep a source object.", dtype=int, default=2 

279 ) 

280 allowSelfMatches = pexConfig.Field( 

281 doc="Allow multiple sources from the same visit to be associated with the same object.", 

282 dtype=bool, 

283 default=False, 

284 ) 

285 sourceFluxType = pexConfig.Field( 

286 dtype=str, 

287 doc="Source flux field to use in source selection and to get fluxes from the catalog.", 

288 default="apFlux_12_0", 

289 ) 

290 systematicError = pexConfig.Field( 

291 dtype=float, 

292 doc=( 

293 "Systematic error padding added in quadrature for the science catalogs (marcsec). The default" 

294 "value is equivalent to 0.02 pixels for HSC." 

295 ), 

296 default=0.0034, 

297 ) 

298 referenceSystematicError = pexConfig.Field( 

299 dtype=float, 

300 doc="Systematic error padding added in quadrature for the reference catalog (marcsec).", 

301 default=0.0, 

302 ) 

303 modelComponents = pexConfig.ListField( 

304 dtype=str, 

305 doc=( 

306 "List of mappings to apply to transform from pixels to sky, in order of their application." 

307 "Supported options are 'INSTRUMENT/DEVICE' and 'EXPOSURE'." 

308 ), 

309 default=["INSTRUMENT/DEVICE", "EXPOSURE"], 

310 ) 

311 deviceModel = pexConfig.ListField( 

312 dtype=str, 

313 doc=( 

314 "List of mappings to apply to transform from detector pixels to intermediate frame. Map names" 

315 "should match the format 'BAND/DEVICE/<map name>'." 

316 ), 

317 default=["BAND/DEVICE/poly"], 

318 ) 

319 exposureModel = pexConfig.ListField( 

320 dtype=str, 

321 doc=( 

322 "List of mappings to apply to transform from intermediate frame to sky coordinates. Map names" 

323 "should match the format 'EXPOSURE/<map name>'." 

324 ), 

325 default=["EXPOSURE/poly"], 

326 ) 

327 devicePolyOrder = pexConfig.Field(dtype=int, doc="Order of device polynomial model.", default=4) 

328 exposurePolyOrder = pexConfig.Field(dtype=int, doc="Order of exposure polynomial model.", default=6) 

329 fitProperMotion = pexConfig.Field(dtype=bool, doc="Fit the proper motions of the objects.", default=False) 

330 excludeNonPMObjects = pexConfig.Field( 

331 dtype=bool, doc="Exclude reference objects without proper motion/parallax information.", default=True 

332 ) 

333 fitReserveFraction = pexConfig.Field( 

334 dtype=float, default=0.2, doc="Fraction of objects to reserve from fit for validation." 

335 ) 

336 fitReserveRandomSeed = pexConfig.Field( 

337 dtype=int, 

338 doc="Set the random seed for selecting data points to reserve from the fit for validation.", 

339 default=1234, 

340 ) 

341 

342 def setDefaults(self): 

343 # Use only stars because aperture fluxes of galaxies are biased and 

344 # depend on seeing. 

345 self.sourceSelector["science"].doUnresolved = True 

346 self.sourceSelector["science"].unresolved.name = "extendedness" 

347 

348 # Use only isolated sources. 

349 self.sourceSelector["science"].doIsolated = True 

350 self.sourceSelector["science"].isolated.parentName = "parentSourceId" 

351 self.sourceSelector["science"].isolated.nChildName = "deblend_nChild" 

352 # Do not use either flux or centroid measurements with flags, 

353 # chosen from the usual QA flags for stars. 

354 self.sourceSelector["science"].doFlags = True 

355 badFlags = [ 

356 "pixelFlags_edge", 

357 "pixelFlags_saturated", 

358 "pixelFlags_interpolatedCenter", 

359 "pixelFlags_interpolated", 

360 "pixelFlags_crCenter", 

361 "pixelFlags_bad", 

362 "hsmPsfMoments_flag", 

363 f"{self.sourceFluxType}_flag", 

364 ] 

365 self.sourceSelector["science"].flags.bad = badFlags 

366 

367 # Use only primary sources. 

368 self.sourceSelector["science"].doRequirePrimary = True 

369 

370 def validate(self): 

371 super().validate() 

372 

373 # Check if all components of the device and exposure models are 

374 # supported. 

375 for component in self.deviceModel: 

376 if not (("poly" in component.lower()) or ("identity" in component.lower())): 

377 raise pexConfig.FieldValidationError( 

378 GbdesAstrometricFitConfig.deviceModel, 

379 self, 

380 f"deviceModel component {component} is not supported.", 

381 ) 

382 

383 for component in self.exposureModel: 

384 if not (("poly" in component.lower()) or ("identity" in component.lower())): 

385 raise pexConfig.FieldValidationError( 

386 GbdesAstrometricFitConfig.exposureModel, 

387 self, 

388 f"exposureModel component {component} is not supported.", 

389 ) 

390 

391 

392class GbdesAstrometricFitTask(pipeBase.PipelineTask): 

393 """Calibrate the WCS across multiple visits of the same field using the 

394 GBDES package. 

395 """ 

396 

397 ConfigClass = GbdesAstrometricFitConfig 

398 _DefaultName = "gbdesAstrometricFit" 

399 

400 def __init__(self, **kwargs): 

401 super().__init__(**kwargs) 

402 self.makeSubtask("sourceSelector") 

403 self.makeSubtask("referenceSelector") 

404 

405 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

406 # We override runQuantum to set up the refObjLoaders 

407 inputs = butlerQC.get(inputRefs) 

408 

409 instrumentName = butlerQC.quantum.dataId["instrument"] 

410 

411 # Ensure the inputs are in a consistent order 

412 inputCatVisits = np.array([inputCat.dataId["visit"] for inputCat in inputs["inputCatalogRefs"]]) 

413 inputs["inputCatalogRefs"] = [inputs["inputCatalogRefs"][v] for v in inputCatVisits.argsort()] 

414 inputSumVisits = np.array([inputSum[0]["visit"] for inputSum in inputs["inputVisitSummaries"]]) 

415 inputs["inputVisitSummaries"] = [inputs["inputVisitSummaries"][v] for v in inputSumVisits.argsort()] 

416 inputRefHtm7s = np.array([inputRefCat.dataId["htm7"] for inputRefCat in inputRefs.referenceCatalog]) 

417 inputRefCatRefs = [inputRefs.referenceCatalog[htm7] for htm7 in inputRefHtm7s.argsort()] 

418 inputRefCats = np.array([inputRefCat.dataId["htm7"] for inputRefCat in inputs["referenceCatalog"]]) 

419 inputs["referenceCatalog"] = [inputs["referenceCatalog"][v] for v in inputRefCats.argsort()] 

420 

421 sampleRefCat = inputs["referenceCatalog"][0].get() 

422 refEpoch = sampleRefCat[0]["epoch"] 

423 

424 refConfig = LoadReferenceObjectsConfig() 

425 refConfig.anyFilterMapsToThis = "phot_g_mean" 

426 refConfig.requireProperMotion = True 

427 refObjectLoader = ReferenceObjectLoader( 

428 dataIds=[ref.datasetRef.dataId for ref in inputRefCatRefs], 

429 refCats=inputs.pop("referenceCatalog"), 

430 config=refConfig, 

431 log=self.log, 

432 ) 

433 

434 output = self.run( 

435 **inputs, instrumentName=instrumentName, refEpoch=refEpoch, refObjectLoader=refObjectLoader 

436 ) 

437 

438 for outputRef in outputRefs.outputWcs: 

439 visit = outputRef.dataId["visit"] 

440 butlerQC.put(output.outputWCSs[visit], outputRef) 

441 butlerQC.put(output.outputCatalog, outputRefs.outputCatalog) 

442 butlerQC.put(output.starCatalog, outputRefs.starCatalog) 

443 

444 def run( 

445 self, inputCatalogRefs, inputVisitSummaries, instrumentName="", refEpoch=None, refObjectLoader=None 

446 ): 

447 """Run the WCS fit for a given set of visits 

448 

449 Parameters 

450 ---------- 

451 inputCatalogRefs : `list` 

452 List of `DeferredDatasetHandle`s pointing to visit-level source 

453 tables. 

454 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

455 List of catalogs with per-detector summary information. 

456 instrumentName : `str`, optional 

457 Name of the instrument used. This is only used for labelling. 

458 refEpoch : `float` 

459 Epoch of the reference objects in MJD. 

460 refObjectLoader : instance of 

461 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

462 Referencef object loader instance. 

463 

464 Returns 

465 ------- 

466 result : `lsst.pipe.base.Struct` 

467 ``outputWCSs`` : `list` of `lsst.afw.table.ExposureCatalog` 

468 List of exposure catalogs (one per visit) with the WCS for each 

469 detector set by the new fitted WCS. 

470 ``fitModel`` : `wcsfit.WCSFit` 

471 Model-fitting object with final model parameters. 

472 ``outputCatalog`` : `pyarrow.Table` 

473 Catalog with fit residuals of all sources used. 

474 """ 

475 self.log.info("Gathering instrument, exposure, and field info") 

476 # Set up an instrument object 

477 instrument = wcsfit.Instrument(instrumentName) 

478 

479 # Get RA, Dec, MJD, etc., for the input visits 

480 exposureInfo, exposuresHelper, extensionInfo = self._get_exposure_info( 

481 inputVisitSummaries, instrument 

482 ) 

483 

484 # Get information about the extent of the input visits 

485 fields, fieldCenter, fieldRadius = self._prep_sky(inputVisitSummaries, exposureInfo.medianEpoch) 

486 

487 self.log.info("Load catalogs and associate sources") 

488 # Set up class to associate sources into matches using a 

489 # friends-of-friends algorithm 

490 associations = wcsfit.FoFClass( 

491 fields, 

492 [instrument], 

493 exposuresHelper, 

494 [fieldRadius.asDegrees()], 

495 (self.config.matchRadius * u.arcsec).to(u.degree).value, 

496 ) 

497 

498 # Add the reference catalog to the associator 

499 medianEpoch = astropy.time.Time(exposureInfo.medianEpoch, format="decimalyear").mjd 

500 refObjects, refCovariance = self._load_refcat( 

501 associations, refObjectLoader, fieldCenter, fieldRadius, extensionInfo, epoch=medianEpoch 

502 ) 

503 

504 # Add the science catalogs and associate new sources as they are added 

505 sourceIndices, usedColumns = self._load_catalogs_and_associate( 

506 associations, inputCatalogRefs, extensionInfo 

507 ) 

508 

509 self.log.info("Fit the WCSs") 

510 # Set up a YAML-type string using the config variables and a sample 

511 # visit 

512 inputYAML = self.make_yaml(inputVisitSummaries[0]) 

513 

514 # Set the verbosity level for WCSFit from the task log level. 

515 # TODO: DM-36850, Add lsst.log to gbdes so that log messages are 

516 # properly propagated. 

517 loglevel = self.log.getEffectiveLevel() 

518 if loglevel >= self.log.WARNING: 

519 verbose = 0 

520 elif loglevel == self.log.INFO: 

521 verbose = 1 

522 else: 

523 verbose = 2 

524 

525 # Set up the WCS-fitting class using the results of the FOF associator 

526 wcsf = wcsfit.WCSFit( 

527 fields, 

528 [instrument], 

529 exposuresHelper, 

530 extensionInfo.visitIndex, 

531 extensionInfo.detectorIndex, 

532 inputYAML, 

533 extensionInfo.wcs, 

534 associations.sequence, 

535 associations.extn, 

536 associations.obj, 

537 sysErr=self.config.systematicError, 

538 refSysErr=self.config.referenceSystematicError, 

539 usePM=self.config.fitProperMotion, 

540 verbose=verbose, 

541 ) 

542 

543 # Add the science and reference sources 

544 self._add_objects(wcsf, inputCatalogRefs, sourceIndices, extensionInfo, usedColumns) 

545 self._add_ref_objects(wcsf, refObjects, refCovariance, extensionInfo) 

546 

547 # Do the WCS fit 

548 wcsf.fit( 

549 reserveFraction=self.config.fitReserveFraction, randomNumberSeed=self.config.fitReserveRandomSeed 

550 ) 

551 self.log.info("WCS fitting done") 

552 

553 outputWCSs = self._make_outputs(wcsf, inputVisitSummaries, exposureInfo) 

554 outputCatalog = wcsf.getOutputCatalog() 

555 starCatalog = wcsf.getStarCatalog() 

556 

557 return pipeBase.Struct( 

558 outputWCSs=outputWCSs, fitModel=wcsf, outputCatalog=outputCatalog, starCatalog=starCatalog 

559 ) 

560 

561 def _prep_sky(self, inputVisitSummaries, epoch, fieldName="Field"): 

562 """Get center and radius of the input tract. This assumes that all 

563 visits will be put into the same `wcsfit.Field` and fit together. 

564 

565 Paramaters 

566 ---------- 

567 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

568 List of catalogs with per-detector summary information. 

569 epoch : float 

570 Reference epoch. 

571 fieldName : str 

572 Name of the field, used internally. 

573 

574 Returns 

575 ------- 

576 fields : `wcsfit.Fields` 

577 Object with field information. 

578 center : `lsst.geom.SpherePoint` 

579 Center of the field. 

580 radius : `lsst.sphgeom._sphgeom.Angle` 

581 Radius of the bounding circle of the tract. 

582 """ 

583 allDetectorCorners = [] 

584 for visSum in inputVisitSummaries: 

585 detectorCorners = [ 

586 lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees).getVector() 

587 for (ra, dec) in zip(visSum["raCorners"].ravel(), visSum["decCorners"].ravel()) 

588 ] 

589 allDetectorCorners.extend(detectorCorners) 

590 boundingCircle = lsst.sphgeom.ConvexPolygon.convexHull(allDetectorCorners).getBoundingCircle() 

591 center = lsst.geom.SpherePoint(boundingCircle.getCenter()) 

592 ra = center.getRa().asDegrees() 

593 dec = center.getDec().asDegrees() 

594 radius = boundingCircle.getOpeningAngle() 

595 

596 # wcsfit.Fields describes a list of fields, but we assume all 

597 # observations will be fit together in one field. 

598 fields = wcsfit.Fields([fieldName], [ra], [dec], [epoch]) 

599 

600 return fields, center, radius 

601 

602 def _get_exposure_info( 

603 self, inputVisitSummaries, instrument, fieldNumber=0, instrumentNumber=0, refEpoch=None 

604 ): 

605 """Get various information about the input visits to feed to the 

606 fitting routines. 

607 

608 Parameters 

609 ---------- 

610 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

611 Tables for each visit with information for detectors. 

612 instrument : `wcsfit.Instrument` 

613 Instrument object to which detector information is added. 

614 fieldNumber : `int` 

615 Index of the field for these visits. Should be zero if all data is 

616 being fit together. 

617 instrumentNumber : `int` 

618 Index of the instrument for these visits. Should be zero if all 

619 data comes from the same instrument. 

620 refEpoch : `float` 

621 Epoch of the reference objects in MJD. 

622 

623 Returns 

624 ------- 

625 exposureInfo : `lsst.pipe.base.Struct` 

626 Struct containing general properties for the visits: 

627 ``visits`` : `list` 

628 List of visit names. 

629 ``detectors`` : `list` 

630 List of all detectors in any visit. 

631 ``ras`` : `list` of float 

632 List of boresight RAs for each visit. 

633 ``decs`` : `list` of float 

634 List of borseight Decs for each visit. 

635 ``medianEpoch`` : float 

636 Median epoch of all visits in decimal-year format. 

637 exposuresHelper : `wcsfit.ExposuresHelper` 

638 Object containing information about the input visits. 

639 extensionInfo : `lsst.pipe.base.Struct` 

640 Struct containing properties for each extension: 

641 ``visit`` : `np.ndarray` 

642 Name of the visit for this extension. 

643 ``detector`` : `np.ndarray` 

644 Name of the detector for this extension. 

645 ``visitIndex` : `np.ndarray` of `int` 

646 Index of visit for this extension. 

647 ``detectorIndex`` : `np.ndarray` of `int` 

648 Index of the detector for this extension. 

649 ``wcss`` : `np.ndarray` of `lsst.afw.geom.SkyWcs` 

650 Initial WCS for this extension. 

651 ``extensionType`` : `np.ndarray` of `str` 

652 "SCIENCE" or "REFERENCE". 

653 """ 

654 exposureNames = [] 

655 ras = [] 

656 decs = [] 

657 visits = [] 

658 detectors = [] 

659 airmasses = [] 

660 exposureTimes = [] 

661 mjds = [] 

662 observatories = [] 

663 wcss = [] 

664 

665 extensionType = [] 

666 extensionVisitIndices = [] 

667 extensionDetectorIndices = [] 

668 extensionVisits = [] 

669 extensionDetectors = [] 

670 # Get information for all the science visits 

671 for v, visitSummary in enumerate(inputVisitSummaries): 

672 visitInfo = visitSummary[0].getVisitInfo() 

673 visit = visitSummary[0]["visit"] 

674 visits.append(visit) 

675 exposureNames.append(str(visit)) 

676 raDec = visitInfo.getBoresightRaDec() 

677 ras.append(raDec.getRa().asRadians()) 

678 decs.append(raDec.getDec().asRadians()) 

679 airmasses.append(visitInfo.getBoresightAirmass()) 

680 exposureTimes.append(visitInfo.getExposureTime()) 

681 obsDate = visitInfo.getDate() 

682 obsMJD = obsDate.get(obsDate.MJD) 

683 mjds.append(obsMJD) 

684 # Get the observatory ICRS position for use in fitting parallax 

685 obsLon = visitInfo.observatory.getLongitude().asDegrees() 

686 obsLat = visitInfo.observatory.getLatitude().asDegrees() 

687 obsElev = visitInfo.observatory.getElevation() 

688 earthLocation = astropy.coordinates.EarthLocation.from_geodetic(obsLon, obsLat, obsElev) 

689 observatory_gcrs = earthLocation.get_gcrs(astropy.time.Time(obsMJD, format="mjd")) 

690 observatory_icrs = observatory_gcrs.transform_to(astropy.coordinates.ICRS()) 

691 # We want the position in AU in Cartesian coordinates 

692 observatories.append(observatory_icrs.cartesian.xyz.to(u.AU).value) 

693 

694 for row in visitSummary: 

695 detector = row["id"] 

696 if detector not in detectors: 

697 detectors.append(detector) 

698 detectorBounds = wcsfit.Bounds( 

699 row["bbox_min_x"], row["bbox_max_x"], row["bbox_min_y"], row["bbox_max_y"] 

700 ) 

701 instrument.addDevice(str(detector), detectorBounds) 

702 

703 detectorIndex = np.flatnonzero(detector == np.array(detectors))[0] 

704 extensionVisitIndices.append(v) 

705 extensionDetectorIndices.append(detectorIndex) 

706 extensionVisits.append(visit) 

707 extensionDetectors.append(detector) 

708 extensionType.append("SCIENCE") 

709 

710 wcs = row.getWcs() 

711 wcss.append(_get_wcs_from_sip(wcs)) 

712 

713 fieldNumbers = list(np.ones(len(exposureNames), dtype=int) * fieldNumber) 

714 instrumentNumbers = list(np.ones(len(exposureNames), dtype=int) * instrumentNumber) 

715 

716 # Set the reference epoch to be the median of the science visits. 

717 # The reference catalog will be shifted to this date. 

718 medianMJD = np.median(mjds) 

719 medianEpoch = astropy.time.Time(medianMJD, format="mjd").decimalyear 

720 

721 # Add information for the reference catalog. Most of the values are 

722 # not used. 

723 exposureNames.append("REFERENCE") 

724 visits.append(-1) 

725 fieldNumbers.append(0) 

726 if self.config.fitProperMotion: 

727 instrumentNumbers.append(-2) 

728 else: 

729 instrumentNumbers.append(-1) 

730 ras.append(0.0) 

731 decs.append(0.0) 

732 airmasses.append(0.0) 

733 exposureTimes.append(0) 

734 mjds.append((refEpoch if (refEpoch is not None) else medianMJD)) 

735 observatories.append(np.array([0, 0, 0])) 

736 identity = wcsfit.IdentityMap() 

737 icrs = wcsfit.SphericalICRS() 

738 refWcs = wcsfit.Wcs(identity, icrs, "Identity", np.pi / 180.0) 

739 wcss.append(refWcs) 

740 

741 extensionVisitIndices.append(len(exposureNames) - 1) 

742 extensionDetectorIndices.append(-1) # REFERENCE device must be -1 

743 extensionVisits.append(-1) 

744 extensionDetectors.append(-1) 

745 extensionType.append("REFERENCE") 

746 

747 # Make a table of information to use elsewhere in the class 

748 extensionInfo = pipeBase.Struct( 

749 visit=np.array(extensionVisits), 

750 detector=np.array(extensionDetectors), 

751 visitIndex=np.array(extensionVisitIndices), 

752 detectorIndex=np.array(extensionDetectorIndices), 

753 wcs=np.array(wcss), 

754 extensionType=np.array(extensionType), 

755 ) 

756 

757 # Make the exposureHelper object to use in the fitting routines 

758 exposuresHelper = wcsfit.ExposuresHelper( 

759 exposureNames, 

760 fieldNumbers, 

761 instrumentNumbers, 

762 ras, 

763 decs, 

764 airmasses, 

765 exposureTimes, 

766 mjds, 

767 observatories, 

768 ) 

769 

770 exposureInfo = pipeBase.Struct( 

771 visits=visits, detectors=detectors, ras=ras, decs=decs, medianEpoch=medianEpoch 

772 ) 

773 

774 return exposureInfo, exposuresHelper, extensionInfo 

775 

776 def _load_refcat( 

777 self, associations, refObjectLoader, center, radius, extensionInfo, epoch=None, fieldIndex=0 

778 ): 

779 """Load the reference catalog and add reference objects to the 

780 `wcsfit.FoFClass` object. 

781 

782 Parameters 

783 ---------- 

784 associations : `wcsfit.FoFClass` 

785 Object to which to add the catalog of reference objects. 

786 refObjectLoader : 

787 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

788 Object set up to load reference catalog objects. 

789 center : `lsst.geom.SpherePoint` 

790 Center of the circle in which to load reference objects. 

791 radius : `lsst.sphgeom._sphgeom.Angle` 

792 Radius of the circle in which to load reference objects. 

793 extensionInfo : `lsst.pipe.base.Struct` 

794 Struct containing properties for each extension. 

795 epoch : `float` 

796 MJD to which to correct the object positions. 

797 fieldIndex : `int` 

798 Index of the field. Should be zero if all the data is fit together. 

799 

800 Returns 

801 ------- 

802 refObjects : `dict` 

803 Position and error information of reference objects. 

804 refCovariance : `list` of `float` 

805 Flattened output covariance matrix. 

806 """ 

807 formattedEpoch = astropy.time.Time(epoch, format="mjd") 

808 

809 refFilter = refObjectLoader.config.anyFilterMapsToThis 

810 skyCircle = refObjectLoader.loadSkyCircle(center, radius, refFilter, epoch=formattedEpoch) 

811 

812 selected = self.referenceSelector.run(skyCircle.refCat) 

813 # Need memory contiguity to get reference filters as a vector. 

814 if not selected.sourceCat.isContiguous(): 

815 refCat = selected.sourceCat.copy(deep=True) 

816 else: 

817 refCat = selected.sourceCat 

818 

819 # In Gaia DR3, missing values are denoted by NaNs. 

820 finiteInd = np.isfinite(refCat["coord_ra"]) & np.isfinite(refCat["coord_dec"]) 

821 refCat = refCat[finiteInd] 

822 

823 if self.config.excludeNonPMObjects: 

824 # Gaia DR2 has zeros for missing data, while Gaia DR3 has NaNs: 

825 hasPM = ( 

826 (refCat["pm_raErr"] != 0) & np.isfinite(refCat["pm_raErr"]) & np.isfinite(refCat["pm_decErr"]) 

827 ) 

828 refCat = refCat[hasPM] 

829 

830 ra = (refCat["coord_ra"] * u.radian).to(u.degree).to_value().tolist() 

831 dec = (refCat["coord_dec"] * u.radian).to(u.degree).to_value().tolist() 

832 raCov = ((refCat["coord_raErr"] * u.radian).to(u.degree).to_value() ** 2).tolist() 

833 decCov = ((refCat["coord_decErr"] * u.radian).to(u.degree).to_value() ** 2).tolist() 

834 

835 # Get refcat version from refcat metadata 

836 refCatMetadata = refObjectLoader.refCats[0].get().getMetadata() 

837 refCatVersion = refCatMetadata["REFCAT_FORMAT_VERSION"] 

838 if refCatVersion == 2: 

839 raDecCov = ( 

840 (refCat["coord_ra_coord_dec_Cov"] * u.radian**2).to(u.degree**2).to_value().tolist() 

841 ) 

842 else: 

843 raDecCov = np.zeros(len(ra)) 

844 

845 refObjects = {"ra": ra, "dec": dec, "raCov": raCov, "decCov": decCov, "raDecCov": raDecCov} 

846 refCovariance = [] 

847 

848 if self.config.fitProperMotion: 

849 raPM = (refCat["pm_ra"] * u.radian).to(u.marcsec).to_value().tolist() 

850 decPM = (refCat["pm_dec"] * u.radian).to(u.marcsec).to_value().tolist() 

851 parallax = (refCat["parallax"] * u.radian).to(u.marcsec).to_value().tolist() 

852 cov = _make_ref_covariance_matrix(refCat, version=refCatVersion) 

853 pmDict = {"raPM": raPM, "decPM": decPM, "parallax": parallax} 

854 refObjects.update(pmDict) 

855 refCovariance = cov 

856 

857 extensionIndex = np.flatnonzero(extensionInfo.extensionType == "REFERENCE")[0] 

858 visitIndex = extensionInfo.visitIndex[extensionIndex] 

859 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

860 instrumentIndex = -1 # -1 indicates the reference catalog 

861 refWcs = extensionInfo.wcs[extensionIndex] 

862 

863 associations.addCatalog( 

864 refWcs, 

865 "STELLAR", 

866 visitIndex, 

867 fieldIndex, 

868 instrumentIndex, 

869 detectorIndex, 

870 extensionIndex, 

871 np.ones(len(refCat), dtype=bool), 

872 ra, 

873 dec, 

874 np.arange(len(ra)), 

875 ) 

876 

877 return refObjects, refCovariance 

878 

879 def _load_catalogs_and_associate( 

880 self, associations, inputCatalogRefs, extensionInfo, fieldIndex=0, instrumentIndex=0 

881 ): 

882 """Load the science catalogs and add the sources to the associator 

883 class `wcsfit.FoFClass`, associating them into matches as you go. 

884 

885 Parameters 

886 ---------- 

887 associations : `wcsfit.FoFClass` 

888 Object to which to add the catalog of reference objects. 

889 inputCatalogRefs : `list` 

890 List of DeferredDatasetHandles pointing to visit-level source 

891 tables. 

892 extensionInfo : `lsst.pipe.base.Struct` 

893 Struct containing properties for each extension. 

894 fieldIndex : `int` 

895 Index of the field for these catalogs. Should be zero assuming all 

896 data is being fit together. 

897 instrumentIndex : `int` 

898 Index of the instrument for these catalogs. Should be zero 

899 assuming all data comes from the same instrument. 

900 

901 Returns 

902 ------- 

903 sourceIndices : `list` 

904 List of boolean arrays used to select sources. 

905 columns : `list` of `str` 

906 List of columns needed from source tables. 

907 """ 

908 columns = [ 

909 "detector", 

910 "sourceId", 

911 "x", 

912 "xErr", 

913 "y", 

914 "yErr", 

915 "ixx", 

916 "iyy", 

917 "ixy", 

918 f"{self.config.sourceFluxType}_instFlux", 

919 f"{self.config.sourceFluxType}_instFluxErr", 

920 ] 

921 if self.sourceSelector.config.doFlags: 

922 columns.extend(self.sourceSelector.config.flags.bad) 

923 if self.sourceSelector.config.doUnresolved: 

924 columns.append(self.sourceSelector.config.unresolved.name) 

925 if self.sourceSelector.config.doIsolated: 

926 columns.append(self.sourceSelector.config.isolated.parentName) 

927 columns.append(self.sourceSelector.config.isolated.nChildName) 

928 if self.sourceSelector.config.doRequirePrimary: 

929 columns.append(self.sourceSelector.config.requirePrimary.primaryColName) 

930 

931 sourceIndices = [None] * len(extensionInfo.visit) 

932 for inputCatalogRef in inputCatalogRefs: 

933 visit = inputCatalogRef.dataId["visit"] 

934 inputCatalog = inputCatalogRef.get(parameters={"columns": columns}) 

935 # Get a sorted array of detector names 

936 detectors = np.unique(inputCatalog["detector"]) 

937 

938 for detector in detectors: 

939 detectorSources = inputCatalog[inputCatalog["detector"] == detector] 

940 xCov = detectorSources["xErr"] ** 2 

941 yCov = detectorSources["yErr"] ** 2 

942 xyCov = ( 

943 detectorSources["ixy"] * (xCov + yCov) / (detectorSources["ixx"] + detectorSources["iyy"]) 

944 ) 

945 # Remove sources with bad shape measurements 

946 goodShapes = xyCov**2 <= (xCov * yCov) 

947 selected = self.sourceSelector.run(detectorSources) 

948 goodInds = selected.selected & goodShapes 

949 

950 isStar = np.ones(goodInds.sum()) 

951 extensionIndex = np.flatnonzero( 

952 (extensionInfo.visit == visit) & (extensionInfo.detector == detector) 

953 )[0] 

954 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

955 visitIndex = extensionInfo.visitIndex[extensionIndex] 

956 

957 sourceIndices[extensionIndex] = goodInds 

958 

959 wcs = extensionInfo.wcs[extensionIndex] 

960 associations.reprojectWCS(wcs, fieldIndex) 

961 

962 associations.addCatalog( 

963 wcs, 

964 "STELLAR", 

965 visitIndex, 

966 fieldIndex, 

967 instrumentIndex, 

968 detectorIndex, 

969 extensionIndex, 

970 isStar, 

971 detectorSources[goodInds]["x"].to_list(), 

972 detectorSources[goodInds]["y"].to_list(), 

973 np.arange(goodInds.sum()), 

974 ) 

975 

976 associations.sortMatches( 

977 fieldIndex, minMatches=self.config.minMatches, allowSelfMatches=self.config.allowSelfMatches 

978 ) 

979 

980 return sourceIndices, columns 

981 

982 def make_yaml(self, inputVisitSummary, inputFile=None): 

983 """Make a YAML-type object that describes the parameters of the fit 

984 model. 

985 

986 Parameters 

987 ---------- 

988 inputVisitSummary : `lsst.afw.table.ExposureCatalog` 

989 Catalog with per-detector summary information. 

990 inputFile : `str` 

991 Path to a file that contains a basic model. 

992 

993 Returns 

994 ------- 

995 inputYAML : `wcsfit.YAMLCollector` 

996 YAML object containing the model description. 

997 """ 

998 if inputFile is not None: 

999 inputYAML = wcsfit.YAMLCollector(inputFile, "PixelMapCollection") 

1000 else: 

1001 inputYAML = wcsfit.YAMLCollector("", "PixelMapCollection") 

1002 inputDict = {} 

1003 modelComponents = ["INSTRUMENT/DEVICE", "EXPOSURE"] 

1004 baseMap = {"Type": "Composite", "Elements": modelComponents} 

1005 inputDict["EXPOSURE/DEVICE/base"] = baseMap 

1006 

1007 xMin = str(inputVisitSummary["bbox_min_x"].min()) 

1008 xMax = str(inputVisitSummary["bbox_max_x"].max()) 

1009 yMin = str(inputVisitSummary["bbox_min_y"].min()) 

1010 yMax = str(inputVisitSummary["bbox_max_y"].max()) 

1011 

1012 deviceModel = {"Type": "Composite", "Elements": self.config.deviceModel.list()} 

1013 inputDict["INSTRUMENT/DEVICE"] = deviceModel 

1014 for component in self.config.deviceModel: 

1015 if "poly" in component.lower(): 

1016 componentDict = { 

1017 "Type": "Poly", 

1018 "XPoly": {"OrderX": self.config.devicePolyOrder, "SumOrder": True}, 

1019 "YPoly": {"OrderX": self.config.devicePolyOrder, "SumOrder": True}, 

1020 "XMin": xMin, 

1021 "XMax": xMax, 

1022 "YMin": yMin, 

1023 "YMax": yMax, 

1024 } 

1025 elif "identity" in component.lower(): 

1026 componentDict = {"Type": "Identity"} 

1027 

1028 inputDict[component] = componentDict 

1029 

1030 exposureModel = {"Type": "Composite", "Elements": self.config.exposureModel.list()} 

1031 inputDict["EXPOSURE"] = exposureModel 

1032 for component in self.config.exposureModel: 

1033 if "poly" in component.lower(): 

1034 componentDict = { 

1035 "Type": "Poly", 

1036 "XPoly": {"OrderX": self.config.exposurePolyOrder, "SumOrder": "true"}, 

1037 "YPoly": {"OrderX": self.config.exposurePolyOrder, "SumOrder": "true"}, 

1038 } 

1039 elif "identity" in component.lower(): 

1040 componentDict = {"Type": "Identity"} 

1041 

1042 inputDict[component] = componentDict 

1043 

1044 inputYAML.addInput(yaml.dump(inputDict)) 

1045 inputYAML.addInput("Identity:\n Type: Identity\n") 

1046 

1047 return inputYAML 

1048 

1049 def _add_objects(self, wcsf, inputCatalogRefs, sourceIndices, extensionInfo, columns): 

1050 """Add science sources to the wcsfit.WCSFit object. 

1051 

1052 Parameters 

1053 ---------- 

1054 wcsf : `wcsfit.WCSFit` 

1055 WCS-fitting object. 

1056 inputCatalogRefs : `list` 

1057 List of DeferredDatasetHandles pointing to visit-level source 

1058 tables. 

1059 sourceIndices : `list` 

1060 List of boolean arrays used to select sources. 

1061 extensionInfo : `lsst.pipe.base.Struct` 

1062 Struct containing properties for each extension. 

1063 columns : `list` of `str` 

1064 List of columns needed from source tables. 

1065 """ 

1066 for inputCatalogRef in inputCatalogRefs: 

1067 visit = inputCatalogRef.dataId["visit"] 

1068 inputCatalog = inputCatalogRef.get(parameters={"columns": columns}) 

1069 detectors = np.unique(inputCatalog["detector"]) 

1070 

1071 for detector in detectors: 

1072 detectorSources = inputCatalog[inputCatalog["detector"] == detector] 

1073 

1074 extensionIndex = np.flatnonzero( 

1075 (extensionInfo.visit == visit) & (extensionInfo.detector == detector) 

1076 )[0] 

1077 sourceCat = detectorSources[sourceIndices[extensionIndex]] 

1078 

1079 xCov = sourceCat["xErr"] ** 2 

1080 yCov = sourceCat["yErr"] ** 2 

1081 xyCov = sourceCat["ixy"] * (xCov + yCov) / (sourceCat["ixx"] + sourceCat["iyy"]) 

1082 # TODO: add correct xyErr if DM-7101 is ever done. 

1083 

1084 d = { 

1085 "x": sourceCat["x"].to_numpy(), 

1086 "y": sourceCat["y"].to_numpy(), 

1087 "xCov": xCov.to_numpy(), 

1088 "yCov": yCov.to_numpy(), 

1089 "xyCov": xyCov.to_numpy(), 

1090 } 

1091 

1092 wcsf.setObjects(extensionIndex, d, "x", "y", ["xCov", "yCov", "xyCov"]) 

1093 

1094 def _add_ref_objects(self, wcsf, refObjects, refCovariance, extensionInfo): 

1095 """Add reference sources to the wcsfit.WCSFit object. 

1096 

1097 Parameters 

1098 ---------- 

1099 wcsf : `wcsfit.WCSFit` 

1100 WCS-fitting object. 

1101 refObjects : `dict` 

1102 Position and error information of reference objects. 

1103 refCovariance : `list` of `float` 

1104 Flattened output covariance matrix. 

1105 extensionInfo : `lsst.pipe.base.Struct` 

1106 Struct containing properties for each extension. 

1107 """ 

1108 extensionIndex = np.flatnonzero(extensionInfo.extensionType == "REFERENCE")[0] 

1109 

1110 if self.config.fitProperMotion: 

1111 wcsf.setObjects( 

1112 extensionIndex, 

1113 refObjects, 

1114 "ra", 

1115 "dec", 

1116 ["raCov", "decCov", "raDecCov"], 

1117 pmDecKey="decPM", 

1118 pmRaKey="raPM", 

1119 parallaxKey="parallax", 

1120 pmCovKey="fullCov", 

1121 pmCov=refCovariance, 

1122 ) 

1123 else: 

1124 wcsf.setObjects(extensionIndex, refObjects, "ra", "dec", ["raCov", "decCov", "raDecCov"]) 

1125 

1126 def _make_afw_wcs(self, mapDict, centerRA, centerDec, doNormalizePixels=False, xScale=1, yScale=1): 

1127 """Make an `lsst.afw.geom.SkyWcs` from a dictionary of mappings. 

1128 

1129 Parameters 

1130 ---------- 

1131 mapDict : `dict` 

1132 Dictionary of mapping parameters. 

1133 centerRA : `lsst.geom.Angle` 

1134 RA of the tangent point. 

1135 centerDec : `lsst.geom.Angle` 

1136 Declination of the tangent point. 

1137 doNormalizePixels : `bool` 

1138 Whether to normalize pixels so that range is [-1,1]. 

1139 xScale : `float` 

1140 Factor by which to normalize x-dimension. Corresponds to width of 

1141 detector. 

1142 yScale : `float` 

1143 Factor by which to normalize y-dimension. Corresponds to height of 

1144 detector. 

1145 

1146 Returns 

1147 ------- 

1148 outWCS : `lsst.afw.geom.SkyWcs` 

1149 WCS constructed from the input mappings 

1150 """ 

1151 # Set up pixel frames 

1152 pixelFrame = astshim.Frame(2, "Domain=PIXELS") 

1153 normedPixelFrame = astshim.Frame(2, "Domain=NORMEDPIXELS") 

1154 

1155 if doNormalizePixels: 

1156 # Pixels will need to be rescaled before going into the mappings 

1157 normCoefficients = [-1.0, 2.0 / xScale, 0, -1.0, 0, 2.0 / yScale] 

1158 normMap = _convert_to_ast_polymap_coefficients(normCoefficients) 

1159 else: 

1160 normMap = astshim.UnitMap(2) 

1161 

1162 # All of the detectors for one visit map to the same tangent plane 

1163 tangentPoint = lsst.geom.SpherePoint(centerRA, centerDec) 

1164 cdMatrix = afwgeom.makeCdMatrix(1.0 * lsst.geom.degrees, 0 * lsst.geom.degrees, True) 

1165 iwcToSkyWcs = afwgeom.makeSkyWcs(lsst.geom.Point2D(0, 0), tangentPoint, cdMatrix) 

1166 iwcToSkyMap = iwcToSkyWcs.getFrameDict().getMapping("PIXELS", "SKY") 

1167 skyFrame = iwcToSkyWcs.getFrameDict().getFrame("SKY") 

1168 

1169 frameDict = astshim.FrameDict(pixelFrame) 

1170 frameDict.addFrame("PIXELS", normMap, normedPixelFrame) 

1171 

1172 currentFrameName = "NORMEDPIXELS" 

1173 

1174 # Dictionary values are ordered according to the maps' application. 

1175 for m, mapElement in enumerate(mapDict.values()): 

1176 mapType = mapElement["Type"] 

1177 

1178 if mapType == "Poly": 

1179 mapCoefficients = mapElement["Coefficients"] 

1180 astMap = _convert_to_ast_polymap_coefficients(mapCoefficients) 

1181 elif mapType == "Identity": 

1182 astMap = astshim.UnitMap(2) 

1183 else: 

1184 raise ValueError(f"Converting map type {mapType} to WCS is not supported") 

1185 

1186 if m == len(mapDict) - 1: 

1187 newFrameName = "IWC" 

1188 else: 

1189 newFrameName = "INTERMEDIATE" + str(m) 

1190 newFrame = astshim.Frame(2, f"Domain={newFrameName}") 

1191 frameDict.addFrame(currentFrameName, astMap, newFrame) 

1192 currentFrameName = newFrameName 

1193 frameDict.addFrame("IWC", iwcToSkyMap, skyFrame) 

1194 

1195 outWCS = afwgeom.SkyWcs(frameDict) 

1196 return outWCS 

1197 

1198 def _make_outputs(self, wcsf, visitSummaryTables, exposureInfo): 

1199 """Make a WCS object out of the WCS models. 

1200 

1201 Parameters 

1202 ---------- 

1203 wcsf : `wcsfit.WCSFit` 

1204 WCSFit object, assumed to have fit model. 

1205 visitSummaryTables : `list` of `lsst.afw.table.ExposureCatalog` 

1206 Catalogs with per-detector summary information from which to grab 

1207 detector information. 

1208 extensionInfo : `lsst.pipe.base.Struct` 

1209 Struct containing properties for each extension. 

1210 

1211 Returns 

1212 ------- 

1213 catalogs : `dict` of [`str`, `lsst.afw.table.ExposureCatalog`] 

1214 Dictionary of `lsst.afw.table.ExposureCatalog` objects with the WCS 

1215 set to the WCS fit in wcsf, keyed by the visit name. 

1216 """ 

1217 # Get the parameters of the fit models 

1218 mapParams = wcsf.mapCollection.getParamDict() 

1219 

1220 # Set up the schema for the output catalogs 

1221 schema = lsst.afw.table.ExposureTable.makeMinimalSchema() 

1222 schema.addField("visit", type="L", doc="Visit number") 

1223 

1224 # Pixels will need to be rescaled before going into the mappings 

1225 sampleDetector = visitSummaryTables[0][0] 

1226 xscale = sampleDetector["bbox_max_x"] - sampleDetector["bbox_min_x"] 

1227 yscale = sampleDetector["bbox_max_y"] - sampleDetector["bbox_min_y"] 

1228 

1229 catalogs = {} 

1230 for v, visitSummary in enumerate(visitSummaryTables): 

1231 visit = visitSummary[0]["visit"] 

1232 

1233 catalog = lsst.afw.table.ExposureCatalog(schema) 

1234 catalog.resize(len(exposureInfo.detectors)) 

1235 catalog["visit"] = visit 

1236 

1237 for d, detector in enumerate(visitSummary["id"]): 

1238 mapName = f"{visit}/{detector}" 

1239 

1240 mapElements = wcsf.mapCollection.orderAtoms(f"{mapName}/base") 

1241 mapDict = {} 

1242 for m, mapElement in enumerate(mapElements): 

1243 mapType = wcsf.mapCollection.getMapType(mapElement) 

1244 mapDict[mapElement] = {"Type": mapType} 

1245 

1246 if mapType == "Poly": 

1247 mapCoefficients = mapParams[mapElement] 

1248 mapDict[mapElement]["Coefficients"] = mapCoefficients 

1249 

1250 # The RA and Dec of the visit are needed for the last step of 

1251 # the mapping from the visit tangent plane to RA and Dec 

1252 outWCS = self._make_afw_wcs( 

1253 mapDict, 

1254 exposureInfo.ras[v] * lsst.geom.radians, 

1255 exposureInfo.decs[v] * lsst.geom.radians, 

1256 doNormalizePixels=True, 

1257 xScale=xscale, 

1258 yScale=yscale, 

1259 ) 

1260 

1261 catalog[d].setId(detector) 

1262 catalog[d].setWcs(outWCS) 

1263 catalog.sort() 

1264 catalogs[visit] = catalog 

1265 

1266 return catalogs