Coverage for python/lsst/drp/tasks/gbdesAstrometricFit.py: 12%

417 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2022-12-20 01:57 -0800

1# This file is part of drp_tasks. 

2# 

3# LSST Data Management System 

4# This product includes software developed by the 

5# LSST Project (http://www.lsst.org/). 

6# See COPYRIGHT file at the top of the source tree. 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <https://www.lsstcorp.org/LegalNotices/>. 

21# 

22import numpy as np 

23import astropy.time 

24import astropy.units as u 

25import astropy.coordinates 

26import yaml 

27import wcsfit 

28import astshim 

29import pyarrow as pa 

30 

31import lsst.geom 

32import lsst.pex.config as pexConfig 

33import lsst.pipe.base as pipeBase 

34import lsst.sphgeom 

35import lsst.afw.table 

36import lsst.afw.geom as afwgeom 

37from lsst.meas.algorithms import (LoadReferenceObjectsConfig, ReferenceObjectLoader, 

38 ReferenceSourceSelectorTask) 

39from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry 

40 

41__all__ = ['GbdesAstrometricFitConnections', 'GbdesAstrometricFitConfig', 'GbdesAstrometricFitTask'] 

42 

43 

44def _lookup_visit_refcats(datasetType, registry, quantumDataId, collections): 

45 """Lookup function that finds all refcats for all visits that overlap a 

46 tract, rather than just the refcats that directly overlap the tract. 

47 Borrowed from jointcal. 

48 

49 Parameters 

50 ---------- 

51 datasetType : `lsst.daf.butler.DatasetType` 

52 Type of dataset being searched for. 

53 registry : `lsst.daf.butler.Registry` 

54 Data repository registry to search. 

55 quantumDataId : `lsst.daf.butler.DataCoordinate` 

56 Data ID of the quantum; expected to be something we can use as a 

57 constraint to query for overlapping visits. 

58 collections : `Iterable` [ `str` ] 

59 Collections to search. 

60 Returns 

61 ------- 

62 refs : `Iterator` [ `lsst.daf.butler.DatasetRef` ] 

63 Iterator over refcat references. 

64 """ 

65 refs = set() 

66 # Use .expanded() on the query methods below because we need data IDs with 

67 # regions, both in the outer loop over visits (queryDatasets will expand 

68 # any data ID we give it, but doing it up-front in bulk is much more 

69 # efficient) and in the data IDs of the DatasetRefs this function yields 

70 # (because the RefCatLoader relies on them to do some of its own 

71 # filtering). 

72 for visit_data_id in set(registry.queryDataIds('visit', dataId=quantumDataId).expanded()): 

73 refs.update( 

74 registry.queryDatasets( 

75 datasetType, 

76 collections=collections, 

77 dataId=visit_data_id, 

78 findFirst=True, 

79 ).expanded() 

80 ) 

81 yield from refs 

82 

83 

84def _make_ref_covariance_matrix(refCat, inputUnit=u.radian, outputCoordUnit=u.marcsec, 

85 outputPMUnit=u.marcsec): 

86 """Make a covariance matrix for the reference catalog including proper 

87 motion and parallax. 

88 

89 The output is flattened to one dimension to match the format expected by 

90 `gbdes`. 

91 

92 Parameters 

93 ---------- 

94 refCat : `lsst.afw.table.SimpleCatalog` 

95 Catalog including proper motion and parallax measurements. 

96 inputUnit : `astropy.unit.core.Unit` 

97 Units of the input catalog 

98 outputCoordUnit : `astropy.unit.core.Unit` 

99 Units required for the coordinates in the covariance matrix. `gbdes` 

100 expects milliarcseconds. 

101 outputPMUnit : `astropy.unit.core.Unit` 

102 Units required for the proper motion/parallax in the covariance matrix. 

103 `gbdes` expects milliarcseconds. 

104 

105 Returns 

106 ------- 

107 cov : `list` of `float` 

108 Flattened output covariance matrix. 

109 """ 

110 # Here is the standard ordering of components in the cov matrix, 

111 # to match the PM enumeration in C++ code of gbdes package's Match. 

112 # Each tuple gives: the array holding the 1d error, 

113 # the string in Gaia column names for this 

114 # the ordering in the Gaia catalog 

115 # and the ordering of the tuples is the order we want in our cov matrix 

116 raErr = (refCat['coord_raErr'] * inputUnit).to(outputCoordUnit).to_value() 

117 decErr = (refCat['coord_decErr'] * inputUnit).to(outputCoordUnit).to_value() 

118 raPMErr = (refCat['pm_raErr'] * inputUnit).to(outputPMUnit).to_value() 

119 decPMErr = (refCat['pm_decErr'] * inputUnit).to(outputPMUnit).to_value() 

120 parallaxErr = (refCat['parallaxErr'] * inputUnit).to(outputPMUnit).to_value() 

121 stdOrder = ((raErr, 'ra', 0), 

122 (decErr, 'dec', 1), 

123 (raPMErr, 'pmra', 3), 

124 (decPMErr, 'pmdec', 4), 

125 (parallaxErr, 'parallax', 2)) 

126 cov = np.zeros((len(refCat), 25)) 

127 k = 0 

128 # TODO: when DM-35130, is done, we need the full covariance here 

129 for i, pr1 in enumerate(stdOrder): 

130 for j, pr2 in enumerate(stdOrder): 

131 if pr1[2] < pr2[2]: 

132 # add correlation coefficient (once it is available) 

133 # cov[:, k] = (pr1[0] * pr2[0] * refCat[pr1[1] + '_' + pr2[1] 

134 # + '_corr']) 

135 cov[:, k] = 0 

136 elif pr1[2] > pr2[2]: 

137 # add correlation coefficient (once it is available) 

138 # cov[:, k] = (pr1[0] * pr2[0] * refCat[pr2[1] + '_' + pr1[1] 

139 # + '_corr']) 

140 cov[:, k] = 0 

141 else: 

142 # diagnonal element 

143 cov[:, k] = pr1[0] * pr2[0] 

144 k = k+1 

145 

146 return cov 

147 

148 

149def _convert_to_ast_polymap_coefficients(coefficients): 

150 """Convert vector of polynomial coefficients from the format used in 

151 `gbdes` into AST format (see Poly2d::vectorIndex(i, j) in 

152 gbdes/gbutil/src/Poly2d.cpp). This assumes two input and two output 

153 coordinates. 

154 

155 Parameters 

156 ---------- 

157 coefficients : `list` 

158 Coefficients of the polynomials. 

159 degree : `int` 

160 Degree of the polynomial. 

161 

162 Returns 

163 ------- 

164 astPoly : `astshim.PolyMap` 

165 Coefficients in AST polynomial format. 

166 """ 

167 polyArray = np.zeros((len(coefficients), 4)) 

168 N = len(coefficients) / 2 

169 # Get the degree of the polynomial by applying the quadratic formula to the 

170 # formula for calculating the number of coefficients of the polynomial. 

171 degree = int(-1.5 + 0.5 * (1 + 8 * N)**0.5) 

172 

173 for outVar in [1, 2]: 

174 for i in range(degree + 1): 

175 for j in range(degree + 1): 

176 if (i + j) > degree: 

177 continue 

178 vectorIndex = int(((i+j)*(i+j+1))/2+j + N * (outVar - 1)) 

179 polyArray[vectorIndex, 0] = coefficients[vectorIndex] 

180 polyArray[vectorIndex, 1] = outVar 

181 polyArray[vectorIndex, 2] = i 

182 polyArray[vectorIndex, 3] = j 

183 

184 astPoly = astshim.PolyMap(polyArray, 2, options="IterInverse=1,NIterInverse=10,TolInverse=1e-7") 

185 return astPoly 

186 

187 

188def _get_wcs_from_sip(butlerWcs): 

189 """Get wcsfit.Wcs in TPV format from the SIP-formatted input WCS. 

190 

191 Parameters 

192 ---------- 

193 butlerWcs : `lsst.afw.geom.SkyWcs` 

194 Input WCS from the calexp in SIP format. 

195 

196 Returns 

197 ------- 

198 wcs : `wcsfit.Wcs` 

199 WCS object in TPV format. 

200 """ 

201 fits_metadata = butlerWcs.getFitsMetadata() 

202 if not ((fits_metadata.get('CTYPE1') == 'RA---TAN-SIP') 

203 and (fits_metadata.get('CTYPE2') == 'DEC--TAN-SIP')): 

204 raise ValueError(f"CTYPES {fits_metadata.get('CTYPE1')} and {fits_metadata.get('CTYPE2')}" 

205 "do not match SIP convention") 

206 

207 # Correct CRPIX values to correspond to source table pixel indexing 

208 # convention 

209 crpix1 = fits_metadata.get('CRPIX1') 

210 crpix2 = fits_metadata.get('CRPIX2') 

211 fits_metadata.set('CRPIX1', crpix1 - 1) 

212 fits_metadata.set('CRPIX2', crpix2 - 1) 

213 

214 floatDict = {k: fits_metadata[k] for k in fits_metadata if isinstance(fits_metadata[k], (int, float))} 

215 

216 wcs = wcsfit.readTPVFromSIP(floatDict, 'SIP') 

217 

218 return wcs 

219 

220 

221class GbdesAstrometricFitConnections(pipeBase.PipelineTaskConnections, 

222 dimensions=('skymap', 'tract', 'instrument', 'physical_filter')): 

223 """Middleware input/output connections for task data.""" 

224 inputCatalogRefs = pipeBase.connectionTypes.Input( 

225 doc="Source table in parquet format, per visit.", 

226 name='preSourceTable_visit', 

227 storageClass='DataFrame', 

228 dimensions=('instrument', 'visit'), 

229 deferLoad=True, 

230 multiple=True, 

231 ) 

232 inputVisitSummary = pipeBase.connectionTypes.Input( 

233 doc=("Per-visit consolidated exposure metadata built from calexps. " 

234 "These catalogs use detector id for the id and must be sorted for " 

235 "fast lookups of a detector."), 

236 name='visitSummary', 

237 storageClass='ExposureCatalog', 

238 dimensions=('instrument', 'visit'), 

239 multiple=True, 

240 ) 

241 referenceCatalog = pipeBase.connectionTypes.PrerequisiteInput( 

242 doc="The astrometry reference catalog to match to loaded input catalog sources.", 

243 name='gaia_dr2_20200414', 

244 storageClass='SimpleCatalog', 

245 dimensions=('skypix',), 

246 deferLoad=True, 

247 multiple=True, 

248 lookupFunction=_lookup_visit_refcats, 

249 ) 

250 outputWcs = pipeBase.connectionTypes.Output( 

251 doc=("Per-tract, per-visit world coordinate systems derived from the fitted model." 

252 " These catalogs only contain entries for detectors with an output, and use" 

253 " the detector id for the catalog id, sorted on id for fast lookups of a detector."), 

254 name='GbdesAstrometricFitSkyWcsCatalog', 

255 storageClass='ExposureCatalog', 

256 dimensions=('instrument', 'visit', 'skymap', 'tract'), 

257 multiple=True 

258 ) 

259 outputCatalog = pipeBase.connectionTypes.Output( 

260 doc=("Source table with stars used in fit, along with residuals in pixel coordinates and tangent " 

261 "plane coordinates and chisq values."), 

262 name='GbdesAstrometricFit_fitStars', 

263 storageClass='ArrowTable', 

264 dimensions=('instrument', 'skymap', 'tract', 'physical_filter'), 

265 ) 

266 

267 

268class GbdesAstrometricFitConfig(pipeBase.PipelineTaskConfig, 

269 pipelineConnections=GbdesAstrometricFitConnections): 

270 """Configuration for GbdesAstrometricFitTask""" 

271 sourceSelector = sourceSelectorRegistry.makeField( 

272 doc="How to select sources for cross-matching.", 

273 default='science' 

274 ) 

275 referenceSelector = pexConfig.ConfigurableField( 

276 target=ReferenceSourceSelectorTask, 

277 doc="How to down-select the loaded astrometry reference catalog.", 

278 ) 

279 matchRadius = pexConfig.Field( 

280 doc="Matching tolerance between associated objects (arcseconds).", 

281 dtype=float, 

282 default=1.0 

283 ) 

284 minMatches = pexConfig.Field( 

285 doc="Number of matches required to keep a source object.", 

286 dtype=int, 

287 default=2 

288 ) 

289 allowSelfMatches = pexConfig.Field( 

290 doc="Allow multiple sources from the same visit to be associated with the same object.", 

291 dtype=bool, 

292 default=False 

293 ) 

294 sourceFluxType = pexConfig.Field( 

295 dtype=str, 

296 doc="Source flux field to use in source selection and to get fluxes from the catalog.", 

297 default='apFlux_12_0' 

298 ) 

299 systematicError = pexConfig.Field( 

300 dtype=float, 

301 doc=("Systematic error padding added in quadrature for the science catalogs (marcsec). The default" 

302 "value is equivalent to 0.02 pixels for HSC."), 

303 default=0.0034 

304 ) 

305 referenceSystematicError = pexConfig.Field( 

306 dtype=float, 

307 doc="Systematic error padding added in quadrature for the reference catalog (marcsec).", 

308 default=0.0 

309 ) 

310 modelComponents = pexConfig.ListField( 

311 dtype=str, 

312 doc=("List of mappings to apply to transform from pixels to sky, in order of their application." 

313 "Supported options are 'INSTRUMENT/DEVICE' and 'EXPOSURE'."), 

314 default=['INSTRUMENT/DEVICE', 'EXPOSURE'] 

315 ) 

316 deviceModel = pexConfig.ListField( 

317 dtype=str, 

318 doc=("List of mappings to apply to transform from detector pixels to intermediate frame. Map names" 

319 "should match the format 'BAND/DEVICE/<map name>'."), 

320 default=['BAND/DEVICE/poly'] 

321 ) 

322 exposureModel = pexConfig.ListField( 

323 dtype=str, 

324 doc=("List of mappings to apply to transform from intermediate frame to sky coordinates. Map names" 

325 "should match the format 'EXPOSURE/<map name>'."), 

326 default=['EXPOSURE/poly'] 

327 ) 

328 devicePolyOrder = pexConfig.Field( 

329 dtype=int, 

330 doc="Order of device polynomial model.", 

331 default=4 

332 ) 

333 exposurePolyOrder = pexConfig.Field( 

334 dtype=int, 

335 doc="Order of exposure polynomial model.", 

336 default=6 

337 ) 

338 fitProperMotion = pexConfig.Field( 

339 dtype=bool, 

340 doc="Fit the proper motions of the objects.", 

341 default=False 

342 ) 

343 excludeNonPMObjects = pexConfig.Field( 

344 dtype=bool, 

345 doc="Exclude reference objects without proper motion/parallax information.", 

346 default=True 

347 ) 

348 

349 def setDefaults(self): 

350 # Use only stars because aperture fluxes of galaxies are biased and 

351 # depend on seeing. 

352 self.sourceSelector['science'].doUnresolved = True 

353 self.sourceSelector['science'].unresolved.name = 'extendedness' 

354 

355 # Use only isolated sources. 

356 self.sourceSelector['science'].doIsolated = True 

357 self.sourceSelector['science'].isolated.parentName = 'parentSourceId' 

358 self.sourceSelector['science'].isolated.nChildName = 'deblend_nChild' 

359 # Do not use either flux or centroid measurements with flags, 

360 # chosen from the usual QA flags for stars. 

361 self.sourceSelector['science'].doFlags = True 

362 badFlags = ['pixelFlags_edge', 

363 'pixelFlags_saturated', 

364 'pixelFlags_interpolatedCenter', 

365 'pixelFlags_interpolated', 

366 'pixelFlags_crCenter', 

367 'pixelFlags_bad', 

368 'hsmPsfMoments_flag', 

369 f'{self.sourceFluxType}_flag', 

370 ] 

371 self.sourceSelector['science'].flags.bad = badFlags 

372 

373 def validate(self): 

374 super().validate() 

375 

376 # Check if all components of the device and exposure models are 

377 # supported. 

378 for component in self.deviceModel: 

379 if not (('poly' in component.lower()) or ('identity' in component.lower())): 

380 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.deviceModel, self, 

381 f'deviceModel component {component} is not supported.') 

382 

383 for component in self.exposureModel: 

384 if not (('poly' in component.lower()) or ('identity' in component.lower())): 

385 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.exposureModel, self, 

386 f'exposureModel component {component} is not supported.') 

387 

388 

389class GbdesAstrometricFitTask(pipeBase.PipelineTask): 

390 """Calibrate the WCS across multiple visits of the same field using the 

391 GBDES package. 

392 """ 

393 

394 ConfigClass = GbdesAstrometricFitConfig 

395 _DefaultName = 'gbdesAstrometricFit' 

396 

397 def __init__(self, **kwargs): 

398 super().__init__(**kwargs) 

399 self.makeSubtask('sourceSelector') 

400 self.makeSubtask('referenceSelector') 

401 

402 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

403 # We override runQuantum to set up the refObjLoaders 

404 inputs = butlerQC.get(inputRefs) 

405 

406 instrumentName = butlerQC.quantum.dataId['instrument'] 

407 

408 sampleRefCat = inputs['referenceCatalog'][0].get() 

409 refEpoch = sampleRefCat[0]['epoch'] 

410 

411 refConfig = LoadReferenceObjectsConfig() 

412 refConfig.anyFilterMapsToThis = 'phot_g_mean' 

413 refConfig.requireProperMotion = True 

414 refObjectLoader = ReferenceObjectLoader(dataIds=[ref.datasetRef.dataId 

415 for ref in inputRefs.referenceCatalog], 

416 refCats=inputs.pop('referenceCatalog'), 

417 config=refConfig, 

418 log=self.log) 

419 

420 output = self.run(**inputs, instrumentName=instrumentName, refEpoch=refEpoch, 

421 refObjectLoader=refObjectLoader) 

422 

423 for outputRef in outputRefs.outputWcs: 

424 visit = outputRef.dataId['visit'] 

425 butlerQC.put(output.outputWCSs[visit], outputRef) 

426 butlerQC.put(output.outputCatalog, outputRefs.outputCatalog) 

427 

428 def run(self, inputCatalogRefs, inputVisitSummary, instrumentName="", refEpoch=None, 

429 refObjectLoader=None): 

430 """Run the WCS fit for a given set of visits 

431 

432 Parameters 

433 ---------- 

434 inputCatalogRefs : `list` 

435 List of `DeferredDatasetHandle`s pointing to visit-level source 

436 tables. 

437 inputVisitSummary : `list` of `lsst.afw.table.ExposureCatalog` 

438 List of catalogs with per-detector summary information. 

439 instrumentName : `str`, optional 

440 Name of the instrument used. This is only used for labelling. 

441 refEpoch : `float` 

442 Epoch of the reference objects in MJD. 

443 refObjectLoader : instance of 

444 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

445 Referencef object loader instance. 

446 

447 Returns 

448 ------- 

449 result : `lsst.pipe.base.Struct` 

450 ``outputWCSs`` : `list` of `lsst.afw.table.ExposureCatalog` 

451 List of exposure catalogs (one per visit) with the WCS for each 

452 detector set by the new fitted WCS. 

453 ``fitModel`` : `wcsfit.WCSFit` 

454 Model-fitting object with final model parameters. 

455 ``outputCatalog`` : `pyarrow.Table` 

456 Catalog with fit residuals of all sources used. 

457 """ 

458 self.log.info("Gathering instrument, exposure, and field info") 

459 # Set up an instrument object 

460 instrument = wcsfit.Instrument(instrumentName) 

461 

462 # Get RA, Dec, MJD, etc., for the input visits 

463 exposureInfo, exposuresHelper, extensionInfo = self._get_exposure_info(inputVisitSummary, instrument, 

464 refEpoch=refEpoch) 

465 

466 # Get information about the extent of the input visits 

467 fields, fieldCenter, fieldRadius = self._prep_sky(inputVisitSummary, exposureInfo.medianEpoch) 

468 

469 self.log.info("Load catalogs and associate sources") 

470 # Set up class to associate sources into matches using a 

471 # friends-of-friends algorithm 

472 associations = wcsfit.FoFClass(fields, [instrument], exposuresHelper, 

473 [fieldRadius.asDegrees()], 

474 (self.config.matchRadius * u.arcsec).to(u.degree).value) 

475 

476 # Add the reference catalog to the associator 

477 refObjects, refCovariance = self._load_refcat(associations, refObjectLoader, fieldCenter, fieldRadius, 

478 extensionInfo, epoch=refEpoch) 

479 

480 # Add the science catalogs and associate new sources as they are added 

481 sourceIndices, usedColumns = self._load_catalogs_and_associate(associations, inputCatalogRefs, 

482 extensionInfo) 

483 

484 self.log.info("Fit the WCSs") 

485 # Set up a YAML-type string using the config variables and a sample 

486 # visit 

487 inputYAML = self.make_yaml(inputVisitSummary[0]) 

488 

489 # Set the verbosity level for WCSFit from the task log level. 

490 # TODO: DM-36850, Add lsst.log to gbdes so that log messages are 

491 # properly propagated. 

492 loglevel = self.log.getEffectiveLevel() 

493 if loglevel >= self.log.WARNING: 

494 verbose = 0 

495 elif loglevel == self.log.INFO: 

496 verbose = 1 

497 else: 

498 verbose = 2 

499 

500 # Set up the WCS-fitting class using the results of the FOF associator 

501 wcsf = wcsfit.WCSFit(fields, [instrument], exposuresHelper, 

502 extensionInfo.visitIndex, extensionInfo.detectorIndex, 

503 inputYAML, extensionInfo.wcs, associations.sequence, associations.extn, 

504 associations.obj, sysErr=self.config.systematicError, 

505 refSysErr=self.config.referenceSystematicError, 

506 usePM=self.config.fitProperMotion, 

507 verbose=verbose) 

508 

509 # Add the science and reference sources 

510 self._add_objects(wcsf, inputCatalogRefs, sourceIndices, extensionInfo, usedColumns) 

511 self._add_ref_objects(wcsf, refObjects, refCovariance, extensionInfo) 

512 

513 # Do the WCS fit 

514 wcsf.fit() 

515 self.log.info("WCS fitting done") 

516 

517 outputWCSs = self._make_outputs(wcsf, inputVisitSummary, exposureInfo) 

518 outputCatalog = pa.Table.from_pydict(wcsf.getOutputCatalog()) 

519 

520 return pipeBase.Struct(outputWCSs=outputWCSs, 

521 fitModel=wcsf, 

522 outputCatalog=outputCatalog) 

523 

524 def _prep_sky(self, inputVisitSummaries, epoch, fieldName='Field'): 

525 """Get center and radius of the input tract. This assumes that all 

526 visits will be put into the same `wcsfit.Field` and fit together. 

527 

528 Paramaters 

529 ---------- 

530 inputVisitSummary : `list` of `lsst.afw.table.ExposureCatalog` 

531 List of catalogs with per-detector summary information. 

532 epoch : float 

533 Reference epoch. 

534 fieldName : str 

535 Name of the field, used internally. 

536 

537 Returns 

538 ------- 

539 fields : `wcsfit.Fields` 

540 Object with field information. 

541 center : `lsst.geom.SpherePoint` 

542 Center of the field. 

543 radius : `lsst.sphgeom._sphgeom.Angle` 

544 Radius of the bounding circle of the tract. 

545 """ 

546 allDetectorCorners = [] 

547 for visSum in inputVisitSummaries: 

548 detectorCorners = [lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees).getVector() for (ra, dec) 

549 in zip(visSum['raCorners'].ravel(), visSum['decCorners'].ravel())] 

550 allDetectorCorners.extend(detectorCorners) 

551 boundingCircle = lsst.sphgeom.ConvexPolygon.convexHull(allDetectorCorners).getBoundingCircle() 

552 center = lsst.geom.SpherePoint(boundingCircle.getCenter()) 

553 ra = center.getRa().asDegrees() 

554 dec = center.getDec().asDegrees() 

555 radius = boundingCircle.getOpeningAngle() 

556 

557 # wcsfit.Fields describes a list of fields, but we assume all 

558 # observations will be fit together in one field. 

559 fields = wcsfit.Fields([fieldName], [ra], [dec], [epoch]) 

560 

561 return fields, center, radius 

562 

563 def _get_exposure_info(self, visitSummaryTables, instrument, fieldNumber=0, instrumentNumber=0, 

564 refEpoch=None): 

565 """Get various information about the input visits to feed to the 

566 fitting routines. 

567 

568 Parameters 

569 ---------- 

570 visitSummaryTables : `list` of `lsst.afw.table.ExposureCatalog` 

571 Tables for each visit with information for detectors. 

572 instrument : `wcsfit.Instrument` 

573 Instrument object to which detector information is added. 

574 fieldNumber : `int` 

575 Index of the field for these visits. Should be zero if all data is 

576 being fit together. 

577 instrumentNumber : `int` 

578 Index of the instrument for these visits. Should be zero if all 

579 data comes from the same instrument. 

580 refEpoch : `float` 

581 Epoch of the reference objects in MJD. 

582 

583 Returns 

584 ------- 

585 exposureInfo : `lsst.pipe.base.Struct` 

586 Struct containing general properties for the visits: 

587 ``visits`` : `list` 

588 List of visit names. 

589 ``detectors`` : `list` 

590 List of all detectors in any visit. 

591 ``ras`` : `list` of float 

592 List of boresight RAs for each visit. 

593 ``decs`` : `list` of float 

594 List of borseight Decs for each visit. 

595 ``medianEpoch`` : float 

596 Median epoch of all visits in decimal-year format. 

597 exposuresHelper : `wcsfit.ExposuresHelper` 

598 Object containing information about the input visits. 

599 extensionInfo : `lsst.pipe.base.Struct` 

600 Struct containing properties for each extension: 

601 ``visit`` : `np.ndarray` 

602 Name of the visit for this extension. 

603 ``detector`` : `np.ndarray` 

604 Name of the detector for this extension. 

605 ``visitIndex` : `np.ndarray` of `int` 

606 Index of visit for this extension. 

607 ``detectorIndex`` : `np.ndarray` of `int` 

608 Index of the detector for this extension. 

609 ``wcss`` : `np.ndarray` of `lsst.afw.geom.SkyWcs` 

610 Initial WCS for this extension. 

611 ``extensionType`` : `np.ndarray` of `str` 

612 "SCIENCE" or "REFERENCE". 

613 """ 

614 exposureNames = [] 

615 ras = [] 

616 decs = [] 

617 visits = [] 

618 detectors = [] 

619 airmasses = [] 

620 exposureTimes = [] 

621 mjds = [] 

622 observatories = [] 

623 wcss = [] 

624 

625 extensionType = [] 

626 extensionVisitIndices = [] 

627 extensionDetectorIndices = [] 

628 extensionVisits = [] 

629 extensionDetectors = [] 

630 # Get information for all the science visits 

631 for v, visitSummary in enumerate(visitSummaryTables): 

632 visitInfo = visitSummary[0].getVisitInfo() 

633 visit = visitSummary[0]['visit'] 

634 visits.append(visit) 

635 exposureNames.append(str(visit)) 

636 raDec = visitInfo.getBoresightRaDec() 

637 ras.append(raDec.getRa().asRadians()) 

638 decs.append(raDec.getDec().asRadians()) 

639 airmasses.append(visitInfo.getBoresightAirmass()) 

640 exposureTimes.append(visitInfo.getExposureTime()) 

641 obsDate = visitInfo.getDate() 

642 obsMJD = obsDate.get(obsDate.MJD) 

643 mjds.append(obsMJD) 

644 # Get the observatory ICRS position for use in fitting parallax 

645 obsLon = visitInfo.observatory.getLongitude().asDegrees() 

646 obsLat = visitInfo.observatory.getLatitude().asDegrees() 

647 obsElev = visitInfo.observatory.getElevation() 

648 earthLocation = astropy.coordinates.EarthLocation.from_geodetic(obsLon, obsLat, obsElev) 

649 observatory_gcrs = earthLocation.get_gcrs(astropy.time.Time(obsMJD, format='mjd')) 

650 observatory_icrs = observatory_gcrs.transform_to(astropy.coordinates.ICRS()) 

651 # We want the position in AU in Cartesian coordinates 

652 observatories.append(observatory_icrs.cartesian.xyz.to(u.AU).value) 

653 

654 for row in visitSummary: 

655 detector = row['id'] 

656 if detector not in detectors: 

657 detectors.append(detector) 

658 detectorBounds = wcsfit.Bounds(row['bbox_min_x'], row['bbox_max_x'], 

659 row['bbox_min_y'], row['bbox_max_y']) 

660 instrument.addDevice(str(detector), detectorBounds) 

661 

662 detectorIndex = np.flatnonzero(detector == np.array(detectors))[0] 

663 extensionVisitIndices.append(v) 

664 extensionDetectorIndices.append(detectorIndex) 

665 extensionVisits.append(visit) 

666 extensionDetectors.append(detector) 

667 extensionType.append('SCIENCE') 

668 

669 wcs = row.getWcs() 

670 wcss.append(_get_wcs_from_sip(wcs)) 

671 

672 fieldNumbers = list(np.ones(len(exposureNames), dtype=int) * fieldNumber) 

673 instrumentNumbers = list(np.ones(len(exposureNames), dtype=int) * instrumentNumber) 

674 

675 # Set the reference epoch to be the median of the science visits. 

676 # The reference catalog will be shifted to this date. 

677 medianEpoch = astropy.time.Time(np.median(mjds), format='mjd').decimalyear 

678 

679 # Add information for the reference catalog. Most of the values are 

680 # not used. 

681 exposureNames.append('REFERENCE') 

682 visits.append(-1) 

683 fieldNumbers.append(0) 

684 if self.config.fitProperMotion: 

685 instrumentNumbers.append(-2) 

686 else: 

687 instrumentNumbers.append(-1) 

688 ras.append(0.0) 

689 decs.append(0.0) 

690 airmasses.append(0.0) 

691 exposureTimes.append(0) 

692 mjds.append((refEpoch if (refEpoch is not None) else medianEpoch)) 

693 observatories.append(np.array([0, 0, 0])) 

694 identity = wcsfit.IdentityMap() 

695 icrs = wcsfit.SphericalICRS() 

696 refWcs = wcsfit.Wcs(identity, icrs, 'Identity', np.pi / 180.) 

697 wcss.append(refWcs) 

698 

699 extensionVisitIndices.append(len(exposureNames) - 1) 

700 extensionDetectorIndices.append(-1) # REFERENCE device must be -1 

701 extensionVisits.append(-1) 

702 extensionDetectors.append(-1) 

703 extensionType.append('REFERENCE') 

704 

705 # Make a table of information to use elsewhere in the class 

706 extensionInfo = pipeBase.Struct(visit=np.array(extensionVisits), 

707 detector=np.array(extensionDetectors), 

708 visitIndex=np.array(extensionVisitIndices), 

709 detectorIndex=np.array(extensionDetectorIndices), 

710 wcs=np.array(wcss), 

711 extensionType=np.array(extensionType)) 

712 

713 # Make the exposureHelper object to use in the fitting routines 

714 exposuresHelper = wcsfit.ExposuresHelper(exposureNames, 

715 fieldNumbers, 

716 instrumentNumbers, 

717 ras, 

718 decs, 

719 airmasses, 

720 exposureTimes, 

721 mjds, 

722 observatories) 

723 

724 exposureInfo = pipeBase.Struct(visits=visits, 

725 detectors=detectors, 

726 ras=ras, 

727 decs=decs, 

728 medianEpoch=medianEpoch) 

729 

730 return exposureInfo, exposuresHelper, extensionInfo 

731 

732 def _load_refcat(self, associations, refObjectLoader, center, radius, extensionInfo, epoch=None, 

733 fieldIndex=0): 

734 """Load the reference catalog and add reference objects to the 

735 `wcsfit.FoFClass` object. 

736 

737 Parameters 

738 ---------- 

739 associations : `wcsfit.FoFClass` 

740 Object to which to add the catalog of reference objects. 

741 refObjectLoader : 

742 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

743 Object set up to load reference catalog objects. 

744 center : `lsst.geom.SpherePoint` 

745 Center of the circle in which to load reference objects. 

746 radius : `lsst.sphgeom._sphgeom.Angle` 

747 Radius of the circle in which to load reference objects. 

748 extensionInfo : `lsst.pipe.base.Struct` 

749 Struct containing properties for each extension. 

750 epoch : `float` 

751 MJD to which to correct the object positions. 

752 fieldIndex : `int` 

753 Index of the field. Should be zero if all the data is fit together. 

754 

755 Returns 

756 ------- 

757 refObjects : `dict` 

758 Position and error information of reference objects. 

759 refCovariance : `list` of `float` 

760 Flattened output covariance matrix. 

761 """ 

762 formattedEpoch = astropy.time.Time(epoch, format='mjd') 

763 

764 refFilter = refObjectLoader.config.anyFilterMapsToThis 

765 skyCircle = refObjectLoader.loadSkyCircle(center, radius, refFilter, epoch=formattedEpoch) 

766 

767 selected = self.referenceSelector.run(skyCircle.refCat) 

768 # Need memory contiguity to get reference filters as a vector. 

769 if not selected.sourceCat.isContiguous(): 

770 refCat = selected.sourceCat.copy(deep=True) 

771 else: 

772 refCat = selected.sourceCat 

773 

774 if self.config.excludeNonPMObjects: 

775 hasPM = refCat['pm_raErr'] != 0 

776 refCat = refCat[hasPM] 

777 

778 ra = (refCat['coord_ra'] * u.radian).to(u.degree).to_value().tolist() 

779 dec = (refCat['coord_dec'] * u.radian).to(u.degree).to_value().tolist() 

780 raCov = ((refCat['coord_raErr'] * u.radian).to(u.degree).to_value()**2).tolist() 

781 decCov = ((refCat['coord_decErr'] * u.radian).to(u.degree).to_value()**2).tolist() 

782 

783 # TODO: DM-37316 we need the full gaia covariance here 

784 refObjects = {'ra': ra, 'dec': dec, 'raCov': raCov, 'decCov': decCov, 

785 'raDecCov': np.zeros(len(ra))} 

786 refCovariance = [] 

787 

788 if self.config.fitProperMotion: 

789 raPM = (refCat['pm_ra'] * u.radian).to(u.marcsec).to_value().tolist() 

790 decPM = (refCat['pm_dec'] * u.radian).to(u.marcsec).to_value().tolist() 

791 parallax = (refCat['parallax'] * u.radian).to(u.marcsec).to_value().tolist() 

792 cov = _make_ref_covariance_matrix(refCat) 

793 pmDict = {'raPM': raPM, 'decPM': decPM, 'parallax': parallax} 

794 refObjects.update(pmDict) 

795 refCovariance = cov 

796 

797 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0] 

798 visitIndex = extensionInfo.visitIndex[extensionIndex] 

799 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

800 instrumentIndex = -1 # -1 indicates the reference catalog 

801 refWcs = extensionInfo.wcs[extensionIndex] 

802 

803 associations.addCatalog(refWcs, 'STELLAR', visitIndex, fieldIndex, instrumentIndex, detectorIndex, 

804 extensionIndex, np.ones(len(refCat), dtype=bool), 

805 ra, dec, np.arange(len(ra))) 

806 

807 return refObjects, refCovariance 

808 

809 def _load_catalogs_and_associate(self, associations, inputCatalogRefs, extensionInfo, 

810 fieldIndex=0, instrumentIndex=0): 

811 """Load the science catalogs and add the sources to the associator 

812 class `wcsfit.FoFClass`, associating them into matches as you go. 

813 

814 Parameters 

815 ---------- 

816 associations : `wcsfit.FoFClass` 

817 Object to which to add the catalog of reference objects. 

818 inputCatalogRefs : `list` 

819 List of DeferredDatasetHandles pointing to visit-level source 

820 tables. 

821 extensionInfo : `lsst.pipe.base.Struct` 

822 Struct containing properties for each extension. 

823 fieldIndex : `int` 

824 Index of the field for these catalogs. Should be zero assuming all 

825 data is being fit together. 

826 instrumentIndex : `int` 

827 Index of the instrument for these catalogs. Should be zero 

828 assuming all data comes from the same instrument. 

829 

830 Returns 

831 ------- 

832 sourceIndices : `list` 

833 List of boolean arrays used to select sources. 

834 columns : `list` of `str` 

835 List of columns needed from source tables. 

836 """ 

837 columns = ['detector', 'sourceId', 'x', 'xErr', 'y', 'yErr', 'ixx', 'iyy', 'ixy', 

838 f'{self.config.sourceFluxType}_instFlux', f'{self.config.sourceFluxType}_instFluxErr'] 

839 if self.sourceSelector.config.doFlags: 

840 columns.extend(self.sourceSelector.config.flags.bad) 

841 if self.sourceSelector.config.doUnresolved: 

842 columns.append(self.sourceSelector.config.unresolved.name) 

843 if self.sourceSelector.config.doIsolated: 

844 columns.append(self.sourceSelector.config.isolated.parentName) 

845 columns.append(self.sourceSelector.config.isolated.nChildName) 

846 

847 sourceIndices = [None] * len(extensionInfo.visit) 

848 for inputCatalogRef in inputCatalogRefs: 

849 visit = inputCatalogRef.dataId['visit'] 

850 inputCatalog = inputCatalogRef.get(parameters={'columns': columns}) 

851 detectors = set(inputCatalog['detector']) 

852 

853 for detector in detectors: 

854 detectorSources = inputCatalog[inputCatalog['detector'] == detector] 

855 xCov = detectorSources['xErr']**2 

856 yCov = detectorSources['yErr']**2 

857 xyCov = (detectorSources['ixy'] * (xCov + yCov) 

858 / (detectorSources['ixx'] + detectorSources['iyy'])) 

859 # Remove sources with bad shape measurements 

860 goodShapes = xyCov**2 <= (xCov * yCov) 

861 selected = self.sourceSelector.run(detectorSources) 

862 goodInds = selected.selected & goodShapes 

863 

864 isStar = np.ones(goodInds.sum()) 

865 extensionIndex = np.flatnonzero((extensionInfo.visit == visit) 

866 & (extensionInfo.detector == detector))[0] 

867 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

868 visitIndex = extensionInfo.visitIndex[extensionIndex] 

869 

870 sourceIndices[extensionIndex] = goodInds 

871 

872 wcs = extensionInfo.wcs[extensionIndex] 

873 associations.reprojectWCS(wcs, fieldIndex) 

874 

875 associations.addCatalog(wcs, 'STELLAR', visitIndex, fieldIndex, 

876 instrumentIndex, detectorIndex, extensionIndex, isStar, 

877 detectorSources[goodInds]['x'].to_list(), 

878 detectorSources[goodInds]['y'].to_list(), 

879 np.arange(goodInds.sum())) 

880 

881 associations.sortMatches(fieldIndex, minMatches=self.config.minMatches, 

882 allowSelfMatches=self.config.allowSelfMatches) 

883 

884 return sourceIndices, columns 

885 

886 def make_yaml(self, inputVisitSummary, inputFile=None): 

887 """Make a YAML-type object that describes the parameters of the fit 

888 model. 

889 

890 Parameters 

891 ---------- 

892 inputVisitSummary : `list` of `lsst.afw.table.ExposureCatalog` 

893 List of catalogs with per-detector summary information. 

894 inputFile : `str` 

895 Path to a file that contains a basic model. 

896 

897 Returns 

898 ------- 

899 inputYAML : `wcsfit.YAMLCollector` 

900 YAML object containing the model description. 

901 """ 

902 if inputFile is not None: 

903 inputYAML = wcsfit.YAMLCollector(inputFile, 'PixelMapCollection') 

904 else: 

905 inputYAML = wcsfit.YAMLCollector('', 'PixelMapCollection') 

906 inputDict = {} 

907 modelComponents = ['INSTRUMENT/DEVICE', 'EXPOSURE'] 

908 baseMap = {'Type': 'Composite', 'Elements': modelComponents} 

909 inputDict['EXPOSURE/DEVICE/base'] = baseMap 

910 

911 xMin = str(inputVisitSummary['bbox_min_x'].min()) 

912 xMax = str(inputVisitSummary['bbox_max_x'].max()) 

913 yMin = str(inputVisitSummary['bbox_min_y'].min()) 

914 yMax = str(inputVisitSummary['bbox_max_y'].max()) 

915 

916 deviceModel = {'Type': 'Composite', 'Elements': self.config.deviceModel.list()} 

917 inputDict['INSTRUMENT/DEVICE'] = deviceModel 

918 for component in self.config.deviceModel: 

919 if 'poly' in component.lower(): 

920 componentDict = {'Type': 'Poly', 

921 'XPoly': {'OrderX': self.config.devicePolyOrder, 

922 'SumOrder': True}, 

923 'YPoly': {'OrderX': self.config.devicePolyOrder, 

924 'SumOrder': True}, 

925 'XMin': xMin, 'XMax': xMax, 'YMin': yMin, 'YMax': yMax} 

926 elif 'identity' in component.lower(): 

927 componentDict = {'Type': 'Identity'} 

928 

929 inputDict[component] = componentDict 

930 

931 exposureModel = {'Type': 'Composite', 'Elements': self.config.exposureModel.list()} 

932 inputDict['EXPOSURE'] = exposureModel 

933 for component in self.config.exposureModel: 

934 if 'poly' in component.lower(): 

935 componentDict = {'Type': 'Poly', 

936 'XPoly': {'OrderX': self.config.exposurePolyOrder, 

937 'SumOrder': 'true'}, 

938 'YPoly': {'OrderX': self.config.exposurePolyOrder, 

939 'SumOrder': 'true'}} 

940 elif 'identity' in component.lower(): 

941 componentDict = {'Type': 'Identity'} 

942 

943 inputDict[component] = componentDict 

944 

945 inputYAML.addInput(yaml.dump(inputDict)) 

946 inputYAML.addInput('Identity:\n Type: Identity\n') 

947 

948 return inputYAML 

949 

950 def _add_objects(self, wcsf, inputCatalogRefs, sourceIndices, extensionInfo, columns): 

951 """Add science sources to the wcsfit.WCSFit object. 

952 

953 Parameters 

954 ---------- 

955 wcsf : `wcsfit.WCSFit` 

956 WCS-fitting object. 

957 inputCatalogRefs : `list` 

958 List of DeferredDatasetHandles pointing to visit-level source 

959 tables. 

960 sourceIndices : `list` 

961 List of boolean arrays used to select sources. 

962 extensionInfo : `lsst.pipe.base.Struct` 

963 Struct containing properties for each extension. 

964 columns : `list` of `str` 

965 List of columns needed from source tables. 

966 """ 

967 for inputCatalogRef in inputCatalogRefs: 

968 visit = inputCatalogRef.dataId['visit'] 

969 inputCatalog = inputCatalogRef.get(parameters={'columns': columns}) 

970 detectors = set(inputCatalog['detector']) 

971 

972 for detector in detectors: 

973 detectorSources = inputCatalog[inputCatalog['detector'] == detector] 

974 

975 extensionIndex = np.flatnonzero((extensionInfo.visit == visit) 

976 & (extensionInfo.detector == detector))[0] 

977 sourceCat = detectorSources[sourceIndices[extensionIndex]] 

978 

979 xCov = sourceCat['xErr']**2 

980 yCov = sourceCat['yErr']**2 

981 xyCov = (sourceCat['ixy'] * (xCov + yCov) 

982 / (sourceCat['ixx'] + sourceCat['iyy'])) 

983 # TODO: add correct xyErr if DM-7101 is ever done. 

984 

985 d = {'x': sourceCat['x'].to_numpy(), 'y': sourceCat['y'].to_numpy(), 

986 'xCov': xCov.to_numpy(), 'yCov': yCov.to_numpy(), 'xyCov': xyCov.to_numpy()} 

987 

988 wcsf.setObjects(extensionIndex, d, 'x', 'y', ['xCov', 'yCov', 'xyCov']) 

989 

990 def _add_ref_objects(self, wcsf, refObjects, refCovariance, extensionInfo): 

991 """Add reference sources to the wcsfit.WCSFit object. 

992 

993 Parameters 

994 ---------- 

995 wcsf : `wcsfit.WCSFit` 

996 WCS-fitting object. 

997 refObjects : `dict` 

998 Position and error information of reference objects. 

999 refCovariance : `list` of `float` 

1000 Flattened output covariance matrix. 

1001 extensionInfo : `lsst.pipe.base.Struct` 

1002 Struct containing properties for each extension. 

1003 """ 

1004 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0] 

1005 

1006 if self.config.fitProperMotion: 

1007 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov'], 

1008 pmDecKey='decPM', pmRaKey='raPM', parallaxKey='parallax', pmCovKey='fullCov', 

1009 pmCov=refCovariance) 

1010 else: 

1011 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov']) 

1012 

1013 def _make_afw_wcs(self, mapDict, centerRA, centerDec, doNormalizePixels=False, xScale=1, yScale=1): 

1014 """Make an `lsst.afw.geom.SkyWcs` from a dictionary of mappings. 

1015 

1016 Parameters 

1017 ---------- 

1018 mapDict : `dict` 

1019 Dictionary of mapping parameters. 

1020 centerRA : `lsst.geom.Angle` 

1021 RA of the tangent point. 

1022 centerDec : `lsst.geom.Angle` 

1023 Declination of the tangent point. 

1024 doNormalizePixels : `bool` 

1025 Whether to normalize pixels so that range is [-1,1]. 

1026 xScale : `float` 

1027 Factor by which to normalize x-dimension. Corresponds to width of 

1028 detector. 

1029 yScale : `float` 

1030 Factor by which to normalize y-dimension. Corresponds to height of 

1031 detector. 

1032 

1033 Returns 

1034 ------- 

1035 outWCS : `lsst.afw.geom.SkyWcs` 

1036 WCS constructed from the input mappings 

1037 """ 

1038 # Set up pixel frames 

1039 pixelFrame = astshim.Frame(2, 'Domain=PIXELS') 

1040 normedPixelFrame = astshim.Frame(2, 'Domain=NORMEDPIXELS') 

1041 

1042 if doNormalizePixels: 

1043 # Pixels will need to be rescaled before going into the mappings 

1044 normCoefficients = [-1.0, 2.0/xScale, 0, 

1045 -1.0, 0, 2.0/yScale] 

1046 normMap = _convert_to_ast_polymap_coefficients(normCoefficients) 

1047 else: 

1048 normMap = astshim.UnitMap(2) 

1049 

1050 # All of the detectors for one visit map to the same tangent plane 

1051 tangentPoint = lsst.geom.SpherePoint(centerRA, centerDec) 

1052 cdMatrix = afwgeom.makeCdMatrix(1.0 * lsst.geom.degrees, 0 * lsst.geom.degrees, True) 

1053 iwcToSkyWcs = afwgeom.makeSkyWcs(lsst.geom.Point2D(0, 0), tangentPoint, cdMatrix) 

1054 iwcToSkyMap = iwcToSkyWcs.getFrameDict().getMapping('PIXELS', 'SKY') 

1055 skyFrame = iwcToSkyWcs.getFrameDict().getFrame('SKY') 

1056 

1057 frameDict = astshim.FrameDict(pixelFrame) 

1058 frameDict.addFrame('PIXELS', normMap, normedPixelFrame) 

1059 

1060 currentFrameName = 'NORMEDPIXELS' 

1061 

1062 # Dictionary values are ordered according to the maps' application. 

1063 for m, mapElement in enumerate(mapDict.values()): 

1064 mapType = mapElement['Type'] 

1065 

1066 if mapType == 'Poly': 

1067 mapCoefficients = mapElement['Coefficients'] 

1068 astMap = _convert_to_ast_polymap_coefficients(mapCoefficients) 

1069 elif mapType == 'Identity': 

1070 astMap = astshim.UnitMap(2) 

1071 else: 

1072 raise ValueError(f"Converting map type {mapType} to WCS is not supported") 

1073 

1074 if m == len(mapDict) - 1: 

1075 newFrameName = 'IWC' 

1076 else: 

1077 newFrameName = 'INTERMEDIATE' + str(m) 

1078 newFrame = astshim.Frame(2, f'Domain={newFrameName}') 

1079 frameDict.addFrame(currentFrameName, astMap, newFrame) 

1080 currentFrameName = newFrameName 

1081 frameDict.addFrame('IWC', iwcToSkyMap, skyFrame) 

1082 

1083 outWCS = afwgeom.SkyWcs(frameDict) 

1084 return outWCS 

1085 

1086 def _make_outputs(self, wcsf, visitSummaryTables, exposureInfo): 

1087 """Make a WCS object out of the WCS models. 

1088 

1089 Parameters 

1090 ---------- 

1091 wcsf : `wcsfit.WCSFit` 

1092 WCSFit object, assumed to have fit model. 

1093 visitSummaryTables : `list` of `lsst.afw.table.ExposureCatalog` 

1094 Catalogs with per-detector summary information from which to grab 

1095 detector information. 

1096 extensionInfo : `lsst.pipe.base.Struct` 

1097 Struct containing properties for each extension. 

1098 

1099 Returns 

1100 ------- 

1101 catalogs : `dict` of [`str`, `lsst.afw.table.ExposureCatalog`] 

1102 Dictionary of `lsst.afw.table.ExposureCatalog` objects with the WCS 

1103 set to the WCS fit in wcsf, keyed by the visit name. 

1104 """ 

1105 # Get the parameters of the fit models 

1106 mapParams = wcsf.mapCollection.getParamDict() 

1107 

1108 # Set up the schema for the output catalogs 

1109 schema = lsst.afw.table.ExposureTable.makeMinimalSchema() 

1110 schema.addField('visit', type='L', doc='Visit number') 

1111 

1112 # Pixels will need to be rescaled before going into the mappings 

1113 sampleDetector = visitSummaryTables[0][0] 

1114 xscale = sampleDetector['bbox_max_x'] - sampleDetector['bbox_min_x'] 

1115 yscale = sampleDetector['bbox_max_y'] - sampleDetector['bbox_min_y'] 

1116 

1117 catalogs = {} 

1118 for v, visitSummary in enumerate(visitSummaryTables): 

1119 visit = visitSummary[0]['visit'] 

1120 

1121 catalog = lsst.afw.table.ExposureCatalog(schema) 

1122 catalog.resize(len(exposureInfo.detectors)) 

1123 catalog['visit'] = visit 

1124 

1125 for d, detector in enumerate(visitSummary['id']): 

1126 mapName = f'{visit}/{detector}' 

1127 

1128 mapElements = wcsf.mapCollection.orderAtoms(f'{mapName}/base') 

1129 mapDict = {} 

1130 for m, mapElement in enumerate(mapElements): 

1131 mapType = wcsf.mapCollection.getMapType(mapElement) 

1132 mapDict[mapElement] = {'Type': mapType} 

1133 

1134 if mapType == 'Poly': 

1135 mapCoefficients = mapParams[mapElement] 

1136 mapDict[mapElement]['Coefficients'] = mapCoefficients 

1137 

1138 # The RA and Dec of the visit are needed for the last step of 

1139 # the mapping from the visit tangent plane to RA and Dec 

1140 outWCS = self._make_afw_wcs(mapDict, exposureInfo.ras[v] * lsst.geom.radians, 

1141 exposureInfo.decs[v] * lsst.geom.radians, 

1142 doNormalizePixels=True, 

1143 xScale=xscale, yScale=yscale) 

1144 

1145 catalog[d].setId(detector) 

1146 catalog[d].setWcs(outWCS) 

1147 catalog.sort() 

1148 catalogs[visit] = catalog 

1149 

1150 return catalogs