Coverage for python/lsst/drp/tasks/gbdesAstrometricFit.py: 11%

428 statements  

« prev     ^ index     » next       coverage.py v7.2.6, created at 2023-05-26 03:05 -0700

1# This file is part of drp_tasks. 

2# 

3# LSST Data Management System 

4# This product includes software developed by the 

5# LSST Project (http://www.lsst.org/). 

6# See COPYRIGHT file at the top of the source tree. 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <https://www.lsstcorp.org/LegalNotices/>. 

21# 

22import numpy as np 

23import astropy.time 

24import astropy.units as u 

25import astropy.coordinates 

26import yaml 

27import wcsfit 

28import astshim 

29 

30import lsst.geom 

31import lsst.pex.config as pexConfig 

32import lsst.pipe.base as pipeBase 

33import lsst.sphgeom 

34import lsst.afw.table 

35import lsst.afw.geom as afwgeom 

36from lsst.meas.algorithms import (LoadReferenceObjectsConfig, ReferenceObjectLoader, 

37 ReferenceSourceSelectorTask) 

38from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry 

39 

40__all__ = ['GbdesAstrometricFitConnections', 'GbdesAstrometricFitConfig', 'GbdesAstrometricFitTask'] 

41 

42 

43def _lookup_visit_refcats(datasetType, registry, quantumDataId, collections): 

44 """Lookup function that finds all refcats for all visits that overlap a 

45 tract, rather than just the refcats that directly overlap the tract. 

46 Borrowed from jointcal. 

47 

48 Parameters 

49 ---------- 

50 datasetType : `lsst.daf.butler.DatasetType` 

51 Type of dataset being searched for. 

52 registry : `lsst.daf.butler.Registry` 

53 Data repository registry to search. 

54 quantumDataId : `lsst.daf.butler.DataCoordinate` 

55 Data ID of the quantum; expected to be something we can use as a 

56 constraint to query for overlapping visits. 

57 collections : `Iterable` [ `str` ] 

58 Collections to search. 

59 Returns 

60 ------- 

61 refs : `Iterator` [ `lsst.daf.butler.DatasetRef` ] 

62 Iterator over refcat references. 

63 """ 

64 refs = set() 

65 # Use .expanded() on the query methods below because we need data IDs with 

66 # regions, both in the outer loop over visits (queryDatasets will expand 

67 # any data ID we give it, but doing it up-front in bulk is much more 

68 # efficient) and in the data IDs of the DatasetRefs this function yields 

69 # (because the RefCatLoader relies on them to do some of its own 

70 # filtering). 

71 for visit_data_id in set(registry.queryDataIds('visit', dataId=quantumDataId).expanded()): 

72 refs.update( 

73 registry.queryDatasets( 

74 datasetType, 

75 collections=collections, 

76 dataId=visit_data_id, 

77 findFirst=True, 

78 ).expanded() 

79 ) 

80 sorted(refs) 

81 yield from refs 

82 

83 

84def _make_ref_covariance_matrix(refCat, inputUnit=u.radian, outputCoordUnit=u.marcsec, 

85 outputPMUnit=u.marcsec): 

86 """Make a covariance matrix for the reference catalog including proper 

87 motion and parallax. 

88 

89 The output is flattened to one dimension to match the format expected by 

90 `gbdes`. 

91 

92 Parameters 

93 ---------- 

94 refCat : `lsst.afw.table.SimpleCatalog` 

95 Catalog including proper motion and parallax measurements. 

96 inputUnit : `astropy.unit.core.Unit` 

97 Units of the input catalog 

98 outputCoordUnit : `astropy.unit.core.Unit` 

99 Units required for the coordinates in the covariance matrix. `gbdes` 

100 expects milliarcseconds. 

101 outputPMUnit : `astropy.unit.core.Unit` 

102 Units required for the proper motion/parallax in the covariance matrix. 

103 `gbdes` expects milliarcseconds. 

104 

105 Returns 

106 ------- 

107 cov : `list` of `float` 

108 Flattened output covariance matrix. 

109 """ 

110 # Here is the standard ordering of components in the cov matrix, 

111 # to match the PM enumeration in C++ code of gbdes package's Match. 

112 # Each tuple gives: the array holding the 1d error, 

113 # the string in Gaia column names for this 

114 # the ordering in the Gaia catalog 

115 # and the ordering of the tuples is the order we want in our cov matrix 

116 raErr = (refCat['coord_raErr'] * inputUnit).to(outputCoordUnit).to_value() 

117 decErr = (refCat['coord_decErr'] * inputUnit).to(outputCoordUnit).to_value() 

118 raPMErr = (refCat['pm_raErr'] * inputUnit).to(outputPMUnit).to_value() 

119 decPMErr = (refCat['pm_decErr'] * inputUnit).to(outputPMUnit).to_value() 

120 parallaxErr = (refCat['parallaxErr'] * inputUnit).to(outputPMUnit).to_value() 

121 stdOrder = ((raErr, 'ra', 0), 

122 (decErr, 'dec', 1), 

123 (raPMErr, 'pmra', 3), 

124 (decPMErr, 'pmdec', 4), 

125 (parallaxErr, 'parallax', 2)) 

126 cov = np.zeros((len(refCat), 25)) 

127 k = 0 

128 # TODO: when DM-35130, is done, we need the full covariance here 

129 for i, pr1 in enumerate(stdOrder): 

130 for j, pr2 in enumerate(stdOrder): 

131 if pr1[2] < pr2[2]: 

132 # add correlation coefficient (once it is available) 

133 # cov[:, k] = (pr1[0] * pr2[0] * refCat[pr1[1] + '_' + pr2[1] 

134 # + '_corr']) 

135 cov[:, k] = 0 

136 elif pr1[2] > pr2[2]: 

137 # add correlation coefficient (once it is available) 

138 # cov[:, k] = (pr1[0] * pr2[0] * refCat[pr2[1] + '_' + pr1[1] 

139 # + '_corr']) 

140 cov[:, k] = 0 

141 else: 

142 # diagnonal element 

143 cov[:, k] = pr1[0] * pr2[0] 

144 k = k+1 

145 

146 return cov 

147 

148 

149def _convert_to_ast_polymap_coefficients(coefficients): 

150 """Convert vector of polynomial coefficients from the format used in 

151 `gbdes` into AST format (see Poly2d::vectorIndex(i, j) in 

152 gbdes/gbutil/src/Poly2d.cpp). This assumes two input and two output 

153 coordinates. 

154 

155 Parameters 

156 ---------- 

157 coefficients : `list` 

158 Coefficients of the polynomials. 

159 degree : `int` 

160 Degree of the polynomial. 

161 

162 Returns 

163 ------- 

164 astPoly : `astshim.PolyMap` 

165 Coefficients in AST polynomial format. 

166 """ 

167 polyArray = np.zeros((len(coefficients), 4)) 

168 N = len(coefficients) / 2 

169 # Get the degree of the polynomial by applying the quadratic formula to the 

170 # formula for calculating the number of coefficients of the polynomial. 

171 degree = int(-1.5 + 0.5 * (1 + 8 * N)**0.5) 

172 

173 for outVar in [1, 2]: 

174 for i in range(degree + 1): 

175 for j in range(degree + 1): 

176 if (i + j) > degree: 

177 continue 

178 vectorIndex = int(((i+j)*(i+j+1))/2+j + N * (outVar - 1)) 

179 polyArray[vectorIndex, 0] = coefficients[vectorIndex] 

180 polyArray[vectorIndex, 1] = outVar 

181 polyArray[vectorIndex, 2] = i 

182 polyArray[vectorIndex, 3] = j 

183 

184 astPoly = astshim.PolyMap(polyArray, 2, options="IterInverse=1,NIterInverse=10,TolInverse=1e-7") 

185 return astPoly 

186 

187 

188def _get_wcs_from_sip(butlerWcs): 

189 """Get wcsfit.Wcs in TPV format from the SIP-formatted input WCS. 

190 

191 Parameters 

192 ---------- 

193 butlerWcs : `lsst.afw.geom.SkyWcs` 

194 Input WCS from the calexp in SIP format. 

195 

196 Returns 

197 ------- 

198 wcs : `wcsfit.Wcs` 

199 WCS object in TPV format. 

200 """ 

201 fits_metadata = butlerWcs.getFitsMetadata() 

202 if not ((fits_metadata.get('CTYPE1') == 'RA---TAN-SIP') 

203 and (fits_metadata.get('CTYPE2') == 'DEC--TAN-SIP')): 

204 raise ValueError(f"CTYPES {fits_metadata.get('CTYPE1')} and {fits_metadata.get('CTYPE2')}" 

205 "do not match SIP convention") 

206 

207 # Correct CRPIX values to correspond to source table pixel indexing 

208 # convention 

209 crpix1 = fits_metadata.get('CRPIX1') 

210 crpix2 = fits_metadata.get('CRPIX2') 

211 fits_metadata.set('CRPIX1', crpix1 - 1) 

212 fits_metadata.set('CRPIX2', crpix2 - 1) 

213 

214 floatDict = {k: fits_metadata[k] for k in fits_metadata if isinstance(fits_metadata[k], (int, float))} 

215 

216 wcs = wcsfit.readTPVFromSIP(floatDict, 'SIP') 

217 

218 return wcs 

219 

220 

221class GbdesAstrometricFitConnections(pipeBase.PipelineTaskConnections, 

222 dimensions=('skymap', 'tract', 'instrument', 'physical_filter')): 

223 """Middleware input/output connections for task data.""" 

224 inputCatalogRefs = pipeBase.connectionTypes.Input( 

225 doc="Source table in parquet format, per visit.", 

226 name='preSourceTable_visit', 

227 storageClass='DataFrame', 

228 dimensions=('instrument', 'visit'), 

229 deferLoad=True, 

230 multiple=True, 

231 ) 

232 inputVisitSummaries = pipeBase.connectionTypes.Input( 

233 doc=("Per-visit consolidated exposure metadata built from calexps. " 

234 "These catalogs use detector id for the id and must be sorted for " 

235 "fast lookups of a detector."), 

236 name='visitSummary', 

237 storageClass='ExposureCatalog', 

238 dimensions=('instrument', 'visit'), 

239 multiple=True, 

240 ) 

241 referenceCatalog = pipeBase.connectionTypes.PrerequisiteInput( 

242 doc="The astrometry reference catalog to match to loaded input catalog sources.", 

243 name='gaia_dr2_20200414', 

244 storageClass='SimpleCatalog', 

245 dimensions=('skypix',), 

246 deferLoad=True, 

247 multiple=True, 

248 lookupFunction=_lookup_visit_refcats, 

249 ) 

250 outputWcs = pipeBase.connectionTypes.Output( 

251 doc=("Per-tract, per-visit world coordinate systems derived from the fitted model." 

252 " These catalogs only contain entries for detectors with an output, and use" 

253 " the detector id for the catalog id, sorted on id for fast lookups of a detector."), 

254 name='gbdesAstrometricFitSkyWcsCatalog', 

255 storageClass='ExposureCatalog', 

256 dimensions=('instrument', 'visit', 'skymap', 'tract'), 

257 multiple=True 

258 ) 

259 outputCatalog = pipeBase.connectionTypes.Output( 

260 doc=("Source table with stars used in fit, along with residuals in pixel coordinates and tangent " 

261 "plane coordinates and chisq values."), 

262 name='gbdesAstrometricFit_fitStars', 

263 storageClass='ArrowNumpyDict', 

264 dimensions=('instrument', 'skymap', 'tract', 'physical_filter'), 

265 ) 

266 starCatalog = pipeBase.connectionTypes.Output( 

267 doc="", 

268 name='gbdesAstrometricFit_starCatalog', 

269 storageClass='ArrowNumpyDict', 

270 dimensions=('instrument', 'skymap', 'tract', 'physical_filter') 

271 ) 

272 

273 

274class GbdesAstrometricFitConfig(pipeBase.PipelineTaskConfig, 

275 pipelineConnections=GbdesAstrometricFitConnections): 

276 """Configuration for GbdesAstrometricFitTask""" 

277 sourceSelector = sourceSelectorRegistry.makeField( 

278 doc="How to select sources for cross-matching.", 

279 default='science' 

280 ) 

281 referenceSelector = pexConfig.ConfigurableField( 

282 target=ReferenceSourceSelectorTask, 

283 doc="How to down-select the loaded astrometry reference catalog.", 

284 ) 

285 matchRadius = pexConfig.Field( 

286 doc="Matching tolerance between associated objects (arcseconds).", 

287 dtype=float, 

288 default=1.0 

289 ) 

290 minMatches = pexConfig.Field( 

291 doc="Number of matches required to keep a source object.", 

292 dtype=int, 

293 default=2 

294 ) 

295 allowSelfMatches = pexConfig.Field( 

296 doc="Allow multiple sources from the same visit to be associated with the same object.", 

297 dtype=bool, 

298 default=False 

299 ) 

300 sourceFluxType = pexConfig.Field( 

301 dtype=str, 

302 doc="Source flux field to use in source selection and to get fluxes from the catalog.", 

303 default='apFlux_12_0' 

304 ) 

305 systematicError = pexConfig.Field( 

306 dtype=float, 

307 doc=("Systematic error padding added in quadrature for the science catalogs (marcsec). The default" 

308 "value is equivalent to 0.02 pixels for HSC."), 

309 default=0.0034 

310 ) 

311 referenceSystematicError = pexConfig.Field( 

312 dtype=float, 

313 doc="Systematic error padding added in quadrature for the reference catalog (marcsec).", 

314 default=0.0 

315 ) 

316 modelComponents = pexConfig.ListField( 

317 dtype=str, 

318 doc=("List of mappings to apply to transform from pixels to sky, in order of their application." 

319 "Supported options are 'INSTRUMENT/DEVICE' and 'EXPOSURE'."), 

320 default=['INSTRUMENT/DEVICE', 'EXPOSURE'] 

321 ) 

322 deviceModel = pexConfig.ListField( 

323 dtype=str, 

324 doc=("List of mappings to apply to transform from detector pixels to intermediate frame. Map names" 

325 "should match the format 'BAND/DEVICE/<map name>'."), 

326 default=['BAND/DEVICE/poly'] 

327 ) 

328 exposureModel = pexConfig.ListField( 

329 dtype=str, 

330 doc=("List of mappings to apply to transform from intermediate frame to sky coordinates. Map names" 

331 "should match the format 'EXPOSURE/<map name>'."), 

332 default=['EXPOSURE/poly'] 

333 ) 

334 devicePolyOrder = pexConfig.Field( 

335 dtype=int, 

336 doc="Order of device polynomial model.", 

337 default=4 

338 ) 

339 exposurePolyOrder = pexConfig.Field( 

340 dtype=int, 

341 doc="Order of exposure polynomial model.", 

342 default=6 

343 ) 

344 fitProperMotion = pexConfig.Field( 

345 dtype=bool, 

346 doc="Fit the proper motions of the objects.", 

347 default=False 

348 ) 

349 excludeNonPMObjects = pexConfig.Field( 

350 dtype=bool, 

351 doc="Exclude reference objects without proper motion/parallax information.", 

352 default=True 

353 ) 

354 fitReserveFraction = pexConfig.Field( 

355 dtype=float, 

356 default=0.2, 

357 doc="Fraction of objects to reserve from fit for validation." 

358 ) 

359 fitReserveRandomSeed = pexConfig.Field( 

360 dtype=int, 

361 doc="Set the random seed for selecting data points to reserve from the fit for validation.", 

362 default=1234 

363 ) 

364 

365 def setDefaults(self): 

366 # Use only stars because aperture fluxes of galaxies are biased and 

367 # depend on seeing. 

368 self.sourceSelector['science'].doUnresolved = True 

369 self.sourceSelector['science'].unresolved.name = 'extendedness' 

370 

371 # Use only isolated sources. 

372 self.sourceSelector['science'].doIsolated = True 

373 self.sourceSelector['science'].isolated.parentName = 'parentSourceId' 

374 self.sourceSelector['science'].isolated.nChildName = 'deblend_nChild' 

375 # Do not use either flux or centroid measurements with flags, 

376 # chosen from the usual QA flags for stars. 

377 self.sourceSelector['science'].doFlags = True 

378 badFlags = ['pixelFlags_edge', 

379 'pixelFlags_saturated', 

380 'pixelFlags_interpolatedCenter', 

381 'pixelFlags_interpolated', 

382 'pixelFlags_crCenter', 

383 'pixelFlags_bad', 

384 'hsmPsfMoments_flag', 

385 f'{self.sourceFluxType}_flag', 

386 ] 

387 self.sourceSelector['science'].flags.bad = badFlags 

388 

389 def validate(self): 

390 super().validate() 

391 

392 # Check if all components of the device and exposure models are 

393 # supported. 

394 for component in self.deviceModel: 

395 if not (('poly' in component.lower()) or ('identity' in component.lower())): 

396 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.deviceModel, self, 

397 f'deviceModel component {component} is not supported.') 

398 

399 for component in self.exposureModel: 

400 if not (('poly' in component.lower()) or ('identity' in component.lower())): 

401 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.exposureModel, self, 

402 f'exposureModel component {component} is not supported.') 

403 

404 

405class GbdesAstrometricFitTask(pipeBase.PipelineTask): 

406 """Calibrate the WCS across multiple visits of the same field using the 

407 GBDES package. 

408 """ 

409 

410 ConfigClass = GbdesAstrometricFitConfig 

411 _DefaultName = 'gbdesAstrometricFit' 

412 

413 def __init__(self, **kwargs): 

414 super().__init__(**kwargs) 

415 self.makeSubtask('sourceSelector') 

416 self.makeSubtask('referenceSelector') 

417 

418 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

419 # We override runQuantum to set up the refObjLoaders 

420 inputs = butlerQC.get(inputRefs) 

421 

422 instrumentName = butlerQC.quantum.dataId['instrument'] 

423 

424 sampleRefCat = inputs['referenceCatalog'][0].get() 

425 refEpoch = sampleRefCat[0]['epoch'] 

426 

427 refConfig = LoadReferenceObjectsConfig() 

428 refConfig.anyFilterMapsToThis = 'phot_g_mean' 

429 refConfig.requireProperMotion = True 

430 refObjectLoader = ReferenceObjectLoader(dataIds=[ref.datasetRef.dataId 

431 for ref in inputRefs.referenceCatalog], 

432 refCats=inputs.pop('referenceCatalog'), 

433 config=refConfig, 

434 log=self.log) 

435 

436 # Ensure the inputs are in a consistent order 

437 inputCatVisits = np.array([inputCat.dataId['visit'] for inputCat in inputs['inputCatalogRefs']]) 

438 inputs['inputCatalogRefs'] = [inputs['inputCatalogRefs'][v] for v in inputCatVisits.argsort()] 

439 inputSumVisits = np.array([inputSum[0]['visit'] for inputSum in inputs['inputVisitSummaries']]) 

440 inputs['inputVisitSummaries'] = [inputs['inputVisitSummaries'][v] for v in inputSumVisits.argsort()] 

441 

442 output = self.run(**inputs, instrumentName=instrumentName, refEpoch=refEpoch, 

443 refObjectLoader=refObjectLoader) 

444 

445 for outputRef in outputRefs.outputWcs: 

446 visit = outputRef.dataId['visit'] 

447 butlerQC.put(output.outputWCSs[visit], outputRef) 

448 butlerQC.put(output.outputCatalog, outputRefs.outputCatalog) 

449 butlerQC.put(output.starCatalog, outputRefs.starCatalog) 

450 

451 def run(self, inputCatalogRefs, inputVisitSummaries, instrumentName="", refEpoch=None, 

452 refObjectLoader=None): 

453 """Run the WCS fit for a given set of visits 

454 

455 Parameters 

456 ---------- 

457 inputCatalogRefs : `list` 

458 List of `DeferredDatasetHandle`s pointing to visit-level source 

459 tables. 

460 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

461 List of catalogs with per-detector summary information. 

462 instrumentName : `str`, optional 

463 Name of the instrument used. This is only used for labelling. 

464 refEpoch : `float` 

465 Epoch of the reference objects in MJD. 

466 refObjectLoader : instance of 

467 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

468 Referencef object loader instance. 

469 

470 Returns 

471 ------- 

472 result : `lsst.pipe.base.Struct` 

473 ``outputWCSs`` : `list` of `lsst.afw.table.ExposureCatalog` 

474 List of exposure catalogs (one per visit) with the WCS for each 

475 detector set by the new fitted WCS. 

476 ``fitModel`` : `wcsfit.WCSFit` 

477 Model-fitting object with final model parameters. 

478 ``outputCatalog`` : `pyarrow.Table` 

479 Catalog with fit residuals of all sources used. 

480 """ 

481 self.log.info("Gathering instrument, exposure, and field info") 

482 # Set up an instrument object 

483 instrument = wcsfit.Instrument(instrumentName) 

484 

485 # Get RA, Dec, MJD, etc., for the input visits 

486 exposureInfo, exposuresHelper, extensionInfo = self._get_exposure_info(inputVisitSummaries, 

487 instrument) 

488 

489 # Get information about the extent of the input visits 

490 fields, fieldCenter, fieldRadius = self._prep_sky(inputVisitSummaries, exposureInfo.medianEpoch) 

491 

492 self.log.info("Load catalogs and associate sources") 

493 # Set up class to associate sources into matches using a 

494 # friends-of-friends algorithm 

495 associations = wcsfit.FoFClass(fields, [instrument], exposuresHelper, 

496 [fieldRadius.asDegrees()], 

497 (self.config.matchRadius * u.arcsec).to(u.degree).value) 

498 

499 # Add the reference catalog to the associator 

500 medianEpoch = astropy.time.Time(exposureInfo.medianEpoch, format='decimalyear').mjd 

501 refObjects, refCovariance = self._load_refcat(associations, refObjectLoader, fieldCenter, fieldRadius, 

502 extensionInfo, epoch=medianEpoch) 

503 

504 # Add the science catalogs and associate new sources as they are added 

505 sourceIndices, usedColumns = self._load_catalogs_and_associate(associations, inputCatalogRefs, 

506 extensionInfo) 

507 

508 self.log.info("Fit the WCSs") 

509 # Set up a YAML-type string using the config variables and a sample 

510 # visit 

511 inputYAML = self.make_yaml(inputVisitSummaries[0]) 

512 

513 # Set the verbosity level for WCSFit from the task log level. 

514 # TODO: DM-36850, Add lsst.log to gbdes so that log messages are 

515 # properly propagated. 

516 loglevel = self.log.getEffectiveLevel() 

517 if loglevel >= self.log.WARNING: 

518 verbose = 0 

519 elif loglevel == self.log.INFO: 

520 verbose = 1 

521 else: 

522 verbose = 2 

523 

524 # Set up the WCS-fitting class using the results of the FOF associator 

525 wcsf = wcsfit.WCSFit(fields, [instrument], exposuresHelper, 

526 extensionInfo.visitIndex, extensionInfo.detectorIndex, 

527 inputYAML, extensionInfo.wcs, associations.sequence, associations.extn, 

528 associations.obj, sysErr=self.config.systematicError, 

529 refSysErr=self.config.referenceSystematicError, 

530 usePM=self.config.fitProperMotion, 

531 verbose=verbose) 

532 

533 # Add the science and reference sources 

534 self._add_objects(wcsf, inputCatalogRefs, sourceIndices, extensionInfo, usedColumns) 

535 self._add_ref_objects(wcsf, refObjects, refCovariance, extensionInfo) 

536 

537 # Do the WCS fit 

538 wcsf.fit(reserveFraction=self.config.fitReserveFraction, 

539 randomNumberSeed=self.config.fitReserveRandomSeed) 

540 self.log.info("WCS fitting done") 

541 

542 outputWCSs = self._make_outputs(wcsf, inputVisitSummaries, exposureInfo) 

543 outputCatalog = wcsf.getOutputCatalog() 

544 starCatalog = wcsf.getStarCatalog() 

545 

546 return pipeBase.Struct(outputWCSs=outputWCSs, 

547 fitModel=wcsf, 

548 outputCatalog=outputCatalog, 

549 starCatalog=starCatalog) 

550 

551 def _prep_sky(self, inputVisitSummaries, epoch, fieldName='Field'): 

552 """Get center and radius of the input tract. This assumes that all 

553 visits will be put into the same `wcsfit.Field` and fit together. 

554 

555 Paramaters 

556 ---------- 

557 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

558 List of catalogs with per-detector summary information. 

559 epoch : float 

560 Reference epoch. 

561 fieldName : str 

562 Name of the field, used internally. 

563 

564 Returns 

565 ------- 

566 fields : `wcsfit.Fields` 

567 Object with field information. 

568 center : `lsst.geom.SpherePoint` 

569 Center of the field. 

570 radius : `lsst.sphgeom._sphgeom.Angle` 

571 Radius of the bounding circle of the tract. 

572 """ 

573 allDetectorCorners = [] 

574 for visSum in inputVisitSummaries: 

575 detectorCorners = [lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees).getVector() for (ra, dec) 

576 in zip(visSum['raCorners'].ravel(), visSum['decCorners'].ravel())] 

577 allDetectorCorners.extend(detectorCorners) 

578 boundingCircle = lsst.sphgeom.ConvexPolygon.convexHull(allDetectorCorners).getBoundingCircle() 

579 center = lsst.geom.SpherePoint(boundingCircle.getCenter()) 

580 ra = center.getRa().asDegrees() 

581 dec = center.getDec().asDegrees() 

582 radius = boundingCircle.getOpeningAngle() 

583 

584 # wcsfit.Fields describes a list of fields, but we assume all 

585 # observations will be fit together in one field. 

586 fields = wcsfit.Fields([fieldName], [ra], [dec], [epoch]) 

587 

588 return fields, center, radius 

589 

590 def _get_exposure_info(self, inputVisitSummaries, instrument, fieldNumber=0, instrumentNumber=0, 

591 refEpoch=None): 

592 """Get various information about the input visits to feed to the 

593 fitting routines. 

594 

595 Parameters 

596 ---------- 

597 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

598 Tables for each visit with information for detectors. 

599 instrument : `wcsfit.Instrument` 

600 Instrument object to which detector information is added. 

601 fieldNumber : `int` 

602 Index of the field for these visits. Should be zero if all data is 

603 being fit together. 

604 instrumentNumber : `int` 

605 Index of the instrument for these visits. Should be zero if all 

606 data comes from the same instrument. 

607 refEpoch : `float` 

608 Epoch of the reference objects in MJD. 

609 

610 Returns 

611 ------- 

612 exposureInfo : `lsst.pipe.base.Struct` 

613 Struct containing general properties for the visits: 

614 ``visits`` : `list` 

615 List of visit names. 

616 ``detectors`` : `list` 

617 List of all detectors in any visit. 

618 ``ras`` : `list` of float 

619 List of boresight RAs for each visit. 

620 ``decs`` : `list` of float 

621 List of borseight Decs for each visit. 

622 ``medianEpoch`` : float 

623 Median epoch of all visits in decimal-year format. 

624 exposuresHelper : `wcsfit.ExposuresHelper` 

625 Object containing information about the input visits. 

626 extensionInfo : `lsst.pipe.base.Struct` 

627 Struct containing properties for each extension: 

628 ``visit`` : `np.ndarray` 

629 Name of the visit for this extension. 

630 ``detector`` : `np.ndarray` 

631 Name of the detector for this extension. 

632 ``visitIndex` : `np.ndarray` of `int` 

633 Index of visit for this extension. 

634 ``detectorIndex`` : `np.ndarray` of `int` 

635 Index of the detector for this extension. 

636 ``wcss`` : `np.ndarray` of `lsst.afw.geom.SkyWcs` 

637 Initial WCS for this extension. 

638 ``extensionType`` : `np.ndarray` of `str` 

639 "SCIENCE" or "REFERENCE". 

640 """ 

641 exposureNames = [] 

642 ras = [] 

643 decs = [] 

644 visits = [] 

645 detectors = [] 

646 airmasses = [] 

647 exposureTimes = [] 

648 mjds = [] 

649 observatories = [] 

650 wcss = [] 

651 

652 extensionType = [] 

653 extensionVisitIndices = [] 

654 extensionDetectorIndices = [] 

655 extensionVisits = [] 

656 extensionDetectors = [] 

657 # Get information for all the science visits 

658 for v, visitSummary in enumerate(inputVisitSummaries): 

659 visitInfo = visitSummary[0].getVisitInfo() 

660 visit = visitSummary[0]['visit'] 

661 visits.append(visit) 

662 exposureNames.append(str(visit)) 

663 raDec = visitInfo.getBoresightRaDec() 

664 ras.append(raDec.getRa().asRadians()) 

665 decs.append(raDec.getDec().asRadians()) 

666 airmasses.append(visitInfo.getBoresightAirmass()) 

667 exposureTimes.append(visitInfo.getExposureTime()) 

668 obsDate = visitInfo.getDate() 

669 obsMJD = obsDate.get(obsDate.MJD) 

670 mjds.append(obsMJD) 

671 # Get the observatory ICRS position for use in fitting parallax 

672 obsLon = visitInfo.observatory.getLongitude().asDegrees() 

673 obsLat = visitInfo.observatory.getLatitude().asDegrees() 

674 obsElev = visitInfo.observatory.getElevation() 

675 earthLocation = astropy.coordinates.EarthLocation.from_geodetic(obsLon, obsLat, obsElev) 

676 observatory_gcrs = earthLocation.get_gcrs(astropy.time.Time(obsMJD, format='mjd')) 

677 observatory_icrs = observatory_gcrs.transform_to(astropy.coordinates.ICRS()) 

678 # We want the position in AU in Cartesian coordinates 

679 observatories.append(observatory_icrs.cartesian.xyz.to(u.AU).value) 

680 

681 for row in visitSummary: 

682 detector = row['id'] 

683 if detector not in detectors: 

684 detectors.append(detector) 

685 detectorBounds = wcsfit.Bounds(row['bbox_min_x'], row['bbox_max_x'], 

686 row['bbox_min_y'], row['bbox_max_y']) 

687 instrument.addDevice(str(detector), detectorBounds) 

688 

689 detectorIndex = np.flatnonzero(detector == np.array(detectors))[0] 

690 extensionVisitIndices.append(v) 

691 extensionDetectorIndices.append(detectorIndex) 

692 extensionVisits.append(visit) 

693 extensionDetectors.append(detector) 

694 extensionType.append('SCIENCE') 

695 

696 wcs = row.getWcs() 

697 wcss.append(_get_wcs_from_sip(wcs)) 

698 

699 fieldNumbers = list(np.ones(len(exposureNames), dtype=int) * fieldNumber) 

700 instrumentNumbers = list(np.ones(len(exposureNames), dtype=int) * instrumentNumber) 

701 

702 # Set the reference epoch to be the median of the science visits. 

703 # The reference catalog will be shifted to this date. 

704 medianMJD = np.median(mjds) 

705 medianEpoch = astropy.time.Time(medianMJD, format='mjd').decimalyear 

706 

707 # Add information for the reference catalog. Most of the values are 

708 # not used. 

709 exposureNames.append('REFERENCE') 

710 visits.append(-1) 

711 fieldNumbers.append(0) 

712 if self.config.fitProperMotion: 

713 instrumentNumbers.append(-2) 

714 else: 

715 instrumentNumbers.append(-1) 

716 ras.append(0.0) 

717 decs.append(0.0) 

718 airmasses.append(0.0) 

719 exposureTimes.append(0) 

720 mjds.append((refEpoch if (refEpoch is not None) else medianMJD)) 

721 observatories.append(np.array([0, 0, 0])) 

722 identity = wcsfit.IdentityMap() 

723 icrs = wcsfit.SphericalICRS() 

724 refWcs = wcsfit.Wcs(identity, icrs, 'Identity', np.pi / 180.) 

725 wcss.append(refWcs) 

726 

727 extensionVisitIndices.append(len(exposureNames) - 1) 

728 extensionDetectorIndices.append(-1) # REFERENCE device must be -1 

729 extensionVisits.append(-1) 

730 extensionDetectors.append(-1) 

731 extensionType.append('REFERENCE') 

732 

733 # Make a table of information to use elsewhere in the class 

734 extensionInfo = pipeBase.Struct(visit=np.array(extensionVisits), 

735 detector=np.array(extensionDetectors), 

736 visitIndex=np.array(extensionVisitIndices), 

737 detectorIndex=np.array(extensionDetectorIndices), 

738 wcs=np.array(wcss), 

739 extensionType=np.array(extensionType)) 

740 

741 # Make the exposureHelper object to use in the fitting routines 

742 exposuresHelper = wcsfit.ExposuresHelper(exposureNames, 

743 fieldNumbers, 

744 instrumentNumbers, 

745 ras, 

746 decs, 

747 airmasses, 

748 exposureTimes, 

749 mjds, 

750 observatories) 

751 

752 exposureInfo = pipeBase.Struct(visits=visits, 

753 detectors=detectors, 

754 ras=ras, 

755 decs=decs, 

756 medianEpoch=medianEpoch) 

757 

758 return exposureInfo, exposuresHelper, extensionInfo 

759 

760 def _load_refcat(self, associations, refObjectLoader, center, radius, extensionInfo, epoch=None, 

761 fieldIndex=0): 

762 """Load the reference catalog and add reference objects to the 

763 `wcsfit.FoFClass` object. 

764 

765 Parameters 

766 ---------- 

767 associations : `wcsfit.FoFClass` 

768 Object to which to add the catalog of reference objects. 

769 refObjectLoader : 

770 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

771 Object set up to load reference catalog objects. 

772 center : `lsst.geom.SpherePoint` 

773 Center of the circle in which to load reference objects. 

774 radius : `lsst.sphgeom._sphgeom.Angle` 

775 Radius of the circle in which to load reference objects. 

776 extensionInfo : `lsst.pipe.base.Struct` 

777 Struct containing properties for each extension. 

778 epoch : `float` 

779 MJD to which to correct the object positions. 

780 fieldIndex : `int` 

781 Index of the field. Should be zero if all the data is fit together. 

782 

783 Returns 

784 ------- 

785 refObjects : `dict` 

786 Position and error information of reference objects. 

787 refCovariance : `list` of `float` 

788 Flattened output covariance matrix. 

789 """ 

790 formattedEpoch = astropy.time.Time(epoch, format='mjd') 

791 

792 refFilter = refObjectLoader.config.anyFilterMapsToThis 

793 skyCircle = refObjectLoader.loadSkyCircle(center, radius, refFilter, epoch=formattedEpoch) 

794 

795 selected = self.referenceSelector.run(skyCircle.refCat) 

796 # Need memory contiguity to get reference filters as a vector. 

797 if not selected.sourceCat.isContiguous(): 

798 refCat = selected.sourceCat.copy(deep=True) 

799 else: 

800 refCat = selected.sourceCat 

801 

802 if self.config.excludeNonPMObjects: 

803 hasPM = refCat['pm_raErr'] != 0 

804 refCat = refCat[hasPM] 

805 

806 ra = (refCat['coord_ra'] * u.radian).to(u.degree).to_value().tolist() 

807 dec = (refCat['coord_dec'] * u.radian).to(u.degree).to_value().tolist() 

808 raCov = ((refCat['coord_raErr'] * u.radian).to(u.degree).to_value()**2).tolist() 

809 decCov = ((refCat['coord_decErr'] * u.radian).to(u.degree).to_value()**2).tolist() 

810 

811 # TODO: DM-37316 we need the full gaia covariance here 

812 refObjects = {'ra': ra, 'dec': dec, 'raCov': raCov, 'decCov': decCov, 

813 'raDecCov': np.zeros(len(ra))} 

814 refCovariance = [] 

815 

816 if self.config.fitProperMotion: 

817 raPM = (refCat['pm_ra'] * u.radian).to(u.marcsec).to_value().tolist() 

818 decPM = (refCat['pm_dec'] * u.radian).to(u.marcsec).to_value().tolist() 

819 parallax = (refCat['parallax'] * u.radian).to(u.marcsec).to_value().tolist() 

820 cov = _make_ref_covariance_matrix(refCat) 

821 pmDict = {'raPM': raPM, 'decPM': decPM, 'parallax': parallax} 

822 refObjects.update(pmDict) 

823 refCovariance = cov 

824 

825 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0] 

826 visitIndex = extensionInfo.visitIndex[extensionIndex] 

827 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

828 instrumentIndex = -1 # -1 indicates the reference catalog 

829 refWcs = extensionInfo.wcs[extensionIndex] 

830 

831 associations.addCatalog(refWcs, 'STELLAR', visitIndex, fieldIndex, instrumentIndex, detectorIndex, 

832 extensionIndex, np.ones(len(refCat), dtype=bool), 

833 ra, dec, np.arange(len(ra))) 

834 

835 return refObjects, refCovariance 

836 

837 def _load_catalogs_and_associate(self, associations, inputCatalogRefs, extensionInfo, 

838 fieldIndex=0, instrumentIndex=0): 

839 """Load the science catalogs and add the sources to the associator 

840 class `wcsfit.FoFClass`, associating them into matches as you go. 

841 

842 Parameters 

843 ---------- 

844 associations : `wcsfit.FoFClass` 

845 Object to which to add the catalog of reference objects. 

846 inputCatalogRefs : `list` 

847 List of DeferredDatasetHandles pointing to visit-level source 

848 tables. 

849 extensionInfo : `lsst.pipe.base.Struct` 

850 Struct containing properties for each extension. 

851 fieldIndex : `int` 

852 Index of the field for these catalogs. Should be zero assuming all 

853 data is being fit together. 

854 instrumentIndex : `int` 

855 Index of the instrument for these catalogs. Should be zero 

856 assuming all data comes from the same instrument. 

857 

858 Returns 

859 ------- 

860 sourceIndices : `list` 

861 List of boolean arrays used to select sources. 

862 columns : `list` of `str` 

863 List of columns needed from source tables. 

864 """ 

865 columns = ['detector', 'sourceId', 'x', 'xErr', 'y', 'yErr', 'ixx', 'iyy', 'ixy', 

866 f'{self.config.sourceFluxType}_instFlux', f'{self.config.sourceFluxType}_instFluxErr'] 

867 if self.sourceSelector.config.doFlags: 

868 columns.extend(self.sourceSelector.config.flags.bad) 

869 if self.sourceSelector.config.doUnresolved: 

870 columns.append(self.sourceSelector.config.unresolved.name) 

871 if self.sourceSelector.config.doIsolated: 

872 columns.append(self.sourceSelector.config.isolated.parentName) 

873 columns.append(self.sourceSelector.config.isolated.nChildName) 

874 

875 sourceIndices = [None] * len(extensionInfo.visit) 

876 for inputCatalogRef in inputCatalogRefs: 

877 visit = inputCatalogRef.dataId['visit'] 

878 inputCatalog = inputCatalogRef.get(parameters={'columns': columns}) 

879 # Get a sorted array of detector names 

880 detectors = np.unique(inputCatalog['detector']) 

881 

882 for detector in detectors: 

883 detectorSources = inputCatalog[inputCatalog['detector'] == detector] 

884 xCov = detectorSources['xErr']**2 

885 yCov = detectorSources['yErr']**2 

886 xyCov = (detectorSources['ixy'] * (xCov + yCov) 

887 / (detectorSources['ixx'] + detectorSources['iyy'])) 

888 # Remove sources with bad shape measurements 

889 goodShapes = xyCov**2 <= (xCov * yCov) 

890 selected = self.sourceSelector.run(detectorSources) 

891 goodInds = selected.selected & goodShapes 

892 

893 isStar = np.ones(goodInds.sum()) 

894 extensionIndex = np.flatnonzero((extensionInfo.visit == visit) 

895 & (extensionInfo.detector == detector))[0] 

896 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

897 visitIndex = extensionInfo.visitIndex[extensionIndex] 

898 

899 sourceIndices[extensionIndex] = goodInds 

900 

901 wcs = extensionInfo.wcs[extensionIndex] 

902 associations.reprojectWCS(wcs, fieldIndex) 

903 

904 associations.addCatalog(wcs, 'STELLAR', visitIndex, fieldIndex, 

905 instrumentIndex, detectorIndex, extensionIndex, isStar, 

906 detectorSources[goodInds]['x'].to_list(), 

907 detectorSources[goodInds]['y'].to_list(), 

908 np.arange(goodInds.sum())) 

909 

910 associations.sortMatches(fieldIndex, minMatches=self.config.minMatches, 

911 allowSelfMatches=self.config.allowSelfMatches) 

912 

913 return sourceIndices, columns 

914 

915 def make_yaml(self, inputVisitSummary, inputFile=None): 

916 """Make a YAML-type object that describes the parameters of the fit 

917 model. 

918 

919 Parameters 

920 ---------- 

921 inputVisitSummary : `lsst.afw.table.ExposureCatalog` 

922 Catalog with per-detector summary information. 

923 inputFile : `str` 

924 Path to a file that contains a basic model. 

925 

926 Returns 

927 ------- 

928 inputYAML : `wcsfit.YAMLCollector` 

929 YAML object containing the model description. 

930 """ 

931 if inputFile is not None: 

932 inputYAML = wcsfit.YAMLCollector(inputFile, 'PixelMapCollection') 

933 else: 

934 inputYAML = wcsfit.YAMLCollector('', 'PixelMapCollection') 

935 inputDict = {} 

936 modelComponents = ['INSTRUMENT/DEVICE', 'EXPOSURE'] 

937 baseMap = {'Type': 'Composite', 'Elements': modelComponents} 

938 inputDict['EXPOSURE/DEVICE/base'] = baseMap 

939 

940 xMin = str(inputVisitSummary['bbox_min_x'].min()) 

941 xMax = str(inputVisitSummary['bbox_max_x'].max()) 

942 yMin = str(inputVisitSummary['bbox_min_y'].min()) 

943 yMax = str(inputVisitSummary['bbox_max_y'].max()) 

944 

945 deviceModel = {'Type': 'Composite', 'Elements': self.config.deviceModel.list()} 

946 inputDict['INSTRUMENT/DEVICE'] = deviceModel 

947 for component in self.config.deviceModel: 

948 if 'poly' in component.lower(): 

949 componentDict = {'Type': 'Poly', 

950 'XPoly': {'OrderX': self.config.devicePolyOrder, 

951 'SumOrder': True}, 

952 'YPoly': {'OrderX': self.config.devicePolyOrder, 

953 'SumOrder': True}, 

954 'XMin': xMin, 'XMax': xMax, 'YMin': yMin, 'YMax': yMax} 

955 elif 'identity' in component.lower(): 

956 componentDict = {'Type': 'Identity'} 

957 

958 inputDict[component] = componentDict 

959 

960 exposureModel = {'Type': 'Composite', 'Elements': self.config.exposureModel.list()} 

961 inputDict['EXPOSURE'] = exposureModel 

962 for component in self.config.exposureModel: 

963 if 'poly' in component.lower(): 

964 componentDict = {'Type': 'Poly', 

965 'XPoly': {'OrderX': self.config.exposurePolyOrder, 

966 'SumOrder': 'true'}, 

967 'YPoly': {'OrderX': self.config.exposurePolyOrder, 

968 'SumOrder': 'true'}} 

969 elif 'identity' in component.lower(): 

970 componentDict = {'Type': 'Identity'} 

971 

972 inputDict[component] = componentDict 

973 

974 inputYAML.addInput(yaml.dump(inputDict)) 

975 inputYAML.addInput('Identity:\n Type: Identity\n') 

976 

977 return inputYAML 

978 

979 def _add_objects(self, wcsf, inputCatalogRefs, sourceIndices, extensionInfo, columns): 

980 """Add science sources to the wcsfit.WCSFit object. 

981 

982 Parameters 

983 ---------- 

984 wcsf : `wcsfit.WCSFit` 

985 WCS-fitting object. 

986 inputCatalogRefs : `list` 

987 List of DeferredDatasetHandles pointing to visit-level source 

988 tables. 

989 sourceIndices : `list` 

990 List of boolean arrays used to select sources. 

991 extensionInfo : `lsst.pipe.base.Struct` 

992 Struct containing properties for each extension. 

993 columns : `list` of `str` 

994 List of columns needed from source tables. 

995 """ 

996 for inputCatalogRef in inputCatalogRefs: 

997 visit = inputCatalogRef.dataId['visit'] 

998 inputCatalog = inputCatalogRef.get(parameters={'columns': columns}) 

999 detectors = np.unique(inputCatalog['detector']) 

1000 

1001 for detector in detectors: 

1002 detectorSources = inputCatalog[inputCatalog['detector'] == detector] 

1003 

1004 extensionIndex = np.flatnonzero((extensionInfo.visit == visit) 

1005 & (extensionInfo.detector == detector))[0] 

1006 sourceCat = detectorSources[sourceIndices[extensionIndex]] 

1007 

1008 xCov = sourceCat['xErr']**2 

1009 yCov = sourceCat['yErr']**2 

1010 xyCov = (sourceCat['ixy'] * (xCov + yCov) 

1011 / (sourceCat['ixx'] + sourceCat['iyy'])) 

1012 # TODO: add correct xyErr if DM-7101 is ever done. 

1013 

1014 d = {'x': sourceCat['x'].to_numpy(), 'y': sourceCat['y'].to_numpy(), 

1015 'xCov': xCov.to_numpy(), 'yCov': yCov.to_numpy(), 'xyCov': xyCov.to_numpy()} 

1016 

1017 wcsf.setObjects(extensionIndex, d, 'x', 'y', ['xCov', 'yCov', 'xyCov']) 

1018 

1019 def _add_ref_objects(self, wcsf, refObjects, refCovariance, extensionInfo): 

1020 """Add reference sources to the wcsfit.WCSFit object. 

1021 

1022 Parameters 

1023 ---------- 

1024 wcsf : `wcsfit.WCSFit` 

1025 WCS-fitting object. 

1026 refObjects : `dict` 

1027 Position and error information of reference objects. 

1028 refCovariance : `list` of `float` 

1029 Flattened output covariance matrix. 

1030 extensionInfo : `lsst.pipe.base.Struct` 

1031 Struct containing properties for each extension. 

1032 """ 

1033 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0] 

1034 

1035 if self.config.fitProperMotion: 

1036 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov'], 

1037 pmDecKey='decPM', pmRaKey='raPM', parallaxKey='parallax', pmCovKey='fullCov', 

1038 pmCov=refCovariance) 

1039 else: 

1040 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov']) 

1041 

1042 def _make_afw_wcs(self, mapDict, centerRA, centerDec, doNormalizePixels=False, xScale=1, yScale=1): 

1043 """Make an `lsst.afw.geom.SkyWcs` from a dictionary of mappings. 

1044 

1045 Parameters 

1046 ---------- 

1047 mapDict : `dict` 

1048 Dictionary of mapping parameters. 

1049 centerRA : `lsst.geom.Angle` 

1050 RA of the tangent point. 

1051 centerDec : `lsst.geom.Angle` 

1052 Declination of the tangent point. 

1053 doNormalizePixels : `bool` 

1054 Whether to normalize pixels so that range is [-1,1]. 

1055 xScale : `float` 

1056 Factor by which to normalize x-dimension. Corresponds to width of 

1057 detector. 

1058 yScale : `float` 

1059 Factor by which to normalize y-dimension. Corresponds to height of 

1060 detector. 

1061 

1062 Returns 

1063 ------- 

1064 outWCS : `lsst.afw.geom.SkyWcs` 

1065 WCS constructed from the input mappings 

1066 """ 

1067 # Set up pixel frames 

1068 pixelFrame = astshim.Frame(2, 'Domain=PIXELS') 

1069 normedPixelFrame = astshim.Frame(2, 'Domain=NORMEDPIXELS') 

1070 

1071 if doNormalizePixels: 

1072 # Pixels will need to be rescaled before going into the mappings 

1073 normCoefficients = [-1.0, 2.0/xScale, 0, 

1074 -1.0, 0, 2.0/yScale] 

1075 normMap = _convert_to_ast_polymap_coefficients(normCoefficients) 

1076 else: 

1077 normMap = astshim.UnitMap(2) 

1078 

1079 # All of the detectors for one visit map to the same tangent plane 

1080 tangentPoint = lsst.geom.SpherePoint(centerRA, centerDec) 

1081 cdMatrix = afwgeom.makeCdMatrix(1.0 * lsst.geom.degrees, 0 * lsst.geom.degrees, True) 

1082 iwcToSkyWcs = afwgeom.makeSkyWcs(lsst.geom.Point2D(0, 0), tangentPoint, cdMatrix) 

1083 iwcToSkyMap = iwcToSkyWcs.getFrameDict().getMapping('PIXELS', 'SKY') 

1084 skyFrame = iwcToSkyWcs.getFrameDict().getFrame('SKY') 

1085 

1086 frameDict = astshim.FrameDict(pixelFrame) 

1087 frameDict.addFrame('PIXELS', normMap, normedPixelFrame) 

1088 

1089 currentFrameName = 'NORMEDPIXELS' 

1090 

1091 # Dictionary values are ordered according to the maps' application. 

1092 for m, mapElement in enumerate(mapDict.values()): 

1093 mapType = mapElement['Type'] 

1094 

1095 if mapType == 'Poly': 

1096 mapCoefficients = mapElement['Coefficients'] 

1097 astMap = _convert_to_ast_polymap_coefficients(mapCoefficients) 

1098 elif mapType == 'Identity': 

1099 astMap = astshim.UnitMap(2) 

1100 else: 

1101 raise ValueError(f"Converting map type {mapType} to WCS is not supported") 

1102 

1103 if m == len(mapDict) - 1: 

1104 newFrameName = 'IWC' 

1105 else: 

1106 newFrameName = 'INTERMEDIATE' + str(m) 

1107 newFrame = astshim.Frame(2, f'Domain={newFrameName}') 

1108 frameDict.addFrame(currentFrameName, astMap, newFrame) 

1109 currentFrameName = newFrameName 

1110 frameDict.addFrame('IWC', iwcToSkyMap, skyFrame) 

1111 

1112 outWCS = afwgeom.SkyWcs(frameDict) 

1113 return outWCS 

1114 

1115 def _make_outputs(self, wcsf, visitSummaryTables, exposureInfo): 

1116 """Make a WCS object out of the WCS models. 

1117 

1118 Parameters 

1119 ---------- 

1120 wcsf : `wcsfit.WCSFit` 

1121 WCSFit object, assumed to have fit model. 

1122 visitSummaryTables : `list` of `lsst.afw.table.ExposureCatalog` 

1123 Catalogs with per-detector summary information from which to grab 

1124 detector information. 

1125 extensionInfo : `lsst.pipe.base.Struct` 

1126 Struct containing properties for each extension. 

1127 

1128 Returns 

1129 ------- 

1130 catalogs : `dict` of [`str`, `lsst.afw.table.ExposureCatalog`] 

1131 Dictionary of `lsst.afw.table.ExposureCatalog` objects with the WCS 

1132 set to the WCS fit in wcsf, keyed by the visit name. 

1133 """ 

1134 # Get the parameters of the fit models 

1135 mapParams = wcsf.mapCollection.getParamDict() 

1136 

1137 # Set up the schema for the output catalogs 

1138 schema = lsst.afw.table.ExposureTable.makeMinimalSchema() 

1139 schema.addField('visit', type='L', doc='Visit number') 

1140 

1141 # Pixels will need to be rescaled before going into the mappings 

1142 sampleDetector = visitSummaryTables[0][0] 

1143 xscale = sampleDetector['bbox_max_x'] - sampleDetector['bbox_min_x'] 

1144 yscale = sampleDetector['bbox_max_y'] - sampleDetector['bbox_min_y'] 

1145 

1146 catalogs = {} 

1147 for v, visitSummary in enumerate(visitSummaryTables): 

1148 visit = visitSummary[0]['visit'] 

1149 

1150 catalog = lsst.afw.table.ExposureCatalog(schema) 

1151 catalog.resize(len(exposureInfo.detectors)) 

1152 catalog['visit'] = visit 

1153 

1154 for d, detector in enumerate(visitSummary['id']): 

1155 mapName = f'{visit}/{detector}' 

1156 

1157 mapElements = wcsf.mapCollection.orderAtoms(f'{mapName}/base') 

1158 mapDict = {} 

1159 for m, mapElement in enumerate(mapElements): 

1160 mapType = wcsf.mapCollection.getMapType(mapElement) 

1161 mapDict[mapElement] = {'Type': mapType} 

1162 

1163 if mapType == 'Poly': 

1164 mapCoefficients = mapParams[mapElement] 

1165 mapDict[mapElement]['Coefficients'] = mapCoefficients 

1166 

1167 # The RA and Dec of the visit are needed for the last step of 

1168 # the mapping from the visit tangent plane to RA and Dec 

1169 outWCS = self._make_afw_wcs(mapDict, exposureInfo.ras[v] * lsst.geom.radians, 

1170 exposureInfo.decs[v] * lsst.geom.radians, 

1171 doNormalizePixels=True, 

1172 xScale=xscale, yScale=yscale) 

1173 

1174 catalog[d].setId(detector) 

1175 catalog[d].setWcs(outWCS) 

1176 catalog.sort() 

1177 catalogs[visit] = catalog 

1178 

1179 return catalogs