Coverage for python/lsst/drp/tasks/gbdesAstrometricFit.py: 12%

424 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-02-16 03:43 -0800

1# This file is part of drp_tasks. 

2# 

3# LSST Data Management System 

4# This product includes software developed by the 

5# LSST Project (http://www.lsst.org/). 

6# See COPYRIGHT file at the top of the source tree. 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <https://www.lsstcorp.org/LegalNotices/>. 

21# 

22import numpy as np 

23import astropy.time 

24import astropy.units as u 

25import astropy.coordinates 

26import yaml 

27import wcsfit 

28import astshim 

29import pyarrow as pa 

30 

31import lsst.geom 

32import lsst.pex.config as pexConfig 

33import lsst.pipe.base as pipeBase 

34import lsst.sphgeom 

35import lsst.afw.table 

36import lsst.afw.geom as afwgeom 

37from lsst.meas.algorithms import (LoadReferenceObjectsConfig, ReferenceObjectLoader, 

38 ReferenceSourceSelectorTask) 

39from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry 

40 

41__all__ = ['GbdesAstrometricFitConnections', 'GbdesAstrometricFitConfig', 'GbdesAstrometricFitTask'] 

42 

43 

44def _lookup_visit_refcats(datasetType, registry, quantumDataId, collections): 

45 """Lookup function that finds all refcats for all visits that overlap a 

46 tract, rather than just the refcats that directly overlap the tract. 

47 Borrowed from jointcal. 

48 

49 Parameters 

50 ---------- 

51 datasetType : `lsst.daf.butler.DatasetType` 

52 Type of dataset being searched for. 

53 registry : `lsst.daf.butler.Registry` 

54 Data repository registry to search. 

55 quantumDataId : `lsst.daf.butler.DataCoordinate` 

56 Data ID of the quantum; expected to be something we can use as a 

57 constraint to query for overlapping visits. 

58 collections : `Iterable` [ `str` ] 

59 Collections to search. 

60 Returns 

61 ------- 

62 refs : `Iterator` [ `lsst.daf.butler.DatasetRef` ] 

63 Iterator over refcat references. 

64 """ 

65 refs = set() 

66 # Use .expanded() on the query methods below because we need data IDs with 

67 # regions, both in the outer loop over visits (queryDatasets will expand 

68 # any data ID we give it, but doing it up-front in bulk is much more 

69 # efficient) and in the data IDs of the DatasetRefs this function yields 

70 # (because the RefCatLoader relies on them to do some of its own 

71 # filtering). 

72 for visit_data_id in set(registry.queryDataIds('visit', dataId=quantumDataId).expanded()): 

73 refs.update( 

74 registry.queryDatasets( 

75 datasetType, 

76 collections=collections, 

77 dataId=visit_data_id, 

78 findFirst=True, 

79 ).expanded() 

80 ) 

81 sorted(refs) 

82 yield from refs 

83 

84 

85def _make_ref_covariance_matrix(refCat, inputUnit=u.radian, outputCoordUnit=u.marcsec, 

86 outputPMUnit=u.marcsec): 

87 """Make a covariance matrix for the reference catalog including proper 

88 motion and parallax. 

89 

90 The output is flattened to one dimension to match the format expected by 

91 `gbdes`. 

92 

93 Parameters 

94 ---------- 

95 refCat : `lsst.afw.table.SimpleCatalog` 

96 Catalog including proper motion and parallax measurements. 

97 inputUnit : `astropy.unit.core.Unit` 

98 Units of the input catalog 

99 outputCoordUnit : `astropy.unit.core.Unit` 

100 Units required for the coordinates in the covariance matrix. `gbdes` 

101 expects milliarcseconds. 

102 outputPMUnit : `astropy.unit.core.Unit` 

103 Units required for the proper motion/parallax in the covariance matrix. 

104 `gbdes` expects milliarcseconds. 

105 

106 Returns 

107 ------- 

108 cov : `list` of `float` 

109 Flattened output covariance matrix. 

110 """ 

111 # Here is the standard ordering of components in the cov matrix, 

112 # to match the PM enumeration in C++ code of gbdes package's Match. 

113 # Each tuple gives: the array holding the 1d error, 

114 # the string in Gaia column names for this 

115 # the ordering in the Gaia catalog 

116 # and the ordering of the tuples is the order we want in our cov matrix 

117 raErr = (refCat['coord_raErr'] * inputUnit).to(outputCoordUnit).to_value() 

118 decErr = (refCat['coord_decErr'] * inputUnit).to(outputCoordUnit).to_value() 

119 raPMErr = (refCat['pm_raErr'] * inputUnit).to(outputPMUnit).to_value() 

120 decPMErr = (refCat['pm_decErr'] * inputUnit).to(outputPMUnit).to_value() 

121 parallaxErr = (refCat['parallaxErr'] * inputUnit).to(outputPMUnit).to_value() 

122 stdOrder = ((raErr, 'ra', 0), 

123 (decErr, 'dec', 1), 

124 (raPMErr, 'pmra', 3), 

125 (decPMErr, 'pmdec', 4), 

126 (parallaxErr, 'parallax', 2)) 

127 cov = np.zeros((len(refCat), 25)) 

128 k = 0 

129 # TODO: when DM-35130, is done, we need the full covariance here 

130 for i, pr1 in enumerate(stdOrder): 

131 for j, pr2 in enumerate(stdOrder): 

132 if pr1[2] < pr2[2]: 

133 # add correlation coefficient (once it is available) 

134 # cov[:, k] = (pr1[0] * pr2[0] * refCat[pr1[1] + '_' + pr2[1] 

135 # + '_corr']) 

136 cov[:, k] = 0 

137 elif pr1[2] > pr2[2]: 

138 # add correlation coefficient (once it is available) 

139 # cov[:, k] = (pr1[0] * pr2[0] * refCat[pr2[1] + '_' + pr1[1] 

140 # + '_corr']) 

141 cov[:, k] = 0 

142 else: 

143 # diagnonal element 

144 cov[:, k] = pr1[0] * pr2[0] 

145 k = k+1 

146 

147 return cov 

148 

149 

150def _convert_to_ast_polymap_coefficients(coefficients): 

151 """Convert vector of polynomial coefficients from the format used in 

152 `gbdes` into AST format (see Poly2d::vectorIndex(i, j) in 

153 gbdes/gbutil/src/Poly2d.cpp). This assumes two input and two output 

154 coordinates. 

155 

156 Parameters 

157 ---------- 

158 coefficients : `list` 

159 Coefficients of the polynomials. 

160 degree : `int` 

161 Degree of the polynomial. 

162 

163 Returns 

164 ------- 

165 astPoly : `astshim.PolyMap` 

166 Coefficients in AST polynomial format. 

167 """ 

168 polyArray = np.zeros((len(coefficients), 4)) 

169 N = len(coefficients) / 2 

170 # Get the degree of the polynomial by applying the quadratic formula to the 

171 # formula for calculating the number of coefficients of the polynomial. 

172 degree = int(-1.5 + 0.5 * (1 + 8 * N)**0.5) 

173 

174 for outVar in [1, 2]: 

175 for i in range(degree + 1): 

176 for j in range(degree + 1): 

177 if (i + j) > degree: 

178 continue 

179 vectorIndex = int(((i+j)*(i+j+1))/2+j + N * (outVar - 1)) 

180 polyArray[vectorIndex, 0] = coefficients[vectorIndex] 

181 polyArray[vectorIndex, 1] = outVar 

182 polyArray[vectorIndex, 2] = i 

183 polyArray[vectorIndex, 3] = j 

184 

185 astPoly = astshim.PolyMap(polyArray, 2, options="IterInverse=1,NIterInverse=10,TolInverse=1e-7") 

186 return astPoly 

187 

188 

189def _get_wcs_from_sip(butlerWcs): 

190 """Get wcsfit.Wcs in TPV format from the SIP-formatted input WCS. 

191 

192 Parameters 

193 ---------- 

194 butlerWcs : `lsst.afw.geom.SkyWcs` 

195 Input WCS from the calexp in SIP format. 

196 

197 Returns 

198 ------- 

199 wcs : `wcsfit.Wcs` 

200 WCS object in TPV format. 

201 """ 

202 fits_metadata = butlerWcs.getFitsMetadata() 

203 if not ((fits_metadata.get('CTYPE1') == 'RA---TAN-SIP') 

204 and (fits_metadata.get('CTYPE2') == 'DEC--TAN-SIP')): 

205 raise ValueError(f"CTYPES {fits_metadata.get('CTYPE1')} and {fits_metadata.get('CTYPE2')}" 

206 "do not match SIP convention") 

207 

208 # Correct CRPIX values to correspond to source table pixel indexing 

209 # convention 

210 crpix1 = fits_metadata.get('CRPIX1') 

211 crpix2 = fits_metadata.get('CRPIX2') 

212 fits_metadata.set('CRPIX1', crpix1 - 1) 

213 fits_metadata.set('CRPIX2', crpix2 - 1) 

214 

215 floatDict = {k: fits_metadata[k] for k in fits_metadata if isinstance(fits_metadata[k], (int, float))} 

216 

217 wcs = wcsfit.readTPVFromSIP(floatDict, 'SIP') 

218 

219 return wcs 

220 

221 

222class GbdesAstrometricFitConnections(pipeBase.PipelineTaskConnections, 

223 dimensions=('skymap', 'tract', 'instrument', 'physical_filter')): 

224 """Middleware input/output connections for task data.""" 

225 inputCatalogRefs = pipeBase.connectionTypes.Input( 

226 doc="Source table in parquet format, per visit.", 

227 name='preSourceTable_visit', 

228 storageClass='DataFrame', 

229 dimensions=('instrument', 'visit'), 

230 deferLoad=True, 

231 multiple=True, 

232 ) 

233 inputVisitSummaries = pipeBase.connectionTypes.Input( 

234 doc=("Per-visit consolidated exposure metadata built from calexps. " 

235 "These catalogs use detector id for the id and must be sorted for " 

236 "fast lookups of a detector."), 

237 name='visitSummary', 

238 storageClass='ExposureCatalog', 

239 dimensions=('instrument', 'visit'), 

240 multiple=True, 

241 ) 

242 referenceCatalog = pipeBase.connectionTypes.PrerequisiteInput( 

243 doc="The astrometry reference catalog to match to loaded input catalog sources.", 

244 name='gaia_dr2_20200414', 

245 storageClass='SimpleCatalog', 

246 dimensions=('skypix',), 

247 deferLoad=True, 

248 multiple=True, 

249 lookupFunction=_lookup_visit_refcats, 

250 ) 

251 outputWcs = pipeBase.connectionTypes.Output( 

252 doc=("Per-tract, per-visit world coordinate systems derived from the fitted model." 

253 " These catalogs only contain entries for detectors with an output, and use" 

254 " the detector id for the catalog id, sorted on id for fast lookups of a detector."), 

255 name='GbdesAstrometricFitSkyWcsCatalog', 

256 storageClass='ExposureCatalog', 

257 dimensions=('instrument', 'visit', 'skymap', 'tract'), 

258 multiple=True 

259 ) 

260 outputCatalog = pipeBase.connectionTypes.Output( 

261 doc=("Source table with stars used in fit, along with residuals in pixel coordinates and tangent " 

262 "plane coordinates and chisq values."), 

263 name='GbdesAstrometricFit_fitStars', 

264 storageClass='ArrowTable', 

265 dimensions=('instrument', 'skymap', 'tract', 'physical_filter'), 

266 ) 

267 

268 

269class GbdesAstrometricFitConfig(pipeBase.PipelineTaskConfig, 

270 pipelineConnections=GbdesAstrometricFitConnections): 

271 """Configuration for GbdesAstrometricFitTask""" 

272 sourceSelector = sourceSelectorRegistry.makeField( 

273 doc="How to select sources for cross-matching.", 

274 default='science' 

275 ) 

276 referenceSelector = pexConfig.ConfigurableField( 

277 target=ReferenceSourceSelectorTask, 

278 doc="How to down-select the loaded astrometry reference catalog.", 

279 ) 

280 matchRadius = pexConfig.Field( 

281 doc="Matching tolerance between associated objects (arcseconds).", 

282 dtype=float, 

283 default=1.0 

284 ) 

285 minMatches = pexConfig.Field( 

286 doc="Number of matches required to keep a source object.", 

287 dtype=int, 

288 default=2 

289 ) 

290 allowSelfMatches = pexConfig.Field( 

291 doc="Allow multiple sources from the same visit to be associated with the same object.", 

292 dtype=bool, 

293 default=False 

294 ) 

295 sourceFluxType = pexConfig.Field( 

296 dtype=str, 

297 doc="Source flux field to use in source selection and to get fluxes from the catalog.", 

298 default='apFlux_12_0' 

299 ) 

300 systematicError = pexConfig.Field( 

301 dtype=float, 

302 doc=("Systematic error padding added in quadrature for the science catalogs (marcsec). The default" 

303 "value is equivalent to 0.02 pixels for HSC."), 

304 default=0.0034 

305 ) 

306 referenceSystematicError = pexConfig.Field( 

307 dtype=float, 

308 doc="Systematic error padding added in quadrature for the reference catalog (marcsec).", 

309 default=0.0 

310 ) 

311 modelComponents = pexConfig.ListField( 

312 dtype=str, 

313 doc=("List of mappings to apply to transform from pixels to sky, in order of their application." 

314 "Supported options are 'INSTRUMENT/DEVICE' and 'EXPOSURE'."), 

315 default=['INSTRUMENT/DEVICE', 'EXPOSURE'] 

316 ) 

317 deviceModel = pexConfig.ListField( 

318 dtype=str, 

319 doc=("List of mappings to apply to transform from detector pixels to intermediate frame. Map names" 

320 "should match the format 'BAND/DEVICE/<map name>'."), 

321 default=['BAND/DEVICE/poly'] 

322 ) 

323 exposureModel = pexConfig.ListField( 

324 dtype=str, 

325 doc=("List of mappings to apply to transform from intermediate frame to sky coordinates. Map names" 

326 "should match the format 'EXPOSURE/<map name>'."), 

327 default=['EXPOSURE/poly'] 

328 ) 

329 devicePolyOrder = pexConfig.Field( 

330 dtype=int, 

331 doc="Order of device polynomial model.", 

332 default=4 

333 ) 

334 exposurePolyOrder = pexConfig.Field( 

335 dtype=int, 

336 doc="Order of exposure polynomial model.", 

337 default=6 

338 ) 

339 fitProperMotion = pexConfig.Field( 

340 dtype=bool, 

341 doc="Fit the proper motions of the objects.", 

342 default=False 

343 ) 

344 excludeNonPMObjects = pexConfig.Field( 

345 dtype=bool, 

346 doc="Exclude reference objects without proper motion/parallax information.", 

347 default=True 

348 ) 

349 fitReserveFraction = pexConfig.Field( 

350 dtype=float, 

351 default=0.2, 

352 doc="Fraction of objects to reserve from fit for validation." 

353 ) 

354 fitReserveRandomSeed = pexConfig.Field( 

355 dtype=int, 

356 doc="Set the random seed for selecting data points to reserve from the fit for validation.", 

357 default=1234 

358 ) 

359 

360 def setDefaults(self): 

361 # Use only stars because aperture fluxes of galaxies are biased and 

362 # depend on seeing. 

363 self.sourceSelector['science'].doUnresolved = True 

364 self.sourceSelector['science'].unresolved.name = 'extendedness' 

365 

366 # Use only isolated sources. 

367 self.sourceSelector['science'].doIsolated = True 

368 self.sourceSelector['science'].isolated.parentName = 'parentSourceId' 

369 self.sourceSelector['science'].isolated.nChildName = 'deblend_nChild' 

370 # Do not use either flux or centroid measurements with flags, 

371 # chosen from the usual QA flags for stars. 

372 self.sourceSelector['science'].doFlags = True 

373 badFlags = ['pixelFlags_edge', 

374 'pixelFlags_saturated', 

375 'pixelFlags_interpolatedCenter', 

376 'pixelFlags_interpolated', 

377 'pixelFlags_crCenter', 

378 'pixelFlags_bad', 

379 'hsmPsfMoments_flag', 

380 f'{self.sourceFluxType}_flag', 

381 ] 

382 self.sourceSelector['science'].flags.bad = badFlags 

383 

384 def validate(self): 

385 super().validate() 

386 

387 # Check if all components of the device and exposure models are 

388 # supported. 

389 for component in self.deviceModel: 

390 if not (('poly' in component.lower()) or ('identity' in component.lower())): 

391 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.deviceModel, self, 

392 f'deviceModel component {component} is not supported.') 

393 

394 for component in self.exposureModel: 

395 if not (('poly' in component.lower()) or ('identity' in component.lower())): 

396 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.exposureModel, self, 

397 f'exposureModel component {component} is not supported.') 

398 

399 

400class GbdesAstrometricFitTask(pipeBase.PipelineTask): 

401 """Calibrate the WCS across multiple visits of the same field using the 

402 GBDES package. 

403 """ 

404 

405 ConfigClass = GbdesAstrometricFitConfig 

406 _DefaultName = 'gbdesAstrometricFit' 

407 

408 def __init__(self, **kwargs): 

409 super().__init__(**kwargs) 

410 self.makeSubtask('sourceSelector') 

411 self.makeSubtask('referenceSelector') 

412 

413 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

414 # We override runQuantum to set up the refObjLoaders 

415 inputs = butlerQC.get(inputRefs) 

416 

417 instrumentName = butlerQC.quantum.dataId['instrument'] 

418 

419 sampleRefCat = inputs['referenceCatalog'][0].get() 

420 refEpoch = sampleRefCat[0]['epoch'] 

421 

422 refConfig = LoadReferenceObjectsConfig() 

423 refConfig.anyFilterMapsToThis = 'phot_g_mean' 

424 refConfig.requireProperMotion = True 

425 refObjectLoader = ReferenceObjectLoader(dataIds=[ref.datasetRef.dataId 

426 for ref in inputRefs.referenceCatalog], 

427 refCats=inputs.pop('referenceCatalog'), 

428 config=refConfig, 

429 log=self.log) 

430 

431 # Ensure the inputs are in a consistent order 

432 inputCatVisits = np.array([inputCat.dataId['visit'] for inputCat in inputs['inputCatalogRefs']]) 

433 inputs['inputCatalogRefs'] = [inputs['inputCatalogRefs'][v] for v in inputCatVisits.argsort()] 

434 inputSumVisits = np.array([inputSum[0]['visit'] for inputSum in inputs['inputVisitSummaries']]) 

435 inputs['inputVisitSummaries'] = [inputs['inputVisitSummaries'][v] for v in inputSumVisits.argsort()] 

436 

437 output = self.run(**inputs, instrumentName=instrumentName, refEpoch=refEpoch, 

438 refObjectLoader=refObjectLoader) 

439 

440 for outputRef in outputRefs.outputWcs: 

441 visit = outputRef.dataId['visit'] 

442 butlerQC.put(output.outputWCSs[visit], outputRef) 

443 butlerQC.put(output.outputCatalog, outputRefs.outputCatalog) 

444 

445 def run(self, inputCatalogRefs, inputVisitSummaries, instrumentName="", refEpoch=None, 

446 refObjectLoader=None): 

447 """Run the WCS fit for a given set of visits 

448 

449 Parameters 

450 ---------- 

451 inputCatalogRefs : `list` 

452 List of `DeferredDatasetHandle`s pointing to visit-level source 

453 tables. 

454 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

455 List of catalogs with per-detector summary information. 

456 instrumentName : `str`, optional 

457 Name of the instrument used. This is only used for labelling. 

458 refEpoch : `float` 

459 Epoch of the reference objects in MJD. 

460 refObjectLoader : instance of 

461 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

462 Referencef object loader instance. 

463 

464 Returns 

465 ------- 

466 result : `lsst.pipe.base.Struct` 

467 ``outputWCSs`` : `list` of `lsst.afw.table.ExposureCatalog` 

468 List of exposure catalogs (one per visit) with the WCS for each 

469 detector set by the new fitted WCS. 

470 ``fitModel`` : `wcsfit.WCSFit` 

471 Model-fitting object with final model parameters. 

472 ``outputCatalog`` : `pyarrow.Table` 

473 Catalog with fit residuals of all sources used. 

474 """ 

475 self.log.info("Gathering instrument, exposure, and field info") 

476 # Set up an instrument object 

477 instrument = wcsfit.Instrument(instrumentName) 

478 

479 # Get RA, Dec, MJD, etc., for the input visits 

480 exposureInfo, exposuresHelper, extensionInfo = self._get_exposure_info(inputVisitSummaries, 

481 instrument, refEpoch=refEpoch) 

482 

483 # Get information about the extent of the input visits 

484 fields, fieldCenter, fieldRadius = self._prep_sky(inputVisitSummaries, exposureInfo.medianEpoch) 

485 

486 self.log.info("Load catalogs and associate sources") 

487 # Set up class to associate sources into matches using a 

488 # friends-of-friends algorithm 

489 associations = wcsfit.FoFClass(fields, [instrument], exposuresHelper, 

490 [fieldRadius.asDegrees()], 

491 (self.config.matchRadius * u.arcsec).to(u.degree).value) 

492 

493 # Add the reference catalog to the associator 

494 refObjects, refCovariance = self._load_refcat(associations, refObjectLoader, fieldCenter, fieldRadius, 

495 extensionInfo, epoch=refEpoch) 

496 

497 # Add the science catalogs and associate new sources as they are added 

498 sourceIndices, usedColumns = self._load_catalogs_and_associate(associations, inputCatalogRefs, 

499 extensionInfo) 

500 

501 self.log.info("Fit the WCSs") 

502 # Set up a YAML-type string using the config variables and a sample 

503 # visit 

504 inputYAML = self.make_yaml(inputVisitSummaries[0]) 

505 

506 # Set the verbosity level for WCSFit from the task log level. 

507 # TODO: DM-36850, Add lsst.log to gbdes so that log messages are 

508 # properly propagated. 

509 loglevel = self.log.getEffectiveLevel() 

510 if loglevel >= self.log.WARNING: 

511 verbose = 0 

512 elif loglevel == self.log.INFO: 

513 verbose = 1 

514 else: 

515 verbose = 2 

516 

517 # Set up the WCS-fitting class using the results of the FOF associator 

518 wcsf = wcsfit.WCSFit(fields, [instrument], exposuresHelper, 

519 extensionInfo.visitIndex, extensionInfo.detectorIndex, 

520 inputYAML, extensionInfo.wcs, associations.sequence, associations.extn, 

521 associations.obj, sysErr=self.config.systematicError, 

522 refSysErr=self.config.referenceSystematicError, 

523 usePM=self.config.fitProperMotion, 

524 verbose=verbose) 

525 

526 # Add the science and reference sources 

527 self._add_objects(wcsf, inputCatalogRefs, sourceIndices, extensionInfo, usedColumns) 

528 self._add_ref_objects(wcsf, refObjects, refCovariance, extensionInfo) 

529 

530 # Do the WCS fit 

531 wcsf.fit(reserveFraction=self.config.fitReserveFraction, 

532 randomNumberSeed=self.config.fitReserveRandomSeed) 

533 self.log.info("WCS fitting done") 

534 

535 outputWCSs = self._make_outputs(wcsf, inputVisitSummaries, exposureInfo) 

536 outputCatalog = pa.Table.from_pydict(wcsf.getOutputCatalog()) 

537 

538 return pipeBase.Struct(outputWCSs=outputWCSs, 

539 fitModel=wcsf, 

540 outputCatalog=outputCatalog) 

541 

542 def _prep_sky(self, inputVisitSummaries, epoch, fieldName='Field'): 

543 """Get center and radius of the input tract. This assumes that all 

544 visits will be put into the same `wcsfit.Field` and fit together. 

545 

546 Paramaters 

547 ---------- 

548 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

549 List of catalogs with per-detector summary information. 

550 epoch : float 

551 Reference epoch. 

552 fieldName : str 

553 Name of the field, used internally. 

554 

555 Returns 

556 ------- 

557 fields : `wcsfit.Fields` 

558 Object with field information. 

559 center : `lsst.geom.SpherePoint` 

560 Center of the field. 

561 radius : `lsst.sphgeom._sphgeom.Angle` 

562 Radius of the bounding circle of the tract. 

563 """ 

564 allDetectorCorners = [] 

565 for visSum in inputVisitSummaries: 

566 detectorCorners = [lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees).getVector() for (ra, dec) 

567 in zip(visSum['raCorners'].ravel(), visSum['decCorners'].ravel())] 

568 allDetectorCorners.extend(detectorCorners) 

569 boundingCircle = lsst.sphgeom.ConvexPolygon.convexHull(allDetectorCorners).getBoundingCircle() 

570 center = lsst.geom.SpherePoint(boundingCircle.getCenter()) 

571 ra = center.getRa().asDegrees() 

572 dec = center.getDec().asDegrees() 

573 radius = boundingCircle.getOpeningAngle() 

574 

575 # wcsfit.Fields describes a list of fields, but we assume all 

576 # observations will be fit together in one field. 

577 fields = wcsfit.Fields([fieldName], [ra], [dec], [epoch]) 

578 

579 return fields, center, radius 

580 

581 def _get_exposure_info(self, inputVisitSummaries, instrument, fieldNumber=0, instrumentNumber=0, 

582 refEpoch=None): 

583 """Get various information about the input visits to feed to the 

584 fitting routines. 

585 

586 Parameters 

587 ---------- 

588 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

589 Tables for each visit with information for detectors. 

590 instrument : `wcsfit.Instrument` 

591 Instrument object to which detector information is added. 

592 fieldNumber : `int` 

593 Index of the field for these visits. Should be zero if all data is 

594 being fit together. 

595 instrumentNumber : `int` 

596 Index of the instrument for these visits. Should be zero if all 

597 data comes from the same instrument. 

598 refEpoch : `float` 

599 Epoch of the reference objects in MJD. 

600 

601 Returns 

602 ------- 

603 exposureInfo : `lsst.pipe.base.Struct` 

604 Struct containing general properties for the visits: 

605 ``visits`` : `list` 

606 List of visit names. 

607 ``detectors`` : `list` 

608 List of all detectors in any visit. 

609 ``ras`` : `list` of float 

610 List of boresight RAs for each visit. 

611 ``decs`` : `list` of float 

612 List of borseight Decs for each visit. 

613 ``medianEpoch`` : float 

614 Median epoch of all visits in decimal-year format. 

615 exposuresHelper : `wcsfit.ExposuresHelper` 

616 Object containing information about the input visits. 

617 extensionInfo : `lsst.pipe.base.Struct` 

618 Struct containing properties for each extension: 

619 ``visit`` : `np.ndarray` 

620 Name of the visit for this extension. 

621 ``detector`` : `np.ndarray` 

622 Name of the detector for this extension. 

623 ``visitIndex` : `np.ndarray` of `int` 

624 Index of visit for this extension. 

625 ``detectorIndex`` : `np.ndarray` of `int` 

626 Index of the detector for this extension. 

627 ``wcss`` : `np.ndarray` of `lsst.afw.geom.SkyWcs` 

628 Initial WCS for this extension. 

629 ``extensionType`` : `np.ndarray` of `str` 

630 "SCIENCE" or "REFERENCE". 

631 """ 

632 exposureNames = [] 

633 ras = [] 

634 decs = [] 

635 visits = [] 

636 detectors = [] 

637 airmasses = [] 

638 exposureTimes = [] 

639 mjds = [] 

640 observatories = [] 

641 wcss = [] 

642 

643 extensionType = [] 

644 extensionVisitIndices = [] 

645 extensionDetectorIndices = [] 

646 extensionVisits = [] 

647 extensionDetectors = [] 

648 # Get information for all the science visits 

649 for v, visitSummary in enumerate(inputVisitSummaries): 

650 visitInfo = visitSummary[0].getVisitInfo() 

651 visit = visitSummary[0]['visit'] 

652 visits.append(visit) 

653 exposureNames.append(str(visit)) 

654 raDec = visitInfo.getBoresightRaDec() 

655 ras.append(raDec.getRa().asRadians()) 

656 decs.append(raDec.getDec().asRadians()) 

657 airmasses.append(visitInfo.getBoresightAirmass()) 

658 exposureTimes.append(visitInfo.getExposureTime()) 

659 obsDate = visitInfo.getDate() 

660 obsMJD = obsDate.get(obsDate.MJD) 

661 mjds.append(obsMJD) 

662 # Get the observatory ICRS position for use in fitting parallax 

663 obsLon = visitInfo.observatory.getLongitude().asDegrees() 

664 obsLat = visitInfo.observatory.getLatitude().asDegrees() 

665 obsElev = visitInfo.observatory.getElevation() 

666 earthLocation = astropy.coordinates.EarthLocation.from_geodetic(obsLon, obsLat, obsElev) 

667 observatory_gcrs = earthLocation.get_gcrs(astropy.time.Time(obsMJD, format='mjd')) 

668 observatory_icrs = observatory_gcrs.transform_to(astropy.coordinates.ICRS()) 

669 # We want the position in AU in Cartesian coordinates 

670 observatories.append(observatory_icrs.cartesian.xyz.to(u.AU).value) 

671 

672 for row in visitSummary: 

673 detector = row['id'] 

674 if detector not in detectors: 

675 detectors.append(detector) 

676 detectorBounds = wcsfit.Bounds(row['bbox_min_x'], row['bbox_max_x'], 

677 row['bbox_min_y'], row['bbox_max_y']) 

678 instrument.addDevice(str(detector), detectorBounds) 

679 

680 detectorIndex = np.flatnonzero(detector == np.array(detectors))[0] 

681 extensionVisitIndices.append(v) 

682 extensionDetectorIndices.append(detectorIndex) 

683 extensionVisits.append(visit) 

684 extensionDetectors.append(detector) 

685 extensionType.append('SCIENCE') 

686 

687 wcs = row.getWcs() 

688 wcss.append(_get_wcs_from_sip(wcs)) 

689 

690 fieldNumbers = list(np.ones(len(exposureNames), dtype=int) * fieldNumber) 

691 instrumentNumbers = list(np.ones(len(exposureNames), dtype=int) * instrumentNumber) 

692 

693 # Set the reference epoch to be the median of the science visits. 

694 # The reference catalog will be shifted to this date. 

695 medianEpoch = astropy.time.Time(np.median(mjds), format='mjd').decimalyear 

696 

697 # Add information for the reference catalog. Most of the values are 

698 # not used. 

699 exposureNames.append('REFERENCE') 

700 visits.append(-1) 

701 fieldNumbers.append(0) 

702 if self.config.fitProperMotion: 

703 instrumentNumbers.append(-2) 

704 else: 

705 instrumentNumbers.append(-1) 

706 ras.append(0.0) 

707 decs.append(0.0) 

708 airmasses.append(0.0) 

709 exposureTimes.append(0) 

710 mjds.append((refEpoch if (refEpoch is not None) else medianEpoch)) 

711 observatories.append(np.array([0, 0, 0])) 

712 identity = wcsfit.IdentityMap() 

713 icrs = wcsfit.SphericalICRS() 

714 refWcs = wcsfit.Wcs(identity, icrs, 'Identity', np.pi / 180.) 

715 wcss.append(refWcs) 

716 

717 extensionVisitIndices.append(len(exposureNames) - 1) 

718 extensionDetectorIndices.append(-1) # REFERENCE device must be -1 

719 extensionVisits.append(-1) 

720 extensionDetectors.append(-1) 

721 extensionType.append('REFERENCE') 

722 

723 # Make a table of information to use elsewhere in the class 

724 extensionInfo = pipeBase.Struct(visit=np.array(extensionVisits), 

725 detector=np.array(extensionDetectors), 

726 visitIndex=np.array(extensionVisitIndices), 

727 detectorIndex=np.array(extensionDetectorIndices), 

728 wcs=np.array(wcss), 

729 extensionType=np.array(extensionType)) 

730 

731 # Make the exposureHelper object to use in the fitting routines 

732 exposuresHelper = wcsfit.ExposuresHelper(exposureNames, 

733 fieldNumbers, 

734 instrumentNumbers, 

735 ras, 

736 decs, 

737 airmasses, 

738 exposureTimes, 

739 mjds, 

740 observatories) 

741 

742 exposureInfo = pipeBase.Struct(visits=visits, 

743 detectors=detectors, 

744 ras=ras, 

745 decs=decs, 

746 medianEpoch=medianEpoch) 

747 

748 return exposureInfo, exposuresHelper, extensionInfo 

749 

750 def _load_refcat(self, associations, refObjectLoader, center, radius, extensionInfo, epoch=None, 

751 fieldIndex=0): 

752 """Load the reference catalog and add reference objects to the 

753 `wcsfit.FoFClass` object. 

754 

755 Parameters 

756 ---------- 

757 associations : `wcsfit.FoFClass` 

758 Object to which to add the catalog of reference objects. 

759 refObjectLoader : 

760 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

761 Object set up to load reference catalog objects. 

762 center : `lsst.geom.SpherePoint` 

763 Center of the circle in which to load reference objects. 

764 radius : `lsst.sphgeom._sphgeom.Angle` 

765 Radius of the circle in which to load reference objects. 

766 extensionInfo : `lsst.pipe.base.Struct` 

767 Struct containing properties for each extension. 

768 epoch : `float` 

769 MJD to which to correct the object positions. 

770 fieldIndex : `int` 

771 Index of the field. Should be zero if all the data is fit together. 

772 

773 Returns 

774 ------- 

775 refObjects : `dict` 

776 Position and error information of reference objects. 

777 refCovariance : `list` of `float` 

778 Flattened output covariance matrix. 

779 """ 

780 formattedEpoch = astropy.time.Time(epoch, format='mjd') 

781 

782 refFilter = refObjectLoader.config.anyFilterMapsToThis 

783 skyCircle = refObjectLoader.loadSkyCircle(center, radius, refFilter, epoch=formattedEpoch) 

784 

785 selected = self.referenceSelector.run(skyCircle.refCat) 

786 # Need memory contiguity to get reference filters as a vector. 

787 if not selected.sourceCat.isContiguous(): 

788 refCat = selected.sourceCat.copy(deep=True) 

789 else: 

790 refCat = selected.sourceCat 

791 

792 if self.config.excludeNonPMObjects: 

793 hasPM = refCat['pm_raErr'] != 0 

794 refCat = refCat[hasPM] 

795 

796 ra = (refCat['coord_ra'] * u.radian).to(u.degree).to_value().tolist() 

797 dec = (refCat['coord_dec'] * u.radian).to(u.degree).to_value().tolist() 

798 raCov = ((refCat['coord_raErr'] * u.radian).to(u.degree).to_value()**2).tolist() 

799 decCov = ((refCat['coord_decErr'] * u.radian).to(u.degree).to_value()**2).tolist() 

800 

801 # TODO: DM-37316 we need the full gaia covariance here 

802 refObjects = {'ra': ra, 'dec': dec, 'raCov': raCov, 'decCov': decCov, 

803 'raDecCov': np.zeros(len(ra))} 

804 refCovariance = [] 

805 

806 if self.config.fitProperMotion: 

807 raPM = (refCat['pm_ra'] * u.radian).to(u.marcsec).to_value().tolist() 

808 decPM = (refCat['pm_dec'] * u.radian).to(u.marcsec).to_value().tolist() 

809 parallax = (refCat['parallax'] * u.radian).to(u.marcsec).to_value().tolist() 

810 cov = _make_ref_covariance_matrix(refCat) 

811 pmDict = {'raPM': raPM, 'decPM': decPM, 'parallax': parallax} 

812 refObjects.update(pmDict) 

813 refCovariance = cov 

814 

815 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0] 

816 visitIndex = extensionInfo.visitIndex[extensionIndex] 

817 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

818 instrumentIndex = -1 # -1 indicates the reference catalog 

819 refWcs = extensionInfo.wcs[extensionIndex] 

820 

821 associations.addCatalog(refWcs, 'STELLAR', visitIndex, fieldIndex, instrumentIndex, detectorIndex, 

822 extensionIndex, np.ones(len(refCat), dtype=bool), 

823 ra, dec, np.arange(len(ra))) 

824 

825 return refObjects, refCovariance 

826 

827 def _load_catalogs_and_associate(self, associations, inputCatalogRefs, extensionInfo, 

828 fieldIndex=0, instrumentIndex=0): 

829 """Load the science catalogs and add the sources to the associator 

830 class `wcsfit.FoFClass`, associating them into matches as you go. 

831 

832 Parameters 

833 ---------- 

834 associations : `wcsfit.FoFClass` 

835 Object to which to add the catalog of reference objects. 

836 inputCatalogRefs : `list` 

837 List of DeferredDatasetHandles pointing to visit-level source 

838 tables. 

839 extensionInfo : `lsst.pipe.base.Struct` 

840 Struct containing properties for each extension. 

841 fieldIndex : `int` 

842 Index of the field for these catalogs. Should be zero assuming all 

843 data is being fit together. 

844 instrumentIndex : `int` 

845 Index of the instrument for these catalogs. Should be zero 

846 assuming all data comes from the same instrument. 

847 

848 Returns 

849 ------- 

850 sourceIndices : `list` 

851 List of boolean arrays used to select sources. 

852 columns : `list` of `str` 

853 List of columns needed from source tables. 

854 """ 

855 columns = ['detector', 'sourceId', 'x', 'xErr', 'y', 'yErr', 'ixx', 'iyy', 'ixy', 

856 f'{self.config.sourceFluxType}_instFlux', f'{self.config.sourceFluxType}_instFluxErr'] 

857 if self.sourceSelector.config.doFlags: 

858 columns.extend(self.sourceSelector.config.flags.bad) 

859 if self.sourceSelector.config.doUnresolved: 

860 columns.append(self.sourceSelector.config.unresolved.name) 

861 if self.sourceSelector.config.doIsolated: 

862 columns.append(self.sourceSelector.config.isolated.parentName) 

863 columns.append(self.sourceSelector.config.isolated.nChildName) 

864 

865 sourceIndices = [None] * len(extensionInfo.visit) 

866 for inputCatalogRef in inputCatalogRefs: 

867 visit = inputCatalogRef.dataId['visit'] 

868 inputCatalog = inputCatalogRef.get(parameters={'columns': columns}) 

869 # Get a sorted array of detector names 

870 detectors = np.unique(inputCatalog['detector']) 

871 

872 for detector in detectors: 

873 detectorSources = inputCatalog[inputCatalog['detector'] == detector] 

874 xCov = detectorSources['xErr']**2 

875 yCov = detectorSources['yErr']**2 

876 xyCov = (detectorSources['ixy'] * (xCov + yCov) 

877 / (detectorSources['ixx'] + detectorSources['iyy'])) 

878 # Remove sources with bad shape measurements 

879 goodShapes = xyCov**2 <= (xCov * yCov) 

880 selected = self.sourceSelector.run(detectorSources) 

881 goodInds = selected.selected & goodShapes 

882 

883 isStar = np.ones(goodInds.sum()) 

884 extensionIndex = np.flatnonzero((extensionInfo.visit == visit) 

885 & (extensionInfo.detector == detector))[0] 

886 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

887 visitIndex = extensionInfo.visitIndex[extensionIndex] 

888 

889 sourceIndices[extensionIndex] = goodInds 

890 

891 wcs = extensionInfo.wcs[extensionIndex] 

892 associations.reprojectWCS(wcs, fieldIndex) 

893 

894 associations.addCatalog(wcs, 'STELLAR', visitIndex, fieldIndex, 

895 instrumentIndex, detectorIndex, extensionIndex, isStar, 

896 detectorSources[goodInds]['x'].to_list(), 

897 detectorSources[goodInds]['y'].to_list(), 

898 np.arange(goodInds.sum())) 

899 

900 associations.sortMatches(fieldIndex, minMatches=self.config.minMatches, 

901 allowSelfMatches=self.config.allowSelfMatches) 

902 

903 return sourceIndices, columns 

904 

905 def make_yaml(self, inputVisitSummary, inputFile=None): 

906 """Make a YAML-type object that describes the parameters of the fit 

907 model. 

908 

909 Parameters 

910 ---------- 

911 inputVisitSummary : `lsst.afw.table.ExposureCatalog` 

912 Catalog with per-detector summary information. 

913 inputFile : `str` 

914 Path to a file that contains a basic model. 

915 

916 Returns 

917 ------- 

918 inputYAML : `wcsfit.YAMLCollector` 

919 YAML object containing the model description. 

920 """ 

921 if inputFile is not None: 

922 inputYAML = wcsfit.YAMLCollector(inputFile, 'PixelMapCollection') 

923 else: 

924 inputYAML = wcsfit.YAMLCollector('', 'PixelMapCollection') 

925 inputDict = {} 

926 modelComponents = ['INSTRUMENT/DEVICE', 'EXPOSURE'] 

927 baseMap = {'Type': 'Composite', 'Elements': modelComponents} 

928 inputDict['EXPOSURE/DEVICE/base'] = baseMap 

929 

930 xMin = str(inputVisitSummary['bbox_min_x'].min()) 

931 xMax = str(inputVisitSummary['bbox_max_x'].max()) 

932 yMin = str(inputVisitSummary['bbox_min_y'].min()) 

933 yMax = str(inputVisitSummary['bbox_max_y'].max()) 

934 

935 deviceModel = {'Type': 'Composite', 'Elements': self.config.deviceModel.list()} 

936 inputDict['INSTRUMENT/DEVICE'] = deviceModel 

937 for component in self.config.deviceModel: 

938 if 'poly' in component.lower(): 

939 componentDict = {'Type': 'Poly', 

940 'XPoly': {'OrderX': self.config.devicePolyOrder, 

941 'SumOrder': True}, 

942 'YPoly': {'OrderX': self.config.devicePolyOrder, 

943 'SumOrder': True}, 

944 'XMin': xMin, 'XMax': xMax, 'YMin': yMin, 'YMax': yMax} 

945 elif 'identity' in component.lower(): 

946 componentDict = {'Type': 'Identity'} 

947 

948 inputDict[component] = componentDict 

949 

950 exposureModel = {'Type': 'Composite', 'Elements': self.config.exposureModel.list()} 

951 inputDict['EXPOSURE'] = exposureModel 

952 for component in self.config.exposureModel: 

953 if 'poly' in component.lower(): 

954 componentDict = {'Type': 'Poly', 

955 'XPoly': {'OrderX': self.config.exposurePolyOrder, 

956 'SumOrder': 'true'}, 

957 'YPoly': {'OrderX': self.config.exposurePolyOrder, 

958 'SumOrder': 'true'}} 

959 elif 'identity' in component.lower(): 

960 componentDict = {'Type': 'Identity'} 

961 

962 inputDict[component] = componentDict 

963 

964 inputYAML.addInput(yaml.dump(inputDict)) 

965 inputYAML.addInput('Identity:\n Type: Identity\n') 

966 

967 return inputYAML 

968 

969 def _add_objects(self, wcsf, inputCatalogRefs, sourceIndices, extensionInfo, columns): 

970 """Add science sources to the wcsfit.WCSFit object. 

971 

972 Parameters 

973 ---------- 

974 wcsf : `wcsfit.WCSFit` 

975 WCS-fitting object. 

976 inputCatalogRefs : `list` 

977 List of DeferredDatasetHandles pointing to visit-level source 

978 tables. 

979 sourceIndices : `list` 

980 List of boolean arrays used to select sources. 

981 extensionInfo : `lsst.pipe.base.Struct` 

982 Struct containing properties for each extension. 

983 columns : `list` of `str` 

984 List of columns needed from source tables. 

985 """ 

986 for inputCatalogRef in inputCatalogRefs: 

987 visit = inputCatalogRef.dataId['visit'] 

988 inputCatalog = inputCatalogRef.get(parameters={'columns': columns}) 

989 detectors = np.unique(inputCatalog['detector']) 

990 

991 for detector in detectors: 

992 detectorSources = inputCatalog[inputCatalog['detector'] == detector] 

993 

994 extensionIndex = np.flatnonzero((extensionInfo.visit == visit) 

995 & (extensionInfo.detector == detector))[0] 

996 sourceCat = detectorSources[sourceIndices[extensionIndex]] 

997 

998 xCov = sourceCat['xErr']**2 

999 yCov = sourceCat['yErr']**2 

1000 xyCov = (sourceCat['ixy'] * (xCov + yCov) 

1001 / (sourceCat['ixx'] + sourceCat['iyy'])) 

1002 # TODO: add correct xyErr if DM-7101 is ever done. 

1003 

1004 d = {'x': sourceCat['x'].to_numpy(), 'y': sourceCat['y'].to_numpy(), 

1005 'xCov': xCov.to_numpy(), 'yCov': yCov.to_numpy(), 'xyCov': xyCov.to_numpy()} 

1006 

1007 wcsf.setObjects(extensionIndex, d, 'x', 'y', ['xCov', 'yCov', 'xyCov']) 

1008 

1009 def _add_ref_objects(self, wcsf, refObjects, refCovariance, extensionInfo): 

1010 """Add reference sources to the wcsfit.WCSFit object. 

1011 

1012 Parameters 

1013 ---------- 

1014 wcsf : `wcsfit.WCSFit` 

1015 WCS-fitting object. 

1016 refObjects : `dict` 

1017 Position and error information of reference objects. 

1018 refCovariance : `list` of `float` 

1019 Flattened output covariance matrix. 

1020 extensionInfo : `lsst.pipe.base.Struct` 

1021 Struct containing properties for each extension. 

1022 """ 

1023 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0] 

1024 

1025 if self.config.fitProperMotion: 

1026 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov'], 

1027 pmDecKey='decPM', pmRaKey='raPM', parallaxKey='parallax', pmCovKey='fullCov', 

1028 pmCov=refCovariance) 

1029 else: 

1030 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov']) 

1031 

1032 def _make_afw_wcs(self, mapDict, centerRA, centerDec, doNormalizePixels=False, xScale=1, yScale=1): 

1033 """Make an `lsst.afw.geom.SkyWcs` from a dictionary of mappings. 

1034 

1035 Parameters 

1036 ---------- 

1037 mapDict : `dict` 

1038 Dictionary of mapping parameters. 

1039 centerRA : `lsst.geom.Angle` 

1040 RA of the tangent point. 

1041 centerDec : `lsst.geom.Angle` 

1042 Declination of the tangent point. 

1043 doNormalizePixels : `bool` 

1044 Whether to normalize pixels so that range is [-1,1]. 

1045 xScale : `float` 

1046 Factor by which to normalize x-dimension. Corresponds to width of 

1047 detector. 

1048 yScale : `float` 

1049 Factor by which to normalize y-dimension. Corresponds to height of 

1050 detector. 

1051 

1052 Returns 

1053 ------- 

1054 outWCS : `lsst.afw.geom.SkyWcs` 

1055 WCS constructed from the input mappings 

1056 """ 

1057 # Set up pixel frames 

1058 pixelFrame = astshim.Frame(2, 'Domain=PIXELS') 

1059 normedPixelFrame = astshim.Frame(2, 'Domain=NORMEDPIXELS') 

1060 

1061 if doNormalizePixels: 

1062 # Pixels will need to be rescaled before going into the mappings 

1063 normCoefficients = [-1.0, 2.0/xScale, 0, 

1064 -1.0, 0, 2.0/yScale] 

1065 normMap = _convert_to_ast_polymap_coefficients(normCoefficients) 

1066 else: 

1067 normMap = astshim.UnitMap(2) 

1068 

1069 # All of the detectors for one visit map to the same tangent plane 

1070 tangentPoint = lsst.geom.SpherePoint(centerRA, centerDec) 

1071 cdMatrix = afwgeom.makeCdMatrix(1.0 * lsst.geom.degrees, 0 * lsst.geom.degrees, True) 

1072 iwcToSkyWcs = afwgeom.makeSkyWcs(lsst.geom.Point2D(0, 0), tangentPoint, cdMatrix) 

1073 iwcToSkyMap = iwcToSkyWcs.getFrameDict().getMapping('PIXELS', 'SKY') 

1074 skyFrame = iwcToSkyWcs.getFrameDict().getFrame('SKY') 

1075 

1076 frameDict = astshim.FrameDict(pixelFrame) 

1077 frameDict.addFrame('PIXELS', normMap, normedPixelFrame) 

1078 

1079 currentFrameName = 'NORMEDPIXELS' 

1080 

1081 # Dictionary values are ordered according to the maps' application. 

1082 for m, mapElement in enumerate(mapDict.values()): 

1083 mapType = mapElement['Type'] 

1084 

1085 if mapType == 'Poly': 

1086 mapCoefficients = mapElement['Coefficients'] 

1087 astMap = _convert_to_ast_polymap_coefficients(mapCoefficients) 

1088 elif mapType == 'Identity': 

1089 astMap = astshim.UnitMap(2) 

1090 else: 

1091 raise ValueError(f"Converting map type {mapType} to WCS is not supported") 

1092 

1093 if m == len(mapDict) - 1: 

1094 newFrameName = 'IWC' 

1095 else: 

1096 newFrameName = 'INTERMEDIATE' + str(m) 

1097 newFrame = astshim.Frame(2, f'Domain={newFrameName}') 

1098 frameDict.addFrame(currentFrameName, astMap, newFrame) 

1099 currentFrameName = newFrameName 

1100 frameDict.addFrame('IWC', iwcToSkyMap, skyFrame) 

1101 

1102 outWCS = afwgeom.SkyWcs(frameDict) 

1103 return outWCS 

1104 

1105 def _make_outputs(self, wcsf, visitSummaryTables, exposureInfo): 

1106 """Make a WCS object out of the WCS models. 

1107 

1108 Parameters 

1109 ---------- 

1110 wcsf : `wcsfit.WCSFit` 

1111 WCSFit object, assumed to have fit model. 

1112 visitSummaryTables : `list` of `lsst.afw.table.ExposureCatalog` 

1113 Catalogs with per-detector summary information from which to grab 

1114 detector information. 

1115 extensionInfo : `lsst.pipe.base.Struct` 

1116 Struct containing properties for each extension. 

1117 

1118 Returns 

1119 ------- 

1120 catalogs : `dict` of [`str`, `lsst.afw.table.ExposureCatalog`] 

1121 Dictionary of `lsst.afw.table.ExposureCatalog` objects with the WCS 

1122 set to the WCS fit in wcsf, keyed by the visit name. 

1123 """ 

1124 # Get the parameters of the fit models 

1125 mapParams = wcsf.mapCollection.getParamDict() 

1126 

1127 # Set up the schema for the output catalogs 

1128 schema = lsst.afw.table.ExposureTable.makeMinimalSchema() 

1129 schema.addField('visit', type='L', doc='Visit number') 

1130 

1131 # Pixels will need to be rescaled before going into the mappings 

1132 sampleDetector = visitSummaryTables[0][0] 

1133 xscale = sampleDetector['bbox_max_x'] - sampleDetector['bbox_min_x'] 

1134 yscale = sampleDetector['bbox_max_y'] - sampleDetector['bbox_min_y'] 

1135 

1136 catalogs = {} 

1137 for v, visitSummary in enumerate(visitSummaryTables): 

1138 visit = visitSummary[0]['visit'] 

1139 

1140 catalog = lsst.afw.table.ExposureCatalog(schema) 

1141 catalog.resize(len(exposureInfo.detectors)) 

1142 catalog['visit'] = visit 

1143 

1144 for d, detector in enumerate(visitSummary['id']): 

1145 mapName = f'{visit}/{detector}' 

1146 

1147 mapElements = wcsf.mapCollection.orderAtoms(f'{mapName}/base') 

1148 mapDict = {} 

1149 for m, mapElement in enumerate(mapElements): 

1150 mapType = wcsf.mapCollection.getMapType(mapElement) 

1151 mapDict[mapElement] = {'Type': mapType} 

1152 

1153 if mapType == 'Poly': 

1154 mapCoefficients = mapParams[mapElement] 

1155 mapDict[mapElement]['Coefficients'] = mapCoefficients 

1156 

1157 # The RA and Dec of the visit are needed for the last step of 

1158 # the mapping from the visit tangent plane to RA and Dec 

1159 outWCS = self._make_afw_wcs(mapDict, exposureInfo.ras[v] * lsst.geom.radians, 

1160 exposureInfo.decs[v] * lsst.geom.radians, 

1161 doNormalizePixels=True, 

1162 xScale=xscale, yScale=yscale) 

1163 

1164 catalog[d].setId(detector) 

1165 catalog[d].setWcs(outWCS) 

1166 catalog.sort() 

1167 catalogs[visit] = catalog 

1168 

1169 return catalogs