Coverage for python/lsst/drp/tasks/gbdesAstrometricFit.py: 11%

431 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-17 10:26 +0000

1# This file is part of drp_tasks. 

2# 

3# LSST Data Management System 

4# This product includes software developed by the 

5# LSST Project (http://www.lsst.org/). 

6# See COPYRIGHT file at the top of the source tree. 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <https://www.lsstcorp.org/LegalNotices/>. 

21# 

22import numpy as np 

23import astropy.time 

24import astropy.units as u 

25import astropy.coordinates 

26import yaml 

27import wcsfit 

28import astshim 

29 

30import lsst.geom 

31import lsst.pex.config as pexConfig 

32import lsst.pipe.base as pipeBase 

33import lsst.sphgeom 

34import lsst.afw.table 

35import lsst.afw.geom as afwgeom 

36from lsst.meas.algorithms import (LoadReferenceObjectsConfig, ReferenceObjectLoader, 

37 ReferenceSourceSelectorTask) 

38from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry 

39 

40__all__ = ['GbdesAstrometricFitConnections', 'GbdesAstrometricFitConfig', 'GbdesAstrometricFitTask'] 

41 

42 

43def _lookup_visit_refcats(datasetType, registry, quantumDataId, collections): 

44 """Lookup function that finds all refcats for all visits that overlap a 

45 tract, rather than just the refcats that directly overlap the tract. 

46 Borrowed from jointcal. 

47 

48 Parameters 

49 ---------- 

50 datasetType : `lsst.daf.butler.DatasetType` 

51 Type of dataset being searched for. 

52 registry : `lsst.daf.butler.Registry` 

53 Data repository registry to search. 

54 quantumDataId : `lsst.daf.butler.DataCoordinate` 

55 Data ID of the quantum; expected to be something we can use as a 

56 constraint to query for overlapping visits. 

57 collections : `Iterable` [ `str` ] 

58 Collections to search. 

59 Returns 

60 ------- 

61 refs : `Iterator` [ `lsst.daf.butler.DatasetRef` ] 

62 Iterator over refcat references. 

63 """ 

64 refs = set() 

65 # Use .expanded() on the query methods below because we need data IDs with 

66 # regions, both in the outer loop over visits (queryDatasets will expand 

67 # any data ID we give it, but doing it up-front in bulk is much more 

68 # efficient) and in the data IDs of the DatasetRefs this function yields 

69 # (because the RefCatLoader relies on them to do some of its own 

70 # filtering). 

71 for visit_data_id in set(registry.queryDataIds('visit', dataId=quantumDataId).expanded()): 

72 refs.update( 

73 registry.queryDatasets( 

74 datasetType, 

75 collections=collections, 

76 dataId=visit_data_id, 

77 findFirst=True, 

78 ).expanded() 

79 ) 

80 sorted(refs) 

81 yield from refs 

82 

83 

84def _make_ref_covariance_matrix(refCat, inputUnit=u.radian, outputCoordUnit=u.marcsec, 

85 outputPMUnit=u.marcsec): 

86 """Make a covariance matrix for the reference catalog including proper 

87 motion and parallax. 

88 

89 The output is flattened to one dimension to match the format expected by 

90 `gbdes`. 

91 

92 Parameters 

93 ---------- 

94 refCat : `lsst.afw.table.SimpleCatalog` 

95 Catalog including proper motion and parallax measurements. 

96 inputUnit : `astropy.unit.core.Unit` 

97 Units of the input catalog 

98 outputCoordUnit : `astropy.unit.core.Unit` 

99 Units required for the coordinates in the covariance matrix. `gbdes` 

100 expects milliarcseconds. 

101 outputPMUnit : `astropy.unit.core.Unit` 

102 Units required for the proper motion/parallax in the covariance matrix. 

103 `gbdes` expects milliarcseconds. 

104 

105 Returns 

106 ------- 

107 cov : `list` of `float` 

108 Flattened output covariance matrix. 

109 """ 

110 # Here is the standard ordering of components in the cov matrix, 

111 # to match the PM enumeration in C++ code of gbdes package's Match. 

112 # Each tuple gives: the array holding the 1d error, 

113 # the string in Gaia column names for this 

114 # the ordering in the Gaia catalog 

115 # and the ordering of the tuples is the order we want in our cov matrix 

116 raErr = (refCat['coord_raErr'] * inputUnit).to(outputCoordUnit).to_value() 

117 decErr = (refCat['coord_decErr'] * inputUnit).to(outputCoordUnit).to_value() 

118 raPMErr = (refCat['pm_raErr'] * inputUnit).to(outputPMUnit).to_value() 

119 decPMErr = (refCat['pm_decErr'] * inputUnit).to(outputPMUnit).to_value() 

120 parallaxErr = (refCat['parallaxErr'] * inputUnit).to(outputPMUnit).to_value() 

121 stdOrder = ((raErr, 'ra', 0), 

122 (decErr, 'dec', 1), 

123 (raPMErr, 'pmra', 3), 

124 (decPMErr, 'pmdec', 4), 

125 (parallaxErr, 'parallax', 2)) 

126 cov = np.zeros((len(refCat), 25)) 

127 k = 0 

128 # TODO: when DM-35130, is done, we need the full covariance here 

129 for i, pr1 in enumerate(stdOrder): 

130 for j, pr2 in enumerate(stdOrder): 

131 if pr1[2] < pr2[2]: 

132 # add correlation coefficient (once it is available) 

133 # cov[:, k] = (pr1[0] * pr2[0] * refCat[pr1[1] + '_' + pr2[1] 

134 # + '_corr']) 

135 cov[:, k] = 0 

136 elif pr1[2] > pr2[2]: 

137 # add correlation coefficient (once it is available) 

138 # cov[:, k] = (pr1[0] * pr2[0] * refCat[pr2[1] + '_' + pr1[1] 

139 # + '_corr']) 

140 cov[:, k] = 0 

141 else: 

142 # diagnonal element 

143 cov[:, k] = pr1[0] * pr2[0] 

144 k = k+1 

145 

146 return cov 

147 

148 

149def _convert_to_ast_polymap_coefficients(coefficients): 

150 """Convert vector of polynomial coefficients from the format used in 

151 `gbdes` into AST format (see Poly2d::vectorIndex(i, j) in 

152 gbdes/gbutil/src/Poly2d.cpp). This assumes two input and two output 

153 coordinates. 

154 

155 Parameters 

156 ---------- 

157 coefficients : `list` 

158 Coefficients of the polynomials. 

159 degree : `int` 

160 Degree of the polynomial. 

161 

162 Returns 

163 ------- 

164 astPoly : `astshim.PolyMap` 

165 Coefficients in AST polynomial format. 

166 """ 

167 polyArray = np.zeros((len(coefficients), 4)) 

168 N = len(coefficients) / 2 

169 # Get the degree of the polynomial by applying the quadratic formula to the 

170 # formula for calculating the number of coefficients of the polynomial. 

171 degree = int(-1.5 + 0.5 * (1 + 8 * N)**0.5) 

172 

173 for outVar in [1, 2]: 

174 for i in range(degree + 1): 

175 for j in range(degree + 1): 

176 if (i + j) > degree: 

177 continue 

178 vectorIndex = int(((i+j)*(i+j+1))/2+j + N * (outVar - 1)) 

179 polyArray[vectorIndex, 0] = coefficients[vectorIndex] 

180 polyArray[vectorIndex, 1] = outVar 

181 polyArray[vectorIndex, 2] = i 

182 polyArray[vectorIndex, 3] = j 

183 

184 astPoly = astshim.PolyMap(polyArray, 2, options="IterInverse=1,NIterInverse=10,TolInverse=1e-7") 

185 return astPoly 

186 

187 

188def _get_wcs_from_sip(butlerWcs): 

189 """Get wcsfit.Wcs in TPV format from the SIP-formatted input WCS. 

190 

191 Parameters 

192 ---------- 

193 butlerWcs : `lsst.afw.geom.SkyWcs` 

194 Input WCS from the calexp in SIP format. 

195 

196 Returns 

197 ------- 

198 wcs : `wcsfit.Wcs` 

199 WCS object in TPV format. 

200 """ 

201 fits_metadata = butlerWcs.getFitsMetadata() 

202 if not ((fits_metadata.get('CTYPE1') == 'RA---TAN-SIP') 

203 and (fits_metadata.get('CTYPE2') == 'DEC--TAN-SIP')): 

204 raise ValueError(f"CTYPES {fits_metadata.get('CTYPE1')} and {fits_metadata.get('CTYPE2')}" 

205 "do not match SIP convention") 

206 

207 # Correct CRPIX values to correspond to source table pixel indexing 

208 # convention 

209 crpix1 = fits_metadata.get('CRPIX1') 

210 crpix2 = fits_metadata.get('CRPIX2') 

211 fits_metadata.set('CRPIX1', crpix1 - 1) 

212 fits_metadata.set('CRPIX2', crpix2 - 1) 

213 

214 floatDict = {k: fits_metadata[k] for k in fits_metadata if isinstance(fits_metadata[k], (int, float))} 

215 

216 wcs = wcsfit.readTPVFromSIP(floatDict, 'SIP') 

217 

218 return wcs 

219 

220 

221class GbdesAstrometricFitConnections(pipeBase.PipelineTaskConnections, 

222 dimensions=('skymap', 'tract', 'instrument', 'physical_filter')): 

223 """Middleware input/output connections for task data.""" 

224 inputCatalogRefs = pipeBase.connectionTypes.Input( 

225 doc="Source table in parquet format, per visit.", 

226 name='preSourceTable_visit', 

227 storageClass='DataFrame', 

228 dimensions=('instrument', 'visit'), 

229 deferLoad=True, 

230 multiple=True, 

231 ) 

232 inputVisitSummaries = pipeBase.connectionTypes.Input( 

233 doc=("Per-visit consolidated exposure metadata built from calexps. " 

234 "These catalogs use detector id for the id and must be sorted for " 

235 "fast lookups of a detector."), 

236 name='visitSummary', 

237 storageClass='ExposureCatalog', 

238 dimensions=('instrument', 'visit'), 

239 multiple=True, 

240 ) 

241 referenceCatalog = pipeBase.connectionTypes.PrerequisiteInput( 

242 doc="The astrometry reference catalog to match to loaded input catalog sources.", 

243 name='gaia_dr2_20200414', 

244 storageClass='SimpleCatalog', 

245 dimensions=('skypix',), 

246 deferLoad=True, 

247 multiple=True, 

248 lookupFunction=_lookup_visit_refcats, 

249 ) 

250 outputWcs = pipeBase.connectionTypes.Output( 

251 doc=("Per-tract, per-visit world coordinate systems derived from the fitted model." 

252 " These catalogs only contain entries for detectors with an output, and use" 

253 " the detector id for the catalog id, sorted on id for fast lookups of a detector."), 

254 name='gbdesAstrometricFitSkyWcsCatalog', 

255 storageClass='ExposureCatalog', 

256 dimensions=('instrument', 'visit', 'skymap', 'tract'), 

257 multiple=True 

258 ) 

259 outputCatalog = pipeBase.connectionTypes.Output( 

260 doc=("Source table with stars used in fit, along with residuals in pixel coordinates and tangent " 

261 "plane coordinates and chisq values."), 

262 name='gbdesAstrometricFit_fitStars', 

263 storageClass='ArrowNumpyDict', 

264 dimensions=('instrument', 'skymap', 'tract', 'physical_filter'), 

265 ) 

266 starCatalog = pipeBase.connectionTypes.Output( 

267 doc="Star catalog.", 

268 name='gbdesAstrometricFit_starCatalog', 

269 storageClass='ArrowNumpyDict', 

270 dimensions=('instrument', 'skymap', 'tract', 'physical_filter') 

271 ) 

272 

273 

274class GbdesAstrometricFitConfig(pipeBase.PipelineTaskConfig, 

275 pipelineConnections=GbdesAstrometricFitConnections): 

276 """Configuration for GbdesAstrometricFitTask""" 

277 sourceSelector = sourceSelectorRegistry.makeField( 

278 doc="How to select sources for cross-matching.", 

279 default='science' 

280 ) 

281 referenceSelector = pexConfig.ConfigurableField( 

282 target=ReferenceSourceSelectorTask, 

283 doc="How to down-select the loaded astrometry reference catalog.", 

284 ) 

285 matchRadius = pexConfig.Field( 

286 doc="Matching tolerance between associated objects (arcseconds).", 

287 dtype=float, 

288 default=1.0 

289 ) 

290 minMatches = pexConfig.Field( 

291 doc="Number of matches required to keep a source object.", 

292 dtype=int, 

293 default=2 

294 ) 

295 allowSelfMatches = pexConfig.Field( 

296 doc="Allow multiple sources from the same visit to be associated with the same object.", 

297 dtype=bool, 

298 default=False 

299 ) 

300 sourceFluxType = pexConfig.Field( 

301 dtype=str, 

302 doc="Source flux field to use in source selection and to get fluxes from the catalog.", 

303 default='apFlux_12_0' 

304 ) 

305 systematicError = pexConfig.Field( 

306 dtype=float, 

307 doc=("Systematic error padding added in quadrature for the science catalogs (marcsec). The default" 

308 "value is equivalent to 0.02 pixels for HSC."), 

309 default=0.0034 

310 ) 

311 referenceSystematicError = pexConfig.Field( 

312 dtype=float, 

313 doc="Systematic error padding added in quadrature for the reference catalog (marcsec).", 

314 default=0.0 

315 ) 

316 modelComponents = pexConfig.ListField( 

317 dtype=str, 

318 doc=("List of mappings to apply to transform from pixels to sky, in order of their application." 

319 "Supported options are 'INSTRUMENT/DEVICE' and 'EXPOSURE'."), 

320 default=['INSTRUMENT/DEVICE', 'EXPOSURE'] 

321 ) 

322 deviceModel = pexConfig.ListField( 

323 dtype=str, 

324 doc=("List of mappings to apply to transform from detector pixels to intermediate frame. Map names" 

325 "should match the format 'BAND/DEVICE/<map name>'."), 

326 default=['BAND/DEVICE/poly'] 

327 ) 

328 exposureModel = pexConfig.ListField( 

329 dtype=str, 

330 doc=("List of mappings to apply to transform from intermediate frame to sky coordinates. Map names" 

331 "should match the format 'EXPOSURE/<map name>'."), 

332 default=['EXPOSURE/poly'] 

333 ) 

334 devicePolyOrder = pexConfig.Field( 

335 dtype=int, 

336 doc="Order of device polynomial model.", 

337 default=4 

338 ) 

339 exposurePolyOrder = pexConfig.Field( 

340 dtype=int, 

341 doc="Order of exposure polynomial model.", 

342 default=6 

343 ) 

344 fitProperMotion = pexConfig.Field( 

345 dtype=bool, 

346 doc="Fit the proper motions of the objects.", 

347 default=False 

348 ) 

349 excludeNonPMObjects = pexConfig.Field( 

350 dtype=bool, 

351 doc="Exclude reference objects without proper motion/parallax information.", 

352 default=True 

353 ) 

354 fitReserveFraction = pexConfig.Field( 

355 dtype=float, 

356 default=0.2, 

357 doc="Fraction of objects to reserve from fit for validation." 

358 ) 

359 fitReserveRandomSeed = pexConfig.Field( 

360 dtype=int, 

361 doc="Set the random seed for selecting data points to reserve from the fit for validation.", 

362 default=1234 

363 ) 

364 

365 def setDefaults(self): 

366 # Use only stars because aperture fluxes of galaxies are biased and 

367 # depend on seeing. 

368 self.sourceSelector['science'].doUnresolved = True 

369 self.sourceSelector['science'].unresolved.name = 'extendedness' 

370 

371 # Use only isolated sources. 

372 self.sourceSelector['science'].doIsolated = True 

373 self.sourceSelector['science'].isolated.parentName = 'parentSourceId' 

374 self.sourceSelector['science'].isolated.nChildName = 'deblend_nChild' 

375 # Do not use either flux or centroid measurements with flags, 

376 # chosen from the usual QA flags for stars. 

377 self.sourceSelector['science'].doFlags = True 

378 badFlags = ['pixelFlags_edge', 

379 'pixelFlags_saturated', 

380 'pixelFlags_interpolatedCenter', 

381 'pixelFlags_interpolated', 

382 'pixelFlags_crCenter', 

383 'pixelFlags_bad', 

384 'hsmPsfMoments_flag', 

385 f'{self.sourceFluxType}_flag', 

386 ] 

387 self.sourceSelector['science'].flags.bad = badFlags 

388 

389 # Use only primary sources. 

390 self.sourceSelector['science'].doRequirePrimary = True 

391 

392 def validate(self): 

393 super().validate() 

394 

395 # Check if all components of the device and exposure models are 

396 # supported. 

397 for component in self.deviceModel: 

398 if not (('poly' in component.lower()) or ('identity' in component.lower())): 

399 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.deviceModel, self, 

400 f'deviceModel component {component} is not supported.') 

401 

402 for component in self.exposureModel: 

403 if not (('poly' in component.lower()) or ('identity' in component.lower())): 

404 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.exposureModel, self, 

405 f'exposureModel component {component} is not supported.') 

406 

407 

408class GbdesAstrometricFitTask(pipeBase.PipelineTask): 

409 """Calibrate the WCS across multiple visits of the same field using the 

410 GBDES package. 

411 """ 

412 

413 ConfigClass = GbdesAstrometricFitConfig 

414 _DefaultName = 'gbdesAstrometricFit' 

415 

416 def __init__(self, **kwargs): 

417 super().__init__(**kwargs) 

418 self.makeSubtask('sourceSelector') 

419 self.makeSubtask('referenceSelector') 

420 

421 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

422 # We override runQuantum to set up the refObjLoaders 

423 inputs = butlerQC.get(inputRefs) 

424 

425 instrumentName = butlerQC.quantum.dataId['instrument'] 

426 

427 sampleRefCat = inputs['referenceCatalog'][0].get() 

428 refEpoch = sampleRefCat[0]['epoch'] 

429 

430 refConfig = LoadReferenceObjectsConfig() 

431 refConfig.anyFilterMapsToThis = 'phot_g_mean' 

432 refConfig.requireProperMotion = True 

433 refObjectLoader = ReferenceObjectLoader(dataIds=[ref.datasetRef.dataId 

434 for ref in inputRefs.referenceCatalog], 

435 refCats=inputs.pop('referenceCatalog'), 

436 config=refConfig, 

437 log=self.log) 

438 

439 # Ensure the inputs are in a consistent order 

440 inputCatVisits = np.array([inputCat.dataId['visit'] for inputCat in inputs['inputCatalogRefs']]) 

441 inputs['inputCatalogRefs'] = [inputs['inputCatalogRefs'][v] for v in inputCatVisits.argsort()] 

442 inputSumVisits = np.array([inputSum[0]['visit'] for inputSum in inputs['inputVisitSummaries']]) 

443 inputs['inputVisitSummaries'] = [inputs['inputVisitSummaries'][v] for v in inputSumVisits.argsort()] 

444 

445 output = self.run(**inputs, instrumentName=instrumentName, refEpoch=refEpoch, 

446 refObjectLoader=refObjectLoader) 

447 

448 for outputRef in outputRefs.outputWcs: 

449 visit = outputRef.dataId['visit'] 

450 butlerQC.put(output.outputWCSs[visit], outputRef) 

451 butlerQC.put(output.outputCatalog, outputRefs.outputCatalog) 

452 butlerQC.put(output.starCatalog, outputRefs.starCatalog) 

453 

454 def run(self, inputCatalogRefs, inputVisitSummaries, instrumentName="", refEpoch=None, 

455 refObjectLoader=None): 

456 """Run the WCS fit for a given set of visits 

457 

458 Parameters 

459 ---------- 

460 inputCatalogRefs : `list` 

461 List of `DeferredDatasetHandle`s pointing to visit-level source 

462 tables. 

463 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

464 List of catalogs with per-detector summary information. 

465 instrumentName : `str`, optional 

466 Name of the instrument used. This is only used for labelling. 

467 refEpoch : `float` 

468 Epoch of the reference objects in MJD. 

469 refObjectLoader : instance of 

470 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

471 Referencef object loader instance. 

472 

473 Returns 

474 ------- 

475 result : `lsst.pipe.base.Struct` 

476 ``outputWCSs`` : `list` of `lsst.afw.table.ExposureCatalog` 

477 List of exposure catalogs (one per visit) with the WCS for each 

478 detector set by the new fitted WCS. 

479 ``fitModel`` : `wcsfit.WCSFit` 

480 Model-fitting object with final model parameters. 

481 ``outputCatalog`` : `pyarrow.Table` 

482 Catalog with fit residuals of all sources used. 

483 """ 

484 self.log.info("Gathering instrument, exposure, and field info") 

485 # Set up an instrument object 

486 instrument = wcsfit.Instrument(instrumentName) 

487 

488 # Get RA, Dec, MJD, etc., for the input visits 

489 exposureInfo, exposuresHelper, extensionInfo = self._get_exposure_info(inputVisitSummaries, 

490 instrument) 

491 

492 # Get information about the extent of the input visits 

493 fields, fieldCenter, fieldRadius = self._prep_sky(inputVisitSummaries, exposureInfo.medianEpoch) 

494 

495 self.log.info("Load catalogs and associate sources") 

496 # Set up class to associate sources into matches using a 

497 # friends-of-friends algorithm 

498 associations = wcsfit.FoFClass(fields, [instrument], exposuresHelper, 

499 [fieldRadius.asDegrees()], 

500 (self.config.matchRadius * u.arcsec).to(u.degree).value) 

501 

502 # Add the reference catalog to the associator 

503 medianEpoch = astropy.time.Time(exposureInfo.medianEpoch, format='decimalyear').mjd 

504 refObjects, refCovariance = self._load_refcat(associations, refObjectLoader, fieldCenter, fieldRadius, 

505 extensionInfo, epoch=medianEpoch) 

506 

507 # Add the science catalogs and associate new sources as they are added 

508 sourceIndices, usedColumns = self._load_catalogs_and_associate(associations, inputCatalogRefs, 

509 extensionInfo) 

510 

511 self.log.info("Fit the WCSs") 

512 # Set up a YAML-type string using the config variables and a sample 

513 # visit 

514 inputYAML = self.make_yaml(inputVisitSummaries[0]) 

515 

516 # Set the verbosity level for WCSFit from the task log level. 

517 # TODO: DM-36850, Add lsst.log to gbdes so that log messages are 

518 # properly propagated. 

519 loglevel = self.log.getEffectiveLevel() 

520 if loglevel >= self.log.WARNING: 

521 verbose = 0 

522 elif loglevel == self.log.INFO: 

523 verbose = 1 

524 else: 

525 verbose = 2 

526 

527 # Set up the WCS-fitting class using the results of the FOF associator 

528 wcsf = wcsfit.WCSFit(fields, [instrument], exposuresHelper, 

529 extensionInfo.visitIndex, extensionInfo.detectorIndex, 

530 inputYAML, extensionInfo.wcs, associations.sequence, associations.extn, 

531 associations.obj, sysErr=self.config.systematicError, 

532 refSysErr=self.config.referenceSystematicError, 

533 usePM=self.config.fitProperMotion, 

534 verbose=verbose) 

535 

536 # Add the science and reference sources 

537 self._add_objects(wcsf, inputCatalogRefs, sourceIndices, extensionInfo, usedColumns) 

538 self._add_ref_objects(wcsf, refObjects, refCovariance, extensionInfo) 

539 

540 # Do the WCS fit 

541 wcsf.fit(reserveFraction=self.config.fitReserveFraction, 

542 randomNumberSeed=self.config.fitReserveRandomSeed) 

543 self.log.info("WCS fitting done") 

544 

545 outputWCSs = self._make_outputs(wcsf, inputVisitSummaries, exposureInfo) 

546 outputCatalog = wcsf.getOutputCatalog() 

547 starCatalog = wcsf.getStarCatalog() 

548 

549 return pipeBase.Struct(outputWCSs=outputWCSs, 

550 fitModel=wcsf, 

551 outputCatalog=outputCatalog, 

552 starCatalog=starCatalog) 

553 

554 def _prep_sky(self, inputVisitSummaries, epoch, fieldName='Field'): 

555 """Get center and radius of the input tract. This assumes that all 

556 visits will be put into the same `wcsfit.Field` and fit together. 

557 

558 Paramaters 

559 ---------- 

560 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

561 List of catalogs with per-detector summary information. 

562 epoch : float 

563 Reference epoch. 

564 fieldName : str 

565 Name of the field, used internally. 

566 

567 Returns 

568 ------- 

569 fields : `wcsfit.Fields` 

570 Object with field information. 

571 center : `lsst.geom.SpherePoint` 

572 Center of the field. 

573 radius : `lsst.sphgeom._sphgeom.Angle` 

574 Radius of the bounding circle of the tract. 

575 """ 

576 allDetectorCorners = [] 

577 for visSum in inputVisitSummaries: 

578 detectorCorners = [lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees).getVector() for (ra, dec) 

579 in zip(visSum['raCorners'].ravel(), visSum['decCorners'].ravel())] 

580 allDetectorCorners.extend(detectorCorners) 

581 boundingCircle = lsst.sphgeom.ConvexPolygon.convexHull(allDetectorCorners).getBoundingCircle() 

582 center = lsst.geom.SpherePoint(boundingCircle.getCenter()) 

583 ra = center.getRa().asDegrees() 

584 dec = center.getDec().asDegrees() 

585 radius = boundingCircle.getOpeningAngle() 

586 

587 # wcsfit.Fields describes a list of fields, but we assume all 

588 # observations will be fit together in one field. 

589 fields = wcsfit.Fields([fieldName], [ra], [dec], [epoch]) 

590 

591 return fields, center, radius 

592 

593 def _get_exposure_info(self, inputVisitSummaries, instrument, fieldNumber=0, instrumentNumber=0, 

594 refEpoch=None): 

595 """Get various information about the input visits to feed to the 

596 fitting routines. 

597 

598 Parameters 

599 ---------- 

600 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

601 Tables for each visit with information for detectors. 

602 instrument : `wcsfit.Instrument` 

603 Instrument object to which detector information is added. 

604 fieldNumber : `int` 

605 Index of the field for these visits. Should be zero if all data is 

606 being fit together. 

607 instrumentNumber : `int` 

608 Index of the instrument for these visits. Should be zero if all 

609 data comes from the same instrument. 

610 refEpoch : `float` 

611 Epoch of the reference objects in MJD. 

612 

613 Returns 

614 ------- 

615 exposureInfo : `lsst.pipe.base.Struct` 

616 Struct containing general properties for the visits: 

617 ``visits`` : `list` 

618 List of visit names. 

619 ``detectors`` : `list` 

620 List of all detectors in any visit. 

621 ``ras`` : `list` of float 

622 List of boresight RAs for each visit. 

623 ``decs`` : `list` of float 

624 List of borseight Decs for each visit. 

625 ``medianEpoch`` : float 

626 Median epoch of all visits in decimal-year format. 

627 exposuresHelper : `wcsfit.ExposuresHelper` 

628 Object containing information about the input visits. 

629 extensionInfo : `lsst.pipe.base.Struct` 

630 Struct containing properties for each extension: 

631 ``visit`` : `np.ndarray` 

632 Name of the visit for this extension. 

633 ``detector`` : `np.ndarray` 

634 Name of the detector for this extension. 

635 ``visitIndex` : `np.ndarray` of `int` 

636 Index of visit for this extension. 

637 ``detectorIndex`` : `np.ndarray` of `int` 

638 Index of the detector for this extension. 

639 ``wcss`` : `np.ndarray` of `lsst.afw.geom.SkyWcs` 

640 Initial WCS for this extension. 

641 ``extensionType`` : `np.ndarray` of `str` 

642 "SCIENCE" or "REFERENCE". 

643 """ 

644 exposureNames = [] 

645 ras = [] 

646 decs = [] 

647 visits = [] 

648 detectors = [] 

649 airmasses = [] 

650 exposureTimes = [] 

651 mjds = [] 

652 observatories = [] 

653 wcss = [] 

654 

655 extensionType = [] 

656 extensionVisitIndices = [] 

657 extensionDetectorIndices = [] 

658 extensionVisits = [] 

659 extensionDetectors = [] 

660 # Get information for all the science visits 

661 for v, visitSummary in enumerate(inputVisitSummaries): 

662 visitInfo = visitSummary[0].getVisitInfo() 

663 visit = visitSummary[0]['visit'] 

664 visits.append(visit) 

665 exposureNames.append(str(visit)) 

666 raDec = visitInfo.getBoresightRaDec() 

667 ras.append(raDec.getRa().asRadians()) 

668 decs.append(raDec.getDec().asRadians()) 

669 airmasses.append(visitInfo.getBoresightAirmass()) 

670 exposureTimes.append(visitInfo.getExposureTime()) 

671 obsDate = visitInfo.getDate() 

672 obsMJD = obsDate.get(obsDate.MJD) 

673 mjds.append(obsMJD) 

674 # Get the observatory ICRS position for use in fitting parallax 

675 obsLon = visitInfo.observatory.getLongitude().asDegrees() 

676 obsLat = visitInfo.observatory.getLatitude().asDegrees() 

677 obsElev = visitInfo.observatory.getElevation() 

678 earthLocation = astropy.coordinates.EarthLocation.from_geodetic(obsLon, obsLat, obsElev) 

679 observatory_gcrs = earthLocation.get_gcrs(astropy.time.Time(obsMJD, format='mjd')) 

680 observatory_icrs = observatory_gcrs.transform_to(astropy.coordinates.ICRS()) 

681 # We want the position in AU in Cartesian coordinates 

682 observatories.append(observatory_icrs.cartesian.xyz.to(u.AU).value) 

683 

684 for row in visitSummary: 

685 detector = row['id'] 

686 if detector not in detectors: 

687 detectors.append(detector) 

688 detectorBounds = wcsfit.Bounds(row['bbox_min_x'], row['bbox_max_x'], 

689 row['bbox_min_y'], row['bbox_max_y']) 

690 instrument.addDevice(str(detector), detectorBounds) 

691 

692 detectorIndex = np.flatnonzero(detector == np.array(detectors))[0] 

693 extensionVisitIndices.append(v) 

694 extensionDetectorIndices.append(detectorIndex) 

695 extensionVisits.append(visit) 

696 extensionDetectors.append(detector) 

697 extensionType.append('SCIENCE') 

698 

699 wcs = row.getWcs() 

700 wcss.append(_get_wcs_from_sip(wcs)) 

701 

702 fieldNumbers = list(np.ones(len(exposureNames), dtype=int) * fieldNumber) 

703 instrumentNumbers = list(np.ones(len(exposureNames), dtype=int) * instrumentNumber) 

704 

705 # Set the reference epoch to be the median of the science visits. 

706 # The reference catalog will be shifted to this date. 

707 medianMJD = np.median(mjds) 

708 medianEpoch = astropy.time.Time(medianMJD, format='mjd').decimalyear 

709 

710 # Add information for the reference catalog. Most of the values are 

711 # not used. 

712 exposureNames.append('REFERENCE') 

713 visits.append(-1) 

714 fieldNumbers.append(0) 

715 if self.config.fitProperMotion: 

716 instrumentNumbers.append(-2) 

717 else: 

718 instrumentNumbers.append(-1) 

719 ras.append(0.0) 

720 decs.append(0.0) 

721 airmasses.append(0.0) 

722 exposureTimes.append(0) 

723 mjds.append((refEpoch if (refEpoch is not None) else medianMJD)) 

724 observatories.append(np.array([0, 0, 0])) 

725 identity = wcsfit.IdentityMap() 

726 icrs = wcsfit.SphericalICRS() 

727 refWcs = wcsfit.Wcs(identity, icrs, 'Identity', np.pi / 180.) 

728 wcss.append(refWcs) 

729 

730 extensionVisitIndices.append(len(exposureNames) - 1) 

731 extensionDetectorIndices.append(-1) # REFERENCE device must be -1 

732 extensionVisits.append(-1) 

733 extensionDetectors.append(-1) 

734 extensionType.append('REFERENCE') 

735 

736 # Make a table of information to use elsewhere in the class 

737 extensionInfo = pipeBase.Struct(visit=np.array(extensionVisits), 

738 detector=np.array(extensionDetectors), 

739 visitIndex=np.array(extensionVisitIndices), 

740 detectorIndex=np.array(extensionDetectorIndices), 

741 wcs=np.array(wcss), 

742 extensionType=np.array(extensionType)) 

743 

744 # Make the exposureHelper object to use in the fitting routines 

745 exposuresHelper = wcsfit.ExposuresHelper(exposureNames, 

746 fieldNumbers, 

747 instrumentNumbers, 

748 ras, 

749 decs, 

750 airmasses, 

751 exposureTimes, 

752 mjds, 

753 observatories) 

754 

755 exposureInfo = pipeBase.Struct(visits=visits, 

756 detectors=detectors, 

757 ras=ras, 

758 decs=decs, 

759 medianEpoch=medianEpoch) 

760 

761 return exposureInfo, exposuresHelper, extensionInfo 

762 

763 def _load_refcat(self, associations, refObjectLoader, center, radius, extensionInfo, epoch=None, 

764 fieldIndex=0): 

765 """Load the reference catalog and add reference objects to the 

766 `wcsfit.FoFClass` object. 

767 

768 Parameters 

769 ---------- 

770 associations : `wcsfit.FoFClass` 

771 Object to which to add the catalog of reference objects. 

772 refObjectLoader : 

773 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

774 Object set up to load reference catalog objects. 

775 center : `lsst.geom.SpherePoint` 

776 Center of the circle in which to load reference objects. 

777 radius : `lsst.sphgeom._sphgeom.Angle` 

778 Radius of the circle in which to load reference objects. 

779 extensionInfo : `lsst.pipe.base.Struct` 

780 Struct containing properties for each extension. 

781 epoch : `float` 

782 MJD to which to correct the object positions. 

783 fieldIndex : `int` 

784 Index of the field. Should be zero if all the data is fit together. 

785 

786 Returns 

787 ------- 

788 refObjects : `dict` 

789 Position and error information of reference objects. 

790 refCovariance : `list` of `float` 

791 Flattened output covariance matrix. 

792 """ 

793 formattedEpoch = astropy.time.Time(epoch, format='mjd') 

794 

795 refFilter = refObjectLoader.config.anyFilterMapsToThis 

796 skyCircle = refObjectLoader.loadSkyCircle(center, radius, refFilter, epoch=formattedEpoch) 

797 

798 selected = self.referenceSelector.run(skyCircle.refCat) 

799 # Need memory contiguity to get reference filters as a vector. 

800 if not selected.sourceCat.isContiguous(): 

801 refCat = selected.sourceCat.copy(deep=True) 

802 else: 

803 refCat = selected.sourceCat 

804 

805 if self.config.excludeNonPMObjects: 

806 hasPM = refCat['pm_raErr'] != 0 

807 refCat = refCat[hasPM] 

808 

809 ra = (refCat['coord_ra'] * u.radian).to(u.degree).to_value().tolist() 

810 dec = (refCat['coord_dec'] * u.radian).to(u.degree).to_value().tolist() 

811 raCov = ((refCat['coord_raErr'] * u.radian).to(u.degree).to_value()**2).tolist() 

812 decCov = ((refCat['coord_decErr'] * u.radian).to(u.degree).to_value()**2).tolist() 

813 

814 # TODO: DM-37316 we need the full gaia covariance here 

815 refObjects = {'ra': ra, 'dec': dec, 'raCov': raCov, 'decCov': decCov, 

816 'raDecCov': np.zeros(len(ra))} 

817 refCovariance = [] 

818 

819 if self.config.fitProperMotion: 

820 raPM = (refCat['pm_ra'] * u.radian).to(u.marcsec).to_value().tolist() 

821 decPM = (refCat['pm_dec'] * u.radian).to(u.marcsec).to_value().tolist() 

822 parallax = (refCat['parallax'] * u.radian).to(u.marcsec).to_value().tolist() 

823 cov = _make_ref_covariance_matrix(refCat) 

824 pmDict = {'raPM': raPM, 'decPM': decPM, 'parallax': parallax} 

825 refObjects.update(pmDict) 

826 refCovariance = cov 

827 

828 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0] 

829 visitIndex = extensionInfo.visitIndex[extensionIndex] 

830 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

831 instrumentIndex = -1 # -1 indicates the reference catalog 

832 refWcs = extensionInfo.wcs[extensionIndex] 

833 

834 associations.addCatalog(refWcs, 'STELLAR', visitIndex, fieldIndex, instrumentIndex, detectorIndex, 

835 extensionIndex, np.ones(len(refCat), dtype=bool), 

836 ra, dec, np.arange(len(ra))) 

837 

838 return refObjects, refCovariance 

839 

840 def _load_catalogs_and_associate(self, associations, inputCatalogRefs, extensionInfo, 

841 fieldIndex=0, instrumentIndex=0): 

842 """Load the science catalogs and add the sources to the associator 

843 class `wcsfit.FoFClass`, associating them into matches as you go. 

844 

845 Parameters 

846 ---------- 

847 associations : `wcsfit.FoFClass` 

848 Object to which to add the catalog of reference objects. 

849 inputCatalogRefs : `list` 

850 List of DeferredDatasetHandles pointing to visit-level source 

851 tables. 

852 extensionInfo : `lsst.pipe.base.Struct` 

853 Struct containing properties for each extension. 

854 fieldIndex : `int` 

855 Index of the field for these catalogs. Should be zero assuming all 

856 data is being fit together. 

857 instrumentIndex : `int` 

858 Index of the instrument for these catalogs. Should be zero 

859 assuming all data comes from the same instrument. 

860 

861 Returns 

862 ------- 

863 sourceIndices : `list` 

864 List of boolean arrays used to select sources. 

865 columns : `list` of `str` 

866 List of columns needed from source tables. 

867 """ 

868 columns = ['detector', 'sourceId', 'x', 'xErr', 'y', 'yErr', 'ixx', 'iyy', 'ixy', 

869 f'{self.config.sourceFluxType}_instFlux', f'{self.config.sourceFluxType}_instFluxErr'] 

870 if self.sourceSelector.config.doFlags: 

871 columns.extend(self.sourceSelector.config.flags.bad) 

872 if self.sourceSelector.config.doUnresolved: 

873 columns.append(self.sourceSelector.config.unresolved.name) 

874 if self.sourceSelector.config.doIsolated: 

875 columns.append(self.sourceSelector.config.isolated.parentName) 

876 columns.append(self.sourceSelector.config.isolated.nChildName) 

877 if self.sourceSelector.config.doRequirePrimary: 

878 columns.append(self.sourceSelector.config.requirePrimary.primaryColName) 

879 

880 sourceIndices = [None] * len(extensionInfo.visit) 

881 for inputCatalogRef in inputCatalogRefs: 

882 visit = inputCatalogRef.dataId['visit'] 

883 inputCatalog = inputCatalogRef.get(parameters={'columns': columns}) 

884 # Get a sorted array of detector names 

885 detectors = np.unique(inputCatalog['detector']) 

886 

887 for detector in detectors: 

888 detectorSources = inputCatalog[inputCatalog['detector'] == detector] 

889 xCov = detectorSources['xErr']**2 

890 yCov = detectorSources['yErr']**2 

891 xyCov = (detectorSources['ixy'] * (xCov + yCov) 

892 / (detectorSources['ixx'] + detectorSources['iyy'])) 

893 # Remove sources with bad shape measurements 

894 goodShapes = xyCov**2 <= (xCov * yCov) 

895 selected = self.sourceSelector.run(detectorSources) 

896 goodInds = selected.selected & goodShapes 

897 

898 isStar = np.ones(goodInds.sum()) 

899 extensionIndex = np.flatnonzero((extensionInfo.visit == visit) 

900 & (extensionInfo.detector == detector))[0] 

901 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

902 visitIndex = extensionInfo.visitIndex[extensionIndex] 

903 

904 sourceIndices[extensionIndex] = goodInds 

905 

906 wcs = extensionInfo.wcs[extensionIndex] 

907 associations.reprojectWCS(wcs, fieldIndex) 

908 

909 associations.addCatalog(wcs, 'STELLAR', visitIndex, fieldIndex, 

910 instrumentIndex, detectorIndex, extensionIndex, isStar, 

911 detectorSources[goodInds]['x'].to_list(), 

912 detectorSources[goodInds]['y'].to_list(), 

913 np.arange(goodInds.sum())) 

914 

915 associations.sortMatches(fieldIndex, minMatches=self.config.minMatches, 

916 allowSelfMatches=self.config.allowSelfMatches) 

917 

918 return sourceIndices, columns 

919 

920 def make_yaml(self, inputVisitSummary, inputFile=None): 

921 """Make a YAML-type object that describes the parameters of the fit 

922 model. 

923 

924 Parameters 

925 ---------- 

926 inputVisitSummary : `lsst.afw.table.ExposureCatalog` 

927 Catalog with per-detector summary information. 

928 inputFile : `str` 

929 Path to a file that contains a basic model. 

930 

931 Returns 

932 ------- 

933 inputYAML : `wcsfit.YAMLCollector` 

934 YAML object containing the model description. 

935 """ 

936 if inputFile is not None: 

937 inputYAML = wcsfit.YAMLCollector(inputFile, 'PixelMapCollection') 

938 else: 

939 inputYAML = wcsfit.YAMLCollector('', 'PixelMapCollection') 

940 inputDict = {} 

941 modelComponents = ['INSTRUMENT/DEVICE', 'EXPOSURE'] 

942 baseMap = {'Type': 'Composite', 'Elements': modelComponents} 

943 inputDict['EXPOSURE/DEVICE/base'] = baseMap 

944 

945 xMin = str(inputVisitSummary['bbox_min_x'].min()) 

946 xMax = str(inputVisitSummary['bbox_max_x'].max()) 

947 yMin = str(inputVisitSummary['bbox_min_y'].min()) 

948 yMax = str(inputVisitSummary['bbox_max_y'].max()) 

949 

950 deviceModel = {'Type': 'Composite', 'Elements': self.config.deviceModel.list()} 

951 inputDict['INSTRUMENT/DEVICE'] = deviceModel 

952 for component in self.config.deviceModel: 

953 if 'poly' in component.lower(): 

954 componentDict = {'Type': 'Poly', 

955 'XPoly': {'OrderX': self.config.devicePolyOrder, 

956 'SumOrder': True}, 

957 'YPoly': {'OrderX': self.config.devicePolyOrder, 

958 'SumOrder': True}, 

959 'XMin': xMin, 'XMax': xMax, 'YMin': yMin, 'YMax': yMax} 

960 elif 'identity' in component.lower(): 

961 componentDict = {'Type': 'Identity'} 

962 

963 inputDict[component] = componentDict 

964 

965 exposureModel = {'Type': 'Composite', 'Elements': self.config.exposureModel.list()} 

966 inputDict['EXPOSURE'] = exposureModel 

967 for component in self.config.exposureModel: 

968 if 'poly' in component.lower(): 

969 componentDict = {'Type': 'Poly', 

970 'XPoly': {'OrderX': self.config.exposurePolyOrder, 

971 'SumOrder': 'true'}, 

972 'YPoly': {'OrderX': self.config.exposurePolyOrder, 

973 'SumOrder': 'true'}} 

974 elif 'identity' in component.lower(): 

975 componentDict = {'Type': 'Identity'} 

976 

977 inputDict[component] = componentDict 

978 

979 inputYAML.addInput(yaml.dump(inputDict)) 

980 inputYAML.addInput('Identity:\n Type: Identity\n') 

981 

982 return inputYAML 

983 

984 def _add_objects(self, wcsf, inputCatalogRefs, sourceIndices, extensionInfo, columns): 

985 """Add science sources to the wcsfit.WCSFit object. 

986 

987 Parameters 

988 ---------- 

989 wcsf : `wcsfit.WCSFit` 

990 WCS-fitting object. 

991 inputCatalogRefs : `list` 

992 List of DeferredDatasetHandles pointing to visit-level source 

993 tables. 

994 sourceIndices : `list` 

995 List of boolean arrays used to select sources. 

996 extensionInfo : `lsst.pipe.base.Struct` 

997 Struct containing properties for each extension. 

998 columns : `list` of `str` 

999 List of columns needed from source tables. 

1000 """ 

1001 for inputCatalogRef in inputCatalogRefs: 

1002 visit = inputCatalogRef.dataId['visit'] 

1003 inputCatalog = inputCatalogRef.get(parameters={'columns': columns}) 

1004 detectors = np.unique(inputCatalog['detector']) 

1005 

1006 for detector in detectors: 

1007 detectorSources = inputCatalog[inputCatalog['detector'] == detector] 

1008 

1009 extensionIndex = np.flatnonzero((extensionInfo.visit == visit) 

1010 & (extensionInfo.detector == detector))[0] 

1011 sourceCat = detectorSources[sourceIndices[extensionIndex]] 

1012 

1013 xCov = sourceCat['xErr']**2 

1014 yCov = sourceCat['yErr']**2 

1015 xyCov = (sourceCat['ixy'] * (xCov + yCov) 

1016 / (sourceCat['ixx'] + sourceCat['iyy'])) 

1017 # TODO: add correct xyErr if DM-7101 is ever done. 

1018 

1019 d = {'x': sourceCat['x'].to_numpy(), 'y': sourceCat['y'].to_numpy(), 

1020 'xCov': xCov.to_numpy(), 'yCov': yCov.to_numpy(), 'xyCov': xyCov.to_numpy()} 

1021 

1022 wcsf.setObjects(extensionIndex, d, 'x', 'y', ['xCov', 'yCov', 'xyCov']) 

1023 

1024 def _add_ref_objects(self, wcsf, refObjects, refCovariance, extensionInfo): 

1025 """Add reference sources to the wcsfit.WCSFit object. 

1026 

1027 Parameters 

1028 ---------- 

1029 wcsf : `wcsfit.WCSFit` 

1030 WCS-fitting object. 

1031 refObjects : `dict` 

1032 Position and error information of reference objects. 

1033 refCovariance : `list` of `float` 

1034 Flattened output covariance matrix. 

1035 extensionInfo : `lsst.pipe.base.Struct` 

1036 Struct containing properties for each extension. 

1037 """ 

1038 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0] 

1039 

1040 if self.config.fitProperMotion: 

1041 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov'], 

1042 pmDecKey='decPM', pmRaKey='raPM', parallaxKey='parallax', pmCovKey='fullCov', 

1043 pmCov=refCovariance) 

1044 else: 

1045 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov']) 

1046 

1047 def _make_afw_wcs(self, mapDict, centerRA, centerDec, doNormalizePixels=False, xScale=1, yScale=1): 

1048 """Make an `lsst.afw.geom.SkyWcs` from a dictionary of mappings. 

1049 

1050 Parameters 

1051 ---------- 

1052 mapDict : `dict` 

1053 Dictionary of mapping parameters. 

1054 centerRA : `lsst.geom.Angle` 

1055 RA of the tangent point. 

1056 centerDec : `lsst.geom.Angle` 

1057 Declination of the tangent point. 

1058 doNormalizePixels : `bool` 

1059 Whether to normalize pixels so that range is [-1,1]. 

1060 xScale : `float` 

1061 Factor by which to normalize x-dimension. Corresponds to width of 

1062 detector. 

1063 yScale : `float` 

1064 Factor by which to normalize y-dimension. Corresponds to height of 

1065 detector. 

1066 

1067 Returns 

1068 ------- 

1069 outWCS : `lsst.afw.geom.SkyWcs` 

1070 WCS constructed from the input mappings 

1071 """ 

1072 # Set up pixel frames 

1073 pixelFrame = astshim.Frame(2, 'Domain=PIXELS') 

1074 normedPixelFrame = astshim.Frame(2, 'Domain=NORMEDPIXELS') 

1075 

1076 if doNormalizePixels: 

1077 # Pixels will need to be rescaled before going into the mappings 

1078 normCoefficients = [-1.0, 2.0/xScale, 0, 

1079 -1.0, 0, 2.0/yScale] 

1080 normMap = _convert_to_ast_polymap_coefficients(normCoefficients) 

1081 else: 

1082 normMap = astshim.UnitMap(2) 

1083 

1084 # All of the detectors for one visit map to the same tangent plane 

1085 tangentPoint = lsst.geom.SpherePoint(centerRA, centerDec) 

1086 cdMatrix = afwgeom.makeCdMatrix(1.0 * lsst.geom.degrees, 0 * lsst.geom.degrees, True) 

1087 iwcToSkyWcs = afwgeom.makeSkyWcs(lsst.geom.Point2D(0, 0), tangentPoint, cdMatrix) 

1088 iwcToSkyMap = iwcToSkyWcs.getFrameDict().getMapping('PIXELS', 'SKY') 

1089 skyFrame = iwcToSkyWcs.getFrameDict().getFrame('SKY') 

1090 

1091 frameDict = astshim.FrameDict(pixelFrame) 

1092 frameDict.addFrame('PIXELS', normMap, normedPixelFrame) 

1093 

1094 currentFrameName = 'NORMEDPIXELS' 

1095 

1096 # Dictionary values are ordered according to the maps' application. 

1097 for m, mapElement in enumerate(mapDict.values()): 

1098 mapType = mapElement['Type'] 

1099 

1100 if mapType == 'Poly': 

1101 mapCoefficients = mapElement['Coefficients'] 

1102 astMap = _convert_to_ast_polymap_coefficients(mapCoefficients) 

1103 elif mapType == 'Identity': 

1104 astMap = astshim.UnitMap(2) 

1105 else: 

1106 raise ValueError(f"Converting map type {mapType} to WCS is not supported") 

1107 

1108 if m == len(mapDict) - 1: 

1109 newFrameName = 'IWC' 

1110 else: 

1111 newFrameName = 'INTERMEDIATE' + str(m) 

1112 newFrame = astshim.Frame(2, f'Domain={newFrameName}') 

1113 frameDict.addFrame(currentFrameName, astMap, newFrame) 

1114 currentFrameName = newFrameName 

1115 frameDict.addFrame('IWC', iwcToSkyMap, skyFrame) 

1116 

1117 outWCS = afwgeom.SkyWcs(frameDict) 

1118 return outWCS 

1119 

1120 def _make_outputs(self, wcsf, visitSummaryTables, exposureInfo): 

1121 """Make a WCS object out of the WCS models. 

1122 

1123 Parameters 

1124 ---------- 

1125 wcsf : `wcsfit.WCSFit` 

1126 WCSFit object, assumed to have fit model. 

1127 visitSummaryTables : `list` of `lsst.afw.table.ExposureCatalog` 

1128 Catalogs with per-detector summary information from which to grab 

1129 detector information. 

1130 extensionInfo : `lsst.pipe.base.Struct` 

1131 Struct containing properties for each extension. 

1132 

1133 Returns 

1134 ------- 

1135 catalogs : `dict` of [`str`, `lsst.afw.table.ExposureCatalog`] 

1136 Dictionary of `lsst.afw.table.ExposureCatalog` objects with the WCS 

1137 set to the WCS fit in wcsf, keyed by the visit name. 

1138 """ 

1139 # Get the parameters of the fit models 

1140 mapParams = wcsf.mapCollection.getParamDict() 

1141 

1142 # Set up the schema for the output catalogs 

1143 schema = lsst.afw.table.ExposureTable.makeMinimalSchema() 

1144 schema.addField('visit', type='L', doc='Visit number') 

1145 

1146 # Pixels will need to be rescaled before going into the mappings 

1147 sampleDetector = visitSummaryTables[0][0] 

1148 xscale = sampleDetector['bbox_max_x'] - sampleDetector['bbox_min_x'] 

1149 yscale = sampleDetector['bbox_max_y'] - sampleDetector['bbox_min_y'] 

1150 

1151 catalogs = {} 

1152 for v, visitSummary in enumerate(visitSummaryTables): 

1153 visit = visitSummary[0]['visit'] 

1154 

1155 catalog = lsst.afw.table.ExposureCatalog(schema) 

1156 catalog.resize(len(exposureInfo.detectors)) 

1157 catalog['visit'] = visit 

1158 

1159 for d, detector in enumerate(visitSummary['id']): 

1160 mapName = f'{visit}/{detector}' 

1161 

1162 mapElements = wcsf.mapCollection.orderAtoms(f'{mapName}/base') 

1163 mapDict = {} 

1164 for m, mapElement in enumerate(mapElements): 

1165 mapType = wcsf.mapCollection.getMapType(mapElement) 

1166 mapDict[mapElement] = {'Type': mapType} 

1167 

1168 if mapType == 'Poly': 

1169 mapCoefficients = mapParams[mapElement] 

1170 mapDict[mapElement]['Coefficients'] = mapCoefficients 

1171 

1172 # The RA and Dec of the visit are needed for the last step of 

1173 # the mapping from the visit tangent plane to RA and Dec 

1174 outWCS = self._make_afw_wcs(mapDict, exposureInfo.ras[v] * lsst.geom.radians, 

1175 exposureInfo.decs[v] * lsst.geom.radians, 

1176 doNormalizePixels=True, 

1177 xScale=xscale, yScale=yscale) 

1178 

1179 catalog[d].setId(detector) 

1180 catalog[d].setWcs(outWCS) 

1181 catalog.sort() 

1182 catalogs[visit] = catalog 

1183 

1184 return catalogs