Coverage for python/lsst/drp/tasks/gbdesAstrometricFit.py: 11%

457 statements  

« prev     ^ index     » next       coverage.py v7.3.0, created at 2023-08-22 11:16 +0000

1# This file is part of drp_tasks. 

2# 

3# LSST Data Management System 

4# This product includes software developed by the 

5# LSST Project (http://www.lsst.org/). 

6# See COPYRIGHT file at the top of the source tree. 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <https://www.lsstcorp.org/LegalNotices/>. 

21# 

22import numpy as np 

23import astropy.time 

24import astropy.units as u 

25import astropy.coordinates 

26import yaml 

27import wcsfit 

28import astshim 

29 

30import lsst.geom 

31import lsst.pex.config as pexConfig 

32import lsst.pipe.base as pipeBase 

33import lsst.sphgeom 

34import lsst.afw.table 

35import lsst.afw.geom as afwgeom 

36from lsst.meas.algorithms import (LoadReferenceObjectsConfig, ReferenceObjectLoader, 

37 ReferenceSourceSelectorTask) 

38from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry 

39 

40__all__ = ['GbdesAstrometricFitConnections', 'GbdesAstrometricFitConfig', 'GbdesAstrometricFitTask'] 

41 

42 

43def _lookup_visit_refcats(datasetType, registry, quantumDataId, collections): 

44 """Lookup function that finds all refcats for all visits that overlap a 

45 tract, rather than just the refcats that directly overlap the tract. 

46 Borrowed from jointcal. 

47 

48 Parameters 

49 ---------- 

50 datasetType : `lsst.daf.butler.DatasetType` 

51 Type of dataset being searched for. 

52 registry : `lsst.daf.butler.Registry` 

53 Data repository registry to search. 

54 quantumDataId : `lsst.daf.butler.DataCoordinate` 

55 Data ID of the quantum; expected to be something we can use as a 

56 constraint to query for overlapping visits. 

57 collections : `Iterable` [ `str` ] 

58 Collections to search. 

59 Returns 

60 ------- 

61 refs : `Iterator` [ `lsst.daf.butler.DatasetRef` ] 

62 Iterator over refcat references. 

63 """ 

64 refs = set() 

65 # Use .expanded() on the query methods below because we need data IDs with 

66 # regions, both in the outer loop over visits (queryDatasets will expand 

67 # any data ID we give it, but doing it up-front in bulk is much more 

68 # efficient) and in the data IDs of the DatasetRefs this function yields 

69 # (because the RefCatLoader relies on them to do some of its own 

70 # filtering). 

71 for visit_data_id in set(registry.queryDataIds('visit', dataId=quantumDataId).expanded()): 

72 refs.update( 

73 registry.queryDatasets( 

74 datasetType, 

75 collections=collections, 

76 dataId=visit_data_id, 

77 findFirst=True, 

78 ).expanded() 

79 ) 

80 sorted(refs) 

81 yield from refs 

82 

83 

84def _make_ref_covariance_matrix(refCat, inputUnit=u.radian, outputCoordUnit=u.marcsec, 

85 outputPMUnit=u.marcsec, version=1): 

86 """Make a covariance matrix for the reference catalog including proper 

87 motion and parallax. 

88 

89 The output is flattened to one dimension to match the format expected by 

90 `gbdes`. 

91 

92 Parameters 

93 ---------- 

94 refCat : `lsst.afw.table.SimpleCatalog` 

95 Catalog including proper motion and parallax measurements. 

96 inputUnit : `astropy.unit.core.Unit` 

97 Units of the input catalog 

98 outputCoordUnit : `astropy.unit.core.Unit` 

99 Units required for the coordinates in the covariance matrix. `gbdes` 

100 expects milliarcseconds. 

101 outputPMUnit : `astropy.unit.core.Unit` 

102 Units required for the proper motion/parallax in the covariance matrix. 

103 `gbdes` expects milliarcseconds. 

104 version : `int` 

105 Version of the reference catalog. Version 2 includes covariance 

106 measurements. 

107 Returns 

108 ------- 

109 cov : `list` of `float` 

110 Flattened output covariance matrix. 

111 """ 

112 cov = np.zeros((len(refCat), 25)) 

113 if version == 1: 

114 # Here is the standard ordering of components in the cov matrix, 

115 # to match the PM enumeration in C++ code of gbdes package's Match. 

116 # Each tuple gives: the array holding the 1d error, 

117 # the string in Gaia column names for this 

118 # the ordering in the Gaia catalog 

119 # and the ordering of the tuples is the order we want in our cov matrix 

120 raErr = (refCat['coord_raErr'] * inputUnit).to(outputCoordUnit).to_value() 

121 decErr = (refCat['coord_decErr'] * inputUnit).to(outputCoordUnit).to_value() 

122 raPMErr = (refCat['pm_raErr'] * inputUnit).to(outputPMUnit).to_value() 

123 decPMErr = (refCat['pm_decErr'] * inputUnit).to(outputPMUnit).to_value() 

124 parallaxErr = (refCat['parallaxErr'] * inputUnit).to(outputPMUnit).to_value() 

125 stdOrder = ((raErr, 'ra', 0), 

126 (decErr, 'dec', 1), 

127 (raPMErr, 'pmra', 3), 

128 (decPMErr, 'pmdec', 4), 

129 (parallaxErr, 'parallax', 2)) 

130 

131 k = 0 

132 for i, pr1 in enumerate(stdOrder): 

133 for j, pr2 in enumerate(stdOrder): 

134 if pr1[2] < pr2[2]: 

135 cov[:, k] = 0 

136 elif pr1[2] > pr2[2]: 

137 cov[:, k] = 0 

138 else: 

139 # diagnonal element 

140 cov[:, k] = pr1[0] * pr2[0] 

141 k = k+1 

142 

143 elif version == 2: 

144 positionParameters = ['coord_ra', 'coord_dec', 'pm_ra', 'pm_dec', 'parallax'] 

145 units = [outputCoordUnit, outputCoordUnit, outputPMUnit, outputPMUnit, outputPMUnit] 

146 k = 0 

147 for i, pi in enumerate(positionParameters): 

148 for j, pj in enumerate(positionParameters): 

149 if i == j: 

150 cov[:, k] = (refCat[f'{pi}Err']**2 * inputUnit**2).to_value(units[j] * units[j]) 

151 elif i > j: 

152 cov[:, k] = (refCat[f'{pj}_{pi}_Cov'] * inputUnit**2).to_value(units[i] * units[j]) 

153 else: 

154 cov[:, k] = (refCat[f'{pi}_{pj}_Cov'] * inputUnit**2).to_value(units[i] * units[j]) 

155 

156 k += 1 

157 return cov 

158 

159 

160def _convert_to_ast_polymap_coefficients(coefficients): 

161 """Convert vector of polynomial coefficients from the format used in 

162 `gbdes` into AST format (see Poly2d::vectorIndex(i, j) in 

163 gbdes/gbutil/src/Poly2d.cpp). This assumes two input and two output 

164 coordinates. 

165 

166 Parameters 

167 ---------- 

168 coefficients : `list` 

169 Coefficients of the polynomials. 

170 degree : `int` 

171 Degree of the polynomial. 

172 

173 Returns 

174 ------- 

175 astPoly : `astshim.PolyMap` 

176 Coefficients in AST polynomial format. 

177 """ 

178 polyArray = np.zeros((len(coefficients), 4)) 

179 N = len(coefficients) / 2 

180 # Get the degree of the polynomial by applying the quadratic formula to the 

181 # formula for calculating the number of coefficients of the polynomial. 

182 degree = int(-1.5 + 0.5 * (1 + 8 * N)**0.5) 

183 

184 for outVar in [1, 2]: 

185 for i in range(degree + 1): 

186 for j in range(degree + 1): 

187 if (i + j) > degree: 

188 continue 

189 vectorIndex = int(((i+j)*(i+j+1))/2+j + N * (outVar - 1)) 

190 polyArray[vectorIndex, 0] = coefficients[vectorIndex] 

191 polyArray[vectorIndex, 1] = outVar 

192 polyArray[vectorIndex, 2] = i 

193 polyArray[vectorIndex, 3] = j 

194 

195 astPoly = astshim.PolyMap(polyArray, 2, options="IterInverse=1,NIterInverse=10,TolInverse=1e-7") 

196 return astPoly 

197 

198 

199def _get_wcs_from_sip(butlerWcs): 

200 """Get wcsfit.Wcs in TPV format from the SIP-formatted input WCS. 

201 

202 Parameters 

203 ---------- 

204 butlerWcs : `lsst.afw.geom.SkyWcs` 

205 Input WCS from the calexp in SIP format. 

206 

207 Returns 

208 ------- 

209 wcs : `wcsfit.Wcs` 

210 WCS object in TPV format. 

211 """ 

212 fits_metadata = butlerWcs.getFitsMetadata() 

213 if not ((fits_metadata.get('CTYPE1') == 'RA---TAN-SIP') 

214 and (fits_metadata.get('CTYPE2') == 'DEC--TAN-SIP')): 

215 raise ValueError(f"CTYPES {fits_metadata.get('CTYPE1')} and {fits_metadata.get('CTYPE2')}" 

216 "do not match SIP convention") 

217 

218 # Correct CRPIX values to correspond to source table pixel indexing 

219 # convention 

220 crpix1 = fits_metadata.get('CRPIX1') 

221 crpix2 = fits_metadata.get('CRPIX2') 

222 fits_metadata.set('CRPIX1', crpix1 - 1) 

223 fits_metadata.set('CRPIX2', crpix2 - 1) 

224 

225 floatDict = {k: fits_metadata[k] for k in fits_metadata if isinstance(fits_metadata[k], (int, float))} 

226 

227 wcs = wcsfit.readTPVFromSIP(floatDict, 'SIP') 

228 

229 return wcs 

230 

231 

232class GbdesAstrometricFitConnections(pipeBase.PipelineTaskConnections, 

233 dimensions=('skymap', 'tract', 'instrument', 'physical_filter')): 

234 """Middleware input/output connections for task data.""" 

235 inputCatalogRefs = pipeBase.connectionTypes.Input( 

236 doc="Source table in parquet format, per visit.", 

237 name='preSourceTable_visit', 

238 storageClass='DataFrame', 

239 dimensions=('instrument', 'visit'), 

240 deferLoad=True, 

241 multiple=True, 

242 ) 

243 inputVisitSummaries = pipeBase.connectionTypes.Input( 

244 doc=("Per-visit consolidated exposure metadata built from calexps. " 

245 "These catalogs use detector id for the id and must be sorted for " 

246 "fast lookups of a detector."), 

247 name='visitSummary', 

248 storageClass='ExposureCatalog', 

249 dimensions=('instrument', 'visit'), 

250 multiple=True, 

251 ) 

252 referenceCatalog = pipeBase.connectionTypes.PrerequisiteInput( 

253 doc="The astrometry reference catalog to match to loaded input catalog sources.", 

254 name='gaia_dr3_20230707', 

255 storageClass='SimpleCatalog', 

256 dimensions=('skypix',), 

257 deferLoad=True, 

258 multiple=True, 

259 lookupFunction=_lookup_visit_refcats, 

260 ) 

261 outputWcs = pipeBase.connectionTypes.Output( 

262 doc=("Per-tract, per-visit world coordinate systems derived from the fitted model." 

263 " These catalogs only contain entries for detectors with an output, and use" 

264 " the detector id for the catalog id, sorted on id for fast lookups of a detector."), 

265 name='gbdesAstrometricFitSkyWcsCatalog', 

266 storageClass='ExposureCatalog', 

267 dimensions=('instrument', 'visit', 'skymap', 'tract'), 

268 multiple=True 

269 ) 

270 outputCatalog = pipeBase.connectionTypes.Output( 

271 doc=("Source table with stars used in fit, along with residuals in pixel coordinates and tangent " 

272 "plane coordinates and chisq values."), 

273 name='gbdesAstrometricFit_fitStars', 

274 storageClass='ArrowNumpyDict', 

275 dimensions=('instrument', 'skymap', 'tract', 'physical_filter'), 

276 ) 

277 starCatalog = pipeBase.connectionTypes.Output( 

278 doc="Star catalog.", 

279 name='gbdesAstrometricFit_starCatalog', 

280 storageClass='ArrowNumpyDict', 

281 dimensions=('instrument', 'skymap', 'tract', 'physical_filter') 

282 ) 

283 

284 def getSpatialBoundsConnections(self): 

285 return ("inputVisitSummaries",) 

286 

287 

288class GbdesAstrometricFitConfig(pipeBase.PipelineTaskConfig, 

289 pipelineConnections=GbdesAstrometricFitConnections): 

290 """Configuration for GbdesAstrometricFitTask""" 

291 sourceSelector = sourceSelectorRegistry.makeField( 

292 doc="How to select sources for cross-matching.", 

293 default='science' 

294 ) 

295 referenceSelector = pexConfig.ConfigurableField( 

296 target=ReferenceSourceSelectorTask, 

297 doc="How to down-select the loaded astrometry reference catalog.", 

298 ) 

299 matchRadius = pexConfig.Field( 

300 doc="Matching tolerance between associated objects (arcseconds).", 

301 dtype=float, 

302 default=1.0 

303 ) 

304 minMatches = pexConfig.Field( 

305 doc="Number of matches required to keep a source object.", 

306 dtype=int, 

307 default=2 

308 ) 

309 allowSelfMatches = pexConfig.Field( 

310 doc="Allow multiple sources from the same visit to be associated with the same object.", 

311 dtype=bool, 

312 default=False 

313 ) 

314 sourceFluxType = pexConfig.Field( 

315 dtype=str, 

316 doc="Source flux field to use in source selection and to get fluxes from the catalog.", 

317 default='apFlux_12_0' 

318 ) 

319 systematicError = pexConfig.Field( 

320 dtype=float, 

321 doc=("Systematic error padding added in quadrature for the science catalogs (marcsec). The default" 

322 "value is equivalent to 0.02 pixels for HSC."), 

323 default=0.0034 

324 ) 

325 referenceSystematicError = pexConfig.Field( 

326 dtype=float, 

327 doc="Systematic error padding added in quadrature for the reference catalog (marcsec).", 

328 default=0.0 

329 ) 

330 modelComponents = pexConfig.ListField( 

331 dtype=str, 

332 doc=("List of mappings to apply to transform from pixels to sky, in order of their application." 

333 "Supported options are 'INSTRUMENT/DEVICE' and 'EXPOSURE'."), 

334 default=['INSTRUMENT/DEVICE', 'EXPOSURE'] 

335 ) 

336 deviceModel = pexConfig.ListField( 

337 dtype=str, 

338 doc=("List of mappings to apply to transform from detector pixels to intermediate frame. Map names" 

339 "should match the format 'BAND/DEVICE/<map name>'."), 

340 default=['BAND/DEVICE/poly'] 

341 ) 

342 exposureModel = pexConfig.ListField( 

343 dtype=str, 

344 doc=("List of mappings to apply to transform from intermediate frame to sky coordinates. Map names" 

345 "should match the format 'EXPOSURE/<map name>'."), 

346 default=['EXPOSURE/poly'] 

347 ) 

348 devicePolyOrder = pexConfig.Field( 

349 dtype=int, 

350 doc="Order of device polynomial model.", 

351 default=4 

352 ) 

353 exposurePolyOrder = pexConfig.Field( 

354 dtype=int, 

355 doc="Order of exposure polynomial model.", 

356 default=6 

357 ) 

358 fitProperMotion = pexConfig.Field( 

359 dtype=bool, 

360 doc="Fit the proper motions of the objects.", 

361 default=False 

362 ) 

363 excludeNonPMObjects = pexConfig.Field( 

364 dtype=bool, 

365 doc="Exclude reference objects without proper motion/parallax information.", 

366 default=True 

367 ) 

368 fitReserveFraction = pexConfig.Field( 

369 dtype=float, 

370 default=0.2, 

371 doc="Fraction of objects to reserve from fit for validation." 

372 ) 

373 fitReserveRandomSeed = pexConfig.Field( 

374 dtype=int, 

375 doc="Set the random seed for selecting data points to reserve from the fit for validation.", 

376 default=1234 

377 ) 

378 

379 def setDefaults(self): 

380 # Use only stars because aperture fluxes of galaxies are biased and 

381 # depend on seeing. 

382 self.sourceSelector['science'].doUnresolved = True 

383 self.sourceSelector['science'].unresolved.name = 'extendedness' 

384 

385 # Use only isolated sources. 

386 self.sourceSelector['science'].doIsolated = True 

387 self.sourceSelector['science'].isolated.parentName = 'parentSourceId' 

388 self.sourceSelector['science'].isolated.nChildName = 'deblend_nChild' 

389 # Do not use either flux or centroid measurements with flags, 

390 # chosen from the usual QA flags for stars. 

391 self.sourceSelector['science'].doFlags = True 

392 badFlags = ['pixelFlags_edge', 

393 'pixelFlags_saturated', 

394 'pixelFlags_interpolatedCenter', 

395 'pixelFlags_interpolated', 

396 'pixelFlags_crCenter', 

397 'pixelFlags_bad', 

398 'hsmPsfMoments_flag', 

399 f'{self.sourceFluxType}_flag', 

400 ] 

401 self.sourceSelector['science'].flags.bad = badFlags 

402 

403 # Use only primary sources. 

404 self.sourceSelector['science'].doRequirePrimary = True 

405 

406 def validate(self): 

407 super().validate() 

408 

409 # Check if all components of the device and exposure models are 

410 # supported. 

411 for component in self.deviceModel: 

412 if not (('poly' in component.lower()) or ('identity' in component.lower())): 

413 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.deviceModel, self, 

414 f'deviceModel component {component} is not supported.') 

415 

416 for component in self.exposureModel: 

417 if not (('poly' in component.lower()) or ('identity' in component.lower())): 

418 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.exposureModel, self, 

419 f'exposureModel component {component} is not supported.') 

420 

421 

422class GbdesAstrometricFitTask(pipeBase.PipelineTask): 

423 """Calibrate the WCS across multiple visits of the same field using the 

424 GBDES package. 

425 """ 

426 

427 ConfigClass = GbdesAstrometricFitConfig 

428 _DefaultName = 'gbdesAstrometricFit' 

429 

430 def __init__(self, **kwargs): 

431 super().__init__(**kwargs) 

432 self.makeSubtask('sourceSelector') 

433 self.makeSubtask('referenceSelector') 

434 

435 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

436 # We override runQuantum to set up the refObjLoaders 

437 inputs = butlerQC.get(inputRefs) 

438 

439 instrumentName = butlerQC.quantum.dataId['instrument'] 

440 

441 # Ensure the inputs are in a consistent order 

442 inputCatVisits = np.array([inputCat.dataId['visit'] for inputCat in inputs['inputCatalogRefs']]) 

443 inputs['inputCatalogRefs'] = [inputs['inputCatalogRefs'][v] for v in inputCatVisits.argsort()] 

444 inputSumVisits = np.array([inputSum[0]['visit'] for inputSum in inputs['inputVisitSummaries']]) 

445 inputs['inputVisitSummaries'] = [inputs['inputVisitSummaries'][v] for v in inputSumVisits.argsort()] 

446 inputRefHtm7s = np.array([inputRefCat.dataId['htm7'] for inputRefCat in inputRefs.referenceCatalog]) 

447 inputRefCatRefs = [inputRefs.referenceCatalog[htm7] for htm7 in inputRefHtm7s.argsort()] 

448 inputRefCats = np.array([inputRefCat.dataId['htm7'] for inputRefCat in inputs['referenceCatalog']]) 

449 inputs['referenceCatalog'] = [inputs['referenceCatalog'][v] for v in inputRefCats.argsort()] 

450 

451 sampleRefCat = inputs['referenceCatalog'][0].get() 

452 refEpoch = sampleRefCat[0]['epoch'] 

453 

454 refConfig = LoadReferenceObjectsConfig() 

455 refConfig.anyFilterMapsToThis = 'phot_g_mean' 

456 refConfig.requireProperMotion = True 

457 refObjectLoader = ReferenceObjectLoader(dataIds=[ref.datasetRef.dataId 

458 for ref in inputRefCatRefs], 

459 refCats=inputs.pop('referenceCatalog'), 

460 config=refConfig, 

461 log=self.log) 

462 

463 output = self.run(**inputs, instrumentName=instrumentName, refEpoch=refEpoch, 

464 refObjectLoader=refObjectLoader) 

465 

466 for outputRef in outputRefs.outputWcs: 

467 visit = outputRef.dataId['visit'] 

468 butlerQC.put(output.outputWCSs[visit], outputRef) 

469 butlerQC.put(output.outputCatalog, outputRefs.outputCatalog) 

470 butlerQC.put(output.starCatalog, outputRefs.starCatalog) 

471 

472 def run(self, inputCatalogRefs, inputVisitSummaries, instrumentName="", refEpoch=None, 

473 refObjectLoader=None): 

474 """Run the WCS fit for a given set of visits 

475 

476 Parameters 

477 ---------- 

478 inputCatalogRefs : `list` 

479 List of `DeferredDatasetHandle`s pointing to visit-level source 

480 tables. 

481 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

482 List of catalogs with per-detector summary information. 

483 instrumentName : `str`, optional 

484 Name of the instrument used. This is only used for labelling. 

485 refEpoch : `float` 

486 Epoch of the reference objects in MJD. 

487 refObjectLoader : instance of 

488 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

489 Referencef object loader instance. 

490 

491 Returns 

492 ------- 

493 result : `lsst.pipe.base.Struct` 

494 ``outputWCSs`` : `list` of `lsst.afw.table.ExposureCatalog` 

495 List of exposure catalogs (one per visit) with the WCS for each 

496 detector set by the new fitted WCS. 

497 ``fitModel`` : `wcsfit.WCSFit` 

498 Model-fitting object with final model parameters. 

499 ``outputCatalog`` : `pyarrow.Table` 

500 Catalog with fit residuals of all sources used. 

501 """ 

502 self.log.info("Gathering instrument, exposure, and field info") 

503 # Set up an instrument object 

504 instrument = wcsfit.Instrument(instrumentName) 

505 

506 # Get RA, Dec, MJD, etc., for the input visits 

507 exposureInfo, exposuresHelper, extensionInfo = self._get_exposure_info(inputVisitSummaries, 

508 instrument) 

509 

510 # Get information about the extent of the input visits 

511 fields, fieldCenter, fieldRadius = self._prep_sky(inputVisitSummaries, exposureInfo.medianEpoch) 

512 

513 self.log.info("Load catalogs and associate sources") 

514 # Set up class to associate sources into matches using a 

515 # friends-of-friends algorithm 

516 associations = wcsfit.FoFClass(fields, [instrument], exposuresHelper, 

517 [fieldRadius.asDegrees()], 

518 (self.config.matchRadius * u.arcsec).to(u.degree).value) 

519 

520 # Add the reference catalog to the associator 

521 medianEpoch = astropy.time.Time(exposureInfo.medianEpoch, format='decimalyear').mjd 

522 refObjects, refCovariance = self._load_refcat(associations, refObjectLoader, fieldCenter, fieldRadius, 

523 extensionInfo, epoch=medianEpoch) 

524 

525 # Add the science catalogs and associate new sources as they are added 

526 sourceIndices, usedColumns = self._load_catalogs_and_associate(associations, inputCatalogRefs, 

527 extensionInfo) 

528 

529 self.log.info("Fit the WCSs") 

530 # Set up a YAML-type string using the config variables and a sample 

531 # visit 

532 inputYAML = self.make_yaml(inputVisitSummaries[0]) 

533 

534 # Set the verbosity level for WCSFit from the task log level. 

535 # TODO: DM-36850, Add lsst.log to gbdes so that log messages are 

536 # properly propagated. 

537 loglevel = self.log.getEffectiveLevel() 

538 if loglevel >= self.log.WARNING: 

539 verbose = 0 

540 elif loglevel == self.log.INFO: 

541 verbose = 1 

542 else: 

543 verbose = 2 

544 

545 # Set up the WCS-fitting class using the results of the FOF associator 

546 wcsf = wcsfit.WCSFit(fields, [instrument], exposuresHelper, 

547 extensionInfo.visitIndex, extensionInfo.detectorIndex, 

548 inputYAML, extensionInfo.wcs, associations.sequence, associations.extn, 

549 associations.obj, sysErr=self.config.systematicError, 

550 refSysErr=self.config.referenceSystematicError, 

551 usePM=self.config.fitProperMotion, 

552 verbose=verbose) 

553 

554 # Add the science and reference sources 

555 self._add_objects(wcsf, inputCatalogRefs, sourceIndices, extensionInfo, usedColumns) 

556 self._add_ref_objects(wcsf, refObjects, refCovariance, extensionInfo) 

557 

558 # Do the WCS fit 

559 wcsf.fit(reserveFraction=self.config.fitReserveFraction, 

560 randomNumberSeed=self.config.fitReserveRandomSeed) 

561 self.log.info("WCS fitting done") 

562 

563 outputWCSs = self._make_outputs(wcsf, inputVisitSummaries, exposureInfo) 

564 outputCatalog = wcsf.getOutputCatalog() 

565 starCatalog = wcsf.getStarCatalog() 

566 

567 return pipeBase.Struct(outputWCSs=outputWCSs, 

568 fitModel=wcsf, 

569 outputCatalog=outputCatalog, 

570 starCatalog=starCatalog) 

571 

572 def _prep_sky(self, inputVisitSummaries, epoch, fieldName='Field'): 

573 """Get center and radius of the input tract. This assumes that all 

574 visits will be put into the same `wcsfit.Field` and fit together. 

575 

576 Paramaters 

577 ---------- 

578 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

579 List of catalogs with per-detector summary information. 

580 epoch : float 

581 Reference epoch. 

582 fieldName : str 

583 Name of the field, used internally. 

584 

585 Returns 

586 ------- 

587 fields : `wcsfit.Fields` 

588 Object with field information. 

589 center : `lsst.geom.SpherePoint` 

590 Center of the field. 

591 radius : `lsst.sphgeom._sphgeom.Angle` 

592 Radius of the bounding circle of the tract. 

593 """ 

594 allDetectorCorners = [] 

595 for visSum in inputVisitSummaries: 

596 detectorCorners = [lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees).getVector() for (ra, dec) 

597 in zip(visSum['raCorners'].ravel(), visSum['decCorners'].ravel())] 

598 allDetectorCorners.extend(detectorCorners) 

599 boundingCircle = lsst.sphgeom.ConvexPolygon.convexHull(allDetectorCorners).getBoundingCircle() 

600 center = lsst.geom.SpherePoint(boundingCircle.getCenter()) 

601 ra = center.getRa().asDegrees() 

602 dec = center.getDec().asDegrees() 

603 radius = boundingCircle.getOpeningAngle() 

604 

605 # wcsfit.Fields describes a list of fields, but we assume all 

606 # observations will be fit together in one field. 

607 fields = wcsfit.Fields([fieldName], [ra], [dec], [epoch]) 

608 

609 return fields, center, radius 

610 

611 def _get_exposure_info(self, inputVisitSummaries, instrument, fieldNumber=0, instrumentNumber=0, 

612 refEpoch=None): 

613 """Get various information about the input visits to feed to the 

614 fitting routines. 

615 

616 Parameters 

617 ---------- 

618 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog` 

619 Tables for each visit with information for detectors. 

620 instrument : `wcsfit.Instrument` 

621 Instrument object to which detector information is added. 

622 fieldNumber : `int` 

623 Index of the field for these visits. Should be zero if all data is 

624 being fit together. 

625 instrumentNumber : `int` 

626 Index of the instrument for these visits. Should be zero if all 

627 data comes from the same instrument. 

628 refEpoch : `float` 

629 Epoch of the reference objects in MJD. 

630 

631 Returns 

632 ------- 

633 exposureInfo : `lsst.pipe.base.Struct` 

634 Struct containing general properties for the visits: 

635 ``visits`` : `list` 

636 List of visit names. 

637 ``detectors`` : `list` 

638 List of all detectors in any visit. 

639 ``ras`` : `list` of float 

640 List of boresight RAs for each visit. 

641 ``decs`` : `list` of float 

642 List of borseight Decs for each visit. 

643 ``medianEpoch`` : float 

644 Median epoch of all visits in decimal-year format. 

645 exposuresHelper : `wcsfit.ExposuresHelper` 

646 Object containing information about the input visits. 

647 extensionInfo : `lsst.pipe.base.Struct` 

648 Struct containing properties for each extension: 

649 ``visit`` : `np.ndarray` 

650 Name of the visit for this extension. 

651 ``detector`` : `np.ndarray` 

652 Name of the detector for this extension. 

653 ``visitIndex` : `np.ndarray` of `int` 

654 Index of visit for this extension. 

655 ``detectorIndex`` : `np.ndarray` of `int` 

656 Index of the detector for this extension. 

657 ``wcss`` : `np.ndarray` of `lsst.afw.geom.SkyWcs` 

658 Initial WCS for this extension. 

659 ``extensionType`` : `np.ndarray` of `str` 

660 "SCIENCE" or "REFERENCE". 

661 """ 

662 exposureNames = [] 

663 ras = [] 

664 decs = [] 

665 visits = [] 

666 detectors = [] 

667 airmasses = [] 

668 exposureTimes = [] 

669 mjds = [] 

670 observatories = [] 

671 wcss = [] 

672 

673 extensionType = [] 

674 extensionVisitIndices = [] 

675 extensionDetectorIndices = [] 

676 extensionVisits = [] 

677 extensionDetectors = [] 

678 # Get information for all the science visits 

679 for v, visitSummary in enumerate(inputVisitSummaries): 

680 visitInfo = visitSummary[0].getVisitInfo() 

681 visit = visitSummary[0]['visit'] 

682 visits.append(visit) 

683 exposureNames.append(str(visit)) 

684 raDec = visitInfo.getBoresightRaDec() 

685 ras.append(raDec.getRa().asRadians()) 

686 decs.append(raDec.getDec().asRadians()) 

687 airmasses.append(visitInfo.getBoresightAirmass()) 

688 exposureTimes.append(visitInfo.getExposureTime()) 

689 obsDate = visitInfo.getDate() 

690 obsMJD = obsDate.get(obsDate.MJD) 

691 mjds.append(obsMJD) 

692 # Get the observatory ICRS position for use in fitting parallax 

693 obsLon = visitInfo.observatory.getLongitude().asDegrees() 

694 obsLat = visitInfo.observatory.getLatitude().asDegrees() 

695 obsElev = visitInfo.observatory.getElevation() 

696 earthLocation = astropy.coordinates.EarthLocation.from_geodetic(obsLon, obsLat, obsElev) 

697 observatory_gcrs = earthLocation.get_gcrs(astropy.time.Time(obsMJD, format='mjd')) 

698 observatory_icrs = observatory_gcrs.transform_to(astropy.coordinates.ICRS()) 

699 # We want the position in AU in Cartesian coordinates 

700 observatories.append(observatory_icrs.cartesian.xyz.to(u.AU).value) 

701 

702 for row in visitSummary: 

703 detector = row['id'] 

704 if detector not in detectors: 

705 detectors.append(detector) 

706 detectorBounds = wcsfit.Bounds(row['bbox_min_x'], row['bbox_max_x'], 

707 row['bbox_min_y'], row['bbox_max_y']) 

708 instrument.addDevice(str(detector), detectorBounds) 

709 

710 detectorIndex = np.flatnonzero(detector == np.array(detectors))[0] 

711 extensionVisitIndices.append(v) 

712 extensionDetectorIndices.append(detectorIndex) 

713 extensionVisits.append(visit) 

714 extensionDetectors.append(detector) 

715 extensionType.append('SCIENCE') 

716 

717 wcs = row.getWcs() 

718 wcss.append(_get_wcs_from_sip(wcs)) 

719 

720 fieldNumbers = list(np.ones(len(exposureNames), dtype=int) * fieldNumber) 

721 instrumentNumbers = list(np.ones(len(exposureNames), dtype=int) * instrumentNumber) 

722 

723 # Set the reference epoch to be the median of the science visits. 

724 # The reference catalog will be shifted to this date. 

725 medianMJD = np.median(mjds) 

726 medianEpoch = astropy.time.Time(medianMJD, format='mjd').decimalyear 

727 

728 # Add information for the reference catalog. Most of the values are 

729 # not used. 

730 exposureNames.append('REFERENCE') 

731 visits.append(-1) 

732 fieldNumbers.append(0) 

733 if self.config.fitProperMotion: 

734 instrumentNumbers.append(-2) 

735 else: 

736 instrumentNumbers.append(-1) 

737 ras.append(0.0) 

738 decs.append(0.0) 

739 airmasses.append(0.0) 

740 exposureTimes.append(0) 

741 mjds.append((refEpoch if (refEpoch is not None) else medianMJD)) 

742 observatories.append(np.array([0, 0, 0])) 

743 identity = wcsfit.IdentityMap() 

744 icrs = wcsfit.SphericalICRS() 

745 refWcs = wcsfit.Wcs(identity, icrs, 'Identity', np.pi / 180.) 

746 wcss.append(refWcs) 

747 

748 extensionVisitIndices.append(len(exposureNames) - 1) 

749 extensionDetectorIndices.append(-1) # REFERENCE device must be -1 

750 extensionVisits.append(-1) 

751 extensionDetectors.append(-1) 

752 extensionType.append('REFERENCE') 

753 

754 # Make a table of information to use elsewhere in the class 

755 extensionInfo = pipeBase.Struct(visit=np.array(extensionVisits), 

756 detector=np.array(extensionDetectors), 

757 visitIndex=np.array(extensionVisitIndices), 

758 detectorIndex=np.array(extensionDetectorIndices), 

759 wcs=np.array(wcss), 

760 extensionType=np.array(extensionType)) 

761 

762 # Make the exposureHelper object to use in the fitting routines 

763 exposuresHelper = wcsfit.ExposuresHelper(exposureNames, 

764 fieldNumbers, 

765 instrumentNumbers, 

766 ras, 

767 decs, 

768 airmasses, 

769 exposureTimes, 

770 mjds, 

771 observatories) 

772 

773 exposureInfo = pipeBase.Struct(visits=visits, 

774 detectors=detectors, 

775 ras=ras, 

776 decs=decs, 

777 medianEpoch=medianEpoch) 

778 

779 return exposureInfo, exposuresHelper, extensionInfo 

780 

781 def _load_refcat(self, associations, refObjectLoader, center, radius, extensionInfo, epoch=None, 

782 fieldIndex=0): 

783 """Load the reference catalog and add reference objects to the 

784 `wcsfit.FoFClass` object. 

785 

786 Parameters 

787 ---------- 

788 associations : `wcsfit.FoFClass` 

789 Object to which to add the catalog of reference objects. 

790 refObjectLoader : 

791 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader` 

792 Object set up to load reference catalog objects. 

793 center : `lsst.geom.SpherePoint` 

794 Center of the circle in which to load reference objects. 

795 radius : `lsst.sphgeom._sphgeom.Angle` 

796 Radius of the circle in which to load reference objects. 

797 extensionInfo : `lsst.pipe.base.Struct` 

798 Struct containing properties for each extension. 

799 epoch : `float` 

800 MJD to which to correct the object positions. 

801 fieldIndex : `int` 

802 Index of the field. Should be zero if all the data is fit together. 

803 

804 Returns 

805 ------- 

806 refObjects : `dict` 

807 Position and error information of reference objects. 

808 refCovariance : `list` of `float` 

809 Flattened output covariance matrix. 

810 """ 

811 formattedEpoch = astropy.time.Time(epoch, format='mjd') 

812 

813 refFilter = refObjectLoader.config.anyFilterMapsToThis 

814 skyCircle = refObjectLoader.loadSkyCircle(center, radius, refFilter, epoch=formattedEpoch) 

815 

816 selected = self.referenceSelector.run(skyCircle.refCat) 

817 # Need memory contiguity to get reference filters as a vector. 

818 if not selected.sourceCat.isContiguous(): 

819 refCat = selected.sourceCat.copy(deep=True) 

820 else: 

821 refCat = selected.sourceCat 

822 

823 # In Gaia DR3, missing values are denoted by NaNs. 

824 finiteInd = np.isfinite(refCat['coord_ra']) & np.isfinite(refCat['coord_dec']) 

825 refCat = refCat[finiteInd] 

826 

827 if self.config.excludeNonPMObjects: 

828 # Gaia DR2 has zeros for missing data, while Gaia DR3 has NaNs: 

829 hasPM = ((refCat['pm_raErr'] != 0) & np.isfinite(refCat['pm_raErr']) 

830 & np.isfinite(refCat['pm_decErr'])) 

831 refCat = refCat[hasPM] 

832 

833 ra = (refCat['coord_ra'] * u.radian).to(u.degree).to_value().tolist() 

834 dec = (refCat['coord_dec'] * u.radian).to(u.degree).to_value().tolist() 

835 raCov = ((refCat['coord_raErr'] * u.radian).to(u.degree).to_value()**2).tolist() 

836 decCov = ((refCat['coord_decErr'] * u.radian).to(u.degree).to_value()**2).tolist() 

837 

838 # Get refcat version from refcat metadata 

839 refCatMetadata = refObjectLoader.refCats[0].get().getMetadata() 

840 refCatVersion = refCatMetadata['REFCAT_FORMAT_VERSION'] 

841 if refCatVersion == 2: 

842 raDecCov = (refCat['coord_ra_coord_dec_Cov'] * u.radian**2).to(u.degree**2).to_value().tolist() 

843 else: 

844 raDecCov = np.zeros(len(ra)) 

845 

846 refObjects = {'ra': ra, 'dec': dec, 'raCov': raCov, 'decCov': decCov, 'raDecCov': raDecCov} 

847 refCovariance = [] 

848 

849 if self.config.fitProperMotion: 

850 raPM = (refCat['pm_ra'] * u.radian).to(u.marcsec).to_value().tolist() 

851 decPM = (refCat['pm_dec'] * u.radian).to(u.marcsec).to_value().tolist() 

852 parallax = (refCat['parallax'] * u.radian).to(u.marcsec).to_value().tolist() 

853 cov = _make_ref_covariance_matrix(refCat, version=refCatVersion) 

854 pmDict = {'raPM': raPM, 'decPM': decPM, 'parallax': parallax} 

855 refObjects.update(pmDict) 

856 refCovariance = cov 

857 

858 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0] 

859 visitIndex = extensionInfo.visitIndex[extensionIndex] 

860 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

861 instrumentIndex = -1 # -1 indicates the reference catalog 

862 refWcs = extensionInfo.wcs[extensionIndex] 

863 

864 associations.addCatalog(refWcs, 'STELLAR', visitIndex, fieldIndex, instrumentIndex, detectorIndex, 

865 extensionIndex, np.ones(len(refCat), dtype=bool), 

866 ra, dec, np.arange(len(ra))) 

867 

868 return refObjects, refCovariance 

869 

870 def _load_catalogs_and_associate(self, associations, inputCatalogRefs, extensionInfo, 

871 fieldIndex=0, instrumentIndex=0): 

872 """Load the science catalogs and add the sources to the associator 

873 class `wcsfit.FoFClass`, associating them into matches as you go. 

874 

875 Parameters 

876 ---------- 

877 associations : `wcsfit.FoFClass` 

878 Object to which to add the catalog of reference objects. 

879 inputCatalogRefs : `list` 

880 List of DeferredDatasetHandles pointing to visit-level source 

881 tables. 

882 extensionInfo : `lsst.pipe.base.Struct` 

883 Struct containing properties for each extension. 

884 fieldIndex : `int` 

885 Index of the field for these catalogs. Should be zero assuming all 

886 data is being fit together. 

887 instrumentIndex : `int` 

888 Index of the instrument for these catalogs. Should be zero 

889 assuming all data comes from the same instrument. 

890 

891 Returns 

892 ------- 

893 sourceIndices : `list` 

894 List of boolean arrays used to select sources. 

895 columns : `list` of `str` 

896 List of columns needed from source tables. 

897 """ 

898 columns = ['detector', 'sourceId', 'x', 'xErr', 'y', 'yErr', 'ixx', 'iyy', 'ixy', 

899 f'{self.config.sourceFluxType}_instFlux', f'{self.config.sourceFluxType}_instFluxErr'] 

900 if self.sourceSelector.config.doFlags: 

901 columns.extend(self.sourceSelector.config.flags.bad) 

902 if self.sourceSelector.config.doUnresolved: 

903 columns.append(self.sourceSelector.config.unresolved.name) 

904 if self.sourceSelector.config.doIsolated: 

905 columns.append(self.sourceSelector.config.isolated.parentName) 

906 columns.append(self.sourceSelector.config.isolated.nChildName) 

907 if self.sourceSelector.config.doRequirePrimary: 

908 columns.append(self.sourceSelector.config.requirePrimary.primaryColName) 

909 

910 sourceIndices = [None] * len(extensionInfo.visit) 

911 for inputCatalogRef in inputCatalogRefs: 

912 visit = inputCatalogRef.dataId['visit'] 

913 inputCatalog = inputCatalogRef.get(parameters={'columns': columns}) 

914 # Get a sorted array of detector names 

915 detectors = np.unique(inputCatalog['detector']) 

916 

917 for detector in detectors: 

918 detectorSources = inputCatalog[inputCatalog['detector'] == detector] 

919 xCov = detectorSources['xErr']**2 

920 yCov = detectorSources['yErr']**2 

921 xyCov = (detectorSources['ixy'] * (xCov + yCov) 

922 / (detectorSources['ixx'] + detectorSources['iyy'])) 

923 # Remove sources with bad shape measurements 

924 goodShapes = xyCov**2 <= (xCov * yCov) 

925 selected = self.sourceSelector.run(detectorSources) 

926 goodInds = selected.selected & goodShapes 

927 

928 isStar = np.ones(goodInds.sum()) 

929 extensionIndex = np.flatnonzero((extensionInfo.visit == visit) 

930 & (extensionInfo.detector == detector))[0] 

931 detectorIndex = extensionInfo.detectorIndex[extensionIndex] 

932 visitIndex = extensionInfo.visitIndex[extensionIndex] 

933 

934 sourceIndices[extensionIndex] = goodInds 

935 

936 wcs = extensionInfo.wcs[extensionIndex] 

937 associations.reprojectWCS(wcs, fieldIndex) 

938 

939 associations.addCatalog(wcs, 'STELLAR', visitIndex, fieldIndex, 

940 instrumentIndex, detectorIndex, extensionIndex, isStar, 

941 detectorSources[goodInds]['x'].to_list(), 

942 detectorSources[goodInds]['y'].to_list(), 

943 np.arange(goodInds.sum())) 

944 

945 associations.sortMatches(fieldIndex, minMatches=self.config.minMatches, 

946 allowSelfMatches=self.config.allowSelfMatches) 

947 

948 return sourceIndices, columns 

949 

950 def make_yaml(self, inputVisitSummary, inputFile=None): 

951 """Make a YAML-type object that describes the parameters of the fit 

952 model. 

953 

954 Parameters 

955 ---------- 

956 inputVisitSummary : `lsst.afw.table.ExposureCatalog` 

957 Catalog with per-detector summary information. 

958 inputFile : `str` 

959 Path to a file that contains a basic model. 

960 

961 Returns 

962 ------- 

963 inputYAML : `wcsfit.YAMLCollector` 

964 YAML object containing the model description. 

965 """ 

966 if inputFile is not None: 

967 inputYAML = wcsfit.YAMLCollector(inputFile, 'PixelMapCollection') 

968 else: 

969 inputYAML = wcsfit.YAMLCollector('', 'PixelMapCollection') 

970 inputDict = {} 

971 modelComponents = ['INSTRUMENT/DEVICE', 'EXPOSURE'] 

972 baseMap = {'Type': 'Composite', 'Elements': modelComponents} 

973 inputDict['EXPOSURE/DEVICE/base'] = baseMap 

974 

975 xMin = str(inputVisitSummary['bbox_min_x'].min()) 

976 xMax = str(inputVisitSummary['bbox_max_x'].max()) 

977 yMin = str(inputVisitSummary['bbox_min_y'].min()) 

978 yMax = str(inputVisitSummary['bbox_max_y'].max()) 

979 

980 deviceModel = {'Type': 'Composite', 'Elements': self.config.deviceModel.list()} 

981 inputDict['INSTRUMENT/DEVICE'] = deviceModel 

982 for component in self.config.deviceModel: 

983 if 'poly' in component.lower(): 

984 componentDict = {'Type': 'Poly', 

985 'XPoly': {'OrderX': self.config.devicePolyOrder, 

986 'SumOrder': True}, 

987 'YPoly': {'OrderX': self.config.devicePolyOrder, 

988 'SumOrder': True}, 

989 'XMin': xMin, 'XMax': xMax, 'YMin': yMin, 'YMax': yMax} 

990 elif 'identity' in component.lower(): 

991 componentDict = {'Type': 'Identity'} 

992 

993 inputDict[component] = componentDict 

994 

995 exposureModel = {'Type': 'Composite', 'Elements': self.config.exposureModel.list()} 

996 inputDict['EXPOSURE'] = exposureModel 

997 for component in self.config.exposureModel: 

998 if 'poly' in component.lower(): 

999 componentDict = {'Type': 'Poly', 

1000 'XPoly': {'OrderX': self.config.exposurePolyOrder, 

1001 'SumOrder': 'true'}, 

1002 'YPoly': {'OrderX': self.config.exposurePolyOrder, 

1003 'SumOrder': 'true'}} 

1004 elif 'identity' in component.lower(): 

1005 componentDict = {'Type': 'Identity'} 

1006 

1007 inputDict[component] = componentDict 

1008 

1009 inputYAML.addInput(yaml.dump(inputDict)) 

1010 inputYAML.addInput('Identity:\n Type: Identity\n') 

1011 

1012 return inputYAML 

1013 

1014 def _add_objects(self, wcsf, inputCatalogRefs, sourceIndices, extensionInfo, columns): 

1015 """Add science sources to the wcsfit.WCSFit object. 

1016 

1017 Parameters 

1018 ---------- 

1019 wcsf : `wcsfit.WCSFit` 

1020 WCS-fitting object. 

1021 inputCatalogRefs : `list` 

1022 List of DeferredDatasetHandles pointing to visit-level source 

1023 tables. 

1024 sourceIndices : `list` 

1025 List of boolean arrays used to select sources. 

1026 extensionInfo : `lsst.pipe.base.Struct` 

1027 Struct containing properties for each extension. 

1028 columns : `list` of `str` 

1029 List of columns needed from source tables. 

1030 """ 

1031 for inputCatalogRef in inputCatalogRefs: 

1032 visit = inputCatalogRef.dataId['visit'] 

1033 inputCatalog = inputCatalogRef.get(parameters={'columns': columns}) 

1034 detectors = np.unique(inputCatalog['detector']) 

1035 

1036 for detector in detectors: 

1037 detectorSources = inputCatalog[inputCatalog['detector'] == detector] 

1038 

1039 extensionIndex = np.flatnonzero((extensionInfo.visit == visit) 

1040 & (extensionInfo.detector == detector))[0] 

1041 sourceCat = detectorSources[sourceIndices[extensionIndex]] 

1042 

1043 xCov = sourceCat['xErr']**2 

1044 yCov = sourceCat['yErr']**2 

1045 xyCov = (sourceCat['ixy'] * (xCov + yCov) 

1046 / (sourceCat['ixx'] + sourceCat['iyy'])) 

1047 # TODO: add correct xyErr if DM-7101 is ever done. 

1048 

1049 d = {'x': sourceCat['x'].to_numpy(), 'y': sourceCat['y'].to_numpy(), 

1050 'xCov': xCov.to_numpy(), 'yCov': yCov.to_numpy(), 'xyCov': xyCov.to_numpy()} 

1051 

1052 wcsf.setObjects(extensionIndex, d, 'x', 'y', ['xCov', 'yCov', 'xyCov']) 

1053 

1054 def _add_ref_objects(self, wcsf, refObjects, refCovariance, extensionInfo): 

1055 """Add reference sources to the wcsfit.WCSFit object. 

1056 

1057 Parameters 

1058 ---------- 

1059 wcsf : `wcsfit.WCSFit` 

1060 WCS-fitting object. 

1061 refObjects : `dict` 

1062 Position and error information of reference objects. 

1063 refCovariance : `list` of `float` 

1064 Flattened output covariance matrix. 

1065 extensionInfo : `lsst.pipe.base.Struct` 

1066 Struct containing properties for each extension. 

1067 """ 

1068 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0] 

1069 

1070 if self.config.fitProperMotion: 

1071 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov'], 

1072 pmDecKey='decPM', pmRaKey='raPM', parallaxKey='parallax', pmCovKey='fullCov', 

1073 pmCov=refCovariance) 

1074 else: 

1075 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov']) 

1076 

1077 def _make_afw_wcs(self, mapDict, centerRA, centerDec, doNormalizePixels=False, xScale=1, yScale=1): 

1078 """Make an `lsst.afw.geom.SkyWcs` from a dictionary of mappings. 

1079 

1080 Parameters 

1081 ---------- 

1082 mapDict : `dict` 

1083 Dictionary of mapping parameters. 

1084 centerRA : `lsst.geom.Angle` 

1085 RA of the tangent point. 

1086 centerDec : `lsst.geom.Angle` 

1087 Declination of the tangent point. 

1088 doNormalizePixels : `bool` 

1089 Whether to normalize pixels so that range is [-1,1]. 

1090 xScale : `float` 

1091 Factor by which to normalize x-dimension. Corresponds to width of 

1092 detector. 

1093 yScale : `float` 

1094 Factor by which to normalize y-dimension. Corresponds to height of 

1095 detector. 

1096 

1097 Returns 

1098 ------- 

1099 outWCS : `lsst.afw.geom.SkyWcs` 

1100 WCS constructed from the input mappings 

1101 """ 

1102 # Set up pixel frames 

1103 pixelFrame = astshim.Frame(2, 'Domain=PIXELS') 

1104 normedPixelFrame = astshim.Frame(2, 'Domain=NORMEDPIXELS') 

1105 

1106 if doNormalizePixels: 

1107 # Pixels will need to be rescaled before going into the mappings 

1108 normCoefficients = [-1.0, 2.0/xScale, 0, 

1109 -1.0, 0, 2.0/yScale] 

1110 normMap = _convert_to_ast_polymap_coefficients(normCoefficients) 

1111 else: 

1112 normMap = astshim.UnitMap(2) 

1113 

1114 # All of the detectors for one visit map to the same tangent plane 

1115 tangentPoint = lsst.geom.SpherePoint(centerRA, centerDec) 

1116 cdMatrix = afwgeom.makeCdMatrix(1.0 * lsst.geom.degrees, 0 * lsst.geom.degrees, True) 

1117 iwcToSkyWcs = afwgeom.makeSkyWcs(lsst.geom.Point2D(0, 0), tangentPoint, cdMatrix) 

1118 iwcToSkyMap = iwcToSkyWcs.getFrameDict().getMapping('PIXELS', 'SKY') 

1119 skyFrame = iwcToSkyWcs.getFrameDict().getFrame('SKY') 

1120 

1121 frameDict = astshim.FrameDict(pixelFrame) 

1122 frameDict.addFrame('PIXELS', normMap, normedPixelFrame) 

1123 

1124 currentFrameName = 'NORMEDPIXELS' 

1125 

1126 # Dictionary values are ordered according to the maps' application. 

1127 for m, mapElement in enumerate(mapDict.values()): 

1128 mapType = mapElement['Type'] 

1129 

1130 if mapType == 'Poly': 

1131 mapCoefficients = mapElement['Coefficients'] 

1132 astMap = _convert_to_ast_polymap_coefficients(mapCoefficients) 

1133 elif mapType == 'Identity': 

1134 astMap = astshim.UnitMap(2) 

1135 else: 

1136 raise ValueError(f"Converting map type {mapType} to WCS is not supported") 

1137 

1138 if m == len(mapDict) - 1: 

1139 newFrameName = 'IWC' 

1140 else: 

1141 newFrameName = 'INTERMEDIATE' + str(m) 

1142 newFrame = astshim.Frame(2, f'Domain={newFrameName}') 

1143 frameDict.addFrame(currentFrameName, astMap, newFrame) 

1144 currentFrameName = newFrameName 

1145 frameDict.addFrame('IWC', iwcToSkyMap, skyFrame) 

1146 

1147 outWCS = afwgeom.SkyWcs(frameDict) 

1148 return outWCS 

1149 

1150 def _make_outputs(self, wcsf, visitSummaryTables, exposureInfo): 

1151 """Make a WCS object out of the WCS models. 

1152 

1153 Parameters 

1154 ---------- 

1155 wcsf : `wcsfit.WCSFit` 

1156 WCSFit object, assumed to have fit model. 

1157 visitSummaryTables : `list` of `lsst.afw.table.ExposureCatalog` 

1158 Catalogs with per-detector summary information from which to grab 

1159 detector information. 

1160 extensionInfo : `lsst.pipe.base.Struct` 

1161 Struct containing properties for each extension. 

1162 

1163 Returns 

1164 ------- 

1165 catalogs : `dict` of [`str`, `lsst.afw.table.ExposureCatalog`] 

1166 Dictionary of `lsst.afw.table.ExposureCatalog` objects with the WCS 

1167 set to the WCS fit in wcsf, keyed by the visit name. 

1168 """ 

1169 # Get the parameters of the fit models 

1170 mapParams = wcsf.mapCollection.getParamDict() 

1171 

1172 # Set up the schema for the output catalogs 

1173 schema = lsst.afw.table.ExposureTable.makeMinimalSchema() 

1174 schema.addField('visit', type='L', doc='Visit number') 

1175 

1176 # Pixels will need to be rescaled before going into the mappings 

1177 sampleDetector = visitSummaryTables[0][0] 

1178 xscale = sampleDetector['bbox_max_x'] - sampleDetector['bbox_min_x'] 

1179 yscale = sampleDetector['bbox_max_y'] - sampleDetector['bbox_min_y'] 

1180 

1181 catalogs = {} 

1182 for v, visitSummary in enumerate(visitSummaryTables): 

1183 visit = visitSummary[0]['visit'] 

1184 

1185 catalog = lsst.afw.table.ExposureCatalog(schema) 

1186 catalog.resize(len(exposureInfo.detectors)) 

1187 catalog['visit'] = visit 

1188 

1189 for d, detector in enumerate(visitSummary['id']): 

1190 mapName = f'{visit}/{detector}' 

1191 

1192 mapElements = wcsf.mapCollection.orderAtoms(f'{mapName}/base') 

1193 mapDict = {} 

1194 for m, mapElement in enumerate(mapElements): 

1195 mapType = wcsf.mapCollection.getMapType(mapElement) 

1196 mapDict[mapElement] = {'Type': mapType} 

1197 

1198 if mapType == 'Poly': 

1199 mapCoefficients = mapParams[mapElement] 

1200 mapDict[mapElement]['Coefficients'] = mapCoefficients 

1201 

1202 # The RA and Dec of the visit are needed for the last step of 

1203 # the mapping from the visit tangent plane to RA and Dec 

1204 outWCS = self._make_afw_wcs(mapDict, exposureInfo.ras[v] * lsst.geom.radians, 

1205 exposureInfo.decs[v] * lsst.geom.radians, 

1206 doNormalizePixels=True, 

1207 xScale=xscale, yScale=yscale) 

1208 

1209 catalog[d].setId(detector) 

1210 catalog[d].setWcs(outWCS) 

1211 catalog.sort() 

1212 catalogs[visit] = catalog 

1213 

1214 return catalogs