Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import copy 

23import os 

24import re 

25import traceback 

26import warnings 

27import weakref 

28from deprecated.sphinx import deprecated 

29 

30from astro_metadata_translator import fix_header 

31from lsst.utils import doImport 

32import lsst.daf.persistence as dafPersist 

33from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping 

34import lsst.daf.base as dafBase 

35import lsst.afw.geom as afwGeom 

36import lsst.afw.image as afwImage 

37import lsst.afw.table as afwTable 

38from lsst.afw.fits import readMetadata 

39import lsst.afw.cameraGeom as afwCameraGeom 

40import lsst.log as lsstLog 

41import lsst.pex.exceptions as pexExcept 

42from .exposureIdInfo import ExposureIdInfo 

43from .makeRawVisitInfo import MakeRawVisitInfo 

44from .utils import createInitialSkyWcs, InitialSkyWcsError 

45from lsst.utils import getPackageDir 

46from ._instrument import Instrument 

47 

48__all__ = ["CameraMapper", "exposureFromImage"] 

49 

50 

51class CameraMapper(dafPersist.Mapper): 

52 

53 """CameraMapper is a base class for mappers that handle images from a 

54 camera and products derived from them. This provides an abstraction layer 

55 between the data on disk and the code. 

56 

57 Public methods: keys, queryMetadata, getDatasetTypes, map, 

58 canStandardize, standardize 

59 

60 Mappers for specific data sources (e.g., CFHT Megacam, LSST 

61 simulations, etc.) should inherit this class. 

62 

63 The CameraMapper manages datasets within a "root" directory. Note that 

64 writing to a dataset present in the input root will hide the existing 

65 dataset but not overwrite it. See #2160 for design discussion. 

66 

67 A camera is assumed to consist of one or more rafts, each composed of 

68 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 

69 (amps). A camera is also assumed to have a camera geometry description 

70 (CameraGeom object) as a policy file, a filter description (Filter class 

71 static configuration) as another policy file. 

72 

73 Information from the camera geometry and defects are inserted into all 

74 Exposure objects returned. 

75 

76 The mapper uses one or two registries to retrieve metadata about the 

77 images. The first is a registry of all raw exposures. This must contain 

78 the time of the observation. One or more tables (or the equivalent) 

79 within the registry are used to look up data identifier components that 

80 are not specified by the user (e.g. filter) and to return results for 

81 metadata queries. The second is an optional registry of all calibration 

82 data. This should contain validity start and end entries for each 

83 calibration dataset in the same timescale as the observation time. 

84 

85 Subclasses will typically set MakeRawVisitInfoClass and optionally the 

86 metadata translator class: 

87 

88 MakeRawVisitInfoClass: a class variable that points to a subclass of 

89 MakeRawVisitInfo, a functor that creates an 

90 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 

91 

92 translatorClass: The `~astro_metadata_translator.MetadataTranslator` 

93 class to use for fixing metadata values. If it is not set an attempt 

94 will be made to infer the class from ``MakeRawVisitInfoClass``, failing 

95 that the metadata fixup will try to infer the translator class from the 

96 header itself. 

97 

98 Subclasses must provide the following methods: 

99 

100 _extractDetectorName(self, dataId): returns the detector name for a CCD 

101 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 

102 a dataset identifier referring to that CCD or a subcomponent of it. 

103 

104 _computeCcdExposureId(self, dataId): see below 

105 

106 _computeCoaddExposureId(self, dataId, singleFilter): see below 

107 

108 Subclasses may also need to override the following methods: 

109 

110 _transformId(self, dataId): transformation of a data identifier 

111 from colloquial usage (e.g., "ccdname") to proper/actual usage 

112 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 

113 commas). The default implementation does nothing. Note that this 

114 method should not modify its input parameter. 

115 

116 getShortCcdName(self, ccdName): a static method that returns a shortened 

117 name suitable for use as a filename. The default version converts spaces 

118 to underscores. 

119 

120 _mapActualToPath(self, template, actualId): convert a template path to an 

121 actual path, using the actual dataset identifier. 

122 

123 The mapper's behaviors are largely specified by the policy file. 

124 See the MapperDictionary.paf for descriptions of the available items. 

125 

126 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 

127 mappings (see Mappings class). 

128 

129 Common default mappings for all subclasses can be specified in the 

130 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 

131 provides a simple way to add a product to all camera mappers. 

132 

133 Functions to map (provide a path to the data given a dataset 

134 identifier dictionary) and standardize (convert data into some standard 

135 format or type) may be provided in the subclass as "map_{dataset type}" 

136 and "std_{dataset type}", respectively. 

137 

138 If non-Exposure datasets cannot be retrieved using standard 

139 daf_persistence methods alone, a "bypass_{dataset type}" function may be 

140 provided in the subclass to return the dataset instead of using the 

141 "datasets" subpolicy. 

142 

143 Implementations of map_camera and bypass_camera that should typically be 

144 sufficient are provided in this base class. 

145 

146 Notes 

147 ----- 

148 .. todo:: 

149 

150 Instead of auto-loading the camera at construction time, load it from 

151 the calibration registry 

152 

153 Parameters 

154 ---------- 

155 policy : daf_persistence.Policy, 

156 Policy with per-camera defaults already merged. 

157 repositoryDir : string 

158 Policy repository for the subclassing module (obtained with 

159 getRepositoryPath() on the per-camera default dictionary). 

160 root : string, optional 

161 Path to the root directory for data. 

162 registry : string, optional 

163 Path to registry with data's metadata. 

164 calibRoot : string, optional 

165 Root directory for calibrations. 

166 calibRegistry : string, optional 

167 Path to registry with calibrations' metadata. 

168 provided : list of string, optional 

169 Keys provided by the mapper. 

170 parentRegistry : Registry subclass, optional 

171 Registry from a parent repository that may be used to look up 

172 data's metadata. 

173 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 

174 The configuration information for the repository this mapper is 

175 being used with. 

176 """ 

177 packageName = None 

178 

179 # a class or subclass of MakeRawVisitInfo, a functor that makes an 

180 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image 

181 MakeRawVisitInfoClass = MakeRawVisitInfo 

182 

183 # a class or subclass of PupilFactory 

184 PupilFactoryClass = afwCameraGeom.PupilFactory 

185 

186 # Class to use for metadata translations 

187 translatorClass = None 

188 

189 # Gen3 instrument corresponding to this mapper 

190 # Can be a class or a string with the full name of the class 

191 _gen3instrument = None 

192 

193 def __init__(self, policy, repositoryDir, 

194 root=None, registry=None, calibRoot=None, calibRegistry=None, 

195 provided=None, parentRegistry=None, repositoryCfg=None): 

196 

197 dafPersist.Mapper.__init__(self) 

198 

199 self.log = lsstLog.Log.getLogger("CameraMapper") 

200 

201 if root: 

202 self.root = root 

203 elif repositoryCfg: 

204 self.root = repositoryCfg.root 

205 else: 

206 self.root = None 

207 

208 repoPolicy = repositoryCfg.policy if repositoryCfg else None 

209 if repoPolicy is not None: 

210 policy.update(repoPolicy) 

211 

212 # Levels 

213 self.levels = dict() 

214 if 'levels' in policy: 

215 levelsPolicy = policy['levels'] 

216 for key in levelsPolicy.names(True): 

217 self.levels[key] = set(levelsPolicy.asArray(key)) 

218 self.defaultLevel = policy['defaultLevel'] 

219 self.defaultSubLevels = dict() 

220 if 'defaultSubLevels' in policy: 

221 self.defaultSubLevels = policy['defaultSubLevels'] 

222 

223 # Root directories 

224 if root is None: 

225 root = "." 

226 root = dafPersist.LogicalLocation(root).locString() 

227 

228 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root) 

229 

230 # If the calibRoot is passed in, use that. If not and it's indicated in 

231 # the policy, use that. And otherwise, the calibs are in the regular 

232 # root. 

233 # If the location indicated by the calib root does not exist, do not 

234 # create it. 

235 calibStorage = None 

236 if calibRoot is not None: 

237 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot) 

238 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

239 create=False) 

240 else: 

241 calibRoot = policy.get('calibRoot', None) 

242 if calibRoot: 

243 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

244 create=False) 

245 if calibStorage is None: 

246 calibStorage = self.rootStorage 

247 

248 self.root = root 

249 

250 # Registries 

251 self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath", 

252 self.rootStorage, searchParents=False, 

253 posixIfNoSql=(not parentRegistry)) 

254 if not self.registry: 

255 self.registry = parentRegistry 

256 needCalibRegistry = policy.get('needCalibRegistry', None) 

257 if needCalibRegistry: 

258 if calibStorage: 

259 self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy, 

260 "calibRegistryPath", calibStorage, 

261 posixIfNoSql=False) # NB never use posix for calibs 

262 else: 

263 raise RuntimeError( 

264 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " 

265 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}") 

266 else: 

267 self.calibRegistry = None 

268 

269 # Dict of valid keys and their value types 

270 self.keyDict = dict() 

271 

272 self._initMappings(policy, self.rootStorage, calibStorage, provided=None) 

273 self._initWriteRecipes() 

274 

275 # Camera geometry 

276 self.cameraDataLocation = None # path to camera geometry config file 

277 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir) 

278 

279 # Filter translation table 

280 self.filters = None 

281 

282 # verify that the class variable packageName is set before attempting 

283 # to instantiate an instance 

284 if self.packageName is None: 

285 raise ValueError('class variable packageName must not be None') 

286 

287 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log) 

288 

289 # Assign a metadata translator if one has not been defined by 

290 # subclass. We can sometimes infer one from the RawVisitInfo 

291 # class. 

292 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"): 

293 self.translatorClass = self.makeRawVisitInfo.metadataTranslator 

294 

295 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None): 

296 """Initialize mappings 

297 

298 For each of the dataset types that we want to be able to read, there 

299 are methods that can be created to support them: 

300 * map_<dataset> : determine the path for dataset 

301 * std_<dataset> : standardize the retrieved dataset 

302 * bypass_<dataset> : retrieve the dataset (bypassing the usual 

303 retrieval machinery) 

304 * query_<dataset> : query the registry 

305 

306 Besides the dataset types explicitly listed in the policy, we create 

307 additional, derived datasets for additional conveniences, 

308 e.g., reading the header of an image, retrieving only the size of a 

309 catalog. 

310 

311 Parameters 

312 ---------- 

313 policy : `lsst.daf.persistence.Policy` 

314 Policy with per-camera defaults already merged 

315 rootStorage : `Storage subclass instance` 

316 Interface to persisted repository data. 

317 calibRoot : `Storage subclass instance` 

318 Interface to persisted calib repository data 

319 provided : `list` of `str` 

320 Keys provided by the mapper 

321 """ 

322 # Sub-dictionaries (for exposure/calibration/dataset types) 

323 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

324 "obs_base", "ImageMappingDefaults.yaml", "policy")) 

325 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

326 "obs_base", "ExposureMappingDefaults.yaml", "policy")) 

327 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

328 "obs_base", "CalibrationMappingDefaults.yaml", "policy")) 

329 dsMappingPolicy = dafPersist.Policy() 

330 

331 # Mappings 

332 mappingList = ( 

333 ("images", imgMappingPolicy, ImageMapping), 

334 ("exposures", expMappingPolicy, ExposureMapping), 

335 ("calibrations", calMappingPolicy, CalibrationMapping), 

336 ("datasets", dsMappingPolicy, DatasetMapping) 

337 ) 

338 self.mappings = dict() 

339 for name, defPolicy, cls in mappingList: 

340 if name in policy: 

341 datasets = policy[name] 

342 

343 # Centrally-defined datasets 

344 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml") 

345 if os.path.exists(defaultsPath): 

346 datasets.merge(dafPersist.Policy(defaultsPath)) 

347 

348 mappings = dict() 

349 setattr(self, name, mappings) 

350 for datasetType in datasets.names(True): 

351 subPolicy = datasets[datasetType] 

352 subPolicy.merge(defPolicy) 

353 

354 if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy: 

355 def compositeClosure(dataId, write=False, mapper=None, mapping=None, 

356 subPolicy=subPolicy): 

357 components = subPolicy.get('composite') 

358 assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None 

359 disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None 

360 python = subPolicy['python'] 

361 butlerComposite = dafPersist.ButlerComposite(assembler=assembler, 

362 disassembler=disassembler, 

363 python=python, 

364 dataId=dataId, 

365 mapper=self) 

366 for name, component in components.items(): 

367 butlerComposite.add(id=name, 

368 datasetType=component.get('datasetType'), 

369 setter=component.get('setter', None), 

370 getter=component.get('getter', None), 

371 subset=component.get('subset', False), 

372 inputOnly=component.get('inputOnly', False)) 

373 return butlerComposite 

374 setattr(self, "map_" + datasetType, compositeClosure) 

375 # for now at least, don't set up any other handling for 

376 # this dataset type. 

377 continue 

378 

379 if name == "calibrations": 

380 mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage, 

381 provided=provided, dataRoot=rootStorage) 

382 else: 

383 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided) 

384 

385 if datasetType in self.mappings: 

386 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}") 

387 self.keyDict.update(mapping.keys()) 

388 mappings[datasetType] = mapping 

389 self.mappings[datasetType] = mapping 

390 if not hasattr(self, "map_" + datasetType): 

391 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

392 return mapping.map(mapper, dataId, write) 

393 setattr(self, "map_" + datasetType, mapClosure) 

394 if not hasattr(self, "query_" + datasetType): 

395 def queryClosure(format, dataId, mapping=mapping): 

396 return mapping.lookup(format, dataId) 

397 setattr(self, "query_" + datasetType, queryClosure) 

398 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType): 

399 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping): 

400 return mapping.standardize(mapper, item, dataId) 

401 setattr(self, "std_" + datasetType, stdClosure) 

402 

403 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None): 

404 """Set convenience methods on CameraMapper""" 

405 mapName = "map_" + datasetType + "_" + suffix 

406 bypassName = "bypass_" + datasetType + "_" + suffix 

407 queryName = "query_" + datasetType + "_" + suffix 

408 if not hasattr(self, mapName): 

409 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType)) 

410 if not hasattr(self, bypassName): 

411 if bypassImpl is None and hasattr(self, "bypass_" + datasetType): 

412 bypassImpl = getattr(self, "bypass_" + datasetType) 

413 if bypassImpl is not None: 

414 setattr(self, bypassName, bypassImpl) 

415 if not hasattr(self, queryName): 

416 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType)) 

417 

418 # Filename of dataset 

419 setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId: 

420 [os.path.join(location.getStorage().root, p) for p in location.getLocations()]) 

421 # Metadata from FITS file 

422 if subPolicy["storage"] == "FitsStorage": # a FITS image 

423 def getMetadata(datasetType, pythonType, location, dataId): 

424 md = readMetadata(location.getLocationsWithRoot()[0]) 

425 fix_header(md, translator_class=self.translatorClass) 

426 return md 

427 

428 setMethods("md", bypassImpl=getMetadata) 

429 

430 # Add support for configuring FITS compression 

431 addName = "add_" + datasetType 

432 if not hasattr(self, addName): 

433 setattr(self, addName, self.getImageCompressionSettings) 

434 

435 if name == "exposures": 

436 def getSkyWcs(datasetType, pythonType, location, dataId): 

437 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

438 return fitsReader.readWcs() 

439 

440 setMethods("wcs", bypassImpl=getSkyWcs) 

441 

442 def getRawHeaderWcs(datasetType, pythonType, location, dataId): 

443 """Create a SkyWcs from the un-modified raw 

444 FITS WCS header keys.""" 

445 if datasetType[:3] != "raw": 

446 raise dafPersist.NoResults("Can only get header WCS for raw exposures.", 

447 datasetType, dataId) 

448 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])) 

449 

450 setMethods("header_wcs", bypassImpl=getRawHeaderWcs) 

451 

452 def getPhotoCalib(datasetType, pythonType, location, dataId): 

453 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

454 return fitsReader.readPhotoCalib() 

455 

456 setMethods("photoCalib", bypassImpl=getPhotoCalib) 

457 

458 def getVisitInfo(datasetType, pythonType, location, dataId): 

459 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

460 return fitsReader.readVisitInfo() 

461 

462 setMethods("visitInfo", bypassImpl=getVisitInfo) 

463 

464 # TODO: remove in DM-27177 

465 @deprecated(reason="Replaced with getFilterLabel. Will be removed after v22.", 

466 category=FutureWarning, version="v22") 

467 def getFilter(datasetType, pythonType, location, dataId): 

468 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

469 return fitsReader.readFilter() 

470 

471 setMethods("filter", bypassImpl=getFilter) 

472 

473 # TODO: deprecate in DM-27177, remove in DM-27811 

474 def getFilterLabel(datasetType, pythonType, location, dataId): 

475 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

476 storedFilter = fitsReader.readFilterLabel() 

477 

478 # Apply standardization used by full Exposure 

479 try: 

480 # mapping is local to enclosing scope 

481 idFilter = mapping.need(['filter'], dataId)['filter'] 

482 except dafPersist.NoResults: 

483 idFilter = None 

484 bestFilter = self._getBestFilter(storedFilter, idFilter) 

485 if bestFilter is not None: 

486 return bestFilter 

487 else: 

488 return storedFilter 

489 

490 setMethods("filterLabel", bypassImpl=getFilterLabel) 

491 

492 setMethods("detector", 

493 mapImpl=lambda dataId, write=False: 

494 dafPersist.ButlerLocation( 

495 pythonType="lsst.afw.cameraGeom.CameraConfig", 

496 cppType="Config", 

497 storageName="Internal", 

498 locationList="ignored", 

499 dataId=dataId, 

500 mapper=self, 

501 storage=None, 

502 ), 

503 bypassImpl=lambda datasetType, pythonType, location, dataId: 

504 self.camera[self._extractDetectorName(dataId)] 

505 ) 

506 

507 def getBBox(datasetType, pythonType, location, dataId): 

508 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1) 

509 fix_header(md, translator_class=self.translatorClass) 

510 return afwImage.bboxFromMetadata(md) 

511 

512 setMethods("bbox", bypassImpl=getBBox) 

513 

514 elif name == "images": 

515 def getBBox(datasetType, pythonType, location, dataId): 

516 md = readMetadata(location.getLocationsWithRoot()[0]) 

517 fix_header(md, translator_class=self.translatorClass) 

518 return afwImage.bboxFromMetadata(md) 

519 setMethods("bbox", bypassImpl=getBBox) 

520 

521 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog 

522 

523 def getMetadata(datasetType, pythonType, location, dataId): 

524 md = readMetadata(os.path.join(location.getStorage().root, 

525 location.getLocations()[0]), hdu=1) 

526 fix_header(md, translator_class=self.translatorClass) 

527 return md 

528 

529 setMethods("md", bypassImpl=getMetadata) 

530 

531 # Sub-images 

532 if subPolicy["storage"] == "FitsStorage": 

533 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

534 subId = dataId.copy() 

535 del subId['bbox'] 

536 loc = mapping.map(mapper, subId, write) 

537 bbox = dataId['bbox'] 

538 llcX = bbox.getMinX() 

539 llcY = bbox.getMinY() 

540 width = bbox.getWidth() 

541 height = bbox.getHeight() 

542 loc.additionalData.set('llcX', llcX) 

543 loc.additionalData.set('llcY', llcY) 

544 loc.additionalData.set('width', width) 

545 loc.additionalData.set('height', height) 

546 if 'imageOrigin' in dataId: 

547 loc.additionalData.set('imageOrigin', 

548 dataId['imageOrigin']) 

549 return loc 

550 

551 def querySubClosure(key, format, dataId, mapping=mapping): 

552 subId = dataId.copy() 

553 del subId['bbox'] 

554 return mapping.lookup(format, subId) 

555 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure) 

556 

557 if subPolicy["storage"] == "FitsCatalogStorage": 

558 # Length of catalog 

559 

560 def getLen(datasetType, pythonType, location, dataId): 

561 md = readMetadata(os.path.join(location.getStorage().root, 

562 location.getLocations()[0]), hdu=1) 

563 fix_header(md, translator_class=self.translatorClass) 

564 return md["NAXIS2"] 

565 

566 setMethods("len", bypassImpl=getLen) 

567 

568 # Schema of catalog 

569 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets: 

570 setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId: 

571 afwTable.Schema.readFits(os.path.join(location.getStorage().root, 

572 location.getLocations()[0]))) 

573 

574 def _computeCcdExposureId(self, dataId): 

575 """Compute the 64-bit (long) identifier for a CCD exposure. 

576 

577 Subclasses must override 

578 

579 Parameters 

580 ---------- 

581 dataId : `dict` 

582 Data identifier with visit, ccd. 

583 """ 

584 raise NotImplementedError() 

585 

586 def _computeCoaddExposureId(self, dataId, singleFilter): 

587 """Compute the 64-bit (long) identifier for a coadd. 

588 

589 Subclasses must override 

590 

591 Parameters 

592 ---------- 

593 dataId : `dict` 

594 Data identifier with tract and patch. 

595 singleFilter : `bool` 

596 True means the desired ID is for a single-filter coadd, in which 

597 case dataIdmust contain filter. 

598 """ 

599 raise NotImplementedError() 

600 

601 def _search(self, path): 

602 """Search for path in the associated repository's storage. 

603 

604 Parameters 

605 ---------- 

606 path : string 

607 Path that describes an object in the repository associated with 

608 this mapper. 

609 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 

610 indicator will be stripped when searching and so will match 

611 filenames without the HDU indicator, e.g. 'foo.fits'. The path 

612 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 

613 

614 Returns 

615 ------- 

616 string 

617 The path for this object in the repository. Will return None if the 

618 object can't be found. If the input argument path contained an HDU 

619 indicator, the returned path will also contain the HDU indicator. 

620 """ 

621 return self.rootStorage.search(path) 

622 

623 def backup(self, datasetType, dataId): 

624 """Rename any existing object with the given type and dataId. 

625 

626 The CameraMapper implementation saves objects in a sequence of e.g.: 

627 

628 - foo.fits 

629 - foo.fits~1 

630 - foo.fits~2 

631 

632 All of the backups will be placed in the output repo, however, and will 

633 not be removed if they are found elsewhere in the _parent chain. This 

634 means that the same file will be stored twice if the previous version 

635 was found in an input repo. 

636 """ 

637 

638 # Calling PosixStorage directly is not the long term solution in this 

639 # function, this is work-in-progress on epic DM-6225. The plan is for 

640 # parentSearch to be changed to 'search', and search only the storage 

641 # associated with this mapper. All searching of parents will be handled 

642 # by traversing the container of repositories in Butler. 

643 

644 def firstElement(list): 

645 """Get the first element in the list, or None if that can't be 

646 done. 

647 """ 

648 return list[0] if list is not None and len(list) else None 

649 

650 n = 0 

651 newLocation = self.map(datasetType, dataId, write=True) 

652 newPath = newLocation.getLocations()[0] 

653 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True) 

654 path = firstElement(path) 

655 oldPaths = [] 

656 while path is not None: 

657 n += 1 

658 oldPaths.append((n, path)) 

659 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True) 

660 path = firstElement(path) 

661 for n, oldPath in reversed(oldPaths): 

662 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n)) 

663 

664 def keys(self): 

665 """Return supported keys. 

666 

667 Returns 

668 ------- 

669 iterable 

670 List of keys usable in a dataset identifier 

671 """ 

672 return iter(self.keyDict.keys()) 

673 

674 def getKeys(self, datasetType, level): 

675 """Return a dict of supported keys and their value types for a given 

676 dataset type at a given level of the key hierarchy. 

677 

678 Parameters 

679 ---------- 

680 datasetType : `str` 

681 Dataset type or None for all dataset types. 

682 level : `str` or None 

683 Level or None for all levels or '' for the default level for the 

684 camera. 

685 

686 Returns 

687 ------- 

688 `dict` 

689 Keys are strings usable in a dataset identifier, values are their 

690 value types. 

691 """ 

692 

693 # not sure if this is how we want to do this. what if None was 

694 # intended? 

695 if level == '': 

696 level = self.getDefaultLevel() 

697 

698 if datasetType is None: 

699 keyDict = copy.copy(self.keyDict) 

700 else: 

701 keyDict = self.mappings[datasetType].keys() 

702 if level is not None and level in self.levels: 

703 keyDict = copy.copy(keyDict) 

704 for lev in self.levels[level]: 

705 if lev in keyDict: 

706 del keyDict[lev] 

707 return keyDict 

708 

709 def getDefaultLevel(self): 

710 return self.defaultLevel 

711 

712 def getDefaultSubLevel(self, level): 

713 if level in self.defaultSubLevels: 

714 return self.defaultSubLevels[level] 

715 return None 

716 

717 @classmethod 

718 def getCameraName(cls): 

719 """Return the name of the camera that this CameraMapper is for.""" 

720 className = str(cls) 

721 className = className[className.find('.'):-1] 

722 m = re.search(r'(\w+)Mapper', className) 

723 if m is None: 

724 m = re.search(r"class '[\w.]*?(\w+)'", className) 

725 name = m.group(1) 

726 return name[:1].lower() + name[1:] if name else '' 

727 

728 @classmethod 

729 def getPackageName(cls): 

730 """Return the name of the package containing this CameraMapper.""" 

731 if cls.packageName is None: 

732 raise ValueError('class variable packageName must not be None') 

733 return cls.packageName 

734 

735 @classmethod 

736 def getGen3Instrument(cls): 

737 """Return the gen3 Instrument class equivalent for this gen2 Mapper. 

738 

739 Returns 

740 ------- 

741 instr : `type` 

742 A `~lsst.obs.base.Instrument` class. 

743 """ 

744 if cls._gen3instrument is None: 

745 raise NotImplementedError("Please provide a specific implementation for your instrument" 

746 " to enable conversion of this gen2 repository to gen3") 

747 if isinstance(cls._gen3instrument, str): 

748 # Given a string to convert to an instrument class 

749 cls._gen3instrument = doImport(cls._gen3instrument) 

750 if not issubclass(cls._gen3instrument, Instrument): 

751 raise ValueError(f"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}" 

752 " but that is not an lsst.obs.base.Instrument") 

753 return cls._gen3instrument 

754 

755 @classmethod 

756 def getPackageDir(cls): 

757 """Return the base directory of this package""" 

758 return getPackageDir(cls.getPackageName()) 

759 

760 def map_camera(self, dataId, write=False): 

761 """Map a camera dataset.""" 

762 if self.camera is None: 

763 raise RuntimeError("No camera dataset available.") 

764 actualId = self._transformId(dataId) 

765 return dafPersist.ButlerLocation( 

766 pythonType="lsst.afw.cameraGeom.CameraConfig", 

767 cppType="Config", 

768 storageName="ConfigStorage", 

769 locationList=self.cameraDataLocation or "ignored", 

770 dataId=actualId, 

771 mapper=self, 

772 storage=self.rootStorage 

773 ) 

774 

775 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId): 

776 """Return the (preloaded) camera object. 

777 """ 

778 if self.camera is None: 

779 raise RuntimeError("No camera dataset available.") 

780 return self.camera 

781 

782 def map_expIdInfo(self, dataId, write=False): 

783 return dafPersist.ButlerLocation( 

784 pythonType="lsst.obs.base.ExposureIdInfo", 

785 cppType=None, 

786 storageName="Internal", 

787 locationList="ignored", 

788 dataId=dataId, 

789 mapper=self, 

790 storage=self.rootStorage 

791 ) 

792 

793 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId): 

794 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 

795 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId) 

796 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId) 

797 return ExposureIdInfo(expId=expId, expBits=expBits) 

798 

799 def std_bfKernel(self, item, dataId): 

800 """Disable standardization for bfKernel 

801 

802 bfKernel is a calibration product that is numpy array, 

803 unlike other calibration products that are all images; 

804 all calibration images are sent through _standardizeExposure 

805 due to CalibrationMapping, but we don't want that to happen to bfKernel 

806 """ 

807 return item 

808 

809 def std_raw(self, item, dataId): 

810 """Standardize a raw dataset by converting it to an Exposure instead 

811 of an Image""" 

812 return self._standardizeExposure(self.exposures['raw'], item, dataId, 

813 trimmed=False, setVisitInfo=True) 

814 

815 def map_skypolicy(self, dataId): 

816 """Map a sky policy.""" 

817 return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy", 

818 "Internal", None, None, self, 

819 storage=self.rootStorage) 

820 

821 def std_skypolicy(self, item, dataId): 

822 """Standardize a sky policy by returning the one we use.""" 

823 return self.skypolicy 

824 

825############################################################################### 

826# 

827# Utility functions 

828# 

829############################################################################### 

830 

831 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, 

832 posixIfNoSql=True): 

833 """Set up a registry (usually SQLite3), trying a number of possible 

834 paths. 

835 

836 Parameters 

837 ---------- 

838 name : string 

839 Name of registry. 

840 description: `str` 

841 Description of registry (for log messages) 

842 path : string 

843 Path for registry. 

844 policy : string 

845 Policy that contains the registry name, used if path is None. 

846 policyKey : string 

847 Key in policy for registry path. 

848 storage : Storage subclass 

849 Repository Storage to look in. 

850 searchParents : bool, optional 

851 True if the search for a registry should follow any Butler v1 

852 _parent symlinks. 

853 posixIfNoSql : bool, optional 

854 If an sqlite registry is not found, will create a posix registry if 

855 this is True. 

856 

857 Returns 

858 ------- 

859 lsst.daf.persistence.Registry 

860 Registry object 

861 """ 

862 if path is None and policyKey in policy: 

863 path = dafPersist.LogicalLocation(policy[policyKey]).locString() 

864 if os.path.isabs(path): 

865 raise RuntimeError("Policy should not indicate an absolute path for registry.") 

866 if not storage.exists(path): 

867 newPath = storage.instanceSearch(path) 

868 

869 newPath = newPath[0] if newPath is not None and len(newPath) else None 

870 if newPath is None: 

871 self.log.warn("Unable to locate registry at policy path (also looked in root): %s", 

872 path) 

873 path = newPath 

874 else: 

875 self.log.warn("Unable to locate registry at policy path: %s", path) 

876 path = None 

877 

878 # Old Butler API was to indicate the registry WITH the repo folder, 

879 # New Butler expects the registry to be in the repo folder. To support 

880 # Old API, check to see if path starts with root, and if so, strip 

881 # root from path. Currently only works with PosixStorage 

882 try: 

883 root = storage.root 

884 if path and (path.startswith(root)): 

885 path = path[len(root + '/'):] 

886 except AttributeError: 

887 pass 

888 

889 # determine if there is an sqlite registry and if not, try the posix 

890 # registry. 

891 registry = None 

892 

893 def search(filename, description): 

894 """Search for file in storage 

895 

896 Parameters 

897 ---------- 

898 filename : `str` 

899 Filename to search for 

900 description : `str` 

901 Description of file, for error message. 

902 

903 Returns 

904 ------- 

905 path : `str` or `None` 

906 Path to file, or None 

907 """ 

908 result = storage.instanceSearch(filename) 

909 if result: 

910 return result[0] 

911 self.log.debug("Unable to locate %s: %s", description, filename) 

912 return None 

913 

914 # Search for a suitable registry database 

915 if path is None: 

916 path = search("%s.pgsql" % name, "%s in root" % description) 

917 if path is None: 

918 path = search("%s.sqlite3" % name, "%s in root" % description) 

919 if path is None: 

920 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description) 

921 

922 if path is not None: 

923 if not storage.exists(path): 

924 newPath = storage.instanceSearch(path) 

925 newPath = newPath[0] if newPath is not None and len(newPath) else None 

926 if newPath is not None: 

927 path = newPath 

928 localFileObj = storage.getLocalFile(path) 

929 self.log.info("Loading %s registry from %s", description, localFileObj.name) 

930 registry = dafPersist.Registry.create(localFileObj.name) 

931 localFileObj.close() 

932 elif not registry and posixIfNoSql: 

933 try: 

934 self.log.info("Loading Posix %s registry from %s", description, storage.root) 

935 registry = dafPersist.PosixRegistry(storage.root) 

936 except Exception: 

937 registry = None 

938 

939 return registry 

940 

941 def _transformId(self, dataId): 

942 """Generate a standard ID dict from a camera-specific ID dict. 

943 

944 Canonical keys include: 

945 - amp: amplifier name 

946 - ccd: CCD name (in LSST this is a combination of raft and sensor) 

947 The default implementation returns a copy of its input. 

948 

949 Parameters 

950 ---------- 

951 dataId : `dict` 

952 Dataset identifier; this must not be modified 

953 

954 Returns 

955 ------- 

956 `dict` 

957 Transformed dataset identifier. 

958 """ 

959 

960 return dataId.copy() 

961 

962 def _mapActualToPath(self, template, actualId): 

963 """Convert a template path to an actual path, using the actual data 

964 identifier. This implementation is usually sufficient but can be 

965 overridden by the subclass. 

966 

967 Parameters 

968 ---------- 

969 template : `str` 

970 Template path 

971 actualId : `dict` 

972 Dataset identifier 

973 

974 Returns 

975 ------- 

976 `str` 

977 Pathname 

978 """ 

979 

980 try: 

981 transformedId = self._transformId(actualId) 

982 return template % transformedId 

983 except Exception as e: 

984 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e)) 

985 

986 @staticmethod 

987 def getShortCcdName(ccdName): 

988 """Convert a CCD name to a form useful as a filename 

989 

990 The default implementation converts spaces to underscores. 

991 """ 

992 return ccdName.replace(" ", "_") 

993 

994 def _extractDetectorName(self, dataId): 

995 """Extract the detector (CCD) name from the dataset identifier. 

996 

997 The name in question is the detector name used by lsst.afw.cameraGeom. 

998 

999 Parameters 

1000 ---------- 

1001 dataId : `dict` 

1002 Dataset identifier. 

1003 

1004 Returns 

1005 ------- 

1006 `str` 

1007 Detector name 

1008 """ 

1009 raise NotImplementedError("No _extractDetectorName() function specified") 

1010 

1011 def _setAmpDetector(self, item, dataId, trimmed=True): 

1012 """Set the detector object in an Exposure for an amplifier. 

1013 

1014 Defects are also added to the Exposure based on the detector object. 

1015 

1016 Parameters 

1017 ---------- 

1018 item : `lsst.afw.image.Exposure` 

1019 Exposure to set the detector in. 

1020 dataId : `dict` 

1021 Dataset identifier 

1022 trimmed : `bool` 

1023 Should detector be marked as trimmed? (ignored) 

1024 """ 

1025 

1026 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed) 

1027 

1028 def _setCcdDetector(self, item, dataId, trimmed=True): 

1029 """Set the detector object in an Exposure for a CCD. 

1030 

1031 Parameters 

1032 ---------- 

1033 item : `lsst.afw.image.Exposure` 

1034 Exposure to set the detector in. 

1035 dataId : `dict` 

1036 Dataset identifier 

1037 trimmed : `bool` 

1038 Should detector be marked as trimmed? (ignored) 

1039 """ 

1040 if item.getDetector() is not None: 

1041 return 

1042 

1043 detectorName = self._extractDetectorName(dataId) 

1044 detector = self.camera[detectorName] 

1045 item.setDetector(detector) 

1046 

1047 @staticmethod 

1048 def _resolveFilters(definitions, idFilter, filterLabel): 

1049 """Identify the filter(s) consistent with partial filter information. 

1050 

1051 Parameters 

1052 ---------- 

1053 definitions : `lsst.obs.base.FilterDefinitionCollection` 

1054 The filter definitions in which to search for filters. 

1055 idFilter : `str` or `None` 

1056 The filter information provided in a data ID. 

1057 filterLabel : `lsst.afw.image.FilterLabel` or `None` 

1058 The filter information provided by an exposure; may be incomplete. 

1059 

1060 Returns 

1061 ------- 

1062 filters : `set` [`lsst.obs.base.FilterDefinition`] 

1063 The set of filters consistent with ``idFilter`` 

1064 and ``filterLabel``. 

1065 """ 

1066 # Assume none of the filter constraints actually wrong/contradictory. 

1067 # Then taking the intersection of all constraints will give a unique 

1068 # result if one exists. 

1069 matches = set(definitions) 

1070 if idFilter is not None: 

1071 matches.intersection_update(definitions.findAll(idFilter)) 

1072 if filterLabel is not None and filterLabel.hasPhysicalLabel(): 

1073 matches.intersection_update(definitions.findAll(filterLabel.physicalLabel)) 

1074 if filterLabel is not None and filterLabel.hasBandLabel(): 

1075 matches.intersection_update(definitions.findAll(filterLabel.bandLabel)) 

1076 return matches 

1077 

1078 def _getBestFilter(self, storedLabel, idFilter): 

1079 """Estimate the most complete filter information consistent with the 

1080 file or registry. 

1081 

1082 Parameters 

1083 ---------- 

1084 storedLabel : `lsst.afw.image.FilterLabel` or `None` 

1085 The filter previously stored in the file. 

1086 idFilter : `str` or `None` 

1087 The filter implied by the data ID, if any. 

1088 

1089 Returns 

1090 ------- 

1091 bestFitler : `lsst.afw.image.FilterLabel` or `None` 

1092 The complete filter to describe the dataset. May be equal to 

1093 ``storedLabel``. `None` if no recommendation can be generated. 

1094 """ 

1095 try: 

1096 # getGen3Instrument returns class; need to construct it. 

1097 filterDefinitions = self.getGen3Instrument()().filterDefinitions 

1098 except NotImplementedError: 

1099 filterDefinitions = None 

1100 

1101 if filterDefinitions is not None: 

1102 definitions = self._resolveFilters(filterDefinitions, idFilter, storedLabel) 

1103 self.log.debug("Matching filters for id=%r and label=%r are %s.", 

1104 idFilter, storedLabel, definitions) 

1105 if len(definitions) == 1: 

1106 newLabel = list(definitions)[0].makeFilterLabel() 

1107 return newLabel 

1108 elif definitions: 

1109 # Some instruments have many filters for the same band, of 

1110 # which one is known by band name and the others always by 

1111 # afw name (e.g., i, i2). 

1112 nonAfw = {f for f in definitions if f.afw_name is None} 

1113 if len(nonAfw) == 1: 

1114 newLabel = list(nonAfw)[0].makeFilterLabel() 

1115 self.log.debug("Assuming %r is the correct match.", newLabel) 

1116 return newLabel 

1117 

1118 self.log.warn("Multiple matches for filter %r with data ID %r.", storedLabel, idFilter) 

1119 # Can we at least add a band? 

1120 # Never expect multiple definitions with same physical filter. 

1121 bands = {d.band for d in definitions} # None counts as separate result! 

1122 if len(bands) == 1 and storedLabel is None: 

1123 band = list(bands)[0] 

1124 return afwImage.FilterLabel(band=band) 

1125 else: 

1126 return None 

1127 else: 

1128 # Unknown filter, nothing to be done. 

1129 self.log.warn("Cannot reconcile filter %r with data ID %r.", storedLabel, idFilter) 

1130 return None 

1131 

1132 # Not practical to recommend a FilterLabel without filterDefinitions 

1133 

1134 return None 

1135 

1136 def _setFilter(self, mapping, item, dataId): 

1137 """Set the filter information in an Exposure. 

1138 

1139 The Exposure should already have had a filter loaded, but the reader 

1140 (in ``afw``) had to act on incomplete information. This method 

1141 cross-checks the filter against the data ID and the standard list 

1142 of filters. 

1143 

1144 Parameters 

1145 ---------- 

1146 mapping : `lsst.obs.base.Mapping` 

1147 Where to get the data ID filter from. 

1148 item : `lsst.afw.image.Exposure` 

1149 Exposure to set the filter in. 

1150 dataId : `dict` 

1151 Dataset identifier. 

1152 """ 

1153 if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) 

1154 or isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)): 

1155 return 

1156 

1157 itemFilter = item.getFilterLabel() # may be None 

1158 try: 

1159 idFilter = mapping.need(['filter'], dataId)['filter'] 

1160 except dafPersist.NoResults: 

1161 idFilter = None 

1162 

1163 bestFilter = self._getBestFilter(itemFilter, idFilter) 

1164 if bestFilter is not None: 

1165 if bestFilter != itemFilter: 

1166 item.setFilterLabel(bestFilter) 

1167 # Already using bestFilter, avoid unnecessary edits 

1168 elif itemFilter is None: 

1169 # Old Filter cleanup, without the benefit of FilterDefinition 

1170 if self.filters is not None and idFilter in self.filters: 

1171 idFilter = self.filters[idFilter] 

1172 try: 

1173 # TODO: remove in DM-27177; at that point may not be able 

1174 # to process IDs without FilterDefinition. 

1175 with warnings.catch_warnings(): 

1176 warnings.filterwarnings("ignore", category=FutureWarning) 

1177 item.setFilter(afwImage.Filter(idFilter)) 

1178 except pexExcept.NotFoundError: 

1179 self.log.warn("Filter %s not defined. Set to UNKNOWN.", idFilter) 

1180 

1181 def _standardizeExposure(self, mapping, item, dataId, filter=True, 

1182 trimmed=True, setVisitInfo=True): 

1183 """Default standardization function for images. 

1184 

1185 This sets the Detector from the camera geometry 

1186 and optionally set the Filter. In both cases this saves 

1187 having to persist some data in each exposure (or image). 

1188 

1189 Parameters 

1190 ---------- 

1191 mapping : `lsst.obs.base.Mapping` 

1192 Where to get the values from. 

1193 item : image-like object 

1194 Can be any of lsst.afw.image.Exposure, 

1195 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 

1196 or lsst.afw.image.MaskedImage 

1197 

1198 dataId : `dict` 

1199 Dataset identifier 

1200 filter : `bool` 

1201 Set filter? Ignored if item is already an exposure 

1202 trimmed : `bool` 

1203 Should detector be marked as trimmed? 

1204 setVisitInfo : `bool` 

1205 Should Exposure have its VisitInfo filled out from the metadata? 

1206 

1207 Returns 

1208 ------- 

1209 `lsst.afw.image.Exposure` 

1210 The standardized Exposure. 

1211 """ 

1212 try: 

1213 exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log, 

1214 setVisitInfo=setVisitInfo, setFilter=filter) 

1215 except Exception as e: 

1216 self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e)) 

1217 raise 

1218 

1219 if mapping.level.lower() == "amp": 

1220 self._setAmpDetector(exposure, dataId, trimmed) 

1221 elif mapping.level.lower() == "ccd": 

1222 self._setCcdDetector(exposure, dataId, trimmed) 

1223 

1224 # We can only create a WCS if it doesn't already have one and 

1225 # we have either a VisitInfo or exposure metadata. 

1226 # Do not calculate a WCS if this is an amplifier exposure 

1227 if mapping.level.lower() != "amp" and exposure.getWcs() is None and \ 

1228 (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()): 

1229 self._createInitialSkyWcs(exposure) 

1230 

1231 if filter: 

1232 self._setFilter(mapping, exposure, dataId) 

1233 

1234 return exposure 

1235 

1236 def _createSkyWcsFromMetadata(self, exposure): 

1237 """Create a SkyWcs from the FITS header metadata in an Exposure. 

1238 

1239 Parameters 

1240 ---------- 

1241 exposure : `lsst.afw.image.Exposure` 

1242 The exposure to get metadata from, and attach the SkyWcs to. 

1243 """ 

1244 metadata = exposure.getMetadata() 

1245 fix_header(metadata, translator_class=self.translatorClass) 

1246 try: 

1247 wcs = afwGeom.makeSkyWcs(metadata, strip=True) 

1248 exposure.setWcs(wcs) 

1249 except pexExcept.TypeError as e: 

1250 # See DM-14372 for why this is debug and not warn (e.g. calib 

1251 # files without wcs metadata). 

1252 self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:" 

1253 " %s", e.args[0]) 

1254 # ensure any WCS values stripped from the metadata are removed in the 

1255 # exposure 

1256 exposure.setMetadata(metadata) 

1257 

1258 def _createInitialSkyWcs(self, exposure): 

1259 """Create a SkyWcs from the boresight and camera geometry. 

1260 

1261 If the boresight or camera geometry do not support this method of 

1262 WCS creation, this falls back on the header metadata-based version 

1263 (typically a purely linear FITS crval/crpix/cdmatrix WCS). 

1264 

1265 Parameters 

1266 ---------- 

1267 exposure : `lsst.afw.image.Exposure` 

1268 The exposure to get data from, and attach the SkyWcs to. 

1269 """ 

1270 # Always use try to use metadata first, to strip WCS keys from it. 

1271 self._createSkyWcsFromMetadata(exposure) 

1272 

1273 if exposure.getInfo().getVisitInfo() is None: 

1274 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs." 

1275 self.log.warn(msg) 

1276 return 

1277 try: 

1278 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector()) 

1279 exposure.setWcs(newSkyWcs) 

1280 except InitialSkyWcsError as e: 

1281 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s" 

1282 self.log.warn(msg, e) 

1283 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e)) 

1284 if e.__context__ is not None: 

1285 self.log.debug("Root-cause Exception was: %s", 

1286 traceback.TracebackException.from_exception(e.__context__)) 

1287 

1288 def _makeCamera(self, policy, repositoryDir): 

1289 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 

1290 the camera geometry 

1291 

1292 Also set self.cameraDataLocation, if relevant (else it can be left 

1293 None). 

1294 

1295 This implementation assumes that policy contains an entry "camera" 

1296 that points to the subdirectory in this package of camera data; 

1297 specifically, that subdirectory must contain: 

1298 - a file named `camera.py` that contains persisted camera config 

1299 - ampInfo table FITS files, as required by 

1300 lsst.afw.cameraGeom.makeCameraFromPath 

1301 

1302 Parameters 

1303 ---------- 

1304 policy : `lsst.daf.persistence.Policy` 

1305 Policy with per-camera defaults already merged 

1306 (PexPolicy only for backward compatibility). 

1307 repositoryDir : `str` 

1308 Policy repository for the subclassing module (obtained with 

1309 getRepositoryPath() on the per-camera default dictionary). 

1310 """ 

1311 if 'camera' not in policy: 

1312 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera") 

1313 cameraDataSubdir = policy['camera'] 

1314 self.cameraDataLocation = os.path.normpath( 

1315 os.path.join(repositoryDir, cameraDataSubdir, "camera.py")) 

1316 cameraConfig = afwCameraGeom.CameraConfig() 

1317 cameraConfig.load(self.cameraDataLocation) 

1318 ampInfoPath = os.path.dirname(self.cameraDataLocation) 

1319 return afwCameraGeom.makeCameraFromPath( 

1320 cameraConfig=cameraConfig, 

1321 ampInfoPath=ampInfoPath, 

1322 shortNameFunc=self.getShortCcdName, 

1323 pupilFactoryClass=self.PupilFactoryClass 

1324 ) 

1325 

1326 def getRegistry(self): 

1327 """Get the registry used by this mapper. 

1328 

1329 Returns 

1330 ------- 

1331 Registry or None 

1332 The registry used by this mapper for this mapper's repository. 

1333 """ 

1334 return self.registry 

1335 

1336 def getImageCompressionSettings(self, datasetType, dataId): 

1337 """Stuff image compression settings into a daf.base.PropertySet 

1338 

1339 This goes into the ButlerLocation's "additionalData", which gets 

1340 passed into the boost::persistence framework. 

1341 

1342 Parameters 

1343 ---------- 

1344 datasetType : `str` 

1345 Type of dataset for which to get the image compression settings. 

1346 dataId : `dict` 

1347 Dataset identifier. 

1348 

1349 Returns 

1350 ------- 

1351 additionalData : `lsst.daf.base.PropertySet` 

1352 Image compression settings. 

1353 """ 

1354 mapping = self.mappings[datasetType] 

1355 recipeName = mapping.recipe 

1356 storageType = mapping.storage 

1357 if storageType not in self._writeRecipes: 

1358 return dafBase.PropertySet() 

1359 if recipeName not in self._writeRecipes[storageType]: 

1360 raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" % 

1361 (datasetType, storageType, recipeName)) 

1362 recipe = self._writeRecipes[storageType][recipeName].deepCopy() 

1363 seed = hash(tuple(dataId.items())) % 2**31 

1364 for plane in ("image", "mask", "variance"): 

1365 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0: 

1366 recipe.set(plane + ".scaling.seed", seed) 

1367 return recipe 

1368 

1369 def _initWriteRecipes(self): 

1370 """Read the recipes for writing files 

1371 

1372 These recipes are currently used for configuring FITS compression, 

1373 but they could have wider uses for configuring different flavors 

1374 of the storage types. A recipe is referred to by a symbolic name, 

1375 which has associated settings. These settings are stored as a 

1376 `PropertySet` so they can easily be passed down to the 

1377 boost::persistence framework as the "additionalData" parameter. 

1378 

1379 The list of recipes is written in YAML. A default recipe and 

1380 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 

1381 and these may be overridden or supplemented by the individual obs_* 

1382 packages' own policy/writeRecipes.yaml files. 

1383 

1384 Recipes are grouped by the storage type. Currently, only the 

1385 ``FitsStorage`` storage type uses recipes, which uses it to 

1386 configure FITS image compression. 

1387 

1388 Each ``FitsStorage`` recipe for FITS compression should define 

1389 "image", "mask" and "variance" entries, each of which may contain 

1390 "compression" and "scaling" entries. Defaults will be provided for 

1391 any missing elements under "compression" and "scaling". 

1392 

1393 The allowed entries under "compression" are: 

1394 

1395 * algorithm (string): compression algorithm to use 

1396 * rows (int): number of rows per tile (0 = entire dimension) 

1397 * columns (int): number of columns per tile (0 = entire dimension) 

1398 * quantizeLevel (float): cfitsio quantization level 

1399 

1400 The allowed entries under "scaling" are: 

1401 

1402 * algorithm (string): scaling algorithm to use 

1403 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 

1404 * fuzz (bool): fuzz the values when quantising floating-point values? 

1405 * seed (long): seed for random number generator when fuzzing 

1406 * maskPlanes (list of string): mask planes to ignore when doing 

1407 statistics 

1408 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 

1409 * quantizePad: number of stdev to allow on the low side (for 

1410 STDEV_POSITIVE/NEGATIVE) 

1411 * bscale: manually specified BSCALE (for MANUAL scaling) 

1412 * bzero: manually specified BSCALE (for MANUAL scaling) 

1413 

1414 A very simple example YAML recipe: 

1415 

1416 FitsStorage: 

1417 default: 

1418 image: &default 

1419 compression: 

1420 algorithm: GZIP_SHUFFLE 

1421 mask: *default 

1422 variance: *default 

1423 """ 

1424 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml") 

1425 recipes = dafPersist.Policy(recipesFile) 

1426 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml") 

1427 validationMenu = {'FitsStorage': validateRecipeFitsStorage, } 

1428 if os.path.exists(supplementsFile) and supplementsFile != recipesFile: 

1429 supplements = dafPersist.Policy(supplementsFile) 

1430 # Don't allow overrides, only supplements 

1431 for entry in validationMenu: 

1432 intersection = set(recipes[entry].names()).intersection(set(supplements.names())) 

1433 if intersection: 

1434 raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" % 

1435 (supplementsFile, entry, recipesFile, intersection)) 

1436 recipes.update(supplements) 

1437 

1438 self._writeRecipes = {} 

1439 for storageType in recipes.names(True): 

1440 if "default" not in recipes[storageType]: 

1441 raise RuntimeError("No 'default' recipe defined for storage type %s in %s" % 

1442 (storageType, recipesFile)) 

1443 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType]) 

1444 

1445 

1446def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True, setFilter=False): 

1447 """Generate an Exposure from an image-like object 

1448 

1449 If the image is a DecoratedImage then also set its metadata 

1450 (Image and MaskedImage are missing the necessary metadata 

1451 and Exposure already has those set) 

1452 

1453 Parameters 

1454 ---------- 

1455 image : Image-like object 

1456 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 

1457 Exposure. 

1458 dataId : `dict`, optional 

1459 The data ID identifying the visit of the image. 

1460 mapper : `lsst.obs.base.CameraMapper`, optional 

1461 The mapper with which to convert the image. 

1462 logger : `lsst.log.Log`, optional 

1463 An existing logger to which to send output. 

1464 setVisitInfo : `bool`, optional 

1465 If `True`, create and attach a `lsst.afw.image.VisitInfo` to the 

1466 result. Ignored if ``image`` is an `~lsst.afw.image.Exposure` with an 

1467 existing ``VisitInfo``. 

1468 setFilter : `bool`, optional 

1469 If `True`, create and attach a `lsst.afw.image.FilterLabel` to the 

1470 result. Converts non-``FilterLabel`` information provided in ``image``. 

1471 Ignored if ``image`` is an `~lsst.afw.image.Exposure` with existing 

1472 filter information. 

1473 

1474 Returns 

1475 ------- 

1476 `lsst.afw.image.Exposure` 

1477 Exposure containing input image. 

1478 """ 

1479 translatorClass = None 

1480 if mapper is not None: 

1481 translatorClass = mapper.translatorClass 

1482 

1483 metadata = None 

1484 if isinstance(image, afwImage.MaskedImage): 

1485 exposure = afwImage.makeExposure(image) 

1486 elif isinstance(image, afwImage.DecoratedImage): 

1487 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage())) 

1488 metadata = image.getMetadata() 

1489 fix_header(metadata, translator_class=translatorClass) 

1490 exposure.setMetadata(metadata) 

1491 elif isinstance(image, afwImage.Exposure): 

1492 exposure = image 

1493 metadata = exposure.getMetadata() 

1494 fix_header(metadata, translator_class=translatorClass) 

1495 else: # Image 

1496 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image)) 

1497 

1498 if metadata is not None: 

1499 # set filter if we can 

1500 if setFilter and mapper is not None and exposure.getFilterLabel() is None: 

1501 # Translate whatever was in the metadata 

1502 if 'FILTER' in metadata: 

1503 oldFilter = metadata['FILTER'] 

1504 idFilter = dataId['filter'] if 'filter' in dataId else None 

1505 # oldFilter may not be physical, but _getBestFilter always goes 

1506 # through the FilterDefinitions instead of returning 

1507 # unvalidated input. 

1508 filter = mapper._getBestFilter(afwImage.FilterLabel(physical=oldFilter), idFilter) 

1509 if filter is not None: 

1510 exposure.setFilterLabel(filter) 

1511 # set VisitInfo if we can 

1512 if setVisitInfo and exposure.getInfo().getVisitInfo() is None: 

1513 if mapper is None: 

1514 if not logger: 

1515 logger = lsstLog.Log.getLogger("CameraMapper") 

1516 logger.warn("I can only set the VisitInfo if you provide a mapper") 

1517 else: 

1518 exposureId = mapper._computeCcdExposureId(dataId) 

1519 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId) 

1520 

1521 exposure.getInfo().setVisitInfo(visitInfo) 

1522 

1523 return exposure 

1524 

1525 

1526def validateRecipeFitsStorage(recipes): 

1527 """Validate recipes for FitsStorage 

1528 

1529 The recipes are supplemented with default values where appropriate. 

1530 

1531 TODO: replace this custom validation code with Cerberus (DM-11846) 

1532 

1533 Parameters 

1534 ---------- 

1535 recipes : `lsst.daf.persistence.Policy` 

1536 FitsStorage recipes to validate. 

1537 

1538 Returns 

1539 ------- 

1540 validated : `lsst.daf.base.PropertySet` 

1541 Validated FitsStorage recipe. 

1542 

1543 Raises 

1544 ------ 

1545 `RuntimeError` 

1546 If validation fails. 

1547 """ 

1548 # Schemas define what should be there, and the default values (and by the 

1549 # default value, the expected type). 

1550 compressionSchema = { 

1551 "algorithm": "NONE", 

1552 "rows": 1, 

1553 "columns": 0, 

1554 "quantizeLevel": 0.0, 

1555 } 

1556 scalingSchema = { 

1557 "algorithm": "NONE", 

1558 "bitpix": 0, 

1559 "maskPlanes": ["NO_DATA"], 

1560 "seed": 0, 

1561 "quantizeLevel": 4.0, 

1562 "quantizePad": 5.0, 

1563 "fuzz": True, 

1564 "bscale": 1.0, 

1565 "bzero": 0.0, 

1566 } 

1567 

1568 def checkUnrecognized(entry, allowed, description): 

1569 """Check to see if the entry contains unrecognised keywords""" 

1570 unrecognized = set(entry.keys()) - set(allowed) 

1571 if unrecognized: 

1572 raise RuntimeError( 

1573 "Unrecognized entries when parsing image compression recipe %s: %s" % 

1574 (description, unrecognized)) 

1575 

1576 validated = {} 

1577 for name in recipes.names(True): 

1578 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name) 

1579 rr = dafBase.PropertySet() 

1580 validated[name] = rr 

1581 for plane in ("image", "mask", "variance"): 

1582 checkUnrecognized(recipes[name][plane], ["compression", "scaling"], 

1583 name + "->" + plane) 

1584 

1585 for settings, schema in (("compression", compressionSchema), 

1586 ("scaling", scalingSchema)): 

1587 prefix = plane + "." + settings 

1588 if settings not in recipes[name][plane]: 

1589 for key in schema: 

1590 rr.set(prefix + "." + key, schema[key]) 

1591 continue 

1592 entry = recipes[name][plane][settings] 

1593 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings) 

1594 for key in schema: 

1595 value = type(schema[key])(entry[key]) if key in entry else schema[key] 

1596 rr.set(prefix + "." + key, value) 

1597 return validated