Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import copy 

23import os 

24import re 

25import traceback 

26import warnings 

27import weakref 

28from deprecated.sphinx import deprecated 

29 

30from astro_metadata_translator import fix_header 

31from lsst.utils import doImport 

32import lsst.daf.persistence as dafPersist 

33from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping 

34import lsst.daf.base as dafBase 

35import lsst.afw.geom as afwGeom 

36import lsst.afw.image as afwImage 

37import lsst.afw.table as afwTable 

38from lsst.afw.fits import readMetadata 

39import lsst.afw.cameraGeom as afwCameraGeom 

40import lsst.log as lsstLog 

41import lsst.pex.exceptions as pexExcept 

42from .exposureIdInfo import ExposureIdInfo 

43from .makeRawVisitInfo import MakeRawVisitInfo 

44from .utils import createInitialSkyWcs, InitialSkyWcsError 

45from lsst.utils import getPackageDir 

46from ._instrument import Instrument 

47 

48__all__ = ["CameraMapper", "exposureFromImage"] 

49 

50 

51class CameraMapper(dafPersist.Mapper): 

52 

53 """CameraMapper is a base class for mappers that handle images from a 

54 camera and products derived from them. This provides an abstraction layer 

55 between the data on disk and the code. 

56 

57 Public methods: keys, queryMetadata, getDatasetTypes, map, 

58 canStandardize, standardize 

59 

60 Mappers for specific data sources (e.g., CFHT Megacam, LSST 

61 simulations, etc.) should inherit this class. 

62 

63 The CameraMapper manages datasets within a "root" directory. Note that 

64 writing to a dataset present in the input root will hide the existing 

65 dataset but not overwrite it. See #2160 for design discussion. 

66 

67 A camera is assumed to consist of one or more rafts, each composed of 

68 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 

69 (amps). A camera is also assumed to have a camera geometry description 

70 (CameraGeom object) as a policy file, a filter description (Filter class 

71 static configuration) as another policy file. 

72 

73 Information from the camera geometry and defects are inserted into all 

74 Exposure objects returned. 

75 

76 The mapper uses one or two registries to retrieve metadata about the 

77 images. The first is a registry of all raw exposures. This must contain 

78 the time of the observation. One or more tables (or the equivalent) 

79 within the registry are used to look up data identifier components that 

80 are not specified by the user (e.g. filter) and to return results for 

81 metadata queries. The second is an optional registry of all calibration 

82 data. This should contain validity start and end entries for each 

83 calibration dataset in the same timescale as the observation time. 

84 

85 Subclasses will typically set MakeRawVisitInfoClass and optionally the 

86 metadata translator class: 

87 

88 MakeRawVisitInfoClass: a class variable that points to a subclass of 

89 MakeRawVisitInfo, a functor that creates an 

90 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 

91 

92 translatorClass: The `~astro_metadata_translator.MetadataTranslator` 

93 class to use for fixing metadata values. If it is not set an attempt 

94 will be made to infer the class from ``MakeRawVisitInfoClass``, failing 

95 that the metadata fixup will try to infer the translator class from the 

96 header itself. 

97 

98 Subclasses must provide the following methods: 

99 

100 _extractDetectorName(self, dataId): returns the detector name for a CCD 

101 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 

102 a dataset identifier referring to that CCD or a subcomponent of it. 

103 

104 _computeCcdExposureId(self, dataId): see below 

105 

106 _computeCoaddExposureId(self, dataId, singleFilter): see below 

107 

108 Subclasses may also need to override the following methods: 

109 

110 _transformId(self, dataId): transformation of a data identifier 

111 from colloquial usage (e.g., "ccdname") to proper/actual usage 

112 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 

113 commas). The default implementation does nothing. Note that this 

114 method should not modify its input parameter. 

115 

116 getShortCcdName(self, ccdName): a static method that returns a shortened 

117 name suitable for use as a filename. The default version converts spaces 

118 to underscores. 

119 

120 _mapActualToPath(self, template, actualId): convert a template path to an 

121 actual path, using the actual dataset identifier. 

122 

123 The mapper's behaviors are largely specified by the policy file. 

124 See the MapperDictionary.paf for descriptions of the available items. 

125 

126 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 

127 mappings (see Mappings class). 

128 

129 Common default mappings for all subclasses can be specified in the 

130 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 

131 provides a simple way to add a product to all camera mappers. 

132 

133 Functions to map (provide a path to the data given a dataset 

134 identifier dictionary) and standardize (convert data into some standard 

135 format or type) may be provided in the subclass as "map_{dataset type}" 

136 and "std_{dataset type}", respectively. 

137 

138 If non-Exposure datasets cannot be retrieved using standard 

139 daf_persistence methods alone, a "bypass_{dataset type}" function may be 

140 provided in the subclass to return the dataset instead of using the 

141 "datasets" subpolicy. 

142 

143 Implementations of map_camera and bypass_camera that should typically be 

144 sufficient are provided in this base class. 

145 

146 Notes 

147 ----- 

148 .. todo:: 

149 

150 Instead of auto-loading the camera at construction time, load it from 

151 the calibration registry 

152 

153 Parameters 

154 ---------- 

155 policy : daf_persistence.Policy, 

156 Policy with per-camera defaults already merged. 

157 repositoryDir : string 

158 Policy repository for the subclassing module (obtained with 

159 getRepositoryPath() on the per-camera default dictionary). 

160 root : string, optional 

161 Path to the root directory for data. 

162 registry : string, optional 

163 Path to registry with data's metadata. 

164 calibRoot : string, optional 

165 Root directory for calibrations. 

166 calibRegistry : string, optional 

167 Path to registry with calibrations' metadata. 

168 provided : list of string, optional 

169 Keys provided by the mapper. 

170 parentRegistry : Registry subclass, optional 

171 Registry from a parent repository that may be used to look up 

172 data's metadata. 

173 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 

174 The configuration information for the repository this mapper is 

175 being used with. 

176 """ 

177 packageName = None 

178 

179 # a class or subclass of MakeRawVisitInfo, a functor that makes an 

180 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image 

181 MakeRawVisitInfoClass = MakeRawVisitInfo 

182 

183 # a class or subclass of PupilFactory 

184 PupilFactoryClass = afwCameraGeom.PupilFactory 

185 

186 # Class to use for metadata translations 

187 translatorClass = None 

188 

189 # Gen3 instrument corresponding to this mapper 

190 # Can be a class or a string with the full name of the class 

191 _gen3instrument = None 

192 

193 def __init__(self, policy, repositoryDir, 

194 root=None, registry=None, calibRoot=None, calibRegistry=None, 

195 provided=None, parentRegistry=None, repositoryCfg=None): 

196 

197 dafPersist.Mapper.__init__(self) 

198 

199 self.log = lsstLog.Log.getLogger("CameraMapper") 

200 

201 if root: 

202 self.root = root 

203 elif repositoryCfg: 

204 self.root = repositoryCfg.root 

205 else: 

206 self.root = None 

207 

208 repoPolicy = repositoryCfg.policy if repositoryCfg else None 

209 if repoPolicy is not None: 

210 policy.update(repoPolicy) 

211 

212 # Levels 

213 self.levels = dict() 

214 if 'levels' in policy: 

215 levelsPolicy = policy['levels'] 

216 for key in levelsPolicy.names(True): 

217 self.levels[key] = set(levelsPolicy.asArray(key)) 

218 self.defaultLevel = policy['defaultLevel'] 

219 self.defaultSubLevels = dict() 

220 if 'defaultSubLevels' in policy: 

221 self.defaultSubLevels = policy['defaultSubLevels'] 

222 

223 # Root directories 

224 if root is None: 

225 root = "." 

226 root = dafPersist.LogicalLocation(root).locString() 

227 

228 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root) 

229 

230 # If the calibRoot is passed in, use that. If not and it's indicated in 

231 # the policy, use that. And otherwise, the calibs are in the regular 

232 # root. 

233 # If the location indicated by the calib root does not exist, do not 

234 # create it. 

235 calibStorage = None 

236 if calibRoot is not None: 

237 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot) 

238 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

239 create=False) 

240 else: 

241 calibRoot = policy.get('calibRoot', None) 

242 if calibRoot: 

243 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

244 create=False) 

245 if calibStorage is None: 

246 calibStorage = self.rootStorage 

247 

248 self.root = root 

249 

250 # Registries 

251 self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath", 

252 self.rootStorage, searchParents=False, 

253 posixIfNoSql=(not parentRegistry)) 

254 if not self.registry: 

255 self.registry = parentRegistry 

256 needCalibRegistry = policy.get('needCalibRegistry', None) 

257 if needCalibRegistry: 

258 if calibStorage: 

259 self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy, 

260 "calibRegistryPath", calibStorage, 

261 posixIfNoSql=False) # NB never use posix for calibs 

262 else: 

263 raise RuntimeError( 

264 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " 

265 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}") 

266 else: 

267 self.calibRegistry = None 

268 

269 # Dict of valid keys and their value types 

270 self.keyDict = dict() 

271 

272 self._initMappings(policy, self.rootStorage, calibStorage, provided=None) 

273 self._initWriteRecipes() 

274 

275 # Camera geometry 

276 self.cameraDataLocation = None # path to camera geometry config file 

277 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir) 

278 

279 # Filter translation table 

280 self.filters = None 

281 

282 # verify that the class variable packageName is set before attempting 

283 # to instantiate an instance 

284 if self.packageName is None: 

285 raise ValueError('class variable packageName must not be None') 

286 

287 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log) 

288 

289 # Assign a metadata translator if one has not been defined by 

290 # subclass. We can sometimes infer one from the RawVisitInfo 

291 # class. 

292 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"): 

293 self.translatorClass = self.makeRawVisitInfo.metadataTranslator 

294 

295 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None): 

296 """Initialize mappings 

297 

298 For each of the dataset types that we want to be able to read, there 

299 are methods that can be created to support them: 

300 * map_<dataset> : determine the path for dataset 

301 * std_<dataset> : standardize the retrieved dataset 

302 * bypass_<dataset> : retrieve the dataset (bypassing the usual 

303 retrieval machinery) 

304 * query_<dataset> : query the registry 

305 

306 Besides the dataset types explicitly listed in the policy, we create 

307 additional, derived datasets for additional conveniences, 

308 e.g., reading the header of an image, retrieving only the size of a 

309 catalog. 

310 

311 Parameters 

312 ---------- 

313 policy : `lsst.daf.persistence.Policy` 

314 Policy with per-camera defaults already merged 

315 rootStorage : `Storage subclass instance` 

316 Interface to persisted repository data. 

317 calibRoot : `Storage subclass instance` 

318 Interface to persisted calib repository data 

319 provided : `list` of `str` 

320 Keys provided by the mapper 

321 """ 

322 # Sub-dictionaries (for exposure/calibration/dataset types) 

323 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

324 "obs_base", "ImageMappingDefaults.yaml", "policy")) 

325 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

326 "obs_base", "ExposureMappingDefaults.yaml", "policy")) 

327 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

328 "obs_base", "CalibrationMappingDefaults.yaml", "policy")) 

329 dsMappingPolicy = dafPersist.Policy() 

330 

331 # Mappings 

332 mappingList = ( 

333 ("images", imgMappingPolicy, ImageMapping), 

334 ("exposures", expMappingPolicy, ExposureMapping), 

335 ("calibrations", calMappingPolicy, CalibrationMapping), 

336 ("datasets", dsMappingPolicy, DatasetMapping) 

337 ) 

338 self.mappings = dict() 

339 for name, defPolicy, cls in mappingList: 

340 if name in policy: 

341 datasets = policy[name] 

342 

343 # Centrally-defined datasets 

344 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml") 

345 if os.path.exists(defaultsPath): 

346 datasets.merge(dafPersist.Policy(defaultsPath)) 

347 

348 mappings = dict() 

349 setattr(self, name, mappings) 

350 for datasetType in datasets.names(True): 

351 subPolicy = datasets[datasetType] 

352 subPolicy.merge(defPolicy) 

353 

354 if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy: 

355 def compositeClosure(dataId, write=False, mapper=None, mapping=None, 

356 subPolicy=subPolicy): 

357 components = subPolicy.get('composite') 

358 assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None 

359 disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None 

360 python = subPolicy['python'] 

361 butlerComposite = dafPersist.ButlerComposite(assembler=assembler, 

362 disassembler=disassembler, 

363 python=python, 

364 dataId=dataId, 

365 mapper=self) 

366 for name, component in components.items(): 

367 butlerComposite.add(id=name, 

368 datasetType=component.get('datasetType'), 

369 setter=component.get('setter', None), 

370 getter=component.get('getter', None), 

371 subset=component.get('subset', False), 

372 inputOnly=component.get('inputOnly', False)) 

373 return butlerComposite 

374 setattr(self, "map_" + datasetType, compositeClosure) 

375 # for now at least, don't set up any other handling for 

376 # this dataset type. 

377 continue 

378 

379 if name == "calibrations": 

380 mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage, 

381 provided=provided, dataRoot=rootStorage) 

382 else: 

383 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided) 

384 

385 if datasetType in self.mappings: 

386 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}") 

387 self.keyDict.update(mapping.keys()) 

388 mappings[datasetType] = mapping 

389 self.mappings[datasetType] = mapping 

390 if not hasattr(self, "map_" + datasetType): 

391 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

392 return mapping.map(mapper, dataId, write) 

393 setattr(self, "map_" + datasetType, mapClosure) 

394 if not hasattr(self, "query_" + datasetType): 

395 def queryClosure(format, dataId, mapping=mapping): 

396 return mapping.lookup(format, dataId) 

397 setattr(self, "query_" + datasetType, queryClosure) 

398 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType): 

399 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping): 

400 return mapping.standardize(mapper, item, dataId) 

401 setattr(self, "std_" + datasetType, stdClosure) 

402 

403 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None): 

404 """Set convenience methods on CameraMapper""" 

405 mapName = "map_" + datasetType + "_" + suffix 

406 bypassName = "bypass_" + datasetType + "_" + suffix 

407 queryName = "query_" + datasetType + "_" + suffix 

408 if not hasattr(self, mapName): 

409 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType)) 

410 if not hasattr(self, bypassName): 

411 if bypassImpl is None and hasattr(self, "bypass_" + datasetType): 

412 bypassImpl = getattr(self, "bypass_" + datasetType) 

413 if bypassImpl is not None: 

414 setattr(self, bypassName, bypassImpl) 

415 if not hasattr(self, queryName): 

416 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType)) 

417 

418 # Filename of dataset 

419 setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId: 

420 [os.path.join(location.getStorage().root, p) for p in location.getLocations()]) 

421 # Metadata from FITS file 

422 if subPolicy["storage"] == "FitsStorage": # a FITS image 

423 def getMetadata(datasetType, pythonType, location, dataId): 

424 md = readMetadata(location.getLocationsWithRoot()[0]) 

425 fix_header(md, translator_class=self.translatorClass) 

426 return md 

427 

428 setMethods("md", bypassImpl=getMetadata) 

429 

430 # Add support for configuring FITS compression 

431 addName = "add_" + datasetType 

432 if not hasattr(self, addName): 

433 setattr(self, addName, self.getImageCompressionSettings) 

434 

435 if name == "exposures": 

436 def getSkyWcs(datasetType, pythonType, location, dataId): 

437 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

438 return fitsReader.readWcs() 

439 

440 setMethods("wcs", bypassImpl=getSkyWcs) 

441 

442 def getRawHeaderWcs(datasetType, pythonType, location, dataId): 

443 """Create a SkyWcs from the un-modified raw 

444 FITS WCS header keys.""" 

445 if datasetType[:3] != "raw": 

446 raise dafPersist.NoResults("Can only get header WCS for raw exposures.", 

447 datasetType, dataId) 

448 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])) 

449 

450 setMethods("header_wcs", bypassImpl=getRawHeaderWcs) 

451 

452 def getPhotoCalib(datasetType, pythonType, location, dataId): 

453 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

454 return fitsReader.readPhotoCalib() 

455 

456 setMethods("photoCalib", bypassImpl=getPhotoCalib) 

457 

458 def getVisitInfo(datasetType, pythonType, location, dataId): 

459 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

460 return fitsReader.readVisitInfo() 

461 

462 setMethods("visitInfo", bypassImpl=getVisitInfo) 

463 

464 # TODO: remove in DM-27177 

465 @deprecated(reason="Replaced with getFilterLabel. Will be removed after v22.", 

466 category=FutureWarning) 

467 def getFilter(datasetType, pythonType, location, dataId): 

468 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

469 return fitsReader.readFilter() 

470 

471 setMethods("filter", bypassImpl=getFilter) 

472 

473 # TODO: deprecate in DM-27177, remove in DM-27811 

474 def getFilterLabel(datasetType, pythonType, location, dataId): 

475 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

476 storedFilter = fitsReader.readFilterLabel() 

477 

478 # Apply standardization used by full Exposure 

479 try: 

480 # mapping is local to enclosing scope 

481 idFilter = mapping.need(['filter'], dataId)['filter'] 

482 except dafPersist.NoResults: 

483 idFilter = None 

484 bestFilter = self._getBestFilter(storedFilter, idFilter) 

485 if bestFilter is not None: 

486 return bestFilter 

487 else: 

488 return storedFilter 

489 

490 setMethods("filterLabel", bypassImpl=getFilterLabel) 

491 

492 setMethods("detector", 

493 mapImpl=lambda dataId, write=False: 

494 dafPersist.ButlerLocation( 

495 pythonType="lsst.afw.cameraGeom.CameraConfig", 

496 cppType="Config", 

497 storageName="Internal", 

498 locationList="ignored", 

499 dataId=dataId, 

500 mapper=self, 

501 storage=None, 

502 ), 

503 bypassImpl=lambda datasetType, pythonType, location, dataId: 

504 self.camera[self._extractDetectorName(dataId)] 

505 ) 

506 

507 def getBBox(datasetType, pythonType, location, dataId): 

508 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1) 

509 fix_header(md, translator_class=self.translatorClass) 

510 return afwImage.bboxFromMetadata(md) 

511 

512 setMethods("bbox", bypassImpl=getBBox) 

513 

514 elif name == "images": 

515 def getBBox(datasetType, pythonType, location, dataId): 

516 md = readMetadata(location.getLocationsWithRoot()[0]) 

517 fix_header(md, translator_class=self.translatorClass) 

518 return afwImage.bboxFromMetadata(md) 

519 setMethods("bbox", bypassImpl=getBBox) 

520 

521 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog 

522 

523 def getMetadata(datasetType, pythonType, location, dataId): 

524 md = readMetadata(os.path.join(location.getStorage().root, 

525 location.getLocations()[0]), hdu=1) 

526 fix_header(md, translator_class=self.translatorClass) 

527 return md 

528 

529 setMethods("md", bypassImpl=getMetadata) 

530 

531 # Sub-images 

532 if subPolicy["storage"] == "FitsStorage": 

533 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

534 subId = dataId.copy() 

535 del subId['bbox'] 

536 loc = mapping.map(mapper, subId, write) 

537 bbox = dataId['bbox'] 

538 llcX = bbox.getMinX() 

539 llcY = bbox.getMinY() 

540 width = bbox.getWidth() 

541 height = bbox.getHeight() 

542 loc.additionalData.set('llcX', llcX) 

543 loc.additionalData.set('llcY', llcY) 

544 loc.additionalData.set('width', width) 

545 loc.additionalData.set('height', height) 

546 if 'imageOrigin' in dataId: 

547 loc.additionalData.set('imageOrigin', 

548 dataId['imageOrigin']) 

549 return loc 

550 

551 def querySubClosure(key, format, dataId, mapping=mapping): 

552 subId = dataId.copy() 

553 del subId['bbox'] 

554 return mapping.lookup(format, subId) 

555 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure) 

556 

557 if subPolicy["storage"] == "FitsCatalogStorage": 

558 # Length of catalog 

559 

560 def getLen(datasetType, pythonType, location, dataId): 

561 md = readMetadata(os.path.join(location.getStorage().root, 

562 location.getLocations()[0]), hdu=1) 

563 fix_header(md, translator_class=self.translatorClass) 

564 return md["NAXIS2"] 

565 

566 setMethods("len", bypassImpl=getLen) 

567 

568 # Schema of catalog 

569 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets: 

570 setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId: 

571 afwTable.Schema.readFits(os.path.join(location.getStorage().root, 

572 location.getLocations()[0]))) 

573 

574 def _computeCcdExposureId(self, dataId): 

575 """Compute the 64-bit (long) identifier for a CCD exposure. 

576 

577 Subclasses must override 

578 

579 Parameters 

580 ---------- 

581 dataId : `dict` 

582 Data identifier with visit, ccd. 

583 """ 

584 raise NotImplementedError() 

585 

586 def _computeCoaddExposureId(self, dataId, singleFilter): 

587 """Compute the 64-bit (long) identifier for a coadd. 

588 

589 Subclasses must override 

590 

591 Parameters 

592 ---------- 

593 dataId : `dict` 

594 Data identifier with tract and patch. 

595 singleFilter : `bool` 

596 True means the desired ID is for a single-filter coadd, in which 

597 case dataIdmust contain filter. 

598 """ 

599 raise NotImplementedError() 

600 

601 def _search(self, path): 

602 """Search for path in the associated repository's storage. 

603 

604 Parameters 

605 ---------- 

606 path : string 

607 Path that describes an object in the repository associated with 

608 this mapper. 

609 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 

610 indicator will be stripped when searching and so will match 

611 filenames without the HDU indicator, e.g. 'foo.fits'. The path 

612 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 

613 

614 Returns 

615 ------- 

616 string 

617 The path for this object in the repository. Will return None if the 

618 object can't be found. If the input argument path contained an HDU 

619 indicator, the returned path will also contain the HDU indicator. 

620 """ 

621 return self.rootStorage.search(path) 

622 

623 def backup(self, datasetType, dataId): 

624 """Rename any existing object with the given type and dataId. 

625 

626 The CameraMapper implementation saves objects in a sequence of e.g.: 

627 

628 - foo.fits 

629 - foo.fits~1 

630 - foo.fits~2 

631 

632 All of the backups will be placed in the output repo, however, and will 

633 not be removed if they are found elsewhere in the _parent chain. This 

634 means that the same file will be stored twice if the previous version 

635 was found in an input repo. 

636 """ 

637 

638 # Calling PosixStorage directly is not the long term solution in this 

639 # function, this is work-in-progress on epic DM-6225. The plan is for 

640 # parentSearch to be changed to 'search', and search only the storage 

641 # associated with this mapper. All searching of parents will be handled 

642 # by traversing the container of repositories in Butler. 

643 

644 def firstElement(list): 

645 """Get the first element in the list, or None if that can't be 

646 done. 

647 """ 

648 return list[0] if list is not None and len(list) else None 

649 

650 n = 0 

651 newLocation = self.map(datasetType, dataId, write=True) 

652 newPath = newLocation.getLocations()[0] 

653 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True) 

654 path = firstElement(path) 

655 oldPaths = [] 

656 while path is not None: 

657 n += 1 

658 oldPaths.append((n, path)) 

659 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True) 

660 path = firstElement(path) 

661 for n, oldPath in reversed(oldPaths): 

662 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n)) 

663 

664 def keys(self): 

665 """Return supported keys. 

666 

667 Returns 

668 ------- 

669 iterable 

670 List of keys usable in a dataset identifier 

671 """ 

672 return iter(self.keyDict.keys()) 

673 

674 def getKeys(self, datasetType, level): 

675 """Return a dict of supported keys and their value types for a given 

676 dataset type at a given level of the key hierarchy. 

677 

678 Parameters 

679 ---------- 

680 datasetType : `str` 

681 Dataset type or None for all dataset types. 

682 level : `str` or None 

683 Level or None for all levels or '' for the default level for the 

684 camera. 

685 

686 Returns 

687 ------- 

688 `dict` 

689 Keys are strings usable in a dataset identifier, values are their 

690 value types. 

691 """ 

692 

693 # not sure if this is how we want to do this. what if None was 

694 # intended? 

695 if level == '': 

696 level = self.getDefaultLevel() 

697 

698 if datasetType is None: 

699 keyDict = copy.copy(self.keyDict) 

700 else: 

701 keyDict = self.mappings[datasetType].keys() 

702 if level is not None and level in self.levels: 

703 keyDict = copy.copy(keyDict) 

704 for lev in self.levels[level]: 

705 if lev in keyDict: 

706 del keyDict[lev] 

707 return keyDict 

708 

709 def getDefaultLevel(self): 

710 return self.defaultLevel 

711 

712 def getDefaultSubLevel(self, level): 

713 if level in self.defaultSubLevels: 

714 return self.defaultSubLevels[level] 

715 return None 

716 

717 @classmethod 

718 def getCameraName(cls): 

719 """Return the name of the camera that this CameraMapper is for.""" 

720 className = str(cls) 

721 className = className[className.find('.'):-1] 

722 m = re.search(r'(\w+)Mapper', className) 

723 if m is None: 

724 m = re.search(r"class '[\w.]*?(\w+)'", className) 

725 name = m.group(1) 

726 return name[:1].lower() + name[1:] if name else '' 

727 

728 @classmethod 

729 def getPackageName(cls): 

730 """Return the name of the package containing this CameraMapper.""" 

731 if cls.packageName is None: 

732 raise ValueError('class variable packageName must not be None') 

733 return cls.packageName 

734 

735 @classmethod 

736 def getGen3Instrument(cls): 

737 """Return the gen3 Instrument class equivalent for this gen2 Mapper. 

738 

739 Returns 

740 ------- 

741 instr : `type` 

742 A `~lsst.obs.base.Instrument` class. 

743 """ 

744 if cls._gen3instrument is None: 

745 raise NotImplementedError("Please provide a specific implementation for your instrument" 

746 " to enable conversion of this gen2 repository to gen3") 

747 if isinstance(cls._gen3instrument, str): 

748 # Given a string to convert to an instrument class 

749 cls._gen3instrument = doImport(cls._gen3instrument) 

750 if not issubclass(cls._gen3instrument, Instrument): 

751 raise ValueError(f"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}" 

752 " but that is not an lsst.obs.base.Instrument") 

753 return cls._gen3instrument 

754 

755 @classmethod 

756 def getPackageDir(cls): 

757 """Return the base directory of this package""" 

758 return getPackageDir(cls.getPackageName()) 

759 

760 def map_camera(self, dataId, write=False): 

761 """Map a camera dataset.""" 

762 if self.camera is None: 

763 raise RuntimeError("No camera dataset available.") 

764 actualId = self._transformId(dataId) 

765 return dafPersist.ButlerLocation( 

766 pythonType="lsst.afw.cameraGeom.CameraConfig", 

767 cppType="Config", 

768 storageName="ConfigStorage", 

769 locationList=self.cameraDataLocation or "ignored", 

770 dataId=actualId, 

771 mapper=self, 

772 storage=self.rootStorage 

773 ) 

774 

775 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId): 

776 """Return the (preloaded) camera object. 

777 """ 

778 if self.camera is None: 

779 raise RuntimeError("No camera dataset available.") 

780 return self.camera 

781 

782 def map_expIdInfo(self, dataId, write=False): 

783 return dafPersist.ButlerLocation( 

784 pythonType="lsst.obs.base.ExposureIdInfo", 

785 cppType=None, 

786 storageName="Internal", 

787 locationList="ignored", 

788 dataId=dataId, 

789 mapper=self, 

790 storage=self.rootStorage 

791 ) 

792 

793 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId): 

794 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 

795 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId) 

796 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId) 

797 return ExposureIdInfo(expId=expId, expBits=expBits) 

798 

799 def std_bfKernel(self, item, dataId): 

800 """Disable standardization for bfKernel 

801 

802 bfKernel is a calibration product that is numpy array, 

803 unlike other calibration products that are all images; 

804 all calibration images are sent through _standardizeExposure 

805 due to CalibrationMapping, but we don't want that to happen to bfKernel 

806 """ 

807 return item 

808 

809 def std_raw(self, item, dataId): 

810 """Standardize a raw dataset by converting it to an Exposure instead 

811 of an Image""" 

812 return self._standardizeExposure(self.exposures['raw'], item, dataId, 

813 trimmed=False, setVisitInfo=True) 

814 

815 def map_skypolicy(self, dataId): 

816 """Map a sky policy.""" 

817 return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy", 

818 "Internal", None, None, self, 

819 storage=self.rootStorage) 

820 

821 def std_skypolicy(self, item, dataId): 

822 """Standardize a sky policy by returning the one we use.""" 

823 return self.skypolicy 

824 

825############################################################################### 

826# 

827# Utility functions 

828# 

829############################################################################### 

830 

831 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, 

832 posixIfNoSql=True): 

833 """Set up a registry (usually SQLite3), trying a number of possible 

834 paths. 

835 

836 Parameters 

837 ---------- 

838 name : string 

839 Name of registry. 

840 description: `str` 

841 Description of registry (for log messages) 

842 path : string 

843 Path for registry. 

844 policy : string 

845 Policy that contains the registry name, used if path is None. 

846 policyKey : string 

847 Key in policy for registry path. 

848 storage : Storage subclass 

849 Repository Storage to look in. 

850 searchParents : bool, optional 

851 True if the search for a registry should follow any Butler v1 

852 _parent symlinks. 

853 posixIfNoSql : bool, optional 

854 If an sqlite registry is not found, will create a posix registry if 

855 this is True. 

856 

857 Returns 

858 ------- 

859 lsst.daf.persistence.Registry 

860 Registry object 

861 """ 

862 if path is None and policyKey in policy: 

863 path = dafPersist.LogicalLocation(policy[policyKey]).locString() 

864 if os.path.isabs(path): 

865 raise RuntimeError("Policy should not indicate an absolute path for registry.") 

866 if not storage.exists(path): 

867 newPath = storage.instanceSearch(path) 

868 

869 newPath = newPath[0] if newPath is not None and len(newPath) else None 

870 if newPath is None: 

871 self.log.warn("Unable to locate registry at policy path (also looked in root): %s", 

872 path) 

873 path = newPath 

874 else: 

875 self.log.warn("Unable to locate registry at policy path: %s", path) 

876 path = None 

877 

878 # Old Butler API was to indicate the registry WITH the repo folder, 

879 # New Butler expects the registry to be in the repo folder. To support 

880 # Old API, check to see if path starts with root, and if so, strip 

881 # root from path. Currently only works with PosixStorage 

882 try: 

883 root = storage.root 

884 if path and (path.startswith(root)): 

885 path = path[len(root + '/'):] 

886 except AttributeError: 

887 pass 

888 

889 # determine if there is an sqlite registry and if not, try the posix 

890 # registry. 

891 registry = None 

892 

893 def search(filename, description): 

894 """Search for file in storage 

895 

896 Parameters 

897 ---------- 

898 filename : `str` 

899 Filename to search for 

900 description : `str` 

901 Description of file, for error message. 

902 

903 Returns 

904 ------- 

905 path : `str` or `None` 

906 Path to file, or None 

907 """ 

908 result = storage.instanceSearch(filename) 

909 if result: 

910 return result[0] 

911 self.log.debug("Unable to locate %s: %s", description, filename) 

912 return None 

913 

914 # Search for a suitable registry database 

915 if path is None: 

916 path = search("%s.pgsql" % name, "%s in root" % description) 

917 if path is None: 

918 path = search("%s.sqlite3" % name, "%s in root" % description) 

919 if path is None: 

920 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description) 

921 

922 if path is not None: 

923 if not storage.exists(path): 

924 newPath = storage.instanceSearch(path) 

925 newPath = newPath[0] if newPath is not None and len(newPath) else None 

926 if newPath is not None: 

927 path = newPath 

928 localFileObj = storage.getLocalFile(path) 

929 self.log.info("Loading %s registry from %s", description, localFileObj.name) 

930 registry = dafPersist.Registry.create(localFileObj.name) 

931 localFileObj.close() 

932 elif not registry and posixIfNoSql: 

933 try: 

934 self.log.info("Loading Posix %s registry from %s", description, storage.root) 

935 registry = dafPersist.PosixRegistry(storage.root) 

936 except Exception: 

937 registry = None 

938 

939 return registry 

940 

941 def _transformId(self, dataId): 

942 """Generate a standard ID dict from a camera-specific ID dict. 

943 

944 Canonical keys include: 

945 - amp: amplifier name 

946 - ccd: CCD name (in LSST this is a combination of raft and sensor) 

947 The default implementation returns a copy of its input. 

948 

949 Parameters 

950 ---------- 

951 dataId : `dict` 

952 Dataset identifier; this must not be modified 

953 

954 Returns 

955 ------- 

956 `dict` 

957 Transformed dataset identifier. 

958 """ 

959 

960 return dataId.copy() 

961 

962 def _mapActualToPath(self, template, actualId): 

963 """Convert a template path to an actual path, using the actual data 

964 identifier. This implementation is usually sufficient but can be 

965 overridden by the subclass. 

966 

967 Parameters 

968 ---------- 

969 template : `str` 

970 Template path 

971 actualId : `dict` 

972 Dataset identifier 

973 

974 Returns 

975 ------- 

976 `str` 

977 Pathname 

978 """ 

979 

980 try: 

981 transformedId = self._transformId(actualId) 

982 return template % transformedId 

983 except Exception as e: 

984 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e)) 

985 

986 @staticmethod 

987 def getShortCcdName(ccdName): 

988 """Convert a CCD name to a form useful as a filename 

989 

990 The default implementation converts spaces to underscores. 

991 """ 

992 return ccdName.replace(" ", "_") 

993 

994 def _extractDetectorName(self, dataId): 

995 """Extract the detector (CCD) name from the dataset identifier. 

996 

997 The name in question is the detector name used by lsst.afw.cameraGeom. 

998 

999 Parameters 

1000 ---------- 

1001 dataId : `dict` 

1002 Dataset identifier. 

1003 

1004 Returns 

1005 ------- 

1006 `str` 

1007 Detector name 

1008 """ 

1009 raise NotImplementedError("No _extractDetectorName() function specified") 

1010 

1011 def _setAmpDetector(self, item, dataId, trimmed=True): 

1012 """Set the detector object in an Exposure for an amplifier. 

1013 

1014 Defects are also added to the Exposure based on the detector object. 

1015 

1016 Parameters 

1017 ---------- 

1018 item : `lsst.afw.image.Exposure` 

1019 Exposure to set the detector in. 

1020 dataId : `dict` 

1021 Dataset identifier 

1022 trimmed : `bool` 

1023 Should detector be marked as trimmed? (ignored) 

1024 """ 

1025 

1026 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed) 

1027 

1028 def _setCcdDetector(self, item, dataId, trimmed=True): 

1029 """Set the detector object in an Exposure for a CCD. 

1030 

1031 Parameters 

1032 ---------- 

1033 item : `lsst.afw.image.Exposure` 

1034 Exposure to set the detector in. 

1035 dataId : `dict` 

1036 Dataset identifier 

1037 trimmed : `bool` 

1038 Should detector be marked as trimmed? (ignored) 

1039 """ 

1040 if item.getDetector() is not None: 

1041 return 

1042 

1043 detectorName = self._extractDetectorName(dataId) 

1044 detector = self.camera[detectorName] 

1045 item.setDetector(detector) 

1046 

1047 @staticmethod 

1048 def _resolveFilters(definitions, idFilter, filterLabel): 

1049 """Identify the filter(s) consistent with partial filter information. 

1050 

1051 Parameters 

1052 ---------- 

1053 definitions : `lsst.obs.base.FilterDefinitionCollection` 

1054 The filter definitions in which to search for filters. 

1055 idFilter : `str` or `None` 

1056 The filter information provided in a data ID. 

1057 filterLabel : `lsst.afw.image.FilterLabel` or `None` 

1058 The filter information provided by an exposure; may be incomplete. 

1059 

1060 Returns 

1061 ------- 

1062 filters : `set` [`lsst.obs.base.FilterDefinition`] 

1063 The set of filters consistent with ``idFilter`` 

1064 and ``filterLabel``. 

1065 """ 

1066 # Assume none of the filter constraints actually wrong/contradictory. 

1067 # Then taking the intersection of all constraints will give a unique 

1068 # result if one exists. 

1069 matches = set(definitions) 

1070 if idFilter is not None: 

1071 matches.intersection_update(definitions.findAll(idFilter)) 

1072 if filterLabel is not None and filterLabel.hasPhysicalLabel(): 

1073 matches.intersection_update(definitions.findAll(filterLabel.physicalLabel)) 

1074 if filterLabel is not None and filterLabel.hasBandLabel(): 

1075 matches.intersection_update(definitions.findAll(filterLabel.bandLabel)) 

1076 return matches 

1077 

1078 def _getBestFilter(self, storedLabel, idFilter): 

1079 """Estimate the most complete filter information consistent with the 

1080 file or registry. 

1081 

1082 Parameters 

1083 ---------- 

1084 storedLabel : `lsst.afw.image.FilterLabel` or `None` 

1085 The filter previously stored in the file. 

1086 idFilter : `str` or `None` 

1087 The filter implied by the data ID, if any. 

1088 

1089 Returns 

1090 ------- 

1091 bestFitler : `lsst.afw.image.FilterLabel` or `None` 

1092 The complete filter to describe the dataset. May be equal to 

1093 ``storedLabel``. `None` if no recommendation can be generated. 

1094 """ 

1095 try: 

1096 # getGen3Instrument returns class; need to construct it. 

1097 filterDefinitions = self.getGen3Instrument()().filterDefinitions 

1098 except NotImplementedError: 

1099 filterDefinitions = None 

1100 

1101 if filterDefinitions is not None: 

1102 definitions = self._resolveFilters(filterDefinitions, idFilter, storedLabel) 

1103 self.log.debug("Matching filters for id=%r and label=%r are %s.", 

1104 idFilter, storedLabel, definitions) 

1105 if len(definitions) == 1: 

1106 newLabel = list(definitions)[0].makeFilterLabel() 

1107 return newLabel 

1108 elif definitions: 

1109 self.log.warn("Multiple matches for filter %r with data ID %r.", storedLabel, idFilter) 

1110 # Can we at least add a band? 

1111 # Never expect multiple definitions with same physical filter. 

1112 bands = {d.band for d in definitions} # None counts as separate result! 

1113 if len(bands) == 1 and storedLabel is None: 

1114 band = list(bands)[0] 

1115 return afwImage.FilterLabel(band=band) 

1116 else: 

1117 return None 

1118 else: 

1119 # Unknown filter, nothing to be done. 

1120 self.log.warn("Cannot reconcile filter %r with data ID %r.", storedLabel, idFilter) 

1121 return None 

1122 

1123 # Not practical to recommend a FilterLabel without filterDefinitions 

1124 

1125 return None 

1126 

1127 def _setFilter(self, mapping, item, dataId): 

1128 """Set the filter information in an Exposure. 

1129 

1130 The Exposure should already have had a filter loaded, but the reader 

1131 (in ``afw``) had to act on incomplete information. This method 

1132 cross-checks the filter against the data ID and the standard list 

1133 of filters. 

1134 

1135 Parameters 

1136 ---------- 

1137 mapping : `lsst.obs.base.Mapping` 

1138 Where to get the data ID filter from. 

1139 item : `lsst.afw.image.Exposure` 

1140 Exposure to set the filter in. 

1141 dataId : `dict` 

1142 Dataset identifier. 

1143 """ 

1144 if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) 

1145 or isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)): 

1146 return 

1147 

1148 itemFilter = item.getFilterLabel() # may be None 

1149 try: 

1150 idFilter = mapping.need(['filter'], dataId)['filter'] 

1151 except dafPersist.NoResults: 

1152 idFilter = None 

1153 

1154 bestFilter = self._getBestFilter(itemFilter, idFilter) 

1155 if bestFilter is not None: 

1156 if bestFilter != itemFilter: 

1157 item.setFilterLabel(bestFilter) 

1158 # Already using bestFilter, avoid unnecessary edits 

1159 elif itemFilter is None: 

1160 # Old Filter cleanup, without the benefit of FilterDefinition 

1161 if self.filters is not None and idFilter in self.filters: 

1162 idFilter = self.filters[idFilter] 

1163 try: 

1164 # TODO: remove in DM-27177; at that point may not be able 

1165 # to process IDs without FilterDefinition. 

1166 with warnings.catch_warnings(): 

1167 warnings.filterwarnings("ignore", category=FutureWarning) 

1168 item.setFilter(afwImage.Filter(idFilter)) 

1169 except pexExcept.NotFoundError: 

1170 self.log.warn("Filter %s not defined. Set to UNKNOWN.", idFilter) 

1171 

1172 def _standardizeExposure(self, mapping, item, dataId, filter=True, 

1173 trimmed=True, setVisitInfo=True): 

1174 """Default standardization function for images. 

1175 

1176 This sets the Detector from the camera geometry 

1177 and optionally set the Filter. In both cases this saves 

1178 having to persist some data in each exposure (or image). 

1179 

1180 Parameters 

1181 ---------- 

1182 mapping : `lsst.obs.base.Mapping` 

1183 Where to get the values from. 

1184 item : image-like object 

1185 Can be any of lsst.afw.image.Exposure, 

1186 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 

1187 or lsst.afw.image.MaskedImage 

1188 

1189 dataId : `dict` 

1190 Dataset identifier 

1191 filter : `bool` 

1192 Set filter? Ignored if item is already an exposure 

1193 trimmed : `bool` 

1194 Should detector be marked as trimmed? 

1195 setVisitInfo : `bool` 

1196 Should Exposure have its VisitInfo filled out from the metadata? 

1197 

1198 Returns 

1199 ------- 

1200 `lsst.afw.image.Exposure` 

1201 The standardized Exposure. 

1202 """ 

1203 try: 

1204 exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log, 

1205 setVisitInfo=setVisitInfo) 

1206 except Exception as e: 

1207 self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e)) 

1208 raise 

1209 

1210 if mapping.level.lower() == "amp": 

1211 self._setAmpDetector(exposure, dataId, trimmed) 

1212 elif mapping.level.lower() == "ccd": 

1213 self._setCcdDetector(exposure, dataId, trimmed) 

1214 

1215 # We can only create a WCS if it doesn't already have one and 

1216 # we have either a VisitInfo or exposure metadata. 

1217 # Do not calculate a WCS if this is an amplifier exposure 

1218 if mapping.level.lower() != "amp" and exposure.getWcs() is None and \ 

1219 (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()): 

1220 self._createInitialSkyWcs(exposure) 

1221 

1222 if filter: 

1223 self._setFilter(mapping, exposure, dataId) 

1224 

1225 return exposure 

1226 

1227 def _createSkyWcsFromMetadata(self, exposure): 

1228 """Create a SkyWcs from the FITS header metadata in an Exposure. 

1229 

1230 Parameters 

1231 ---------- 

1232 exposure : `lsst.afw.image.Exposure` 

1233 The exposure to get metadata from, and attach the SkyWcs to. 

1234 """ 

1235 metadata = exposure.getMetadata() 

1236 fix_header(metadata, translator_class=self.translatorClass) 

1237 try: 

1238 wcs = afwGeom.makeSkyWcs(metadata, strip=True) 

1239 exposure.setWcs(wcs) 

1240 except pexExcept.TypeError as e: 

1241 # See DM-14372 for why this is debug and not warn (e.g. calib 

1242 # files without wcs metadata). 

1243 self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:" 

1244 " %s", e.args[0]) 

1245 # ensure any WCS values stripped from the metadata are removed in the 

1246 # exposure 

1247 exposure.setMetadata(metadata) 

1248 

1249 def _createInitialSkyWcs(self, exposure): 

1250 """Create a SkyWcs from the boresight and camera geometry. 

1251 

1252 If the boresight or camera geometry do not support this method of 

1253 WCS creation, this falls back on the header metadata-based version 

1254 (typically a purely linear FITS crval/crpix/cdmatrix WCS). 

1255 

1256 Parameters 

1257 ---------- 

1258 exposure : `lsst.afw.image.Exposure` 

1259 The exposure to get data from, and attach the SkyWcs to. 

1260 """ 

1261 # Always use try to use metadata first, to strip WCS keys from it. 

1262 self._createSkyWcsFromMetadata(exposure) 

1263 

1264 if exposure.getInfo().getVisitInfo() is None: 

1265 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs." 

1266 self.log.warn(msg) 

1267 return 

1268 try: 

1269 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector()) 

1270 exposure.setWcs(newSkyWcs) 

1271 except InitialSkyWcsError as e: 

1272 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s" 

1273 self.log.warn(msg, e) 

1274 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e)) 

1275 if e.__context__ is not None: 

1276 self.log.debug("Root-cause Exception was: %s", 

1277 traceback.TracebackException.from_exception(e.__context__)) 

1278 

1279 def _makeCamera(self, policy, repositoryDir): 

1280 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 

1281 the camera geometry 

1282 

1283 Also set self.cameraDataLocation, if relevant (else it can be left 

1284 None). 

1285 

1286 This implementation assumes that policy contains an entry "camera" 

1287 that points to the subdirectory in this package of camera data; 

1288 specifically, that subdirectory must contain: 

1289 - a file named `camera.py` that contains persisted camera config 

1290 - ampInfo table FITS files, as required by 

1291 lsst.afw.cameraGeom.makeCameraFromPath 

1292 

1293 Parameters 

1294 ---------- 

1295 policy : `lsst.daf.persistence.Policy` 

1296 Policy with per-camera defaults already merged 

1297 (PexPolicy only for backward compatibility). 

1298 repositoryDir : `str` 

1299 Policy repository for the subclassing module (obtained with 

1300 getRepositoryPath() on the per-camera default dictionary). 

1301 """ 

1302 if 'camera' not in policy: 

1303 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera") 

1304 cameraDataSubdir = policy['camera'] 

1305 self.cameraDataLocation = os.path.normpath( 

1306 os.path.join(repositoryDir, cameraDataSubdir, "camera.py")) 

1307 cameraConfig = afwCameraGeom.CameraConfig() 

1308 cameraConfig.load(self.cameraDataLocation) 

1309 ampInfoPath = os.path.dirname(self.cameraDataLocation) 

1310 return afwCameraGeom.makeCameraFromPath( 

1311 cameraConfig=cameraConfig, 

1312 ampInfoPath=ampInfoPath, 

1313 shortNameFunc=self.getShortCcdName, 

1314 pupilFactoryClass=self.PupilFactoryClass 

1315 ) 

1316 

1317 def getRegistry(self): 

1318 """Get the registry used by this mapper. 

1319 

1320 Returns 

1321 ------- 

1322 Registry or None 

1323 The registry used by this mapper for this mapper's repository. 

1324 """ 

1325 return self.registry 

1326 

1327 def getImageCompressionSettings(self, datasetType, dataId): 

1328 """Stuff image compression settings into a daf.base.PropertySet 

1329 

1330 This goes into the ButlerLocation's "additionalData", which gets 

1331 passed into the boost::persistence framework. 

1332 

1333 Parameters 

1334 ---------- 

1335 datasetType : `str` 

1336 Type of dataset for which to get the image compression settings. 

1337 dataId : `dict` 

1338 Dataset identifier. 

1339 

1340 Returns 

1341 ------- 

1342 additionalData : `lsst.daf.base.PropertySet` 

1343 Image compression settings. 

1344 """ 

1345 mapping = self.mappings[datasetType] 

1346 recipeName = mapping.recipe 

1347 storageType = mapping.storage 

1348 if storageType not in self._writeRecipes: 

1349 return dafBase.PropertySet() 

1350 if recipeName not in self._writeRecipes[storageType]: 

1351 raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" % 

1352 (datasetType, storageType, recipeName)) 

1353 recipe = self._writeRecipes[storageType][recipeName].deepCopy() 

1354 seed = hash(tuple(dataId.items())) % 2**31 

1355 for plane in ("image", "mask", "variance"): 

1356 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0: 

1357 recipe.set(plane + ".scaling.seed", seed) 

1358 return recipe 

1359 

1360 def _initWriteRecipes(self): 

1361 """Read the recipes for writing files 

1362 

1363 These recipes are currently used for configuring FITS compression, 

1364 but they could have wider uses for configuring different flavors 

1365 of the storage types. A recipe is referred to by a symbolic name, 

1366 which has associated settings. These settings are stored as a 

1367 `PropertySet` so they can easily be passed down to the 

1368 boost::persistence framework as the "additionalData" parameter. 

1369 

1370 The list of recipes is written in YAML. A default recipe and 

1371 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 

1372 and these may be overridden or supplemented by the individual obs_* 

1373 packages' own policy/writeRecipes.yaml files. 

1374 

1375 Recipes are grouped by the storage type. Currently, only the 

1376 ``FitsStorage`` storage type uses recipes, which uses it to 

1377 configure FITS image compression. 

1378 

1379 Each ``FitsStorage`` recipe for FITS compression should define 

1380 "image", "mask" and "variance" entries, each of which may contain 

1381 "compression" and "scaling" entries. Defaults will be provided for 

1382 any missing elements under "compression" and "scaling". 

1383 

1384 The allowed entries under "compression" are: 

1385 

1386 * algorithm (string): compression algorithm to use 

1387 * rows (int): number of rows per tile (0 = entire dimension) 

1388 * columns (int): number of columns per tile (0 = entire dimension) 

1389 * quantizeLevel (float): cfitsio quantization level 

1390 

1391 The allowed entries under "scaling" are: 

1392 

1393 * algorithm (string): scaling algorithm to use 

1394 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 

1395 * fuzz (bool): fuzz the values when quantising floating-point values? 

1396 * seed (long): seed for random number generator when fuzzing 

1397 * maskPlanes (list of string): mask planes to ignore when doing 

1398 statistics 

1399 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 

1400 * quantizePad: number of stdev to allow on the low side (for 

1401 STDEV_POSITIVE/NEGATIVE) 

1402 * bscale: manually specified BSCALE (for MANUAL scaling) 

1403 * bzero: manually specified BSCALE (for MANUAL scaling) 

1404 

1405 A very simple example YAML recipe: 

1406 

1407 FitsStorage: 

1408 default: 

1409 image: &default 

1410 compression: 

1411 algorithm: GZIP_SHUFFLE 

1412 mask: *default 

1413 variance: *default 

1414 """ 

1415 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml") 

1416 recipes = dafPersist.Policy(recipesFile) 

1417 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml") 

1418 validationMenu = {'FitsStorage': validateRecipeFitsStorage, } 

1419 if os.path.exists(supplementsFile) and supplementsFile != recipesFile: 

1420 supplements = dafPersist.Policy(supplementsFile) 

1421 # Don't allow overrides, only supplements 

1422 for entry in validationMenu: 

1423 intersection = set(recipes[entry].names()).intersection(set(supplements.names())) 

1424 if intersection: 

1425 raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" % 

1426 (supplementsFile, entry, recipesFile, intersection)) 

1427 recipes.update(supplements) 

1428 

1429 self._writeRecipes = {} 

1430 for storageType in recipes.names(True): 

1431 if "default" not in recipes[storageType]: 

1432 raise RuntimeError("No 'default' recipe defined for storage type %s in %s" % 

1433 (storageType, recipesFile)) 

1434 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType]) 

1435 

1436 

1437def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True): 

1438 """Generate an Exposure from an image-like object 

1439 

1440 If the image is a DecoratedImage then also set its WCS and metadata 

1441 (Image and MaskedImage are missing the necessary metadata 

1442 and Exposure already has those set) 

1443 

1444 Parameters 

1445 ---------- 

1446 image : Image-like object 

1447 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 

1448 Exposure. 

1449 

1450 Returns 

1451 ------- 

1452 `lsst.afw.image.Exposure` 

1453 Exposure containing input image. 

1454 """ 

1455 translatorClass = None 

1456 if mapper is not None: 

1457 translatorClass = mapper.translatorClass 

1458 

1459 metadata = None 

1460 if isinstance(image, afwImage.MaskedImage): 

1461 exposure = afwImage.makeExposure(image) 

1462 elif isinstance(image, afwImage.DecoratedImage): 

1463 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage())) 

1464 metadata = image.getMetadata() 

1465 fix_header(metadata, translator_class=translatorClass) 

1466 exposure.setMetadata(metadata) 

1467 elif isinstance(image, afwImage.Exposure): 

1468 exposure = image 

1469 metadata = exposure.getMetadata() 

1470 fix_header(metadata, translator_class=translatorClass) 

1471 else: # Image 

1472 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image)) 

1473 

1474 # set VisitInfo if we can 

1475 if setVisitInfo and exposure.getInfo().getVisitInfo() is None: 

1476 if metadata is not None: 

1477 if mapper is None: 

1478 if not logger: 

1479 logger = lsstLog.Log.getLogger("CameraMapper") 

1480 logger.warn("I can only set the VisitInfo if you provide a mapper") 

1481 else: 

1482 exposureId = mapper._computeCcdExposureId(dataId) 

1483 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId) 

1484 

1485 exposure.getInfo().setVisitInfo(visitInfo) 

1486 

1487 return exposure 

1488 

1489 

1490def validateRecipeFitsStorage(recipes): 

1491 """Validate recipes for FitsStorage 

1492 

1493 The recipes are supplemented with default values where appropriate. 

1494 

1495 TODO: replace this custom validation code with Cerberus (DM-11846) 

1496 

1497 Parameters 

1498 ---------- 

1499 recipes : `lsst.daf.persistence.Policy` 

1500 FitsStorage recipes to validate. 

1501 

1502 Returns 

1503 ------- 

1504 validated : `lsst.daf.base.PropertySet` 

1505 Validated FitsStorage recipe. 

1506 

1507 Raises 

1508 ------ 

1509 `RuntimeError` 

1510 If validation fails. 

1511 """ 

1512 # Schemas define what should be there, and the default values (and by the 

1513 # default value, the expected type). 

1514 compressionSchema = { 

1515 "algorithm": "NONE", 

1516 "rows": 1, 

1517 "columns": 0, 

1518 "quantizeLevel": 0.0, 

1519 } 

1520 scalingSchema = { 

1521 "algorithm": "NONE", 

1522 "bitpix": 0, 

1523 "maskPlanes": ["NO_DATA"], 

1524 "seed": 0, 

1525 "quantizeLevel": 4.0, 

1526 "quantizePad": 5.0, 

1527 "fuzz": True, 

1528 "bscale": 1.0, 

1529 "bzero": 0.0, 

1530 } 

1531 

1532 def checkUnrecognized(entry, allowed, description): 

1533 """Check to see if the entry contains unrecognised keywords""" 

1534 unrecognized = set(entry.keys()) - set(allowed) 

1535 if unrecognized: 

1536 raise RuntimeError( 

1537 "Unrecognized entries when parsing image compression recipe %s: %s" % 

1538 (description, unrecognized)) 

1539 

1540 validated = {} 

1541 for name in recipes.names(True): 

1542 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name) 

1543 rr = dafBase.PropertySet() 

1544 validated[name] = rr 

1545 for plane in ("image", "mask", "variance"): 

1546 checkUnrecognized(recipes[name][plane], ["compression", "scaling"], 

1547 name + "->" + plane) 

1548 

1549 for settings, schema in (("compression", compressionSchema), 

1550 ("scaling", scalingSchema)): 

1551 prefix = plane + "." + settings 

1552 if settings not in recipes[name][plane]: 

1553 for key in schema: 

1554 rr.set(prefix + "." + key, schema[key]) 

1555 continue 

1556 entry = recipes[name][plane][settings] 

1557 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings) 

1558 for key in schema: 

1559 value = type(schema[key])(entry[key]) if key in entry else schema[key] 

1560 rr.set(prefix + "." + key, value) 

1561 return validated