Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import copy 

23import os 

24import re 

25import traceback 

26import weakref 

27 

28from deprecated.sphinx import deprecated 

29 

30from astro_metadata_translator import fix_header 

31import lsst.daf.persistence as dafPersist 

32from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping 

33import lsst.daf.base as dafBase 

34import lsst.afw.geom as afwGeom 

35import lsst.afw.image as afwImage 

36import lsst.afw.table as afwTable 

37from lsst.afw.fits import readMetadata 

38import lsst.afw.cameraGeom as afwCameraGeom 

39import lsst.log as lsstLog 

40import lsst.pex.exceptions as pexExcept 

41from .exposureIdInfo import ExposureIdInfo 

42from .makeRawVisitInfo import MakeRawVisitInfo 

43from .utils import createInitialSkyWcs, InitialSkyWcsError 

44from lsst.utils import getPackageDir 

45 

46__all__ = ["CameraMapper", "exposureFromImage"] 

47 

48 

49class CameraMapper(dafPersist.Mapper): 

50 

51 """CameraMapper is a base class for mappers that handle images from a 

52 camera and products derived from them. This provides an abstraction layer 

53 between the data on disk and the code. 

54 

55 Public methods: keys, queryMetadata, getDatasetTypes, map, 

56 canStandardize, standardize 

57 

58 Mappers for specific data sources (e.g., CFHT Megacam, LSST 

59 simulations, etc.) should inherit this class. 

60 

61 The CameraMapper manages datasets within a "root" directory. Note that 

62 writing to a dataset present in the input root will hide the existing 

63 dataset but not overwrite it. See #2160 for design discussion. 

64 

65 A camera is assumed to consist of one or more rafts, each composed of 

66 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 

67 (amps). A camera is also assumed to have a camera geometry description 

68 (CameraGeom object) as a policy file, a filter description (Filter class 

69 static configuration) as another policy file. 

70 

71 Information from the camera geometry and defects are inserted into all 

72 Exposure objects returned. 

73 

74 The mapper uses one or two registries to retrieve metadata about the 

75 images. The first is a registry of all raw exposures. This must contain 

76 the time of the observation. One or more tables (or the equivalent) 

77 within the registry are used to look up data identifier components that 

78 are not specified by the user (e.g. filter) and to return results for 

79 metadata queries. The second is an optional registry of all calibration 

80 data. This should contain validity start and end entries for each 

81 calibration dataset in the same timescale as the observation time. 

82 

83 Subclasses will typically set MakeRawVisitInfoClass and optionally the 

84 metadata translator class: 

85 

86 MakeRawVisitInfoClass: a class variable that points to a subclass of 

87 MakeRawVisitInfo, a functor that creates an 

88 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 

89 

90 translatorClass: The `~astro_metadata_translator.MetadataTranslator` 

91 class to use for fixing metadata values. If it is not set an attempt 

92 will be made to infer the class from ``MakeRawVisitInfoClass``, failing 

93 that the metadata fixup will try to infer the translator class from the 

94 header itself. 

95 

96 Subclasses must provide the following methods: 

97 

98 _extractDetectorName(self, dataId): returns the detector name for a CCD 

99 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 

100 a dataset identifier referring to that CCD or a subcomponent of it. 

101 

102 _computeCcdExposureId(self, dataId): see below 

103 

104 _computeCoaddExposureId(self, dataId, singleFilter): see below 

105 

106 Subclasses may also need to override the following methods: 

107 

108 _transformId(self, dataId): transformation of a data identifier 

109 from colloquial usage (e.g., "ccdname") to proper/actual usage 

110 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 

111 commas). The default implementation does nothing. Note that this 

112 method should not modify its input parameter. 

113 

114 getShortCcdName(self, ccdName): a static method that returns a shortened 

115 name suitable for use as a filename. The default version converts spaces 

116 to underscores. 

117 

118 _mapActualToPath(self, template, actualId): convert a template path to an 

119 actual path, using the actual dataset identifier. 

120 

121 The mapper's behaviors are largely specified by the policy file. 

122 See the MapperDictionary.paf for descriptions of the available items. 

123 

124 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 

125 mappings (see Mappings class). 

126 

127 Common default mappings for all subclasses can be specified in the 

128 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 

129 provides a simple way to add a product to all camera mappers. 

130 

131 Functions to map (provide a path to the data given a dataset 

132 identifier dictionary) and standardize (convert data into some standard 

133 format or type) may be provided in the subclass as "map_{dataset type}" 

134 and "std_{dataset type}", respectively. 

135 

136 If non-Exposure datasets cannot be retrieved using standard 

137 daf_persistence methods alone, a "bypass_{dataset type}" function may be 

138 provided in the subclass to return the dataset instead of using the 

139 "datasets" subpolicy. 

140 

141 Implementations of map_camera and bypass_camera that should typically be 

142 sufficient are provided in this base class. 

143 

144 Notes 

145 ----- 

146 .. todo:: 

147 

148 Instead of auto-loading the camera at construction time, load it from 

149 the calibration registry 

150 

151 Parameters 

152 ---------- 

153 policy : daf_persistence.Policy, 

154 Policy with per-camera defaults already merged. 

155 repositoryDir : string 

156 Policy repository for the subclassing module (obtained with 

157 getRepositoryPath() on the per-camera default dictionary). 

158 root : string, optional 

159 Path to the root directory for data. 

160 registry : string, optional 

161 Path to registry with data's metadata. 

162 calibRoot : string, optional 

163 Root directory for calibrations. 

164 calibRegistry : string, optional 

165 Path to registry with calibrations' metadata. 

166 provided : list of string, optional 

167 Keys provided by the mapper. 

168 parentRegistry : Registry subclass, optional 

169 Registry from a parent repository that may be used to look up 

170 data's metadata. 

171 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 

172 The configuration information for the repository this mapper is 

173 being used with. 

174 """ 

175 packageName = None 

176 

177 # a class or subclass of MakeRawVisitInfo, a functor that makes an 

178 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image 

179 MakeRawVisitInfoClass = MakeRawVisitInfo 

180 

181 # a class or subclass of PupilFactory 

182 PupilFactoryClass = afwCameraGeom.PupilFactory 

183 

184 # Class to use for metadata translations 

185 translatorClass = None 

186 

187 def __init__(self, policy, repositoryDir, 

188 root=None, registry=None, calibRoot=None, calibRegistry=None, 

189 provided=None, parentRegistry=None, repositoryCfg=None): 

190 

191 dafPersist.Mapper.__init__(self) 

192 

193 self.log = lsstLog.Log.getLogger("CameraMapper") 

194 

195 if root: 

196 self.root = root 

197 elif repositoryCfg: 

198 self.root = repositoryCfg.root 

199 else: 

200 self.root = None 

201 

202 repoPolicy = repositoryCfg.policy if repositoryCfg else None 

203 if repoPolicy is not None: 

204 policy.update(repoPolicy) 

205 

206 # Levels 

207 self.levels = dict() 

208 if 'levels' in policy: 

209 levelsPolicy = policy['levels'] 

210 for key in levelsPolicy.names(True): 

211 self.levels[key] = set(levelsPolicy.asArray(key)) 

212 self.defaultLevel = policy['defaultLevel'] 

213 self.defaultSubLevels = dict() 

214 if 'defaultSubLevels' in policy: 

215 self.defaultSubLevels = policy['defaultSubLevels'] 

216 

217 # Root directories 

218 if root is None: 

219 root = "." 

220 root = dafPersist.LogicalLocation(root).locString() 

221 

222 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root) 

223 

224 # If the calibRoot is passed in, use that. If not and it's indicated in 

225 # the policy, use that. And otherwise, the calibs are in the regular 

226 # root. 

227 # If the location indicated by the calib root does not exist, do not 

228 # create it. 

229 calibStorage = None 

230 if calibRoot is not None: 

231 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot) 

232 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

233 create=False) 

234 else: 

235 calibRoot = policy.get('calibRoot', None) 

236 if calibRoot: 

237 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

238 create=False) 

239 if calibStorage is None: 

240 calibStorage = self.rootStorage 

241 

242 self.root = root 

243 

244 # Registries 

245 self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath", 

246 self.rootStorage, searchParents=False, 

247 posixIfNoSql=(not parentRegistry)) 

248 if not self.registry: 

249 self.registry = parentRegistry 

250 needCalibRegistry = policy.get('needCalibRegistry', None) 

251 if needCalibRegistry: 

252 if calibStorage: 

253 self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy, 

254 "calibRegistryPath", calibStorage, 

255 posixIfNoSql=False) # NB never use posix for calibs 

256 else: 

257 raise RuntimeError( 

258 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " 

259 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}") 

260 else: 

261 self.calibRegistry = None 

262 

263 # Dict of valid keys and their value types 

264 self.keyDict = dict() 

265 

266 self._initMappings(policy, self.rootStorage, calibStorage, provided=None) 

267 self._initWriteRecipes() 

268 

269 # Camera geometry 

270 self.cameraDataLocation = None # path to camera geometry config file 

271 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir) 

272 

273 # Filter translation table 

274 self.filters = None 

275 

276 # verify that the class variable packageName is set before attempting 

277 # to instantiate an instance 

278 if self.packageName is None: 

279 raise ValueError('class variable packageName must not be None') 

280 

281 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log) 

282 

283 # Assign a metadata translator if one has not been defined by 

284 # subclass. We can sometimes infer one from the RawVisitInfo 

285 # class. 

286 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"): 

287 self.translatorClass = self.makeRawVisitInfo.metadataTranslator 

288 

289 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None): 

290 """Initialize mappings 

291 

292 For each of the dataset types that we want to be able to read, there 

293 are methods that can be created to support them: 

294 * map_<dataset> : determine the path for dataset 

295 * std_<dataset> : standardize the retrieved dataset 

296 * bypass_<dataset> : retrieve the dataset (bypassing the usual 

297 retrieval machinery) 

298 * query_<dataset> : query the registry 

299 

300 Besides the dataset types explicitly listed in the policy, we create 

301 additional, derived datasets for additional conveniences, 

302 e.g., reading the header of an image, retrieving only the size of a 

303 catalog. 

304 

305 Parameters 

306 ---------- 

307 policy : `lsst.daf.persistence.Policy` 

308 Policy with per-camera defaults already merged 

309 rootStorage : `Storage subclass instance` 

310 Interface to persisted repository data. 

311 calibRoot : `Storage subclass instance` 

312 Interface to persisted calib repository data 

313 provided : `list` of `str` 

314 Keys provided by the mapper 

315 """ 

316 # Sub-dictionaries (for exposure/calibration/dataset types) 

317 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

318 "obs_base", "ImageMappingDefaults.yaml", "policy")) 

319 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

320 "obs_base", "ExposureMappingDefaults.yaml", "policy")) 

321 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

322 "obs_base", "CalibrationMappingDefaults.yaml", "policy")) 

323 dsMappingPolicy = dafPersist.Policy() 

324 

325 # Mappings 

326 mappingList = ( 

327 ("images", imgMappingPolicy, ImageMapping), 

328 ("exposures", expMappingPolicy, ExposureMapping), 

329 ("calibrations", calMappingPolicy, CalibrationMapping), 

330 ("datasets", dsMappingPolicy, DatasetMapping) 

331 ) 

332 self.mappings = dict() 

333 for name, defPolicy, cls in mappingList: 

334 if name in policy: 

335 datasets = policy[name] 

336 

337 # Centrally-defined datasets 

338 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml") 

339 if os.path.exists(defaultsPath): 

340 datasets.merge(dafPersist.Policy(defaultsPath)) 

341 

342 mappings = dict() 

343 setattr(self, name, mappings) 

344 for datasetType in datasets.names(True): 

345 subPolicy = datasets[datasetType] 

346 subPolicy.merge(defPolicy) 

347 

348 if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy: 

349 def compositeClosure(dataId, write=False, mapper=None, mapping=None, 

350 subPolicy=subPolicy): 

351 components = subPolicy.get('composite') 

352 assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None 

353 disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None 

354 python = subPolicy['python'] 

355 butlerComposite = dafPersist.ButlerComposite(assembler=assembler, 

356 disassembler=disassembler, 

357 python=python, 

358 dataId=dataId, 

359 mapper=self) 

360 for name, component in components.items(): 

361 butlerComposite.add(id=name, 

362 datasetType=component.get('datasetType'), 

363 setter=component.get('setter', None), 

364 getter=component.get('getter', None), 

365 subset=component.get('subset', False), 

366 inputOnly=component.get('inputOnly', False)) 

367 return butlerComposite 

368 setattr(self, "map_" + datasetType, compositeClosure) 

369 # for now at least, don't set up any other handling for this dataset type. 

370 continue 

371 

372 if name == "calibrations": 

373 mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage, 

374 provided=provided, dataRoot=rootStorage) 

375 else: 

376 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided) 

377 

378 if datasetType in self.mappings: 

379 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}") 

380 self.keyDict.update(mapping.keys()) 

381 mappings[datasetType] = mapping 

382 self.mappings[datasetType] = mapping 

383 if not hasattr(self, "map_" + datasetType): 

384 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

385 return mapping.map(mapper, dataId, write) 

386 setattr(self, "map_" + datasetType, mapClosure) 

387 if not hasattr(self, "query_" + datasetType): 

388 def queryClosure(format, dataId, mapping=mapping): 

389 return mapping.lookup(format, dataId) 

390 setattr(self, "query_" + datasetType, queryClosure) 

391 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType): 

392 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping): 

393 return mapping.standardize(mapper, item, dataId) 

394 setattr(self, "std_" + datasetType, stdClosure) 

395 

396 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None): 

397 """Set convenience methods on CameraMapper""" 

398 mapName = "map_" + datasetType + "_" + suffix 

399 bypassName = "bypass_" + datasetType + "_" + suffix 

400 queryName = "query_" + datasetType + "_" + suffix 

401 if not hasattr(self, mapName): 

402 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType)) 

403 if not hasattr(self, bypassName): 

404 if bypassImpl is None and hasattr(self, "bypass_" + datasetType): 

405 bypassImpl = getattr(self, "bypass_" + datasetType) 

406 if bypassImpl is not None: 

407 setattr(self, bypassName, bypassImpl) 

408 if not hasattr(self, queryName): 

409 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType)) 

410 

411 # Filename of dataset 

412 setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId: 

413 [os.path.join(location.getStorage().root, p) for p in location.getLocations()]) 

414 # Metadata from FITS file 

415 if subPolicy["storage"] == "FitsStorage": # a FITS image 

416 def getMetadata(datasetType, pythonType, location, dataId): 

417 md = readMetadata(location.getLocationsWithRoot()[0]) 

418 fix_header(md, translator_class=self.translatorClass) 

419 return md 

420 

421 setMethods("md", bypassImpl=getMetadata) 

422 

423 # Add support for configuring FITS compression 

424 addName = "add_" + datasetType 

425 if not hasattr(self, addName): 

426 setattr(self, addName, self.getImageCompressionSettings) 

427 

428 if name == "exposures": 

429 def getSkyWcs(datasetType, pythonType, location, dataId): 

430 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

431 return fitsReader.readWcs() 

432 

433 setMethods("wcs", bypassImpl=getSkyWcs) 

434 

435 def getRawHeaderWcs(datasetType, pythonType, location, dataId): 

436 """Create a SkyWcs from the un-modified raw FITS WCS header keys.""" 

437 if datasetType[:3] != "raw": 

438 raise dafPersist.NoResults("Can only get header WCS for raw exposures.", 

439 datasetType, dataId) 

440 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])) 

441 

442 setMethods("header_wcs", bypassImpl=getRawHeaderWcs) 

443 

444 def getPhotoCalib(datasetType, pythonType, location, dataId): 

445 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

446 return fitsReader.readPhotoCalib() 

447 

448 setMethods("photoCalib", bypassImpl=getPhotoCalib) 

449 

450 def getVisitInfo(datasetType, pythonType, location, dataId): 

451 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

452 return fitsReader.readVisitInfo() 

453 

454 setMethods("visitInfo", bypassImpl=getVisitInfo) 

455 

456 def getFilter(datasetType, pythonType, location, dataId): 

457 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

458 return fitsReader.readFilter() 

459 

460 setMethods("filter", bypassImpl=getFilter) 

461 

462 setMethods("detector", 

463 mapImpl=lambda dataId, write=False: 

464 dafPersist.ButlerLocation( 

465 pythonType="lsst.afw.cameraGeom.CameraConfig", 

466 cppType="Config", 

467 storageName="Internal", 

468 locationList="ignored", 

469 dataId=dataId, 

470 mapper=self, 

471 storage=None, 

472 ), 

473 bypassImpl=lambda datasetType, pythonType, location, dataId: 

474 self.camera[self._extractDetectorName(dataId)] 

475 ) 

476 

477 def getBBox(datasetType, pythonType, location, dataId): 

478 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1) 

479 fix_header(md, translator_class=self.translatorClass) 

480 return afwImage.bboxFromMetadata(md) 

481 

482 setMethods("bbox", bypassImpl=getBBox) 

483 

484 elif name == "images": 

485 def getBBox(datasetType, pythonType, location, dataId): 

486 md = readMetadata(location.getLocationsWithRoot()[0]) 

487 fix_header(md, translator_class=self.translatorClass) 

488 return afwImage.bboxFromMetadata(md) 

489 setMethods("bbox", bypassImpl=getBBox) 

490 

491 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog 

492 

493 def getMetadata(datasetType, pythonType, location, dataId): 

494 md = readMetadata(os.path.join(location.getStorage().root, 

495 location.getLocations()[0]), hdu=1) 

496 fix_header(md, translator_class=self.translatorClass) 

497 return md 

498 

499 setMethods("md", bypassImpl=getMetadata) 

500 

501 # Sub-images 

502 if subPolicy["storage"] == "FitsStorage": 

503 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

504 subId = dataId.copy() 

505 del subId['bbox'] 

506 loc = mapping.map(mapper, subId, write) 

507 bbox = dataId['bbox'] 

508 llcX = bbox.getMinX() 

509 llcY = bbox.getMinY() 

510 width = bbox.getWidth() 

511 height = bbox.getHeight() 

512 loc.additionalData.set('llcX', llcX) 

513 loc.additionalData.set('llcY', llcY) 

514 loc.additionalData.set('width', width) 

515 loc.additionalData.set('height', height) 

516 if 'imageOrigin' in dataId: 

517 loc.additionalData.set('imageOrigin', 

518 dataId['imageOrigin']) 

519 return loc 

520 

521 def querySubClosure(key, format, dataId, mapping=mapping): 

522 subId = dataId.copy() 

523 del subId['bbox'] 

524 return mapping.lookup(format, subId) 

525 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure) 

526 

527 if subPolicy["storage"] == "FitsCatalogStorage": 

528 # Length of catalog 

529 

530 def getLen(datasetType, pythonType, location, dataId): 

531 md = readMetadata(os.path.join(location.getStorage().root, 

532 location.getLocations()[0]), hdu=1) 

533 fix_header(md, translator_class=self.translatorClass) 

534 return md["NAXIS2"] 

535 

536 setMethods("len", bypassImpl=getLen) 

537 

538 # Schema of catalog 

539 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets: 

540 setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId: 

541 afwTable.Schema.readFits(os.path.join(location.getStorage().root, 

542 location.getLocations()[0]))) 

543 

544 def _computeCcdExposureId(self, dataId): 

545 """Compute the 64-bit (long) identifier for a CCD exposure. 

546 

547 Subclasses must override 

548 

549 Parameters 

550 ---------- 

551 dataId : `dict` 

552 Data identifier with visit, ccd. 

553 """ 

554 raise NotImplementedError() 

555 

556 def _computeCoaddExposureId(self, dataId, singleFilter): 

557 """Compute the 64-bit (long) identifier for a coadd. 

558 

559 Subclasses must override 

560 

561 Parameters 

562 ---------- 

563 dataId : `dict` 

564 Data identifier with tract and patch. 

565 singleFilter : `bool` 

566 True means the desired ID is for a single-filter coadd, in which 

567 case dataIdmust contain filter. 

568 """ 

569 raise NotImplementedError() 

570 

571 def _search(self, path): 

572 """Search for path in the associated repository's storage. 

573 

574 Parameters 

575 ---------- 

576 path : string 

577 Path that describes an object in the repository associated with 

578 this mapper. 

579 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 

580 indicator will be stripped when searching and so will match 

581 filenames without the HDU indicator, e.g. 'foo.fits'. The path 

582 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 

583 

584 Returns 

585 ------- 

586 string 

587 The path for this object in the repository. Will return None if the 

588 object can't be found. If the input argument path contained an HDU 

589 indicator, the returned path will also contain the HDU indicator. 

590 """ 

591 return self.rootStorage.search(path) 

592 

593 def backup(self, datasetType, dataId): 

594 """Rename any existing object with the given type and dataId. 

595 

596 The CameraMapper implementation saves objects in a sequence of e.g.: 

597 

598 - foo.fits 

599 - foo.fits~1 

600 - foo.fits~2 

601 

602 All of the backups will be placed in the output repo, however, and will 

603 not be removed if they are found elsewhere in the _parent chain. This 

604 means that the same file will be stored twice if the previous version 

605 was found in an input repo. 

606 """ 

607 

608 # Calling PosixStorage directly is not the long term solution in this 

609 # function, this is work-in-progress on epic DM-6225. The plan is for 

610 # parentSearch to be changed to 'search', and search only the storage 

611 # associated with this mapper. All searching of parents will be handled 

612 # by traversing the container of repositories in Butler. 

613 

614 def firstElement(list): 

615 """Get the first element in the list, or None if that can't be 

616 done. 

617 """ 

618 return list[0] if list is not None and len(list) else None 

619 

620 n = 0 

621 newLocation = self.map(datasetType, dataId, write=True) 

622 newPath = newLocation.getLocations()[0] 

623 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True) 

624 path = firstElement(path) 

625 oldPaths = [] 

626 while path is not None: 

627 n += 1 

628 oldPaths.append((n, path)) 

629 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True) 

630 path = firstElement(path) 

631 for n, oldPath in reversed(oldPaths): 

632 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n)) 

633 

634 def keys(self): 

635 """Return supported keys. 

636 

637 Returns 

638 ------- 

639 iterable 

640 List of keys usable in a dataset identifier 

641 """ 

642 return iter(self.keyDict.keys()) 

643 

644 def getKeys(self, datasetType, level): 

645 """Return a dict of supported keys and their value types for a given 

646 dataset type at a given level of the key hierarchy. 

647 

648 Parameters 

649 ---------- 

650 datasetType : `str` 

651 Dataset type or None for all dataset types. 

652 level : `str` or None 

653 Level or None for all levels or '' for the default level for the 

654 camera. 

655 

656 Returns 

657 ------- 

658 `dict` 

659 Keys are strings usable in a dataset identifier, values are their 

660 value types. 

661 """ 

662 

663 # not sure if this is how we want to do this. what if None was intended? 

664 if level == '': 

665 level = self.getDefaultLevel() 

666 

667 if datasetType is None: 

668 keyDict = copy.copy(self.keyDict) 

669 else: 

670 keyDict = self.mappings[datasetType].keys() 

671 if level is not None and level in self.levels: 

672 keyDict = copy.copy(keyDict) 

673 for l in self.levels[level]: 

674 if l in keyDict: 

675 del keyDict[l] 

676 return keyDict 

677 

678 def getDefaultLevel(self): 

679 return self.defaultLevel 

680 

681 def getDefaultSubLevel(self, level): 

682 if level in self.defaultSubLevels: 

683 return self.defaultSubLevels[level] 

684 return None 

685 

686 @classmethod 

687 def getCameraName(cls): 

688 """Return the name of the camera that this CameraMapper is for.""" 

689 className = str(cls) 

690 className = className[className.find('.'):-1] 

691 m = re.search(r'(\w+)Mapper', className) 

692 if m is None: 

693 m = re.search(r"class '[\w.]*?(\w+)'", className) 

694 name = m.group(1) 

695 return name[:1].lower() + name[1:] if name else '' 

696 

697 @classmethod 

698 def getPackageName(cls): 

699 """Return the name of the package containing this CameraMapper.""" 

700 if cls.packageName is None: 

701 raise ValueError('class variable packageName must not be None') 

702 return cls.packageName 

703 

704 @classmethod 

705 def getPackageDir(cls): 

706 """Return the base directory of this package""" 

707 return getPackageDir(cls.getPackageName()) 

708 

709 def map_camera(self, dataId, write=False): 

710 """Map a camera dataset.""" 

711 if self.camera is None: 

712 raise RuntimeError("No camera dataset available.") 

713 actualId = self._transformId(dataId) 

714 return dafPersist.ButlerLocation( 

715 pythonType="lsst.afw.cameraGeom.CameraConfig", 

716 cppType="Config", 

717 storageName="ConfigStorage", 

718 locationList=self.cameraDataLocation or "ignored", 

719 dataId=actualId, 

720 mapper=self, 

721 storage=self.rootStorage 

722 ) 

723 

724 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId): 

725 """Return the (preloaded) camera object. 

726 """ 

727 if self.camera is None: 

728 raise RuntimeError("No camera dataset available.") 

729 return self.camera 

730 

731 def map_expIdInfo(self, dataId, write=False): 

732 return dafPersist.ButlerLocation( 

733 pythonType="lsst.obs.base.ExposureIdInfo", 

734 cppType=None, 

735 storageName="Internal", 

736 locationList="ignored", 

737 dataId=dataId, 

738 mapper=self, 

739 storage=self.rootStorage 

740 ) 

741 

742 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId): 

743 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 

744 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId) 

745 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId) 

746 return ExposureIdInfo(expId=expId, expBits=expBits) 

747 

748 def std_bfKernel(self, item, dataId): 

749 """Disable standardization for bfKernel 

750 

751 bfKernel is a calibration product that is numpy array, 

752 unlike other calibration products that are all images; 

753 all calibration images are sent through _standardizeExposure 

754 due to CalibrationMapping, but we don't want that to happen to bfKernel 

755 """ 

756 return item 

757 

758 def std_raw(self, item, dataId): 

759 """Standardize a raw dataset by converting it to an Exposure instead 

760 of an Image""" 

761 return self._standardizeExposure(self.exposures['raw'], item, dataId, 

762 trimmed=False, setVisitInfo=True) 

763 

764 def map_skypolicy(self, dataId): 

765 """Map a sky policy.""" 

766 return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy", 

767 "Internal", None, None, self, 

768 storage=self.rootStorage) 

769 

770 def std_skypolicy(self, item, dataId): 

771 """Standardize a sky policy by returning the one we use.""" 

772 return self.skypolicy 

773 

774############################################################################### 

775# 

776# Utility functions 

777# 

778############################################################################### 

779 

780 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, 

781 posixIfNoSql=True): 

782 """Set up a registry (usually SQLite3), trying a number of possible 

783 paths. 

784 

785 Parameters 

786 ---------- 

787 name : string 

788 Name of registry. 

789 description: `str` 

790 Description of registry (for log messages) 

791 path : string 

792 Path for registry. 

793 policy : string 

794 Policy that contains the registry name, used if path is None. 

795 policyKey : string 

796 Key in policy for registry path. 

797 storage : Storage subclass 

798 Repository Storage to look in. 

799 searchParents : bool, optional 

800 True if the search for a registry should follow any Butler v1 

801 _parent symlinks. 

802 posixIfNoSql : bool, optional 

803 If an sqlite registry is not found, will create a posix registry if 

804 this is True. 

805 

806 Returns 

807 ------- 

808 lsst.daf.persistence.Registry 

809 Registry object 

810 """ 

811 if path is None and policyKey in policy: 

812 path = dafPersist.LogicalLocation(policy[policyKey]).locString() 

813 if os.path.isabs(path): 

814 raise RuntimeError("Policy should not indicate an absolute path for registry.") 

815 if not storage.exists(path): 

816 newPath = storage.instanceSearch(path) 

817 

818 newPath = newPath[0] if newPath is not None and len(newPath) else None 

819 if newPath is None: 

820 self.log.warn("Unable to locate registry at policy path (also looked in root): %s", 

821 path) 

822 path = newPath 

823 else: 

824 self.log.warn("Unable to locate registry at policy path: %s", path) 

825 path = None 

826 

827 # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to 

828 # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip 

829 # root from path. Currently only works with PosixStorage 

830 try: 

831 root = storage.root 

832 if path and (path.startswith(root)): 

833 path = path[len(root + '/'):] 

834 except AttributeError: 

835 pass 

836 

837 # determine if there is an sqlite registry and if not, try the posix registry. 

838 registry = None 

839 

840 def search(filename, description): 

841 """Search for file in storage 

842 

843 Parameters 

844 ---------- 

845 filename : `str` 

846 Filename to search for 

847 description : `str` 

848 Description of file, for error message. 

849 

850 Returns 

851 ------- 

852 path : `str` or `None` 

853 Path to file, or None 

854 """ 

855 result = storage.instanceSearch(filename) 

856 if result: 

857 return result[0] 

858 self.log.debug("Unable to locate %s: %s", description, filename) 

859 return None 

860 

861 # Search for a suitable registry database 

862 if path is None: 

863 path = search("%s.pgsql" % name, "%s in root" % description) 

864 if path is None: 

865 path = search("%s.sqlite3" % name, "%s in root" % description) 

866 if path is None: 

867 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description) 

868 

869 if path is not None: 

870 if not storage.exists(path): 

871 newPath = storage.instanceSearch(path) 

872 newPath = newPath[0] if newPath is not None and len(newPath) else None 

873 if newPath is not None: 

874 path = newPath 

875 localFileObj = storage.getLocalFile(path) 

876 self.log.info("Loading %s registry from %s", description, localFileObj.name) 

877 registry = dafPersist.Registry.create(localFileObj.name) 

878 localFileObj.close() 

879 elif not registry and posixIfNoSql: 

880 try: 

881 self.log.info("Loading Posix %s registry from %s", description, storage.root) 

882 registry = dafPersist.PosixRegistry(storage.root) 

883 except Exception: 

884 registry = None 

885 

886 return registry 

887 

888 def _transformId(self, dataId): 

889 """Generate a standard ID dict from a camera-specific ID dict. 

890 

891 Canonical keys include: 

892 - amp: amplifier name 

893 - ccd: CCD name (in LSST this is a combination of raft and sensor) 

894 The default implementation returns a copy of its input. 

895 

896 Parameters 

897 ---------- 

898 dataId : `dict` 

899 Dataset identifier; this must not be modified 

900 

901 Returns 

902 ------- 

903 `dict` 

904 Transformed dataset identifier. 

905 """ 

906 

907 return dataId.copy() 

908 

909 def _mapActualToPath(self, template, actualId): 

910 """Convert a template path to an actual path, using the actual data 

911 identifier. This implementation is usually sufficient but can be 

912 overridden by the subclass. 

913 

914 Parameters 

915 ---------- 

916 template : `str` 

917 Template path 

918 actualId : `dict` 

919 Dataset identifier 

920 

921 Returns 

922 ------- 

923 `str` 

924 Pathname 

925 """ 

926 

927 try: 

928 transformedId = self._transformId(actualId) 

929 return template % transformedId 

930 except Exception as e: 

931 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e)) 

932 

933 @staticmethod 

934 def getShortCcdName(ccdName): 

935 """Convert a CCD name to a form useful as a filename 

936 

937 The default implementation converts spaces to underscores. 

938 """ 

939 return ccdName.replace(" ", "_") 

940 

941 def _extractDetectorName(self, dataId): 

942 """Extract the detector (CCD) name from the dataset identifier. 

943 

944 The name in question is the detector name used by lsst.afw.cameraGeom. 

945 

946 Parameters 

947 ---------- 

948 dataId : `dict` 

949 Dataset identifier. 

950 

951 Returns 

952 ------- 

953 `str` 

954 Detector name 

955 """ 

956 raise NotImplementedError("No _extractDetectorName() function specified") 

957 

958 @deprecated("This method is no longer used for ISR (will be removed after v11)", category=FutureWarning) 

959 def _extractAmpId(self, dataId): 

960 """Extract the amplifier identifer from a dataset identifier. 

961 

962 .. note:: Deprecated in 11_0 

963 

964 amplifier identifier has two parts: the detector name for the CCD 

965 containing the amplifier and index of the amplifier in the detector. 

966 

967 Parameters 

968 ---------- 

969 dataId : `dict` 

970 Dataset identifer 

971 

972 Returns 

973 ------- 

974 `tuple` 

975 Amplifier identifier 

976 """ 

977 

978 trDataId = self._transformId(dataId) 

979 return (trDataId["ccd"], int(trDataId['amp'])) 

980 

981 def _setAmpDetector(self, item, dataId, trimmed=True): 

982 """Set the detector object in an Exposure for an amplifier. 

983 

984 Defects are also added to the Exposure based on the detector object. 

985 

986 Parameters 

987 ---------- 

988 item : `lsst.afw.image.Exposure` 

989 Exposure to set the detector in. 

990 dataId : `dict` 

991 Dataset identifier 

992 trimmed : `bool` 

993 Should detector be marked as trimmed? (ignored) 

994 """ 

995 

996 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed) 

997 

998 def _setCcdDetector(self, item, dataId, trimmed=True): 

999 """Set the detector object in an Exposure for a CCD. 

1000 

1001 Parameters 

1002 ---------- 

1003 item : `lsst.afw.image.Exposure` 

1004 Exposure to set the detector in. 

1005 dataId : `dict` 

1006 Dataset identifier 

1007 trimmed : `bool` 

1008 Should detector be marked as trimmed? (ignored) 

1009 """ 

1010 if item.getDetector() is not None: 

1011 return 

1012 

1013 detectorName = self._extractDetectorName(dataId) 

1014 detector = self.camera[detectorName] 

1015 item.setDetector(detector) 

1016 

1017 def _setFilter(self, mapping, item, dataId): 

1018 """Set the filter object in an Exposure. If the Exposure had a FILTER 

1019 keyword, this was already processed during load. But if it didn't, 

1020 use the filter from the registry. 

1021 

1022 Parameters 

1023 ---------- 

1024 mapping : `lsst.obs.base.Mapping` 

1025 Where to get the filter from. 

1026 item : `lsst.afw.image.Exposure` 

1027 Exposure to set the filter in. 

1028 dataId : `dict` 

1029 Dataset identifier. 

1030 """ 

1031 

1032 if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) 

1033 or isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)): 

1034 return 

1035 

1036 if item.getFilter().getId() != afwImage.Filter.UNKNOWN: 

1037 return 

1038 

1039 actualId = mapping.need(['filter'], dataId) 

1040 filterName = actualId['filter'] 

1041 if self.filters is not None and filterName in self.filters: 

1042 filterName = self.filters[filterName] 

1043 try: 

1044 item.setFilter(afwImage.Filter(filterName)) 

1045 except pexExcept.NotFoundError: 

1046 self.log.warn("Filter %s not defined. Set to UNKNOWN." % (filterName)) 

1047 

1048 def _standardizeExposure(self, mapping, item, dataId, filter=True, 

1049 trimmed=True, setVisitInfo=True): 

1050 """Default standardization function for images. 

1051 

1052 This sets the Detector from the camera geometry 

1053 and optionally set the Filter. In both cases this saves 

1054 having to persist some data in each exposure (or image). 

1055 

1056 Parameters 

1057 ---------- 

1058 mapping : `lsst.obs.base.Mapping` 

1059 Where to get the values from. 

1060 item : image-like object 

1061 Can be any of lsst.afw.image.Exposure, 

1062 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 

1063 or lsst.afw.image.MaskedImage 

1064 

1065 dataId : `dict` 

1066 Dataset identifier 

1067 filter : `bool` 

1068 Set filter? Ignored if item is already an exposure 

1069 trimmed : `bool` 

1070 Should detector be marked as trimmed? 

1071 setVisitInfo : `bool` 

1072 Should Exposure have its VisitInfo filled out from the metadata? 

1073 

1074 Returns 

1075 ------- 

1076 `lsst.afw.image.Exposure` 

1077 The standardized Exposure. 

1078 """ 

1079 try: 

1080 exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log, 

1081 setVisitInfo=setVisitInfo) 

1082 except Exception as e: 

1083 self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e)) 

1084 raise 

1085 

1086 if mapping.level.lower() == "amp": 

1087 self._setAmpDetector(exposure, dataId, trimmed) 

1088 elif mapping.level.lower() == "ccd": 

1089 self._setCcdDetector(exposure, dataId, trimmed) 

1090 

1091 # We can only create a WCS if it doesn't already have one and 

1092 # we have either a VisitInfo or exposure metadata. 

1093 # Do not calculate a WCS if this is an amplifier exposure 

1094 if mapping.level.lower() != "amp" and exposure.getWcs() is None and \ 

1095 (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()): 

1096 self._createInitialSkyWcs(exposure) 

1097 

1098 if filter: 

1099 self._setFilter(mapping, exposure, dataId) 

1100 

1101 return exposure 

1102 

1103 def _createSkyWcsFromMetadata(self, exposure): 

1104 """Create a SkyWcs from the FITS header metadata in an Exposure. 

1105 

1106 Parameters 

1107 ---------- 

1108 exposure : `lsst.afw.image.Exposure` 

1109 The exposure to get metadata from, and attach the SkyWcs to. 

1110 """ 

1111 metadata = exposure.getMetadata() 

1112 fix_header(metadata, translator_class=self.translatorClass) 

1113 try: 

1114 wcs = afwGeom.makeSkyWcs(metadata, strip=True) 

1115 exposure.setWcs(wcs) 

1116 except pexExcept.TypeError as e: 

1117 # See DM-14372 for why this is debug and not warn (e.g. calib files without wcs metadata). 

1118 self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:" 

1119 " %s", e.args[0]) 

1120 # ensure any WCS values stripped from the metadata are removed in the exposure 

1121 exposure.setMetadata(metadata) 

1122 

1123 def _createInitialSkyWcs(self, exposure): 

1124 """Create a SkyWcs from the boresight and camera geometry. 

1125 

1126 If the boresight or camera geometry do not support this method of 

1127 WCS creation, this falls back on the header metadata-based version 

1128 (typically a purely linear FITS crval/crpix/cdmatrix WCS). 

1129 

1130 Parameters 

1131 ---------- 

1132 exposure : `lsst.afw.image.Exposure` 

1133 The exposure to get data from, and attach the SkyWcs to. 

1134 """ 

1135 # Always use try to use metadata first, to strip WCS keys from it. 

1136 self._createSkyWcsFromMetadata(exposure) 

1137 

1138 if exposure.getInfo().getVisitInfo() is None: 

1139 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs." 

1140 self.log.warn(msg) 

1141 return 

1142 try: 

1143 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector()) 

1144 exposure.setWcs(newSkyWcs) 

1145 except InitialSkyWcsError as e: 

1146 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s" 

1147 self.log.warn(msg, e) 

1148 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e)) 

1149 if e.__context__ is not None: 

1150 self.log.debug("Root-cause Exception was: %s", 

1151 traceback.TracebackException.from_exception(e.__context__)) 

1152 

1153 def _makeCamera(self, policy, repositoryDir): 

1154 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 

1155 the camera geometry 

1156 

1157 Also set self.cameraDataLocation, if relevant (else it can be left 

1158 None). 

1159 

1160 This implementation assumes that policy contains an entry "camera" 

1161 that points to the subdirectory in this package of camera data; 

1162 specifically, that subdirectory must contain: 

1163 - a file named `camera.py` that contains persisted camera config 

1164 - ampInfo table FITS files, as required by 

1165 lsst.afw.cameraGeom.makeCameraFromPath 

1166 

1167 Parameters 

1168 ---------- 

1169 policy : `lsst.daf.persistence.Policy` 

1170 Policy with per-camera defaults already merged 

1171 (PexPolicy only for backward compatibility). 

1172 repositoryDir : `str` 

1173 Policy repository for the subclassing module (obtained with 

1174 getRepositoryPath() on the per-camera default dictionary). 

1175 """ 

1176 if 'camera' not in policy: 

1177 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera") 

1178 cameraDataSubdir = policy['camera'] 

1179 self.cameraDataLocation = os.path.normpath( 

1180 os.path.join(repositoryDir, cameraDataSubdir, "camera.py")) 

1181 cameraConfig = afwCameraGeom.CameraConfig() 

1182 cameraConfig.load(self.cameraDataLocation) 

1183 ampInfoPath = os.path.dirname(self.cameraDataLocation) 

1184 return afwCameraGeom.makeCameraFromPath( 

1185 cameraConfig=cameraConfig, 

1186 ampInfoPath=ampInfoPath, 

1187 shortNameFunc=self.getShortCcdName, 

1188 pupilFactoryClass=self.PupilFactoryClass 

1189 ) 

1190 

1191 def getRegistry(self): 

1192 """Get the registry used by this mapper. 

1193 

1194 Returns 

1195 ------- 

1196 Registry or None 

1197 The registry used by this mapper for this mapper's repository. 

1198 """ 

1199 return self.registry 

1200 

1201 def getImageCompressionSettings(self, datasetType, dataId): 

1202 """Stuff image compression settings into a daf.base.PropertySet 

1203 

1204 This goes into the ButlerLocation's "additionalData", which gets 

1205 passed into the boost::persistence framework. 

1206 

1207 Parameters 

1208 ---------- 

1209 datasetType : `str` 

1210 Type of dataset for which to get the image compression settings. 

1211 dataId : `dict` 

1212 Dataset identifier. 

1213 

1214 Returns 

1215 ------- 

1216 additionalData : `lsst.daf.base.PropertySet` 

1217 Image compression settings. 

1218 """ 

1219 mapping = self.mappings[datasetType] 

1220 recipeName = mapping.recipe 

1221 storageType = mapping.storage 

1222 if storageType not in self._writeRecipes: 

1223 return dafBase.PropertySet() 

1224 if recipeName not in self._writeRecipes[storageType]: 

1225 raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" % 

1226 (datasetType, storageType, recipeName)) 

1227 recipe = self._writeRecipes[storageType][recipeName].deepCopy() 

1228 seed = hash(tuple(dataId.items())) % 2**31 

1229 for plane in ("image", "mask", "variance"): 

1230 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0: 

1231 recipe.set(plane + ".scaling.seed", seed) 

1232 return recipe 

1233 

1234 def _initWriteRecipes(self): 

1235 """Read the recipes for writing files 

1236 

1237 These recipes are currently used for configuring FITS compression, 

1238 but they could have wider uses for configuring different flavors 

1239 of the storage types. A recipe is referred to by a symbolic name, 

1240 which has associated settings. These settings are stored as a 

1241 `PropertySet` so they can easily be passed down to the 

1242 boost::persistence framework as the "additionalData" parameter. 

1243 

1244 The list of recipes is written in YAML. A default recipe and 

1245 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 

1246 and these may be overridden or supplemented by the individual obs_* 

1247 packages' own policy/writeRecipes.yaml files. 

1248 

1249 Recipes are grouped by the storage type. Currently, only the 

1250 ``FitsStorage`` storage type uses recipes, which uses it to 

1251 configure FITS image compression. 

1252 

1253 Each ``FitsStorage`` recipe for FITS compression should define 

1254 "image", "mask" and "variance" entries, each of which may contain 

1255 "compression" and "scaling" entries. Defaults will be provided for 

1256 any missing elements under "compression" and "scaling". 

1257 

1258 The allowed entries under "compression" are: 

1259 

1260 * algorithm (string): compression algorithm to use 

1261 * rows (int): number of rows per tile (0 = entire dimension) 

1262 * columns (int): number of columns per tile (0 = entire dimension) 

1263 * quantizeLevel (float): cfitsio quantization level 

1264 

1265 The allowed entries under "scaling" are: 

1266 

1267 * algorithm (string): scaling algorithm to use 

1268 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 

1269 * fuzz (bool): fuzz the values when quantising floating-point values? 

1270 * seed (long): seed for random number generator when fuzzing 

1271 * maskPlanes (list of string): mask planes to ignore when doing 

1272 statistics 

1273 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 

1274 * quantizePad: number of stdev to allow on the low side (for 

1275 STDEV_POSITIVE/NEGATIVE) 

1276 * bscale: manually specified BSCALE (for MANUAL scaling) 

1277 * bzero: manually specified BSCALE (for MANUAL scaling) 

1278 

1279 A very simple example YAML recipe: 

1280 

1281 FitsStorage: 

1282 default: 

1283 image: &default 

1284 compression: 

1285 algorithm: GZIP_SHUFFLE 

1286 mask: *default 

1287 variance: *default 

1288 """ 

1289 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml") 

1290 recipes = dafPersist.Policy(recipesFile) 

1291 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml") 

1292 validationMenu = {'FitsStorage': validateRecipeFitsStorage, } 

1293 if os.path.exists(supplementsFile) and supplementsFile != recipesFile: 

1294 supplements = dafPersist.Policy(supplementsFile) 

1295 # Don't allow overrides, only supplements 

1296 for entry in validationMenu: 

1297 intersection = set(recipes[entry].names()).intersection(set(supplements.names())) 

1298 if intersection: 

1299 raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" % 

1300 (supplementsFile, entry, recipesFile, intersection)) 

1301 recipes.update(supplements) 

1302 

1303 self._writeRecipes = {} 

1304 for storageType in recipes.names(True): 

1305 if "default" not in recipes[storageType]: 

1306 raise RuntimeError("No 'default' recipe defined for storage type %s in %s" % 

1307 (storageType, recipesFile)) 

1308 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType]) 

1309 

1310 

1311def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True): 

1312 """Generate an Exposure from an image-like object 

1313 

1314 If the image is a DecoratedImage then also set its WCS and metadata 

1315 (Image and MaskedImage are missing the necessary metadata 

1316 and Exposure already has those set) 

1317 

1318 Parameters 

1319 ---------- 

1320 image : Image-like object 

1321 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 

1322 Exposure. 

1323 

1324 Returns 

1325 ------- 

1326 `lsst.afw.image.Exposure` 

1327 Exposure containing input image. 

1328 """ 

1329 translatorClass = None 

1330 if mapper is not None: 

1331 translatorClass = mapper.translatorClass 

1332 

1333 metadata = None 

1334 if isinstance(image, afwImage.MaskedImage): 

1335 exposure = afwImage.makeExposure(image) 

1336 elif isinstance(image, afwImage.DecoratedImage): 

1337 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage())) 

1338 metadata = image.getMetadata() 

1339 fix_header(metadata, translator_class=translatorClass) 

1340 exposure.setMetadata(metadata) 

1341 elif isinstance(image, afwImage.Exposure): 

1342 exposure = image 

1343 metadata = exposure.getMetadata() 

1344 fix_header(metadata, translator_class=translatorClass) 

1345 else: # Image 

1346 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image)) 

1347 

1348 # set VisitInfo if we can 

1349 if setVisitInfo and exposure.getInfo().getVisitInfo() is None: 

1350 if metadata is not None: 

1351 if mapper is None: 

1352 if not logger: 

1353 logger = lsstLog.Log.getLogger("CameraMapper") 

1354 logger.warn("I can only set the VisitInfo if you provide a mapper") 

1355 else: 

1356 exposureId = mapper._computeCcdExposureId(dataId) 

1357 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId) 

1358 

1359 exposure.getInfo().setVisitInfo(visitInfo) 

1360 

1361 return exposure 

1362 

1363 

1364def validateRecipeFitsStorage(recipes): 

1365 """Validate recipes for FitsStorage 

1366 

1367 The recipes are supplemented with default values where appropriate. 

1368 

1369 TODO: replace this custom validation code with Cerberus (DM-11846) 

1370 

1371 Parameters 

1372 ---------- 

1373 recipes : `lsst.daf.persistence.Policy` 

1374 FitsStorage recipes to validate. 

1375 

1376 Returns 

1377 ------- 

1378 validated : `lsst.daf.base.PropertySet` 

1379 Validated FitsStorage recipe. 

1380 

1381 Raises 

1382 ------ 

1383 `RuntimeError` 

1384 If validation fails. 

1385 """ 

1386 # Schemas define what should be there, and the default values (and by the default 

1387 # value, the expected type). 

1388 compressionSchema = { 

1389 "algorithm": "NONE", 

1390 "rows": 1, 

1391 "columns": 0, 

1392 "quantizeLevel": 0.0, 

1393 } 

1394 scalingSchema = { 

1395 "algorithm": "NONE", 

1396 "bitpix": 0, 

1397 "maskPlanes": ["NO_DATA"], 

1398 "seed": 0, 

1399 "quantizeLevel": 4.0, 

1400 "quantizePad": 5.0, 

1401 "fuzz": True, 

1402 "bscale": 1.0, 

1403 "bzero": 0.0, 

1404 } 

1405 

1406 def checkUnrecognized(entry, allowed, description): 

1407 """Check to see if the entry contains unrecognised keywords""" 

1408 unrecognized = set(entry.keys()) - set(allowed) 

1409 if unrecognized: 

1410 raise RuntimeError( 

1411 "Unrecognized entries when parsing image compression recipe %s: %s" % 

1412 (description, unrecognized)) 

1413 

1414 validated = {} 

1415 for name in recipes.names(True): 

1416 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name) 

1417 rr = dafBase.PropertySet() 

1418 validated[name] = rr 

1419 for plane in ("image", "mask", "variance"): 

1420 checkUnrecognized(recipes[name][plane], ["compression", "scaling"], 

1421 name + "->" + plane) 

1422 

1423 for settings, schema in (("compression", compressionSchema), 

1424 ("scaling", scalingSchema)): 

1425 prefix = plane + "." + settings 

1426 if settings not in recipes[name][plane]: 

1427 for key in schema: 

1428 rr.set(prefix + "." + key, schema[key]) 

1429 continue 

1430 entry = recipes[name][plane][settings] 

1431 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings) 

1432 for key in schema: 

1433 value = type(schema[key])(entry[key]) if key in entry else schema[key] 

1434 rr.set(prefix + "." + key, value) 

1435 return validated