Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import copy 

23import os 

24import re 

25import traceback 

26import weakref 

27 

28from astro_metadata_translator import fix_header 

29from lsst.utils import doImport 

30import lsst.daf.persistence as dafPersist 

31from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping 

32import lsst.daf.base as dafBase 

33import lsst.afw.geom as afwGeom 

34import lsst.afw.image as afwImage 

35import lsst.afw.table as afwTable 

36from lsst.afw.fits import readMetadata 

37import lsst.afw.cameraGeom as afwCameraGeom 

38import lsst.log as lsstLog 

39import lsst.pex.exceptions as pexExcept 

40from .exposureIdInfo import ExposureIdInfo 

41from .makeRawVisitInfo import MakeRawVisitInfo 

42from .utils import createInitialSkyWcs, InitialSkyWcsError 

43from lsst.utils import getPackageDir 

44from ._instrument import Instrument 

45 

46__all__ = ["CameraMapper", "exposureFromImage"] 

47 

48 

49class CameraMapper(dafPersist.Mapper): 

50 

51 """CameraMapper is a base class for mappers that handle images from a 

52 camera and products derived from them. This provides an abstraction layer 

53 between the data on disk and the code. 

54 

55 Public methods: keys, queryMetadata, getDatasetTypes, map, 

56 canStandardize, standardize 

57 

58 Mappers for specific data sources (e.g., CFHT Megacam, LSST 

59 simulations, etc.) should inherit this class. 

60 

61 The CameraMapper manages datasets within a "root" directory. Note that 

62 writing to a dataset present in the input root will hide the existing 

63 dataset but not overwrite it. See #2160 for design discussion. 

64 

65 A camera is assumed to consist of one or more rafts, each composed of 

66 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 

67 (amps). A camera is also assumed to have a camera geometry description 

68 (CameraGeom object) as a policy file, a filter description (Filter class 

69 static configuration) as another policy file. 

70 

71 Information from the camera geometry and defects are inserted into all 

72 Exposure objects returned. 

73 

74 The mapper uses one or two registries to retrieve metadata about the 

75 images. The first is a registry of all raw exposures. This must contain 

76 the time of the observation. One or more tables (or the equivalent) 

77 within the registry are used to look up data identifier components that 

78 are not specified by the user (e.g. filter) and to return results for 

79 metadata queries. The second is an optional registry of all calibration 

80 data. This should contain validity start and end entries for each 

81 calibration dataset in the same timescale as the observation time. 

82 

83 Subclasses will typically set MakeRawVisitInfoClass and optionally the 

84 metadata translator class: 

85 

86 MakeRawVisitInfoClass: a class variable that points to a subclass of 

87 MakeRawVisitInfo, a functor that creates an 

88 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 

89 

90 translatorClass: The `~astro_metadata_translator.MetadataTranslator` 

91 class to use for fixing metadata values. If it is not set an attempt 

92 will be made to infer the class from ``MakeRawVisitInfoClass``, failing 

93 that the metadata fixup will try to infer the translator class from the 

94 header itself. 

95 

96 Subclasses must provide the following methods: 

97 

98 _extractDetectorName(self, dataId): returns the detector name for a CCD 

99 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 

100 a dataset identifier referring to that CCD or a subcomponent of it. 

101 

102 _computeCcdExposureId(self, dataId): see below 

103 

104 _computeCoaddExposureId(self, dataId, singleFilter): see below 

105 

106 Subclasses may also need to override the following methods: 

107 

108 _transformId(self, dataId): transformation of a data identifier 

109 from colloquial usage (e.g., "ccdname") to proper/actual usage 

110 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 

111 commas). The default implementation does nothing. Note that this 

112 method should not modify its input parameter. 

113 

114 getShortCcdName(self, ccdName): a static method that returns a shortened 

115 name suitable for use as a filename. The default version converts spaces 

116 to underscores. 

117 

118 _mapActualToPath(self, template, actualId): convert a template path to an 

119 actual path, using the actual dataset identifier. 

120 

121 The mapper's behaviors are largely specified by the policy file. 

122 See the MapperDictionary.paf for descriptions of the available items. 

123 

124 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 

125 mappings (see Mappings class). 

126 

127 Common default mappings for all subclasses can be specified in the 

128 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 

129 provides a simple way to add a product to all camera mappers. 

130 

131 Functions to map (provide a path to the data given a dataset 

132 identifier dictionary) and standardize (convert data into some standard 

133 format or type) may be provided in the subclass as "map_{dataset type}" 

134 and "std_{dataset type}", respectively. 

135 

136 If non-Exposure datasets cannot be retrieved using standard 

137 daf_persistence methods alone, a "bypass_{dataset type}" function may be 

138 provided in the subclass to return the dataset instead of using the 

139 "datasets" subpolicy. 

140 

141 Implementations of map_camera and bypass_camera that should typically be 

142 sufficient are provided in this base class. 

143 

144 Notes 

145 ----- 

146 .. todo:: 

147 

148 Instead of auto-loading the camera at construction time, load it from 

149 the calibration registry 

150 

151 Parameters 

152 ---------- 

153 policy : daf_persistence.Policy, 

154 Policy with per-camera defaults already merged. 

155 repositoryDir : string 

156 Policy repository for the subclassing module (obtained with 

157 getRepositoryPath() on the per-camera default dictionary). 

158 root : string, optional 

159 Path to the root directory for data. 

160 registry : string, optional 

161 Path to registry with data's metadata. 

162 calibRoot : string, optional 

163 Root directory for calibrations. 

164 calibRegistry : string, optional 

165 Path to registry with calibrations' metadata. 

166 provided : list of string, optional 

167 Keys provided by the mapper. 

168 parentRegistry : Registry subclass, optional 

169 Registry from a parent repository that may be used to look up 

170 data's metadata. 

171 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 

172 The configuration information for the repository this mapper is 

173 being used with. 

174 """ 

175 packageName = None 

176 

177 # a class or subclass of MakeRawVisitInfo, a functor that makes an 

178 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image 

179 MakeRawVisitInfoClass = MakeRawVisitInfo 

180 

181 # a class or subclass of PupilFactory 

182 PupilFactoryClass = afwCameraGeom.PupilFactory 

183 

184 # Class to use for metadata translations 

185 translatorClass = None 

186 

187 # Gen3 instrument corresponding to this mapper 

188 # Can be a class or a string with the full name of the class 

189 _gen3instrument = None 

190 

191 def __init__(self, policy, repositoryDir, 

192 root=None, registry=None, calibRoot=None, calibRegistry=None, 

193 provided=None, parentRegistry=None, repositoryCfg=None): 

194 

195 dafPersist.Mapper.__init__(self) 

196 

197 self.log = lsstLog.Log.getLogger("CameraMapper") 

198 

199 if root: 

200 self.root = root 

201 elif repositoryCfg: 

202 self.root = repositoryCfg.root 

203 else: 

204 self.root = None 

205 

206 repoPolicy = repositoryCfg.policy if repositoryCfg else None 

207 if repoPolicy is not None: 

208 policy.update(repoPolicy) 

209 

210 # Levels 

211 self.levels = dict() 

212 if 'levels' in policy: 

213 levelsPolicy = policy['levels'] 

214 for key in levelsPolicy.names(True): 

215 self.levels[key] = set(levelsPolicy.asArray(key)) 

216 self.defaultLevel = policy['defaultLevel'] 

217 self.defaultSubLevels = dict() 

218 if 'defaultSubLevels' in policy: 

219 self.defaultSubLevels = policy['defaultSubLevels'] 

220 

221 # Root directories 

222 if root is None: 

223 root = "." 

224 root = dafPersist.LogicalLocation(root).locString() 

225 

226 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root) 

227 

228 # If the calibRoot is passed in, use that. If not and it's indicated in 

229 # the policy, use that. And otherwise, the calibs are in the regular 

230 # root. 

231 # If the location indicated by the calib root does not exist, do not 

232 # create it. 

233 calibStorage = None 

234 if calibRoot is not None: 

235 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot) 

236 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

237 create=False) 

238 else: 

239 calibRoot = policy.get('calibRoot', None) 

240 if calibRoot: 

241 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

242 create=False) 

243 if calibStorage is None: 

244 calibStorage = self.rootStorage 

245 

246 self.root = root 

247 

248 # Registries 

249 self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath", 

250 self.rootStorage, searchParents=False, 

251 posixIfNoSql=(not parentRegistry)) 

252 if not self.registry: 

253 self.registry = parentRegistry 

254 needCalibRegistry = policy.get('needCalibRegistry', None) 

255 if needCalibRegistry: 

256 if calibStorage: 

257 self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy, 

258 "calibRegistryPath", calibStorage, 

259 posixIfNoSql=False) # NB never use posix for calibs 

260 else: 

261 raise RuntimeError( 

262 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " 

263 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}") 

264 else: 

265 self.calibRegistry = None 

266 

267 # Dict of valid keys and their value types 

268 self.keyDict = dict() 

269 

270 self._initMappings(policy, self.rootStorage, calibStorage, provided=None) 

271 self._initWriteRecipes() 

272 

273 # Camera geometry 

274 self.cameraDataLocation = None # path to camera geometry config file 

275 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir) 

276 

277 # Filter translation table 

278 self.filters = None 

279 

280 # verify that the class variable packageName is set before attempting 

281 # to instantiate an instance 

282 if self.packageName is None: 

283 raise ValueError('class variable packageName must not be None') 

284 

285 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log) 

286 

287 # Assign a metadata translator if one has not been defined by 

288 # subclass. We can sometimes infer one from the RawVisitInfo 

289 # class. 

290 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"): 

291 self.translatorClass = self.makeRawVisitInfo.metadataTranslator 

292 

293 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None): 

294 """Initialize mappings 

295 

296 For each of the dataset types that we want to be able to read, there 

297 are methods that can be created to support them: 

298 * map_<dataset> : determine the path for dataset 

299 * std_<dataset> : standardize the retrieved dataset 

300 * bypass_<dataset> : retrieve the dataset (bypassing the usual 

301 retrieval machinery) 

302 * query_<dataset> : query the registry 

303 

304 Besides the dataset types explicitly listed in the policy, we create 

305 additional, derived datasets for additional conveniences, 

306 e.g., reading the header of an image, retrieving only the size of a 

307 catalog. 

308 

309 Parameters 

310 ---------- 

311 policy : `lsst.daf.persistence.Policy` 

312 Policy with per-camera defaults already merged 

313 rootStorage : `Storage subclass instance` 

314 Interface to persisted repository data. 

315 calibRoot : `Storage subclass instance` 

316 Interface to persisted calib repository data 

317 provided : `list` of `str` 

318 Keys provided by the mapper 

319 """ 

320 # Sub-dictionaries (for exposure/calibration/dataset types) 

321 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

322 "obs_base", "ImageMappingDefaults.yaml", "policy")) 

323 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

324 "obs_base", "ExposureMappingDefaults.yaml", "policy")) 

325 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

326 "obs_base", "CalibrationMappingDefaults.yaml", "policy")) 

327 dsMappingPolicy = dafPersist.Policy() 

328 

329 # Mappings 

330 mappingList = ( 

331 ("images", imgMappingPolicy, ImageMapping), 

332 ("exposures", expMappingPolicy, ExposureMapping), 

333 ("calibrations", calMappingPolicy, CalibrationMapping), 

334 ("datasets", dsMappingPolicy, DatasetMapping) 

335 ) 

336 self.mappings = dict() 

337 for name, defPolicy, cls in mappingList: 

338 if name in policy: 

339 datasets = policy[name] 

340 

341 # Centrally-defined datasets 

342 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml") 

343 if os.path.exists(defaultsPath): 

344 datasets.merge(dafPersist.Policy(defaultsPath)) 

345 

346 mappings = dict() 

347 setattr(self, name, mappings) 

348 for datasetType in datasets.names(True): 

349 subPolicy = datasets[datasetType] 

350 subPolicy.merge(defPolicy) 

351 

352 if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy: 

353 def compositeClosure(dataId, write=False, mapper=None, mapping=None, 

354 subPolicy=subPolicy): 

355 components = subPolicy.get('composite') 

356 assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None 

357 disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None 

358 python = subPolicy['python'] 

359 butlerComposite = dafPersist.ButlerComposite(assembler=assembler, 

360 disassembler=disassembler, 

361 python=python, 

362 dataId=dataId, 

363 mapper=self) 

364 for name, component in components.items(): 

365 butlerComposite.add(id=name, 

366 datasetType=component.get('datasetType'), 

367 setter=component.get('setter', None), 

368 getter=component.get('getter', None), 

369 subset=component.get('subset', False), 

370 inputOnly=component.get('inputOnly', False)) 

371 return butlerComposite 

372 setattr(self, "map_" + datasetType, compositeClosure) 

373 # for now at least, don't set up any other handling for 

374 # this dataset type. 

375 continue 

376 

377 if name == "calibrations": 

378 mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage, 

379 provided=provided, dataRoot=rootStorage) 

380 else: 

381 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided) 

382 

383 if datasetType in self.mappings: 

384 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}") 

385 self.keyDict.update(mapping.keys()) 

386 mappings[datasetType] = mapping 

387 self.mappings[datasetType] = mapping 

388 if not hasattr(self, "map_" + datasetType): 

389 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

390 return mapping.map(mapper, dataId, write) 

391 setattr(self, "map_" + datasetType, mapClosure) 

392 if not hasattr(self, "query_" + datasetType): 

393 def queryClosure(format, dataId, mapping=mapping): 

394 return mapping.lookup(format, dataId) 

395 setattr(self, "query_" + datasetType, queryClosure) 

396 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType): 

397 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping): 

398 return mapping.standardize(mapper, item, dataId) 

399 setattr(self, "std_" + datasetType, stdClosure) 

400 

401 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None): 

402 """Set convenience methods on CameraMapper""" 

403 mapName = "map_" + datasetType + "_" + suffix 

404 bypassName = "bypass_" + datasetType + "_" + suffix 

405 queryName = "query_" + datasetType + "_" + suffix 

406 if not hasattr(self, mapName): 

407 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType)) 

408 if not hasattr(self, bypassName): 

409 if bypassImpl is None and hasattr(self, "bypass_" + datasetType): 

410 bypassImpl = getattr(self, "bypass_" + datasetType) 

411 if bypassImpl is not None: 

412 setattr(self, bypassName, bypassImpl) 

413 if not hasattr(self, queryName): 

414 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType)) 

415 

416 # Filename of dataset 

417 setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId: 

418 [os.path.join(location.getStorage().root, p) for p in location.getLocations()]) 

419 # Metadata from FITS file 

420 if subPolicy["storage"] == "FitsStorage": # a FITS image 

421 def getMetadata(datasetType, pythonType, location, dataId): 

422 md = readMetadata(location.getLocationsWithRoot()[0]) 

423 fix_header(md, translator_class=self.translatorClass) 

424 return md 

425 

426 setMethods("md", bypassImpl=getMetadata) 

427 

428 # Add support for configuring FITS compression 

429 addName = "add_" + datasetType 

430 if not hasattr(self, addName): 

431 setattr(self, addName, self.getImageCompressionSettings) 

432 

433 if name == "exposures": 

434 def getSkyWcs(datasetType, pythonType, location, dataId): 

435 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

436 return fitsReader.readWcs() 

437 

438 setMethods("wcs", bypassImpl=getSkyWcs) 

439 

440 def getRawHeaderWcs(datasetType, pythonType, location, dataId): 

441 """Create a SkyWcs from the un-modified raw 

442 FITS WCS header keys.""" 

443 if datasetType[:3] != "raw": 

444 raise dafPersist.NoResults("Can only get header WCS for raw exposures.", 

445 datasetType, dataId) 

446 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])) 

447 

448 setMethods("header_wcs", bypassImpl=getRawHeaderWcs) 

449 

450 def getPhotoCalib(datasetType, pythonType, location, dataId): 

451 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

452 return fitsReader.readPhotoCalib() 

453 

454 setMethods("photoCalib", bypassImpl=getPhotoCalib) 

455 

456 def getVisitInfo(datasetType, pythonType, location, dataId): 

457 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

458 return fitsReader.readVisitInfo() 

459 

460 setMethods("visitInfo", bypassImpl=getVisitInfo) 

461 

462 def getFilter(datasetType, pythonType, location, dataId): 

463 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

464 return fitsReader.readFilter() 

465 

466 setMethods("filter", bypassImpl=getFilter) 

467 

468 setMethods("detector", 

469 mapImpl=lambda dataId, write=False: 

470 dafPersist.ButlerLocation( 

471 pythonType="lsst.afw.cameraGeom.CameraConfig", 

472 cppType="Config", 

473 storageName="Internal", 

474 locationList="ignored", 

475 dataId=dataId, 

476 mapper=self, 

477 storage=None, 

478 ), 

479 bypassImpl=lambda datasetType, pythonType, location, dataId: 

480 self.camera[self._extractDetectorName(dataId)] 

481 ) 

482 

483 def getBBox(datasetType, pythonType, location, dataId): 

484 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1) 

485 fix_header(md, translator_class=self.translatorClass) 

486 return afwImage.bboxFromMetadata(md) 

487 

488 setMethods("bbox", bypassImpl=getBBox) 

489 

490 elif name == "images": 

491 def getBBox(datasetType, pythonType, location, dataId): 

492 md = readMetadata(location.getLocationsWithRoot()[0]) 

493 fix_header(md, translator_class=self.translatorClass) 

494 return afwImage.bboxFromMetadata(md) 

495 setMethods("bbox", bypassImpl=getBBox) 

496 

497 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog 

498 

499 def getMetadata(datasetType, pythonType, location, dataId): 

500 md = readMetadata(os.path.join(location.getStorage().root, 

501 location.getLocations()[0]), hdu=1) 

502 fix_header(md, translator_class=self.translatorClass) 

503 return md 

504 

505 setMethods("md", bypassImpl=getMetadata) 

506 

507 # Sub-images 

508 if subPolicy["storage"] == "FitsStorage": 

509 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

510 subId = dataId.copy() 

511 del subId['bbox'] 

512 loc = mapping.map(mapper, subId, write) 

513 bbox = dataId['bbox'] 

514 llcX = bbox.getMinX() 

515 llcY = bbox.getMinY() 

516 width = bbox.getWidth() 

517 height = bbox.getHeight() 

518 loc.additionalData.set('llcX', llcX) 

519 loc.additionalData.set('llcY', llcY) 

520 loc.additionalData.set('width', width) 

521 loc.additionalData.set('height', height) 

522 if 'imageOrigin' in dataId: 

523 loc.additionalData.set('imageOrigin', 

524 dataId['imageOrigin']) 

525 return loc 

526 

527 def querySubClosure(key, format, dataId, mapping=mapping): 

528 subId = dataId.copy() 

529 del subId['bbox'] 

530 return mapping.lookup(format, subId) 

531 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure) 

532 

533 if subPolicy["storage"] == "FitsCatalogStorage": 

534 # Length of catalog 

535 

536 def getLen(datasetType, pythonType, location, dataId): 

537 md = readMetadata(os.path.join(location.getStorage().root, 

538 location.getLocations()[0]), hdu=1) 

539 fix_header(md, translator_class=self.translatorClass) 

540 return md["NAXIS2"] 

541 

542 setMethods("len", bypassImpl=getLen) 

543 

544 # Schema of catalog 

545 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets: 

546 setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId: 

547 afwTable.Schema.readFits(os.path.join(location.getStorage().root, 

548 location.getLocations()[0]))) 

549 

550 def _computeCcdExposureId(self, dataId): 

551 """Compute the 64-bit (long) identifier for a CCD exposure. 

552 

553 Subclasses must override 

554 

555 Parameters 

556 ---------- 

557 dataId : `dict` 

558 Data identifier with visit, ccd. 

559 """ 

560 raise NotImplementedError() 

561 

562 def _computeCoaddExposureId(self, dataId, singleFilter): 

563 """Compute the 64-bit (long) identifier for a coadd. 

564 

565 Subclasses must override 

566 

567 Parameters 

568 ---------- 

569 dataId : `dict` 

570 Data identifier with tract and patch. 

571 singleFilter : `bool` 

572 True means the desired ID is for a single-filter coadd, in which 

573 case dataIdmust contain filter. 

574 """ 

575 raise NotImplementedError() 

576 

577 def _search(self, path): 

578 """Search for path in the associated repository's storage. 

579 

580 Parameters 

581 ---------- 

582 path : string 

583 Path that describes an object in the repository associated with 

584 this mapper. 

585 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 

586 indicator will be stripped when searching and so will match 

587 filenames without the HDU indicator, e.g. 'foo.fits'. The path 

588 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 

589 

590 Returns 

591 ------- 

592 string 

593 The path for this object in the repository. Will return None if the 

594 object can't be found. If the input argument path contained an HDU 

595 indicator, the returned path will also contain the HDU indicator. 

596 """ 

597 return self.rootStorage.search(path) 

598 

599 def backup(self, datasetType, dataId): 

600 """Rename any existing object with the given type and dataId. 

601 

602 The CameraMapper implementation saves objects in a sequence of e.g.: 

603 

604 - foo.fits 

605 - foo.fits~1 

606 - foo.fits~2 

607 

608 All of the backups will be placed in the output repo, however, and will 

609 not be removed if they are found elsewhere in the _parent chain. This 

610 means that the same file will be stored twice if the previous version 

611 was found in an input repo. 

612 """ 

613 

614 # Calling PosixStorage directly is not the long term solution in this 

615 # function, this is work-in-progress on epic DM-6225. The plan is for 

616 # parentSearch to be changed to 'search', and search only the storage 

617 # associated with this mapper. All searching of parents will be handled 

618 # by traversing the container of repositories in Butler. 

619 

620 def firstElement(list): 

621 """Get the first element in the list, or None if that can't be 

622 done. 

623 """ 

624 return list[0] if list is not None and len(list) else None 

625 

626 n = 0 

627 newLocation = self.map(datasetType, dataId, write=True) 

628 newPath = newLocation.getLocations()[0] 

629 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True) 

630 path = firstElement(path) 

631 oldPaths = [] 

632 while path is not None: 

633 n += 1 

634 oldPaths.append((n, path)) 

635 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True) 

636 path = firstElement(path) 

637 for n, oldPath in reversed(oldPaths): 

638 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n)) 

639 

640 def keys(self): 

641 """Return supported keys. 

642 

643 Returns 

644 ------- 

645 iterable 

646 List of keys usable in a dataset identifier 

647 """ 

648 return iter(self.keyDict.keys()) 

649 

650 def getKeys(self, datasetType, level): 

651 """Return a dict of supported keys and their value types for a given 

652 dataset type at a given level of the key hierarchy. 

653 

654 Parameters 

655 ---------- 

656 datasetType : `str` 

657 Dataset type or None for all dataset types. 

658 level : `str` or None 

659 Level or None for all levels or '' for the default level for the 

660 camera. 

661 

662 Returns 

663 ------- 

664 `dict` 

665 Keys are strings usable in a dataset identifier, values are their 

666 value types. 

667 """ 

668 

669 # not sure if this is how we want to do this. what if None was 

670 # intended? 

671 if level == '': 

672 level = self.getDefaultLevel() 

673 

674 if datasetType is None: 

675 keyDict = copy.copy(self.keyDict) 

676 else: 

677 keyDict = self.mappings[datasetType].keys() 

678 if level is not None and level in self.levels: 

679 keyDict = copy.copy(keyDict) 

680 for lev in self.levels[level]: 

681 if lev in keyDict: 

682 del keyDict[lev] 

683 return keyDict 

684 

685 def getDefaultLevel(self): 

686 return self.defaultLevel 

687 

688 def getDefaultSubLevel(self, level): 

689 if level in self.defaultSubLevels: 

690 return self.defaultSubLevels[level] 

691 return None 

692 

693 @classmethod 

694 def getCameraName(cls): 

695 """Return the name of the camera that this CameraMapper is for.""" 

696 className = str(cls) 

697 className = className[className.find('.'):-1] 

698 m = re.search(r'(\w+)Mapper', className) 

699 if m is None: 

700 m = re.search(r"class '[\w.]*?(\w+)'", className) 

701 name = m.group(1) 

702 return name[:1].lower() + name[1:] if name else '' 

703 

704 @classmethod 

705 def getPackageName(cls): 

706 """Return the name of the package containing this CameraMapper.""" 

707 if cls.packageName is None: 

708 raise ValueError('class variable packageName must not be None') 

709 return cls.packageName 

710 

711 @classmethod 

712 def getGen3Instrument(cls): 

713 """Return the gen3 Instrument class equivalent for this gen2 Mapper. 

714 

715 Returns 

716 ------- 

717 instr : `type` 

718 A `~lsst.obs.base.Instrument` class. 

719 """ 

720 if cls._gen3instrument is None: 

721 raise NotImplementedError("Please provide a specific implementation for your instrument" 

722 " to enable conversion of this gen2 repository to gen3") 

723 if isinstance(cls._gen3instrument, str): 

724 # Given a string to convert to an instrument class 

725 cls._gen3instrument = doImport(cls._gen3instrument) 

726 if not issubclass(cls._gen3instrument, Instrument): 

727 raise ValueError(f"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}" 

728 " but that is not an lsst.obs.base.Instrument") 

729 return cls._gen3instrument 

730 

731 @classmethod 

732 def getPackageDir(cls): 

733 """Return the base directory of this package""" 

734 return getPackageDir(cls.getPackageName()) 

735 

736 def map_camera(self, dataId, write=False): 

737 """Map a camera dataset.""" 

738 if self.camera is None: 

739 raise RuntimeError("No camera dataset available.") 

740 actualId = self._transformId(dataId) 

741 return dafPersist.ButlerLocation( 

742 pythonType="lsst.afw.cameraGeom.CameraConfig", 

743 cppType="Config", 

744 storageName="ConfigStorage", 

745 locationList=self.cameraDataLocation or "ignored", 

746 dataId=actualId, 

747 mapper=self, 

748 storage=self.rootStorage 

749 ) 

750 

751 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId): 

752 """Return the (preloaded) camera object. 

753 """ 

754 if self.camera is None: 

755 raise RuntimeError("No camera dataset available.") 

756 return self.camera 

757 

758 def map_expIdInfo(self, dataId, write=False): 

759 return dafPersist.ButlerLocation( 

760 pythonType="lsst.obs.base.ExposureIdInfo", 

761 cppType=None, 

762 storageName="Internal", 

763 locationList="ignored", 

764 dataId=dataId, 

765 mapper=self, 

766 storage=self.rootStorage 

767 ) 

768 

769 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId): 

770 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 

771 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId) 

772 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId) 

773 return ExposureIdInfo(expId=expId, expBits=expBits) 

774 

775 def std_bfKernel(self, item, dataId): 

776 """Disable standardization for bfKernel 

777 

778 bfKernel is a calibration product that is numpy array, 

779 unlike other calibration products that are all images; 

780 all calibration images are sent through _standardizeExposure 

781 due to CalibrationMapping, but we don't want that to happen to bfKernel 

782 """ 

783 return item 

784 

785 def std_raw(self, item, dataId): 

786 """Standardize a raw dataset by converting it to an Exposure instead 

787 of an Image""" 

788 return self._standardizeExposure(self.exposures['raw'], item, dataId, 

789 trimmed=False, setVisitInfo=True) 

790 

791 def map_skypolicy(self, dataId): 

792 """Map a sky policy.""" 

793 return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy", 

794 "Internal", None, None, self, 

795 storage=self.rootStorage) 

796 

797 def std_skypolicy(self, item, dataId): 

798 """Standardize a sky policy by returning the one we use.""" 

799 return self.skypolicy 

800 

801############################################################################### 

802# 

803# Utility functions 

804# 

805############################################################################### 

806 

807 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, 

808 posixIfNoSql=True): 

809 """Set up a registry (usually SQLite3), trying a number of possible 

810 paths. 

811 

812 Parameters 

813 ---------- 

814 name : string 

815 Name of registry. 

816 description: `str` 

817 Description of registry (for log messages) 

818 path : string 

819 Path for registry. 

820 policy : string 

821 Policy that contains the registry name, used if path is None. 

822 policyKey : string 

823 Key in policy for registry path. 

824 storage : Storage subclass 

825 Repository Storage to look in. 

826 searchParents : bool, optional 

827 True if the search for a registry should follow any Butler v1 

828 _parent symlinks. 

829 posixIfNoSql : bool, optional 

830 If an sqlite registry is not found, will create a posix registry if 

831 this is True. 

832 

833 Returns 

834 ------- 

835 lsst.daf.persistence.Registry 

836 Registry object 

837 """ 

838 if path is None and policyKey in policy: 

839 path = dafPersist.LogicalLocation(policy[policyKey]).locString() 

840 if os.path.isabs(path): 

841 raise RuntimeError("Policy should not indicate an absolute path for registry.") 

842 if not storage.exists(path): 

843 newPath = storage.instanceSearch(path) 

844 

845 newPath = newPath[0] if newPath is not None and len(newPath) else None 

846 if newPath is None: 

847 self.log.warn("Unable to locate registry at policy path (also looked in root): %s", 

848 path) 

849 path = newPath 

850 else: 

851 self.log.warn("Unable to locate registry at policy path: %s", path) 

852 path = None 

853 

854 # Old Butler API was to indicate the registry WITH the repo folder, 

855 # New Butler expects the registry to be in the repo folder. To support 

856 # Old API, check to see if path starts with root, and if so, strip 

857 # root from path. Currently only works with PosixStorage 

858 try: 

859 root = storage.root 

860 if path and (path.startswith(root)): 

861 path = path[len(root + '/'):] 

862 except AttributeError: 

863 pass 

864 

865 # determine if there is an sqlite registry and if not, try the posix 

866 # registry. 

867 registry = None 

868 

869 def search(filename, description): 

870 """Search for file in storage 

871 

872 Parameters 

873 ---------- 

874 filename : `str` 

875 Filename to search for 

876 description : `str` 

877 Description of file, for error message. 

878 

879 Returns 

880 ------- 

881 path : `str` or `None` 

882 Path to file, or None 

883 """ 

884 result = storage.instanceSearch(filename) 

885 if result: 

886 return result[0] 

887 self.log.debug("Unable to locate %s: %s", description, filename) 

888 return None 

889 

890 # Search for a suitable registry database 

891 if path is None: 

892 path = search("%s.pgsql" % name, "%s in root" % description) 

893 if path is None: 

894 path = search("%s.sqlite3" % name, "%s in root" % description) 

895 if path is None: 

896 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description) 

897 

898 if path is not None: 

899 if not storage.exists(path): 

900 newPath = storage.instanceSearch(path) 

901 newPath = newPath[0] if newPath is not None and len(newPath) else None 

902 if newPath is not None: 

903 path = newPath 

904 localFileObj = storage.getLocalFile(path) 

905 self.log.info("Loading %s registry from %s", description, localFileObj.name) 

906 registry = dafPersist.Registry.create(localFileObj.name) 

907 localFileObj.close() 

908 elif not registry and posixIfNoSql: 

909 try: 

910 self.log.info("Loading Posix %s registry from %s", description, storage.root) 

911 registry = dafPersist.PosixRegistry(storage.root) 

912 except Exception: 

913 registry = None 

914 

915 return registry 

916 

917 def _transformId(self, dataId): 

918 """Generate a standard ID dict from a camera-specific ID dict. 

919 

920 Canonical keys include: 

921 - amp: amplifier name 

922 - ccd: CCD name (in LSST this is a combination of raft and sensor) 

923 The default implementation returns a copy of its input. 

924 

925 Parameters 

926 ---------- 

927 dataId : `dict` 

928 Dataset identifier; this must not be modified 

929 

930 Returns 

931 ------- 

932 `dict` 

933 Transformed dataset identifier. 

934 """ 

935 

936 return dataId.copy() 

937 

938 def _mapActualToPath(self, template, actualId): 

939 """Convert a template path to an actual path, using the actual data 

940 identifier. This implementation is usually sufficient but can be 

941 overridden by the subclass. 

942 

943 Parameters 

944 ---------- 

945 template : `str` 

946 Template path 

947 actualId : `dict` 

948 Dataset identifier 

949 

950 Returns 

951 ------- 

952 `str` 

953 Pathname 

954 """ 

955 

956 try: 

957 transformedId = self._transformId(actualId) 

958 return template % transformedId 

959 except Exception as e: 

960 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e)) 

961 

962 @staticmethod 

963 def getShortCcdName(ccdName): 

964 """Convert a CCD name to a form useful as a filename 

965 

966 The default implementation converts spaces to underscores. 

967 """ 

968 return ccdName.replace(" ", "_") 

969 

970 def _extractDetectorName(self, dataId): 

971 """Extract the detector (CCD) name from the dataset identifier. 

972 

973 The name in question is the detector name used by lsst.afw.cameraGeom. 

974 

975 Parameters 

976 ---------- 

977 dataId : `dict` 

978 Dataset identifier. 

979 

980 Returns 

981 ------- 

982 `str` 

983 Detector name 

984 """ 

985 raise NotImplementedError("No _extractDetectorName() function specified") 

986 

987 def _setAmpDetector(self, item, dataId, trimmed=True): 

988 """Set the detector object in an Exposure for an amplifier. 

989 

990 Defects are also added to the Exposure based on the detector object. 

991 

992 Parameters 

993 ---------- 

994 item : `lsst.afw.image.Exposure` 

995 Exposure to set the detector in. 

996 dataId : `dict` 

997 Dataset identifier 

998 trimmed : `bool` 

999 Should detector be marked as trimmed? (ignored) 

1000 """ 

1001 

1002 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed) 

1003 

1004 def _setCcdDetector(self, item, dataId, trimmed=True): 

1005 """Set the detector object in an Exposure for a CCD. 

1006 

1007 Parameters 

1008 ---------- 

1009 item : `lsst.afw.image.Exposure` 

1010 Exposure to set the detector in. 

1011 dataId : `dict` 

1012 Dataset identifier 

1013 trimmed : `bool` 

1014 Should detector be marked as trimmed? (ignored) 

1015 """ 

1016 if item.getDetector() is not None: 

1017 return 

1018 

1019 detectorName = self._extractDetectorName(dataId) 

1020 detector = self.camera[detectorName] 

1021 item.setDetector(detector) 

1022 

1023 def _setFilter(self, mapping, item, dataId): 

1024 """Set the filter object in an Exposure. If the Exposure had a FILTER 

1025 keyword, this was already processed during load. But if it didn't, 

1026 use the filter from the registry. 

1027 

1028 Parameters 

1029 ---------- 

1030 mapping : `lsst.obs.base.Mapping` 

1031 Where to get the filter from. 

1032 item : `lsst.afw.image.Exposure` 

1033 Exposure to set the filter in. 

1034 dataId : `dict` 

1035 Dataset identifier. 

1036 """ 

1037 

1038 if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) 

1039 or isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)): 

1040 return 

1041 

1042 if item.getFilter().getId() != afwImage.Filter.UNKNOWN: 

1043 return 

1044 

1045 actualId = mapping.need(['filter'], dataId) 

1046 filterName = actualId['filter'] 

1047 if self.filters is not None and filterName in self.filters: 

1048 filterName = self.filters[filterName] 

1049 try: 

1050 item.setFilter(afwImage.Filter(filterName)) 

1051 except pexExcept.NotFoundError: 

1052 self.log.warn("Filter %s not defined. Set to UNKNOWN." % (filterName)) 

1053 

1054 def _standardizeExposure(self, mapping, item, dataId, filter=True, 

1055 trimmed=True, setVisitInfo=True): 

1056 """Default standardization function for images. 

1057 

1058 This sets the Detector from the camera geometry 

1059 and optionally set the Filter. In both cases this saves 

1060 having to persist some data in each exposure (or image). 

1061 

1062 Parameters 

1063 ---------- 

1064 mapping : `lsst.obs.base.Mapping` 

1065 Where to get the values from. 

1066 item : image-like object 

1067 Can be any of lsst.afw.image.Exposure, 

1068 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 

1069 or lsst.afw.image.MaskedImage 

1070 

1071 dataId : `dict` 

1072 Dataset identifier 

1073 filter : `bool` 

1074 Set filter? Ignored if item is already an exposure 

1075 trimmed : `bool` 

1076 Should detector be marked as trimmed? 

1077 setVisitInfo : `bool` 

1078 Should Exposure have its VisitInfo filled out from the metadata? 

1079 

1080 Returns 

1081 ------- 

1082 `lsst.afw.image.Exposure` 

1083 The standardized Exposure. 

1084 """ 

1085 try: 

1086 exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log, 

1087 setVisitInfo=setVisitInfo) 

1088 except Exception as e: 

1089 self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e)) 

1090 raise 

1091 

1092 if mapping.level.lower() == "amp": 

1093 self._setAmpDetector(exposure, dataId, trimmed) 

1094 elif mapping.level.lower() == "ccd": 

1095 self._setCcdDetector(exposure, dataId, trimmed) 

1096 

1097 # We can only create a WCS if it doesn't already have one and 

1098 # we have either a VisitInfo or exposure metadata. 

1099 # Do not calculate a WCS if this is an amplifier exposure 

1100 if mapping.level.lower() != "amp" and exposure.getWcs() is None and \ 

1101 (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()): 

1102 self._createInitialSkyWcs(exposure) 

1103 

1104 if filter: 

1105 self._setFilter(mapping, exposure, dataId) 

1106 

1107 return exposure 

1108 

1109 def _createSkyWcsFromMetadata(self, exposure): 

1110 """Create a SkyWcs from the FITS header metadata in an Exposure. 

1111 

1112 Parameters 

1113 ---------- 

1114 exposure : `lsst.afw.image.Exposure` 

1115 The exposure to get metadata from, and attach the SkyWcs to. 

1116 """ 

1117 metadata = exposure.getMetadata() 

1118 fix_header(metadata, translator_class=self.translatorClass) 

1119 try: 

1120 wcs = afwGeom.makeSkyWcs(metadata, strip=True) 

1121 exposure.setWcs(wcs) 

1122 except pexExcept.TypeError as e: 

1123 # See DM-14372 for why this is debug and not warn (e.g. calib 

1124 # files without wcs metadata). 

1125 self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:" 

1126 " %s", e.args[0]) 

1127 # ensure any WCS values stripped from the metadata are removed in the 

1128 # exposure 

1129 exposure.setMetadata(metadata) 

1130 

1131 def _createInitialSkyWcs(self, exposure): 

1132 """Create a SkyWcs from the boresight and camera geometry. 

1133 

1134 If the boresight or camera geometry do not support this method of 

1135 WCS creation, this falls back on the header metadata-based version 

1136 (typically a purely linear FITS crval/crpix/cdmatrix WCS). 

1137 

1138 Parameters 

1139 ---------- 

1140 exposure : `lsst.afw.image.Exposure` 

1141 The exposure to get data from, and attach the SkyWcs to. 

1142 """ 

1143 # Always use try to use metadata first, to strip WCS keys from it. 

1144 self._createSkyWcsFromMetadata(exposure) 

1145 

1146 if exposure.getInfo().getVisitInfo() is None: 

1147 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs." 

1148 self.log.warn(msg) 

1149 return 

1150 try: 

1151 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector()) 

1152 exposure.setWcs(newSkyWcs) 

1153 except InitialSkyWcsError as e: 

1154 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s" 

1155 self.log.warn(msg, e) 

1156 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e)) 

1157 if e.__context__ is not None: 

1158 self.log.debug("Root-cause Exception was: %s", 

1159 traceback.TracebackException.from_exception(e.__context__)) 

1160 

1161 def _makeCamera(self, policy, repositoryDir): 

1162 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 

1163 the camera geometry 

1164 

1165 Also set self.cameraDataLocation, if relevant (else it can be left 

1166 None). 

1167 

1168 This implementation assumes that policy contains an entry "camera" 

1169 that points to the subdirectory in this package of camera data; 

1170 specifically, that subdirectory must contain: 

1171 - a file named `camera.py` that contains persisted camera config 

1172 - ampInfo table FITS files, as required by 

1173 lsst.afw.cameraGeom.makeCameraFromPath 

1174 

1175 Parameters 

1176 ---------- 

1177 policy : `lsst.daf.persistence.Policy` 

1178 Policy with per-camera defaults already merged 

1179 (PexPolicy only for backward compatibility). 

1180 repositoryDir : `str` 

1181 Policy repository for the subclassing module (obtained with 

1182 getRepositoryPath() on the per-camera default dictionary). 

1183 """ 

1184 if 'camera' not in policy: 

1185 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera") 

1186 cameraDataSubdir = policy['camera'] 

1187 self.cameraDataLocation = os.path.normpath( 

1188 os.path.join(repositoryDir, cameraDataSubdir, "camera.py")) 

1189 cameraConfig = afwCameraGeom.CameraConfig() 

1190 cameraConfig.load(self.cameraDataLocation) 

1191 ampInfoPath = os.path.dirname(self.cameraDataLocation) 

1192 return afwCameraGeom.makeCameraFromPath( 

1193 cameraConfig=cameraConfig, 

1194 ampInfoPath=ampInfoPath, 

1195 shortNameFunc=self.getShortCcdName, 

1196 pupilFactoryClass=self.PupilFactoryClass 

1197 ) 

1198 

1199 def getRegistry(self): 

1200 """Get the registry used by this mapper. 

1201 

1202 Returns 

1203 ------- 

1204 Registry or None 

1205 The registry used by this mapper for this mapper's repository. 

1206 """ 

1207 return self.registry 

1208 

1209 def getImageCompressionSettings(self, datasetType, dataId): 

1210 """Stuff image compression settings into a daf.base.PropertySet 

1211 

1212 This goes into the ButlerLocation's "additionalData", which gets 

1213 passed into the boost::persistence framework. 

1214 

1215 Parameters 

1216 ---------- 

1217 datasetType : `str` 

1218 Type of dataset for which to get the image compression settings. 

1219 dataId : `dict` 

1220 Dataset identifier. 

1221 

1222 Returns 

1223 ------- 

1224 additionalData : `lsst.daf.base.PropertySet` 

1225 Image compression settings. 

1226 """ 

1227 mapping = self.mappings[datasetType] 

1228 recipeName = mapping.recipe 

1229 storageType = mapping.storage 

1230 if storageType not in self._writeRecipes: 

1231 return dafBase.PropertySet() 

1232 if recipeName not in self._writeRecipes[storageType]: 

1233 raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" % 

1234 (datasetType, storageType, recipeName)) 

1235 recipe = self._writeRecipes[storageType][recipeName].deepCopy() 

1236 seed = hash(tuple(dataId.items())) % 2**31 

1237 for plane in ("image", "mask", "variance"): 

1238 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0: 

1239 recipe.set(plane + ".scaling.seed", seed) 

1240 return recipe 

1241 

1242 def _initWriteRecipes(self): 

1243 """Read the recipes for writing files 

1244 

1245 These recipes are currently used for configuring FITS compression, 

1246 but they could have wider uses for configuring different flavors 

1247 of the storage types. A recipe is referred to by a symbolic name, 

1248 which has associated settings. These settings are stored as a 

1249 `PropertySet` so they can easily be passed down to the 

1250 boost::persistence framework as the "additionalData" parameter. 

1251 

1252 The list of recipes is written in YAML. A default recipe and 

1253 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 

1254 and these may be overridden or supplemented by the individual obs_* 

1255 packages' own policy/writeRecipes.yaml files. 

1256 

1257 Recipes are grouped by the storage type. Currently, only the 

1258 ``FitsStorage`` storage type uses recipes, which uses it to 

1259 configure FITS image compression. 

1260 

1261 Each ``FitsStorage`` recipe for FITS compression should define 

1262 "image", "mask" and "variance" entries, each of which may contain 

1263 "compression" and "scaling" entries. Defaults will be provided for 

1264 any missing elements under "compression" and "scaling". 

1265 

1266 The allowed entries under "compression" are: 

1267 

1268 * algorithm (string): compression algorithm to use 

1269 * rows (int): number of rows per tile (0 = entire dimension) 

1270 * columns (int): number of columns per tile (0 = entire dimension) 

1271 * quantizeLevel (float): cfitsio quantization level 

1272 

1273 The allowed entries under "scaling" are: 

1274 

1275 * algorithm (string): scaling algorithm to use 

1276 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 

1277 * fuzz (bool): fuzz the values when quantising floating-point values? 

1278 * seed (long): seed for random number generator when fuzzing 

1279 * maskPlanes (list of string): mask planes to ignore when doing 

1280 statistics 

1281 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 

1282 * quantizePad: number of stdev to allow on the low side (for 

1283 STDEV_POSITIVE/NEGATIVE) 

1284 * bscale: manually specified BSCALE (for MANUAL scaling) 

1285 * bzero: manually specified BSCALE (for MANUAL scaling) 

1286 

1287 A very simple example YAML recipe: 

1288 

1289 FitsStorage: 

1290 default: 

1291 image: &default 

1292 compression: 

1293 algorithm: GZIP_SHUFFLE 

1294 mask: *default 

1295 variance: *default 

1296 """ 

1297 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml") 

1298 recipes = dafPersist.Policy(recipesFile) 

1299 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml") 

1300 validationMenu = {'FitsStorage': validateRecipeFitsStorage, } 

1301 if os.path.exists(supplementsFile) and supplementsFile != recipesFile: 

1302 supplements = dafPersist.Policy(supplementsFile) 

1303 # Don't allow overrides, only supplements 

1304 for entry in validationMenu: 

1305 intersection = set(recipes[entry].names()).intersection(set(supplements.names())) 

1306 if intersection: 

1307 raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" % 

1308 (supplementsFile, entry, recipesFile, intersection)) 

1309 recipes.update(supplements) 

1310 

1311 self._writeRecipes = {} 

1312 for storageType in recipes.names(True): 

1313 if "default" not in recipes[storageType]: 

1314 raise RuntimeError("No 'default' recipe defined for storage type %s in %s" % 

1315 (storageType, recipesFile)) 

1316 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType]) 

1317 

1318 

1319def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True): 

1320 """Generate an Exposure from an image-like object 

1321 

1322 If the image is a DecoratedImage then also set its WCS and metadata 

1323 (Image and MaskedImage are missing the necessary metadata 

1324 and Exposure already has those set) 

1325 

1326 Parameters 

1327 ---------- 

1328 image : Image-like object 

1329 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 

1330 Exposure. 

1331 

1332 Returns 

1333 ------- 

1334 `lsst.afw.image.Exposure` 

1335 Exposure containing input image. 

1336 """ 

1337 translatorClass = None 

1338 if mapper is not None: 

1339 translatorClass = mapper.translatorClass 

1340 

1341 metadata = None 

1342 if isinstance(image, afwImage.MaskedImage): 

1343 exposure = afwImage.makeExposure(image) 

1344 elif isinstance(image, afwImage.DecoratedImage): 

1345 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage())) 

1346 metadata = image.getMetadata() 

1347 fix_header(metadata, translator_class=translatorClass) 

1348 exposure.setMetadata(metadata) 

1349 elif isinstance(image, afwImage.Exposure): 

1350 exposure = image 

1351 metadata = exposure.getMetadata() 

1352 fix_header(metadata, translator_class=translatorClass) 

1353 else: # Image 

1354 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image)) 

1355 

1356 # set VisitInfo if we can 

1357 if setVisitInfo and exposure.getInfo().getVisitInfo() is None: 

1358 if metadata is not None: 

1359 if mapper is None: 

1360 if not logger: 

1361 logger = lsstLog.Log.getLogger("CameraMapper") 

1362 logger.warn("I can only set the VisitInfo if you provide a mapper") 

1363 else: 

1364 exposureId = mapper._computeCcdExposureId(dataId) 

1365 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId) 

1366 

1367 exposure.getInfo().setVisitInfo(visitInfo) 

1368 

1369 return exposure 

1370 

1371 

1372def validateRecipeFitsStorage(recipes): 

1373 """Validate recipes for FitsStorage 

1374 

1375 The recipes are supplemented with default values where appropriate. 

1376 

1377 TODO: replace this custom validation code with Cerberus (DM-11846) 

1378 

1379 Parameters 

1380 ---------- 

1381 recipes : `lsst.daf.persistence.Policy` 

1382 FitsStorage recipes to validate. 

1383 

1384 Returns 

1385 ------- 

1386 validated : `lsst.daf.base.PropertySet` 

1387 Validated FitsStorage recipe. 

1388 

1389 Raises 

1390 ------ 

1391 `RuntimeError` 

1392 If validation fails. 

1393 """ 

1394 # Schemas define what should be there, and the default values (and by the 

1395 # default value, the expected type). 

1396 compressionSchema = { 

1397 "algorithm": "NONE", 

1398 "rows": 1, 

1399 "columns": 0, 

1400 "quantizeLevel": 0.0, 

1401 } 

1402 scalingSchema = { 

1403 "algorithm": "NONE", 

1404 "bitpix": 0, 

1405 "maskPlanes": ["NO_DATA"], 

1406 "seed": 0, 

1407 "quantizeLevel": 4.0, 

1408 "quantizePad": 5.0, 

1409 "fuzz": True, 

1410 "bscale": 1.0, 

1411 "bzero": 0.0, 

1412 } 

1413 

1414 def checkUnrecognized(entry, allowed, description): 

1415 """Check to see if the entry contains unrecognised keywords""" 

1416 unrecognized = set(entry.keys()) - set(allowed) 

1417 if unrecognized: 

1418 raise RuntimeError( 

1419 "Unrecognized entries when parsing image compression recipe %s: %s" % 

1420 (description, unrecognized)) 

1421 

1422 validated = {} 

1423 for name in recipes.names(True): 

1424 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name) 

1425 rr = dafBase.PropertySet() 

1426 validated[name] = rr 

1427 for plane in ("image", "mask", "variance"): 

1428 checkUnrecognized(recipes[name][plane], ["compression", "scaling"], 

1429 name + "->" + plane) 

1430 

1431 for settings, schema in (("compression", compressionSchema), 

1432 ("scaling", scalingSchema)): 

1433 prefix = plane + "." + settings 

1434 if settings not in recipes[name][plane]: 

1435 for key in schema: 

1436 rr.set(prefix + "." + key, schema[key]) 

1437 continue 

1438 entry = recipes[name][plane][settings] 

1439 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings) 

1440 for key in schema: 

1441 value = type(schema[key])(entry[key]) if key in entry else schema[key] 

1442 rr.set(prefix + "." + key, value) 

1443 return validated