Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import copy 

23import os 

24import re 

25import traceback 

26import weakref 

27 

28from astro_metadata_translator import fix_header 

29from lsst.utils import doImport 

30import lsst.daf.persistence as dafPersist 

31from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping 

32import lsst.daf.base as dafBase 

33import lsst.afw.geom as afwGeom 

34import lsst.afw.image as afwImage 

35import lsst.afw.table as afwTable 

36from lsst.afw.fits import readMetadata 

37import lsst.afw.cameraGeom as afwCameraGeom 

38import lsst.log as lsstLog 

39import lsst.pex.exceptions as pexExcept 

40from .exposureIdInfo import ExposureIdInfo 

41from .makeRawVisitInfo import MakeRawVisitInfo 

42from .utils import createInitialSkyWcs, InitialSkyWcsError 

43from lsst.utils import getPackageDir 

44from ._instrument import Instrument 

45 

46__all__ = ["CameraMapper", "exposureFromImage"] 

47 

48 

49class CameraMapper(dafPersist.Mapper): 

50 

51 """CameraMapper is a base class for mappers that handle images from a 

52 camera and products derived from them. This provides an abstraction layer 

53 between the data on disk and the code. 

54 

55 Public methods: keys, queryMetadata, getDatasetTypes, map, 

56 canStandardize, standardize 

57 

58 Mappers for specific data sources (e.g., CFHT Megacam, LSST 

59 simulations, etc.) should inherit this class. 

60 

61 The CameraMapper manages datasets within a "root" directory. Note that 

62 writing to a dataset present in the input root will hide the existing 

63 dataset but not overwrite it. See #2160 for design discussion. 

64 

65 A camera is assumed to consist of one or more rafts, each composed of 

66 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 

67 (amps). A camera is also assumed to have a camera geometry description 

68 (CameraGeom object) as a policy file, a filter description (Filter class 

69 static configuration) as another policy file. 

70 

71 Information from the camera geometry and defects are inserted into all 

72 Exposure objects returned. 

73 

74 The mapper uses one or two registries to retrieve metadata about the 

75 images. The first is a registry of all raw exposures. This must contain 

76 the time of the observation. One or more tables (or the equivalent) 

77 within the registry are used to look up data identifier components that 

78 are not specified by the user (e.g. filter) and to return results for 

79 metadata queries. The second is an optional registry of all calibration 

80 data. This should contain validity start and end entries for each 

81 calibration dataset in the same timescale as the observation time. 

82 

83 Subclasses will typically set MakeRawVisitInfoClass and optionally the 

84 metadata translator class: 

85 

86 MakeRawVisitInfoClass: a class variable that points to a subclass of 

87 MakeRawVisitInfo, a functor that creates an 

88 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 

89 

90 translatorClass: The `~astro_metadata_translator.MetadataTranslator` 

91 class to use for fixing metadata values. If it is not set an attempt 

92 will be made to infer the class from ``MakeRawVisitInfoClass``, failing 

93 that the metadata fixup will try to infer the translator class from the 

94 header itself. 

95 

96 Subclasses must provide the following methods: 

97 

98 _extractDetectorName(self, dataId): returns the detector name for a CCD 

99 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 

100 a dataset identifier referring to that CCD or a subcomponent of it. 

101 

102 _computeCcdExposureId(self, dataId): see below 

103 

104 _computeCoaddExposureId(self, dataId, singleFilter): see below 

105 

106 Subclasses may also need to override the following methods: 

107 

108 _transformId(self, dataId): transformation of a data identifier 

109 from colloquial usage (e.g., "ccdname") to proper/actual usage 

110 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 

111 commas). The default implementation does nothing. Note that this 

112 method should not modify its input parameter. 

113 

114 getShortCcdName(self, ccdName): a static method that returns a shortened 

115 name suitable for use as a filename. The default version converts spaces 

116 to underscores. 

117 

118 _mapActualToPath(self, template, actualId): convert a template path to an 

119 actual path, using the actual dataset identifier. 

120 

121 The mapper's behaviors are largely specified by the policy file. 

122 See the MapperDictionary.paf for descriptions of the available items. 

123 

124 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 

125 mappings (see Mappings class). 

126 

127 Common default mappings for all subclasses can be specified in the 

128 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 

129 provides a simple way to add a product to all camera mappers. 

130 

131 Functions to map (provide a path to the data given a dataset 

132 identifier dictionary) and standardize (convert data into some standard 

133 format or type) may be provided in the subclass as "map_{dataset type}" 

134 and "std_{dataset type}", respectively. 

135 

136 If non-Exposure datasets cannot be retrieved using standard 

137 daf_persistence methods alone, a "bypass_{dataset type}" function may be 

138 provided in the subclass to return the dataset instead of using the 

139 "datasets" subpolicy. 

140 

141 Implementations of map_camera and bypass_camera that should typically be 

142 sufficient are provided in this base class. 

143 

144 Notes 

145 ----- 

146 .. todo:: 

147 

148 Instead of auto-loading the camera at construction time, load it from 

149 the calibration registry 

150 

151 Parameters 

152 ---------- 

153 policy : daf_persistence.Policy, 

154 Policy with per-camera defaults already merged. 

155 repositoryDir : string 

156 Policy repository for the subclassing module (obtained with 

157 getRepositoryPath() on the per-camera default dictionary). 

158 root : string, optional 

159 Path to the root directory for data. 

160 registry : string, optional 

161 Path to registry with data's metadata. 

162 calibRoot : string, optional 

163 Root directory for calibrations. 

164 calibRegistry : string, optional 

165 Path to registry with calibrations' metadata. 

166 provided : list of string, optional 

167 Keys provided by the mapper. 

168 parentRegistry : Registry subclass, optional 

169 Registry from a parent repository that may be used to look up 

170 data's metadata. 

171 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 

172 The configuration information for the repository this mapper is 

173 being used with. 

174 """ 

175 packageName = None 

176 

177 # a class or subclass of MakeRawVisitInfo, a functor that makes an 

178 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image 

179 MakeRawVisitInfoClass = MakeRawVisitInfo 

180 

181 # a class or subclass of PupilFactory 

182 PupilFactoryClass = afwCameraGeom.PupilFactory 

183 

184 # Class to use for metadata translations 

185 translatorClass = None 

186 

187 # Gen3 instrument corresponding to this mapper 

188 # Can be a class or a string with the full name of the class 

189 _gen3instrument = None 

190 

191 def __init__(self, policy, repositoryDir, 

192 root=None, registry=None, calibRoot=None, calibRegistry=None, 

193 provided=None, parentRegistry=None, repositoryCfg=None): 

194 

195 dafPersist.Mapper.__init__(self) 

196 

197 self.log = lsstLog.Log.getLogger("CameraMapper") 

198 

199 if root: 

200 self.root = root 

201 elif repositoryCfg: 

202 self.root = repositoryCfg.root 

203 else: 

204 self.root = None 

205 

206 repoPolicy = repositoryCfg.policy if repositoryCfg else None 

207 if repoPolicy is not None: 

208 policy.update(repoPolicy) 

209 

210 # Levels 

211 self.levels = dict() 

212 if 'levels' in policy: 

213 levelsPolicy = policy['levels'] 

214 for key in levelsPolicy.names(True): 

215 self.levels[key] = set(levelsPolicy.asArray(key)) 

216 self.defaultLevel = policy['defaultLevel'] 

217 self.defaultSubLevels = dict() 

218 if 'defaultSubLevels' in policy: 

219 self.defaultSubLevels = policy['defaultSubLevels'] 

220 

221 # Root directories 

222 if root is None: 

223 root = "." 

224 root = dafPersist.LogicalLocation(root).locString() 

225 

226 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root) 

227 

228 # If the calibRoot is passed in, use that. If not and it's indicated in 

229 # the policy, use that. And otherwise, the calibs are in the regular 

230 # root. 

231 # If the location indicated by the calib root does not exist, do not 

232 # create it. 

233 calibStorage = None 

234 if calibRoot is not None: 

235 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot) 

236 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

237 create=False) 

238 else: 

239 calibRoot = policy.get('calibRoot', None) 

240 if calibRoot: 

241 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

242 create=False) 

243 if calibStorage is None: 

244 calibStorage = self.rootStorage 

245 

246 self.root = root 

247 

248 # Registries 

249 self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath", 

250 self.rootStorage, searchParents=False, 

251 posixIfNoSql=(not parentRegistry)) 

252 if not self.registry: 

253 self.registry = parentRegistry 

254 needCalibRegistry = policy.get('needCalibRegistry', None) 

255 if needCalibRegistry: 

256 if calibStorage: 

257 self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy, 

258 "calibRegistryPath", calibStorage, 

259 posixIfNoSql=False) # NB never use posix for calibs 

260 else: 

261 raise RuntimeError( 

262 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " 

263 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}") 

264 else: 

265 self.calibRegistry = None 

266 

267 # Dict of valid keys and their value types 

268 self.keyDict = dict() 

269 

270 self._initMappings(policy, self.rootStorage, calibStorage, provided=None) 

271 self._initWriteRecipes() 

272 

273 # Camera geometry 

274 self.cameraDataLocation = None # path to camera geometry config file 

275 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir) 

276 

277 # Filter translation table 

278 self.filters = None 

279 

280 # verify that the class variable packageName is set before attempting 

281 # to instantiate an instance 

282 if self.packageName is None: 

283 raise ValueError('class variable packageName must not be None') 

284 

285 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log) 

286 

287 # Assign a metadata translator if one has not been defined by 

288 # subclass. We can sometimes infer one from the RawVisitInfo 

289 # class. 

290 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"): 

291 self.translatorClass = self.makeRawVisitInfo.metadataTranslator 

292 

293 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None): 

294 """Initialize mappings 

295 

296 For each of the dataset types that we want to be able to read, there 

297 are methods that can be created to support them: 

298 * map_<dataset> : determine the path for dataset 

299 * std_<dataset> : standardize the retrieved dataset 

300 * bypass_<dataset> : retrieve the dataset (bypassing the usual 

301 retrieval machinery) 

302 * query_<dataset> : query the registry 

303 

304 Besides the dataset types explicitly listed in the policy, we create 

305 additional, derived datasets for additional conveniences, 

306 e.g., reading the header of an image, retrieving only the size of a 

307 catalog. 

308 

309 Parameters 

310 ---------- 

311 policy : `lsst.daf.persistence.Policy` 

312 Policy with per-camera defaults already merged 

313 rootStorage : `Storage subclass instance` 

314 Interface to persisted repository data. 

315 calibRoot : `Storage subclass instance` 

316 Interface to persisted calib repository data 

317 provided : `list` of `str` 

318 Keys provided by the mapper 

319 """ 

320 # Sub-dictionaries (for exposure/calibration/dataset types) 

321 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

322 "obs_base", "ImageMappingDefaults.yaml", "policy")) 

323 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

324 "obs_base", "ExposureMappingDefaults.yaml", "policy")) 

325 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

326 "obs_base", "CalibrationMappingDefaults.yaml", "policy")) 

327 dsMappingPolicy = dafPersist.Policy() 

328 

329 # Mappings 

330 mappingList = ( 

331 ("images", imgMappingPolicy, ImageMapping), 

332 ("exposures", expMappingPolicy, ExposureMapping), 

333 ("calibrations", calMappingPolicy, CalibrationMapping), 

334 ("datasets", dsMappingPolicy, DatasetMapping) 

335 ) 

336 self.mappings = dict() 

337 for name, defPolicy, cls in mappingList: 

338 if name in policy: 

339 datasets = policy[name] 

340 

341 # Centrally-defined datasets 

342 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml") 

343 if os.path.exists(defaultsPath): 

344 datasets.merge(dafPersist.Policy(defaultsPath)) 

345 

346 mappings = dict() 

347 setattr(self, name, mappings) 

348 for datasetType in datasets.names(True): 

349 subPolicy = datasets[datasetType] 

350 subPolicy.merge(defPolicy) 

351 

352 if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy: 

353 def compositeClosure(dataId, write=False, mapper=None, mapping=None, 

354 subPolicy=subPolicy): 

355 components = subPolicy.get('composite') 

356 assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None 

357 disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None 

358 python = subPolicy['python'] 

359 butlerComposite = dafPersist.ButlerComposite(assembler=assembler, 

360 disassembler=disassembler, 

361 python=python, 

362 dataId=dataId, 

363 mapper=self) 

364 for name, component in components.items(): 

365 butlerComposite.add(id=name, 

366 datasetType=component.get('datasetType'), 

367 setter=component.get('setter', None), 

368 getter=component.get('getter', None), 

369 subset=component.get('subset', False), 

370 inputOnly=component.get('inputOnly', False)) 

371 return butlerComposite 

372 setattr(self, "map_" + datasetType, compositeClosure) 

373 # for now at least, don't set up any other handling for 

374 # this dataset type. 

375 continue 

376 

377 if name == "calibrations": 

378 mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage, 

379 provided=provided, dataRoot=rootStorage) 

380 else: 

381 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided) 

382 

383 if datasetType in self.mappings: 

384 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}") 

385 self.keyDict.update(mapping.keys()) 

386 mappings[datasetType] = mapping 

387 self.mappings[datasetType] = mapping 

388 if not hasattr(self, "map_" + datasetType): 

389 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

390 return mapping.map(mapper, dataId, write) 

391 setattr(self, "map_" + datasetType, mapClosure) 

392 if not hasattr(self, "query_" + datasetType): 

393 def queryClosure(format, dataId, mapping=mapping): 

394 return mapping.lookup(format, dataId) 

395 setattr(self, "query_" + datasetType, queryClosure) 

396 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType): 

397 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping): 

398 return mapping.standardize(mapper, item, dataId) 

399 setattr(self, "std_" + datasetType, stdClosure) 

400 

401 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None): 

402 """Set convenience methods on CameraMapper""" 

403 mapName = "map_" + datasetType + "_" + suffix 

404 bypassName = "bypass_" + datasetType + "_" + suffix 

405 queryName = "query_" + datasetType + "_" + suffix 

406 if not hasattr(self, mapName): 

407 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType)) 

408 if not hasattr(self, bypassName): 

409 if bypassImpl is None and hasattr(self, "bypass_" + datasetType): 

410 bypassImpl = getattr(self, "bypass_" + datasetType) 

411 if bypassImpl is not None: 

412 setattr(self, bypassName, bypassImpl) 

413 if not hasattr(self, queryName): 

414 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType)) 

415 

416 # Filename of dataset 

417 setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId: 

418 [os.path.join(location.getStorage().root, p) for p in location.getLocations()]) 

419 # Metadata from FITS file 

420 if subPolicy["storage"] == "FitsStorage": # a FITS image 

421 def getMetadata(datasetType, pythonType, location, dataId): 

422 md = readMetadata(location.getLocationsWithRoot()[0]) 

423 fix_header(md, translator_class=self.translatorClass) 

424 return md 

425 

426 setMethods("md", bypassImpl=getMetadata) 

427 

428 # Add support for configuring FITS compression 

429 addName = "add_" + datasetType 

430 if not hasattr(self, addName): 

431 setattr(self, addName, self.getImageCompressionSettings) 

432 

433 if name == "exposures": 

434 def getSkyWcs(datasetType, pythonType, location, dataId): 

435 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

436 return fitsReader.readWcs() 

437 

438 setMethods("wcs", bypassImpl=getSkyWcs) 

439 

440 def getRawHeaderWcs(datasetType, pythonType, location, dataId): 

441 """Create a SkyWcs from the un-modified raw 

442 FITS WCS header keys.""" 

443 if datasetType[:3] != "raw": 

444 raise dafPersist.NoResults("Can only get header WCS for raw exposures.", 

445 datasetType, dataId) 

446 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])) 

447 

448 setMethods("header_wcs", bypassImpl=getRawHeaderWcs) 

449 

450 def getPhotoCalib(datasetType, pythonType, location, dataId): 

451 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

452 return fitsReader.readPhotoCalib() 

453 

454 setMethods("photoCalib", bypassImpl=getPhotoCalib) 

455 

456 def getVisitInfo(datasetType, pythonType, location, dataId): 

457 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

458 return fitsReader.readVisitInfo() 

459 

460 setMethods("visitInfo", bypassImpl=getVisitInfo) 

461 

462 # TODO: deprecate in DM-27170, remove in DM-27177 

463 def getFilter(datasetType, pythonType, location, dataId): 

464 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

465 return fitsReader.readFilter() 

466 

467 setMethods("filter", bypassImpl=getFilter) 

468 

469 # TODO: deprecate in DM-27177, remove in DM-27811 

470 def getFilterLabel(datasetType, pythonType, location, dataId): 

471 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

472 return fitsReader.readFilterLabel() 

473 

474 setMethods("filterLabel", bypassImpl=getFilterLabel) 

475 

476 setMethods("detector", 

477 mapImpl=lambda dataId, write=False: 

478 dafPersist.ButlerLocation( 

479 pythonType="lsst.afw.cameraGeom.CameraConfig", 

480 cppType="Config", 

481 storageName="Internal", 

482 locationList="ignored", 

483 dataId=dataId, 

484 mapper=self, 

485 storage=None, 

486 ), 

487 bypassImpl=lambda datasetType, pythonType, location, dataId: 

488 self.camera[self._extractDetectorName(dataId)] 

489 ) 

490 

491 def getBBox(datasetType, pythonType, location, dataId): 

492 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1) 

493 fix_header(md, translator_class=self.translatorClass) 

494 return afwImage.bboxFromMetadata(md) 

495 

496 setMethods("bbox", bypassImpl=getBBox) 

497 

498 elif name == "images": 

499 def getBBox(datasetType, pythonType, location, dataId): 

500 md = readMetadata(location.getLocationsWithRoot()[0]) 

501 fix_header(md, translator_class=self.translatorClass) 

502 return afwImage.bboxFromMetadata(md) 

503 setMethods("bbox", bypassImpl=getBBox) 

504 

505 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog 

506 

507 def getMetadata(datasetType, pythonType, location, dataId): 

508 md = readMetadata(os.path.join(location.getStorage().root, 

509 location.getLocations()[0]), hdu=1) 

510 fix_header(md, translator_class=self.translatorClass) 

511 return md 

512 

513 setMethods("md", bypassImpl=getMetadata) 

514 

515 # Sub-images 

516 if subPolicy["storage"] == "FitsStorage": 

517 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

518 subId = dataId.copy() 

519 del subId['bbox'] 

520 loc = mapping.map(mapper, subId, write) 

521 bbox = dataId['bbox'] 

522 llcX = bbox.getMinX() 

523 llcY = bbox.getMinY() 

524 width = bbox.getWidth() 

525 height = bbox.getHeight() 

526 loc.additionalData.set('llcX', llcX) 

527 loc.additionalData.set('llcY', llcY) 

528 loc.additionalData.set('width', width) 

529 loc.additionalData.set('height', height) 

530 if 'imageOrigin' in dataId: 

531 loc.additionalData.set('imageOrigin', 

532 dataId['imageOrigin']) 

533 return loc 

534 

535 def querySubClosure(key, format, dataId, mapping=mapping): 

536 subId = dataId.copy() 

537 del subId['bbox'] 

538 return mapping.lookup(format, subId) 

539 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure) 

540 

541 if subPolicy["storage"] == "FitsCatalogStorage": 

542 # Length of catalog 

543 

544 def getLen(datasetType, pythonType, location, dataId): 

545 md = readMetadata(os.path.join(location.getStorage().root, 

546 location.getLocations()[0]), hdu=1) 

547 fix_header(md, translator_class=self.translatorClass) 

548 return md["NAXIS2"] 

549 

550 setMethods("len", bypassImpl=getLen) 

551 

552 # Schema of catalog 

553 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets: 

554 setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId: 

555 afwTable.Schema.readFits(os.path.join(location.getStorage().root, 

556 location.getLocations()[0]))) 

557 

558 def _computeCcdExposureId(self, dataId): 

559 """Compute the 64-bit (long) identifier for a CCD exposure. 

560 

561 Subclasses must override 

562 

563 Parameters 

564 ---------- 

565 dataId : `dict` 

566 Data identifier with visit, ccd. 

567 """ 

568 raise NotImplementedError() 

569 

570 def _computeCoaddExposureId(self, dataId, singleFilter): 

571 """Compute the 64-bit (long) identifier for a coadd. 

572 

573 Subclasses must override 

574 

575 Parameters 

576 ---------- 

577 dataId : `dict` 

578 Data identifier with tract and patch. 

579 singleFilter : `bool` 

580 True means the desired ID is for a single-filter coadd, in which 

581 case dataIdmust contain filter. 

582 """ 

583 raise NotImplementedError() 

584 

585 def _search(self, path): 

586 """Search for path in the associated repository's storage. 

587 

588 Parameters 

589 ---------- 

590 path : string 

591 Path that describes an object in the repository associated with 

592 this mapper. 

593 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 

594 indicator will be stripped when searching and so will match 

595 filenames without the HDU indicator, e.g. 'foo.fits'. The path 

596 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 

597 

598 Returns 

599 ------- 

600 string 

601 The path for this object in the repository. Will return None if the 

602 object can't be found. If the input argument path contained an HDU 

603 indicator, the returned path will also contain the HDU indicator. 

604 """ 

605 return self.rootStorage.search(path) 

606 

607 def backup(self, datasetType, dataId): 

608 """Rename any existing object with the given type and dataId. 

609 

610 The CameraMapper implementation saves objects in a sequence of e.g.: 

611 

612 - foo.fits 

613 - foo.fits~1 

614 - foo.fits~2 

615 

616 All of the backups will be placed in the output repo, however, and will 

617 not be removed if they are found elsewhere in the _parent chain. This 

618 means that the same file will be stored twice if the previous version 

619 was found in an input repo. 

620 """ 

621 

622 # Calling PosixStorage directly is not the long term solution in this 

623 # function, this is work-in-progress on epic DM-6225. The plan is for 

624 # parentSearch to be changed to 'search', and search only the storage 

625 # associated with this mapper. All searching of parents will be handled 

626 # by traversing the container of repositories in Butler. 

627 

628 def firstElement(list): 

629 """Get the first element in the list, or None if that can't be 

630 done. 

631 """ 

632 return list[0] if list is not None and len(list) else None 

633 

634 n = 0 

635 newLocation = self.map(datasetType, dataId, write=True) 

636 newPath = newLocation.getLocations()[0] 

637 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True) 

638 path = firstElement(path) 

639 oldPaths = [] 

640 while path is not None: 

641 n += 1 

642 oldPaths.append((n, path)) 

643 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True) 

644 path = firstElement(path) 

645 for n, oldPath in reversed(oldPaths): 

646 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n)) 

647 

648 def keys(self): 

649 """Return supported keys. 

650 

651 Returns 

652 ------- 

653 iterable 

654 List of keys usable in a dataset identifier 

655 """ 

656 return iter(self.keyDict.keys()) 

657 

658 def getKeys(self, datasetType, level): 

659 """Return a dict of supported keys and their value types for a given 

660 dataset type at a given level of the key hierarchy. 

661 

662 Parameters 

663 ---------- 

664 datasetType : `str` 

665 Dataset type or None for all dataset types. 

666 level : `str` or None 

667 Level or None for all levels or '' for the default level for the 

668 camera. 

669 

670 Returns 

671 ------- 

672 `dict` 

673 Keys are strings usable in a dataset identifier, values are their 

674 value types. 

675 """ 

676 

677 # not sure if this is how we want to do this. what if None was 

678 # intended? 

679 if level == '': 

680 level = self.getDefaultLevel() 

681 

682 if datasetType is None: 

683 keyDict = copy.copy(self.keyDict) 

684 else: 

685 keyDict = self.mappings[datasetType].keys() 

686 if level is not None and level in self.levels: 

687 keyDict = copy.copy(keyDict) 

688 for lev in self.levels[level]: 

689 if lev in keyDict: 

690 del keyDict[lev] 

691 return keyDict 

692 

693 def getDefaultLevel(self): 

694 return self.defaultLevel 

695 

696 def getDefaultSubLevel(self, level): 

697 if level in self.defaultSubLevels: 

698 return self.defaultSubLevels[level] 

699 return None 

700 

701 @classmethod 

702 def getCameraName(cls): 

703 """Return the name of the camera that this CameraMapper is for.""" 

704 className = str(cls) 

705 className = className[className.find('.'):-1] 

706 m = re.search(r'(\w+)Mapper', className) 

707 if m is None: 

708 m = re.search(r"class '[\w.]*?(\w+)'", className) 

709 name = m.group(1) 

710 return name[:1].lower() + name[1:] if name else '' 

711 

712 @classmethod 

713 def getPackageName(cls): 

714 """Return the name of the package containing this CameraMapper.""" 

715 if cls.packageName is None: 

716 raise ValueError('class variable packageName must not be None') 

717 return cls.packageName 

718 

719 @classmethod 

720 def getGen3Instrument(cls): 

721 """Return the gen3 Instrument class equivalent for this gen2 Mapper. 

722 

723 Returns 

724 ------- 

725 instr : `type` 

726 A `~lsst.obs.base.Instrument` class. 

727 """ 

728 if cls._gen3instrument is None: 

729 raise NotImplementedError("Please provide a specific implementation for your instrument" 

730 " to enable conversion of this gen2 repository to gen3") 

731 if isinstance(cls._gen3instrument, str): 

732 # Given a string to convert to an instrument class 

733 cls._gen3instrument = doImport(cls._gen3instrument) 

734 if not issubclass(cls._gen3instrument, Instrument): 

735 raise ValueError(f"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}" 

736 " but that is not an lsst.obs.base.Instrument") 

737 return cls._gen3instrument 

738 

739 @classmethod 

740 def getPackageDir(cls): 

741 """Return the base directory of this package""" 

742 return getPackageDir(cls.getPackageName()) 

743 

744 def map_camera(self, dataId, write=False): 

745 """Map a camera dataset.""" 

746 if self.camera is None: 

747 raise RuntimeError("No camera dataset available.") 

748 actualId = self._transformId(dataId) 

749 return dafPersist.ButlerLocation( 

750 pythonType="lsst.afw.cameraGeom.CameraConfig", 

751 cppType="Config", 

752 storageName="ConfigStorage", 

753 locationList=self.cameraDataLocation or "ignored", 

754 dataId=actualId, 

755 mapper=self, 

756 storage=self.rootStorage 

757 ) 

758 

759 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId): 

760 """Return the (preloaded) camera object. 

761 """ 

762 if self.camera is None: 

763 raise RuntimeError("No camera dataset available.") 

764 return self.camera 

765 

766 def map_expIdInfo(self, dataId, write=False): 

767 return dafPersist.ButlerLocation( 

768 pythonType="lsst.obs.base.ExposureIdInfo", 

769 cppType=None, 

770 storageName="Internal", 

771 locationList="ignored", 

772 dataId=dataId, 

773 mapper=self, 

774 storage=self.rootStorage 

775 ) 

776 

777 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId): 

778 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 

779 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId) 

780 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId) 

781 return ExposureIdInfo(expId=expId, expBits=expBits) 

782 

783 def std_bfKernel(self, item, dataId): 

784 """Disable standardization for bfKernel 

785 

786 bfKernel is a calibration product that is numpy array, 

787 unlike other calibration products that are all images; 

788 all calibration images are sent through _standardizeExposure 

789 due to CalibrationMapping, but we don't want that to happen to bfKernel 

790 """ 

791 return item 

792 

793 def std_raw(self, item, dataId): 

794 """Standardize a raw dataset by converting it to an Exposure instead 

795 of an Image""" 

796 return self._standardizeExposure(self.exposures['raw'], item, dataId, 

797 trimmed=False, setVisitInfo=True) 

798 

799 def map_skypolicy(self, dataId): 

800 """Map a sky policy.""" 

801 return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy", 

802 "Internal", None, None, self, 

803 storage=self.rootStorage) 

804 

805 def std_skypolicy(self, item, dataId): 

806 """Standardize a sky policy by returning the one we use.""" 

807 return self.skypolicy 

808 

809############################################################################### 

810# 

811# Utility functions 

812# 

813############################################################################### 

814 

815 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, 

816 posixIfNoSql=True): 

817 """Set up a registry (usually SQLite3), trying a number of possible 

818 paths. 

819 

820 Parameters 

821 ---------- 

822 name : string 

823 Name of registry. 

824 description: `str` 

825 Description of registry (for log messages) 

826 path : string 

827 Path for registry. 

828 policy : string 

829 Policy that contains the registry name, used if path is None. 

830 policyKey : string 

831 Key in policy for registry path. 

832 storage : Storage subclass 

833 Repository Storage to look in. 

834 searchParents : bool, optional 

835 True if the search for a registry should follow any Butler v1 

836 _parent symlinks. 

837 posixIfNoSql : bool, optional 

838 If an sqlite registry is not found, will create a posix registry if 

839 this is True. 

840 

841 Returns 

842 ------- 

843 lsst.daf.persistence.Registry 

844 Registry object 

845 """ 

846 if path is None and policyKey in policy: 

847 path = dafPersist.LogicalLocation(policy[policyKey]).locString() 

848 if os.path.isabs(path): 

849 raise RuntimeError("Policy should not indicate an absolute path for registry.") 

850 if not storage.exists(path): 

851 newPath = storage.instanceSearch(path) 

852 

853 newPath = newPath[0] if newPath is not None and len(newPath) else None 

854 if newPath is None: 

855 self.log.warn("Unable to locate registry at policy path (also looked in root): %s", 

856 path) 

857 path = newPath 

858 else: 

859 self.log.warn("Unable to locate registry at policy path: %s", path) 

860 path = None 

861 

862 # Old Butler API was to indicate the registry WITH the repo folder, 

863 # New Butler expects the registry to be in the repo folder. To support 

864 # Old API, check to see if path starts with root, and if so, strip 

865 # root from path. Currently only works with PosixStorage 

866 try: 

867 root = storage.root 

868 if path and (path.startswith(root)): 

869 path = path[len(root + '/'):] 

870 except AttributeError: 

871 pass 

872 

873 # determine if there is an sqlite registry and if not, try the posix 

874 # registry. 

875 registry = None 

876 

877 def search(filename, description): 

878 """Search for file in storage 

879 

880 Parameters 

881 ---------- 

882 filename : `str` 

883 Filename to search for 

884 description : `str` 

885 Description of file, for error message. 

886 

887 Returns 

888 ------- 

889 path : `str` or `None` 

890 Path to file, or None 

891 """ 

892 result = storage.instanceSearch(filename) 

893 if result: 

894 return result[0] 

895 self.log.debug("Unable to locate %s: %s", description, filename) 

896 return None 

897 

898 # Search for a suitable registry database 

899 if path is None: 

900 path = search("%s.pgsql" % name, "%s in root" % description) 

901 if path is None: 

902 path = search("%s.sqlite3" % name, "%s in root" % description) 

903 if path is None: 

904 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description) 

905 

906 if path is not None: 

907 if not storage.exists(path): 

908 newPath = storage.instanceSearch(path) 

909 newPath = newPath[0] if newPath is not None and len(newPath) else None 

910 if newPath is not None: 

911 path = newPath 

912 localFileObj = storage.getLocalFile(path) 

913 self.log.info("Loading %s registry from %s", description, localFileObj.name) 

914 registry = dafPersist.Registry.create(localFileObj.name) 

915 localFileObj.close() 

916 elif not registry and posixIfNoSql: 

917 try: 

918 self.log.info("Loading Posix %s registry from %s", description, storage.root) 

919 registry = dafPersist.PosixRegistry(storage.root) 

920 except Exception: 

921 registry = None 

922 

923 return registry 

924 

925 def _transformId(self, dataId): 

926 """Generate a standard ID dict from a camera-specific ID dict. 

927 

928 Canonical keys include: 

929 - amp: amplifier name 

930 - ccd: CCD name (in LSST this is a combination of raft and sensor) 

931 The default implementation returns a copy of its input. 

932 

933 Parameters 

934 ---------- 

935 dataId : `dict` 

936 Dataset identifier; this must not be modified 

937 

938 Returns 

939 ------- 

940 `dict` 

941 Transformed dataset identifier. 

942 """ 

943 

944 return dataId.copy() 

945 

946 def _mapActualToPath(self, template, actualId): 

947 """Convert a template path to an actual path, using the actual data 

948 identifier. This implementation is usually sufficient but can be 

949 overridden by the subclass. 

950 

951 Parameters 

952 ---------- 

953 template : `str` 

954 Template path 

955 actualId : `dict` 

956 Dataset identifier 

957 

958 Returns 

959 ------- 

960 `str` 

961 Pathname 

962 """ 

963 

964 try: 

965 transformedId = self._transformId(actualId) 

966 return template % transformedId 

967 except Exception as e: 

968 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e)) 

969 

970 @staticmethod 

971 def getShortCcdName(ccdName): 

972 """Convert a CCD name to a form useful as a filename 

973 

974 The default implementation converts spaces to underscores. 

975 """ 

976 return ccdName.replace(" ", "_") 

977 

978 def _extractDetectorName(self, dataId): 

979 """Extract the detector (CCD) name from the dataset identifier. 

980 

981 The name in question is the detector name used by lsst.afw.cameraGeom. 

982 

983 Parameters 

984 ---------- 

985 dataId : `dict` 

986 Dataset identifier. 

987 

988 Returns 

989 ------- 

990 `str` 

991 Detector name 

992 """ 

993 raise NotImplementedError("No _extractDetectorName() function specified") 

994 

995 def _setAmpDetector(self, item, dataId, trimmed=True): 

996 """Set the detector object in an Exposure for an amplifier. 

997 

998 Defects are also added to the Exposure based on the detector object. 

999 

1000 Parameters 

1001 ---------- 

1002 item : `lsst.afw.image.Exposure` 

1003 Exposure to set the detector in. 

1004 dataId : `dict` 

1005 Dataset identifier 

1006 trimmed : `bool` 

1007 Should detector be marked as trimmed? (ignored) 

1008 """ 

1009 

1010 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed) 

1011 

1012 def _setCcdDetector(self, item, dataId, trimmed=True): 

1013 """Set the detector object in an Exposure for a CCD. 

1014 

1015 Parameters 

1016 ---------- 

1017 item : `lsst.afw.image.Exposure` 

1018 Exposure to set the detector in. 

1019 dataId : `dict` 

1020 Dataset identifier 

1021 trimmed : `bool` 

1022 Should detector be marked as trimmed? (ignored) 

1023 """ 

1024 if item.getDetector() is not None: 

1025 return 

1026 

1027 detectorName = self._extractDetectorName(dataId) 

1028 detector = self.camera[detectorName] 

1029 item.setDetector(detector) 

1030 

1031 def _setFilter(self, mapping, item, dataId): 

1032 """Set the filter object in an Exposure. If the Exposure had a FILTER 

1033 keyword, this was already processed during load. But if it didn't, 

1034 use the filter from the registry. 

1035 

1036 Parameters 

1037 ---------- 

1038 mapping : `lsst.obs.base.Mapping` 

1039 Where to get the filter from. 

1040 item : `lsst.afw.image.Exposure` 

1041 Exposure to set the filter in. 

1042 dataId : `dict` 

1043 Dataset identifier. 

1044 """ 

1045 

1046 if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) 

1047 or isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)): 

1048 return 

1049 

1050 if item.getFilter().getId() != afwImage.Filter.UNKNOWN: 

1051 return 

1052 

1053 actualId = mapping.need(['filter'], dataId) 

1054 filterName = actualId['filter'] 

1055 if self.filters is not None and filterName in self.filters: 

1056 filterName = self.filters[filterName] 

1057 try: 

1058 item.setFilter(afwImage.Filter(filterName)) 

1059 except pexExcept.NotFoundError: 

1060 self.log.warn("Filter %s not defined. Set to UNKNOWN." % (filterName)) 

1061 

1062 def _standardizeExposure(self, mapping, item, dataId, filter=True, 

1063 trimmed=True, setVisitInfo=True): 

1064 """Default standardization function for images. 

1065 

1066 This sets the Detector from the camera geometry 

1067 and optionally set the Filter. In both cases this saves 

1068 having to persist some data in each exposure (or image). 

1069 

1070 Parameters 

1071 ---------- 

1072 mapping : `lsst.obs.base.Mapping` 

1073 Where to get the values from. 

1074 item : image-like object 

1075 Can be any of lsst.afw.image.Exposure, 

1076 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 

1077 or lsst.afw.image.MaskedImage 

1078 

1079 dataId : `dict` 

1080 Dataset identifier 

1081 filter : `bool` 

1082 Set filter? Ignored if item is already an exposure 

1083 trimmed : `bool` 

1084 Should detector be marked as trimmed? 

1085 setVisitInfo : `bool` 

1086 Should Exposure have its VisitInfo filled out from the metadata? 

1087 

1088 Returns 

1089 ------- 

1090 `lsst.afw.image.Exposure` 

1091 The standardized Exposure. 

1092 """ 

1093 try: 

1094 exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log, 

1095 setVisitInfo=setVisitInfo) 

1096 except Exception as e: 

1097 self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e)) 

1098 raise 

1099 

1100 if mapping.level.lower() == "amp": 

1101 self._setAmpDetector(exposure, dataId, trimmed) 

1102 elif mapping.level.lower() == "ccd": 

1103 self._setCcdDetector(exposure, dataId, trimmed) 

1104 

1105 # We can only create a WCS if it doesn't already have one and 

1106 # we have either a VisitInfo or exposure metadata. 

1107 # Do not calculate a WCS if this is an amplifier exposure 

1108 if mapping.level.lower() != "amp" and exposure.getWcs() is None and \ 

1109 (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()): 

1110 self._createInitialSkyWcs(exposure) 

1111 

1112 if filter: 

1113 self._setFilter(mapping, exposure, dataId) 

1114 

1115 return exposure 

1116 

1117 def _createSkyWcsFromMetadata(self, exposure): 

1118 """Create a SkyWcs from the FITS header metadata in an Exposure. 

1119 

1120 Parameters 

1121 ---------- 

1122 exposure : `lsst.afw.image.Exposure` 

1123 The exposure to get metadata from, and attach the SkyWcs to. 

1124 """ 

1125 metadata = exposure.getMetadata() 

1126 fix_header(metadata, translator_class=self.translatorClass) 

1127 try: 

1128 wcs = afwGeom.makeSkyWcs(metadata, strip=True) 

1129 exposure.setWcs(wcs) 

1130 except pexExcept.TypeError as e: 

1131 # See DM-14372 for why this is debug and not warn (e.g. calib 

1132 # files without wcs metadata). 

1133 self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:" 

1134 " %s", e.args[0]) 

1135 # ensure any WCS values stripped from the metadata are removed in the 

1136 # exposure 

1137 exposure.setMetadata(metadata) 

1138 

1139 def _createInitialSkyWcs(self, exposure): 

1140 """Create a SkyWcs from the boresight and camera geometry. 

1141 

1142 If the boresight or camera geometry do not support this method of 

1143 WCS creation, this falls back on the header metadata-based version 

1144 (typically a purely linear FITS crval/crpix/cdmatrix WCS). 

1145 

1146 Parameters 

1147 ---------- 

1148 exposure : `lsst.afw.image.Exposure` 

1149 The exposure to get data from, and attach the SkyWcs to. 

1150 """ 

1151 # Always use try to use metadata first, to strip WCS keys from it. 

1152 self._createSkyWcsFromMetadata(exposure) 

1153 

1154 if exposure.getInfo().getVisitInfo() is None: 

1155 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs." 

1156 self.log.warn(msg) 

1157 return 

1158 try: 

1159 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector()) 

1160 exposure.setWcs(newSkyWcs) 

1161 except InitialSkyWcsError as e: 

1162 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s" 

1163 self.log.warn(msg, e) 

1164 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e)) 

1165 if e.__context__ is not None: 

1166 self.log.debug("Root-cause Exception was: %s", 

1167 traceback.TracebackException.from_exception(e.__context__)) 

1168 

1169 def _makeCamera(self, policy, repositoryDir): 

1170 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 

1171 the camera geometry 

1172 

1173 Also set self.cameraDataLocation, if relevant (else it can be left 

1174 None). 

1175 

1176 This implementation assumes that policy contains an entry "camera" 

1177 that points to the subdirectory in this package of camera data; 

1178 specifically, that subdirectory must contain: 

1179 - a file named `camera.py` that contains persisted camera config 

1180 - ampInfo table FITS files, as required by 

1181 lsst.afw.cameraGeom.makeCameraFromPath 

1182 

1183 Parameters 

1184 ---------- 

1185 policy : `lsst.daf.persistence.Policy` 

1186 Policy with per-camera defaults already merged 

1187 (PexPolicy only for backward compatibility). 

1188 repositoryDir : `str` 

1189 Policy repository for the subclassing module (obtained with 

1190 getRepositoryPath() on the per-camera default dictionary). 

1191 """ 

1192 if 'camera' not in policy: 

1193 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera") 

1194 cameraDataSubdir = policy['camera'] 

1195 self.cameraDataLocation = os.path.normpath( 

1196 os.path.join(repositoryDir, cameraDataSubdir, "camera.py")) 

1197 cameraConfig = afwCameraGeom.CameraConfig() 

1198 cameraConfig.load(self.cameraDataLocation) 

1199 ampInfoPath = os.path.dirname(self.cameraDataLocation) 

1200 return afwCameraGeom.makeCameraFromPath( 

1201 cameraConfig=cameraConfig, 

1202 ampInfoPath=ampInfoPath, 

1203 shortNameFunc=self.getShortCcdName, 

1204 pupilFactoryClass=self.PupilFactoryClass 

1205 ) 

1206 

1207 def getRegistry(self): 

1208 """Get the registry used by this mapper. 

1209 

1210 Returns 

1211 ------- 

1212 Registry or None 

1213 The registry used by this mapper for this mapper's repository. 

1214 """ 

1215 return self.registry 

1216 

1217 def getImageCompressionSettings(self, datasetType, dataId): 

1218 """Stuff image compression settings into a daf.base.PropertySet 

1219 

1220 This goes into the ButlerLocation's "additionalData", which gets 

1221 passed into the boost::persistence framework. 

1222 

1223 Parameters 

1224 ---------- 

1225 datasetType : `str` 

1226 Type of dataset for which to get the image compression settings. 

1227 dataId : `dict` 

1228 Dataset identifier. 

1229 

1230 Returns 

1231 ------- 

1232 additionalData : `lsst.daf.base.PropertySet` 

1233 Image compression settings. 

1234 """ 

1235 mapping = self.mappings[datasetType] 

1236 recipeName = mapping.recipe 

1237 storageType = mapping.storage 

1238 if storageType not in self._writeRecipes: 

1239 return dafBase.PropertySet() 

1240 if recipeName not in self._writeRecipes[storageType]: 

1241 raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" % 

1242 (datasetType, storageType, recipeName)) 

1243 recipe = self._writeRecipes[storageType][recipeName].deepCopy() 

1244 seed = hash(tuple(dataId.items())) % 2**31 

1245 for plane in ("image", "mask", "variance"): 

1246 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0: 

1247 recipe.set(plane + ".scaling.seed", seed) 

1248 return recipe 

1249 

1250 def _initWriteRecipes(self): 

1251 """Read the recipes for writing files 

1252 

1253 These recipes are currently used for configuring FITS compression, 

1254 but they could have wider uses for configuring different flavors 

1255 of the storage types. A recipe is referred to by a symbolic name, 

1256 which has associated settings. These settings are stored as a 

1257 `PropertySet` so they can easily be passed down to the 

1258 boost::persistence framework as the "additionalData" parameter. 

1259 

1260 The list of recipes is written in YAML. A default recipe and 

1261 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 

1262 and these may be overridden or supplemented by the individual obs_* 

1263 packages' own policy/writeRecipes.yaml files. 

1264 

1265 Recipes are grouped by the storage type. Currently, only the 

1266 ``FitsStorage`` storage type uses recipes, which uses it to 

1267 configure FITS image compression. 

1268 

1269 Each ``FitsStorage`` recipe for FITS compression should define 

1270 "image", "mask" and "variance" entries, each of which may contain 

1271 "compression" and "scaling" entries. Defaults will be provided for 

1272 any missing elements under "compression" and "scaling". 

1273 

1274 The allowed entries under "compression" are: 

1275 

1276 * algorithm (string): compression algorithm to use 

1277 * rows (int): number of rows per tile (0 = entire dimension) 

1278 * columns (int): number of columns per tile (0 = entire dimension) 

1279 * quantizeLevel (float): cfitsio quantization level 

1280 

1281 The allowed entries under "scaling" are: 

1282 

1283 * algorithm (string): scaling algorithm to use 

1284 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 

1285 * fuzz (bool): fuzz the values when quantising floating-point values? 

1286 * seed (long): seed for random number generator when fuzzing 

1287 * maskPlanes (list of string): mask planes to ignore when doing 

1288 statistics 

1289 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 

1290 * quantizePad: number of stdev to allow on the low side (for 

1291 STDEV_POSITIVE/NEGATIVE) 

1292 * bscale: manually specified BSCALE (for MANUAL scaling) 

1293 * bzero: manually specified BSCALE (for MANUAL scaling) 

1294 

1295 A very simple example YAML recipe: 

1296 

1297 FitsStorage: 

1298 default: 

1299 image: &default 

1300 compression: 

1301 algorithm: GZIP_SHUFFLE 

1302 mask: *default 

1303 variance: *default 

1304 """ 

1305 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml") 

1306 recipes = dafPersist.Policy(recipesFile) 

1307 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml") 

1308 validationMenu = {'FitsStorage': validateRecipeFitsStorage, } 

1309 if os.path.exists(supplementsFile) and supplementsFile != recipesFile: 

1310 supplements = dafPersist.Policy(supplementsFile) 

1311 # Don't allow overrides, only supplements 

1312 for entry in validationMenu: 

1313 intersection = set(recipes[entry].names()).intersection(set(supplements.names())) 

1314 if intersection: 

1315 raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" % 

1316 (supplementsFile, entry, recipesFile, intersection)) 

1317 recipes.update(supplements) 

1318 

1319 self._writeRecipes = {} 

1320 for storageType in recipes.names(True): 

1321 if "default" not in recipes[storageType]: 

1322 raise RuntimeError("No 'default' recipe defined for storage type %s in %s" % 

1323 (storageType, recipesFile)) 

1324 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType]) 

1325 

1326 

1327def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True): 

1328 """Generate an Exposure from an image-like object 

1329 

1330 If the image is a DecoratedImage then also set its WCS and metadata 

1331 (Image and MaskedImage are missing the necessary metadata 

1332 and Exposure already has those set) 

1333 

1334 Parameters 

1335 ---------- 

1336 image : Image-like object 

1337 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 

1338 Exposure. 

1339 

1340 Returns 

1341 ------- 

1342 `lsst.afw.image.Exposure` 

1343 Exposure containing input image. 

1344 """ 

1345 translatorClass = None 

1346 if mapper is not None: 

1347 translatorClass = mapper.translatorClass 

1348 

1349 metadata = None 

1350 if isinstance(image, afwImage.MaskedImage): 

1351 exposure = afwImage.makeExposure(image) 

1352 elif isinstance(image, afwImage.DecoratedImage): 

1353 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage())) 

1354 metadata = image.getMetadata() 

1355 fix_header(metadata, translator_class=translatorClass) 

1356 exposure.setMetadata(metadata) 

1357 elif isinstance(image, afwImage.Exposure): 

1358 exposure = image 

1359 metadata = exposure.getMetadata() 

1360 fix_header(metadata, translator_class=translatorClass) 

1361 else: # Image 

1362 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image)) 

1363 

1364 # set VisitInfo if we can 

1365 if setVisitInfo and exposure.getInfo().getVisitInfo() is None: 

1366 if metadata is not None: 

1367 if mapper is None: 

1368 if not logger: 

1369 logger = lsstLog.Log.getLogger("CameraMapper") 

1370 logger.warn("I can only set the VisitInfo if you provide a mapper") 

1371 else: 

1372 exposureId = mapper._computeCcdExposureId(dataId) 

1373 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId) 

1374 

1375 exposure.getInfo().setVisitInfo(visitInfo) 

1376 

1377 return exposure 

1378 

1379 

1380def validateRecipeFitsStorage(recipes): 

1381 """Validate recipes for FitsStorage 

1382 

1383 The recipes are supplemented with default values where appropriate. 

1384 

1385 TODO: replace this custom validation code with Cerberus (DM-11846) 

1386 

1387 Parameters 

1388 ---------- 

1389 recipes : `lsst.daf.persistence.Policy` 

1390 FitsStorage recipes to validate. 

1391 

1392 Returns 

1393 ------- 

1394 validated : `lsst.daf.base.PropertySet` 

1395 Validated FitsStorage recipe. 

1396 

1397 Raises 

1398 ------ 

1399 `RuntimeError` 

1400 If validation fails. 

1401 """ 

1402 # Schemas define what should be there, and the default values (and by the 

1403 # default value, the expected type). 

1404 compressionSchema = { 

1405 "algorithm": "NONE", 

1406 "rows": 1, 

1407 "columns": 0, 

1408 "quantizeLevel": 0.0, 

1409 } 

1410 scalingSchema = { 

1411 "algorithm": "NONE", 

1412 "bitpix": 0, 

1413 "maskPlanes": ["NO_DATA"], 

1414 "seed": 0, 

1415 "quantizeLevel": 4.0, 

1416 "quantizePad": 5.0, 

1417 "fuzz": True, 

1418 "bscale": 1.0, 

1419 "bzero": 0.0, 

1420 } 

1421 

1422 def checkUnrecognized(entry, allowed, description): 

1423 """Check to see if the entry contains unrecognised keywords""" 

1424 unrecognized = set(entry.keys()) - set(allowed) 

1425 if unrecognized: 

1426 raise RuntimeError( 

1427 "Unrecognized entries when parsing image compression recipe %s: %s" % 

1428 (description, unrecognized)) 

1429 

1430 validated = {} 

1431 for name in recipes.names(True): 

1432 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name) 

1433 rr = dafBase.PropertySet() 

1434 validated[name] = rr 

1435 for plane in ("image", "mask", "variance"): 

1436 checkUnrecognized(recipes[name][plane], ["compression", "scaling"], 

1437 name + "->" + plane) 

1438 

1439 for settings, schema in (("compression", compressionSchema), 

1440 ("scaling", scalingSchema)): 

1441 prefix = plane + "." + settings 

1442 if settings not in recipes[name][plane]: 

1443 for key in schema: 

1444 rr.set(prefix + "." + key, schema[key]) 

1445 continue 

1446 entry = recipes[name][plane][settings] 

1447 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings) 

1448 for key in schema: 

1449 value = type(schema[key])(entry[key]) if key in entry else schema[key] 

1450 rr.set(prefix + "." + key, value) 

1451 return validated