Coverage for python/lsst/obs/base/cameraMapper.py: 9%

605 statements  

« prev     ^ index     » next       coverage.py v6.4.1, created at 2022-06-05 02:41 -0700

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import copy 

23import os 

24import re 

25import traceback 

26import warnings 

27import weakref 

28 

29import lsst.afw.cameraGeom as afwCameraGeom 

30import lsst.afw.geom as afwGeom 

31import lsst.afw.image as afwImage 

32import lsst.daf.base as dafBase 

33import lsst.daf.persistence as dafPersist 

34import lsst.log as lsstLog 

35import lsst.pex.exceptions as pexExcept 

36from astro_metadata_translator import fix_header 

37from deprecated.sphinx import deprecated 

38from lsst.afw.fits import readMetadata 

39from lsst.afw.table import Schema 

40from lsst.utils import doImportType, getPackageDir 

41 

42from ._instrument import Instrument 

43from .exposureIdInfo import ExposureIdInfo 

44from .makeRawVisitInfo import MakeRawVisitInfo 

45from .mapping import CalibrationMapping, DatasetMapping, ExposureMapping, ImageMapping 

46from .utils import InitialSkyWcsError, createInitialSkyWcs 

47 

48__all__ = ["CameraMapper", "exposureFromImage"] 

49 

50 

51class CameraMapper(dafPersist.Mapper): 

52 

53 """CameraMapper is a base class for mappers that handle images from a 

54 camera and products derived from them. This provides an abstraction layer 

55 between the data on disk and the code. 

56 

57 Public methods: keys, queryMetadata, getDatasetTypes, map, 

58 canStandardize, standardize 

59 

60 Mappers for specific data sources (e.g., CFHT Megacam, LSST 

61 simulations, etc.) should inherit this class. 

62 

63 The CameraMapper manages datasets within a "root" directory. Note that 

64 writing to a dataset present in the input root will hide the existing 

65 dataset but not overwrite it. See #2160 for design discussion. 

66 

67 A camera is assumed to consist of one or more rafts, each composed of 

68 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 

69 (amps). A camera is also assumed to have a camera geometry description 

70 (CameraGeom object) as a policy file, a filter description (Filter class 

71 static configuration) as another policy file. 

72 

73 Information from the camera geometry and defects are inserted into all 

74 Exposure objects returned. 

75 

76 The mapper uses one or two registries to retrieve metadata about the 

77 images. The first is a registry of all raw exposures. This must contain 

78 the time of the observation. One or more tables (or the equivalent) 

79 within the registry are used to look up data identifier components that 

80 are not specified by the user (e.g. filter) and to return results for 

81 metadata queries. The second is an optional registry of all calibration 

82 data. This should contain validity start and end entries for each 

83 calibration dataset in the same timescale as the observation time. 

84 

85 Subclasses will typically set MakeRawVisitInfoClass and optionally the 

86 metadata translator class: 

87 

88 MakeRawVisitInfoClass: a class variable that points to a subclass of 

89 MakeRawVisitInfo, a functor that creates an 

90 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 

91 

92 translatorClass: The `~astro_metadata_translator.MetadataTranslator` 

93 class to use for fixing metadata values. If it is not set an attempt 

94 will be made to infer the class from ``MakeRawVisitInfoClass``, failing 

95 that the metadata fixup will try to infer the translator class from the 

96 header itself. 

97 

98 Subclasses must provide the following methods: 

99 

100 _extractDetectorName(self, dataId): returns the detector name for a CCD 

101 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 

102 a dataset identifier referring to that CCD or a subcomponent of it. 

103 

104 _computeCcdExposureId(self, dataId): see below 

105 

106 _computeCoaddExposureId(self, dataId, singleFilter): see below 

107 

108 Subclasses may also need to override the following methods: 

109 

110 _transformId(self, dataId): transformation of a data identifier 

111 from colloquial usage (e.g., "ccdname") to proper/actual usage 

112 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 

113 commas). The default implementation does nothing. Note that this 

114 method should not modify its input parameter. 

115 

116 getShortCcdName(self, ccdName): a static method that returns a shortened 

117 name suitable for use as a filename. The default version converts spaces 

118 to underscores. 

119 

120 _mapActualToPath(self, template, actualId): convert a template path to an 

121 actual path, using the actual dataset identifier. 

122 

123 The mapper's behaviors are largely specified by the policy file. 

124 See the MapperDictionary.paf for descriptions of the available items. 

125 

126 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 

127 mappings (see Mappings class). 

128 

129 Common default mappings for all subclasses can be specified in the 

130 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 

131 provides a simple way to add a product to all camera mappers. 

132 

133 Functions to map (provide a path to the data given a dataset 

134 identifier dictionary) and standardize (convert data into some standard 

135 format or type) may be provided in the subclass as "map_{dataset type}" 

136 and "std_{dataset type}", respectively. 

137 

138 If non-Exposure datasets cannot be retrieved using standard 

139 daf_persistence methods alone, a "bypass_{dataset type}" function may be 

140 provided in the subclass to return the dataset instead of using the 

141 "datasets" subpolicy. 

142 

143 Implementations of map_camera and bypass_camera that should typically be 

144 sufficient are provided in this base class. 

145 

146 Notes 

147 ----- 

148 .. todo:: 

149 

150 Instead of auto-loading the camera at construction time, load it from 

151 the calibration registry 

152 

153 Parameters 

154 ---------- 

155 policy : daf_persistence.Policy, 

156 Policy with per-camera defaults already merged. 

157 repositoryDir : string 

158 Policy repository for the subclassing module (obtained with 

159 getRepositoryPath() on the per-camera default dictionary). 

160 root : string, optional 

161 Path to the root directory for data. 

162 registry : string, optional 

163 Path to registry with data's metadata. 

164 calibRoot : string, optional 

165 Root directory for calibrations. 

166 calibRegistry : string, optional 

167 Path to registry with calibrations' metadata. 

168 provided : list of string, optional 

169 Keys provided by the mapper. 

170 parentRegistry : Registry subclass, optional 

171 Registry from a parent repository that may be used to look up 

172 data's metadata. 

173 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 

174 The configuration information for the repository this mapper is 

175 being used with. 

176 """ 

177 

178 packageName = None 

179 

180 # a class or subclass of MakeRawVisitInfo, a functor that makes an 

181 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image 

182 MakeRawVisitInfoClass = MakeRawVisitInfo 

183 

184 # a class or subclass of PupilFactory 

185 PupilFactoryClass = afwCameraGeom.PupilFactory 

186 

187 # Class to use for metadata translations 

188 translatorClass = None 

189 

190 # Gen3 instrument corresponding to this mapper 

191 # Can be a class or a string with the full name of the class 

192 _gen3instrument = None 

193 

194 def __init__( 

195 self, 

196 policy, 

197 repositoryDir, 

198 root=None, 

199 registry=None, 

200 calibRoot=None, 

201 calibRegistry=None, 

202 provided=None, 

203 parentRegistry=None, 

204 repositoryCfg=None, 

205 ): 

206 

207 dafPersist.Mapper.__init__(self) 

208 

209 self.log = lsstLog.Log.getLogger("lsst.CameraMapper") 

210 

211 if root: 

212 self.root = root 

213 elif repositoryCfg: 

214 self.root = repositoryCfg.root 

215 else: 

216 self.root = None 

217 

218 repoPolicy = repositoryCfg.policy if repositoryCfg else None 

219 if repoPolicy is not None: 

220 policy.update(repoPolicy) 

221 

222 # Levels 

223 self.levels = dict() 

224 if "levels" in policy: 

225 levelsPolicy = policy["levels"] 

226 for key in levelsPolicy.names(True): 

227 self.levels[key] = set(levelsPolicy.asArray(key)) 

228 self.defaultLevel = policy["defaultLevel"] 

229 self.defaultSubLevels = dict() 

230 if "defaultSubLevels" in policy: 

231 self.defaultSubLevels = policy["defaultSubLevels"] 

232 

233 # Root directories 

234 if root is None: 

235 root = "." 

236 root = dafPersist.LogicalLocation(root).locString() 

237 

238 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root) 

239 

240 # If the calibRoot is passed in, use that. If not and it's indicated in 

241 # the policy, use that. And otherwise, the calibs are in the regular 

242 # root. 

243 # If the location indicated by the calib root does not exist, do not 

244 # create it. 

245 calibStorage = None 

246 if calibRoot is not None: 

247 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot) 

248 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, create=False) 

249 else: 

250 calibRoot = policy.get("calibRoot", None) 

251 if calibRoot: 

252 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, create=False) 

253 if calibStorage is None: 

254 calibStorage = self.rootStorage 

255 

256 self.root = root 

257 

258 # Registries 

259 self.registry = self._setupRegistry( 

260 "registry", 

261 "exposure", 

262 registry, 

263 policy, 

264 "registryPath", 

265 self.rootStorage, 

266 searchParents=False, 

267 posixIfNoSql=(not parentRegistry), 

268 ) 

269 if not self.registry: 

270 self.registry = parentRegistry 

271 needCalibRegistry = policy.get("needCalibRegistry", None) 

272 if needCalibRegistry: 

273 if calibStorage: 

274 self.calibRegistry = self._setupRegistry( 

275 "calibRegistry", 

276 "calib", 

277 calibRegistry, 

278 policy, 

279 "calibRegistryPath", 

280 calibStorage, 

281 posixIfNoSql=False, 

282 ) # NB never use posix for calibs 

283 else: 

284 raise RuntimeError( 

285 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " 

286 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}" 

287 ) 

288 else: 

289 self.calibRegistry = None 

290 

291 # Dict of valid keys and their value types 

292 self.keyDict = dict() 

293 

294 self._initMappings(policy, self.rootStorage, calibStorage, provided=None) 

295 self._initWriteRecipes() 

296 

297 # Camera geometry 

298 self.cameraDataLocation = None # path to camera geometry config file 

299 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir) 

300 

301 # Filter translation table 

302 self.filters = None 

303 

304 # verify that the class variable packageName is set before attempting 

305 # to instantiate an instance 

306 if self.packageName is None: 

307 raise ValueError("class variable packageName must not be None") 

308 

309 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log) 

310 

311 # Assign a metadata translator if one has not been defined by 

312 # subclass. We can sometimes infer one from the RawVisitInfo 

313 # class. 

314 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"): 

315 self.translatorClass = self.makeRawVisitInfo.metadataTranslator 

316 

317 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None): 

318 """Initialize mappings 

319 

320 For each of the dataset types that we want to be able to read, there 

321 are methods that can be created to support them: 

322 * map_<dataset> : determine the path for dataset 

323 * std_<dataset> : standardize the retrieved dataset 

324 * bypass_<dataset> : retrieve the dataset (bypassing the usual 

325 retrieval machinery) 

326 * query_<dataset> : query the registry 

327 

328 Besides the dataset types explicitly listed in the policy, we create 

329 additional, derived datasets for additional conveniences, 

330 e.g., reading the header of an image, retrieving only the size of a 

331 catalog. 

332 

333 Parameters 

334 ---------- 

335 policy : `lsst.daf.persistence.Policy` 

336 Policy with per-camera defaults already merged 

337 rootStorage : `Storage subclass instance` 

338 Interface to persisted repository data. 

339 calibRoot : `Storage subclass instance` 

340 Interface to persisted calib repository data 

341 provided : `list` of `str` 

342 Keys provided by the mapper 

343 """ 

344 # Sub-dictionaries (for exposure/calibration/dataset types) 

345 imgMappingPolicy = dafPersist.Policy( 

346 dafPersist.Policy.defaultPolicyFile("obs_base", "ImageMappingDefaults.yaml", "policy") 

347 ) 

348 expMappingPolicy = dafPersist.Policy( 

349 dafPersist.Policy.defaultPolicyFile("obs_base", "ExposureMappingDefaults.yaml", "policy") 

350 ) 

351 calMappingPolicy = dafPersist.Policy( 

352 dafPersist.Policy.defaultPolicyFile("obs_base", "CalibrationMappingDefaults.yaml", "policy") 

353 ) 

354 dsMappingPolicy = dafPersist.Policy() 

355 

356 # Mappings 

357 mappingList = ( 

358 ("images", imgMappingPolicy, ImageMapping), 

359 ("exposures", expMappingPolicy, ExposureMapping), 

360 ("calibrations", calMappingPolicy, CalibrationMapping), 

361 ("datasets", dsMappingPolicy, DatasetMapping), 

362 ) 

363 self.mappings = dict() 

364 for name, defPolicy, cls in mappingList: 

365 if name in policy: 

366 datasets = policy[name] 

367 

368 # Centrally-defined datasets 

369 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml") 

370 if os.path.exists(defaultsPath): 

371 datasets.merge(dafPersist.Policy(defaultsPath)) 

372 

373 mappings = dict() 

374 setattr(self, name, mappings) 

375 for datasetType in datasets.names(True): 

376 subPolicy = datasets[datasetType] 

377 subPolicy.merge(defPolicy) 

378 

379 if not hasattr(self, "map_" + datasetType) and "composite" in subPolicy: 

380 

381 def compositeClosure( 

382 dataId, write=False, mapper=None, mapping=None, subPolicy=subPolicy 

383 ): 

384 components = subPolicy.get("composite") 

385 assembler = subPolicy["assembler"] if "assembler" in subPolicy else None 

386 disassembler = subPolicy["disassembler"] if "disassembler" in subPolicy else None 

387 python = subPolicy["python"] 

388 butlerComposite = dafPersist.ButlerComposite( 

389 assembler=assembler, 

390 disassembler=disassembler, 

391 python=python, 

392 dataId=dataId, 

393 mapper=self, 

394 ) 

395 for name, component in components.items(): 

396 butlerComposite.add( 

397 id=name, 

398 datasetType=component.get("datasetType"), 

399 setter=component.get("setter", None), 

400 getter=component.get("getter", None), 

401 subset=component.get("subset", False), 

402 inputOnly=component.get("inputOnly", False), 

403 ) 

404 return butlerComposite 

405 

406 setattr(self, "map_" + datasetType, compositeClosure) 

407 # for now at least, don't set up any other handling for 

408 # this dataset type. 

409 continue 

410 

411 if name == "calibrations": 

412 mapping = cls( 

413 datasetType, 

414 subPolicy, 

415 self.registry, 

416 self.calibRegistry, 

417 calibStorage, 

418 provided=provided, 

419 dataRoot=rootStorage, 

420 ) 

421 else: 

422 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided) 

423 

424 if datasetType in self.mappings: 

425 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}") 

426 self.keyDict.update(mapping.keys()) 

427 mappings[datasetType] = mapping 

428 self.mappings[datasetType] = mapping 

429 if not hasattr(self, "map_" + datasetType): 

430 

431 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

432 return mapping.map(mapper, dataId, write) 

433 

434 setattr(self, "map_" + datasetType, mapClosure) 

435 if not hasattr(self, "query_" + datasetType): 

436 

437 def queryClosure(format, dataId, mapping=mapping): 

438 return mapping.lookup(format, dataId) 

439 

440 setattr(self, "query_" + datasetType, queryClosure) 

441 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType): 

442 

443 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping): 

444 return mapping.standardize(mapper, item, dataId) 

445 

446 setattr(self, "std_" + datasetType, stdClosure) 

447 

448 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None): 

449 """Set convenience methods on CameraMapper""" 

450 mapName = "map_" + datasetType + "_" + suffix 

451 bypassName = "bypass_" + datasetType + "_" + suffix 

452 queryName = "query_" + datasetType + "_" + suffix 

453 if not hasattr(self, mapName): 

454 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType)) 

455 if not hasattr(self, bypassName): 

456 if bypassImpl is None and hasattr(self, "bypass_" + datasetType): 

457 bypassImpl = getattr(self, "bypass_" + datasetType) 

458 if bypassImpl is not None: 

459 setattr(self, bypassName, bypassImpl) 

460 if not hasattr(self, queryName): 

461 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType)) 

462 

463 # Filename of dataset 

464 setMethods( 

465 "filename", 

466 bypassImpl=lambda datasetType, pythonType, location, dataId: [ 

467 os.path.join(location.getStorage().root, p) for p in location.getLocations() 

468 ], 

469 ) 

470 # Metadata from FITS file 

471 if subPolicy["storage"] == "FitsStorage": # a FITS image 

472 

473 def getMetadata(datasetType, pythonType, location, dataId): 

474 md = readMetadata(location.getLocationsWithRoot()[0]) 

475 fix_header(md, translator_class=self.translatorClass) 

476 return md 

477 

478 setMethods("md", bypassImpl=getMetadata) 

479 

480 # Add support for configuring FITS compression 

481 addName = "add_" + datasetType 

482 if not hasattr(self, addName): 

483 setattr(self, addName, self.getImageCompressionSettings) 

484 

485 if name == "exposures": 

486 

487 def getSkyWcs(datasetType, pythonType, location, dataId): 

488 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

489 return fitsReader.readWcs() 

490 

491 setMethods("wcs", bypassImpl=getSkyWcs) 

492 

493 def getRawHeaderWcs(datasetType, pythonType, location, dataId): 

494 """Create a SkyWcs from the un-modified raw 

495 FITS WCS header keys.""" 

496 if datasetType[:3] != "raw": 

497 raise dafPersist.NoResults( 

498 "Can only get header WCS for raw exposures.", datasetType, dataId 

499 ) 

500 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])) 

501 

502 setMethods("header_wcs", bypassImpl=getRawHeaderWcs) 

503 

504 def getPhotoCalib(datasetType, pythonType, location, dataId): 

505 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

506 return fitsReader.readPhotoCalib() 

507 

508 setMethods("photoCalib", bypassImpl=getPhotoCalib) 

509 

510 def getVisitInfo(datasetType, pythonType, location, dataId): 

511 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

512 return fitsReader.readVisitInfo() 

513 

514 setMethods("visitInfo", bypassImpl=getVisitInfo) 

515 

516 # TODO: remove in DM-27177 

517 @deprecated( 

518 reason="Replaced with getFilterLabel. Will be removed after v22.", 

519 category=FutureWarning, 

520 version="v22", 

521 ) 

522 def getFilter(datasetType, pythonType, location, dataId): 

523 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

524 return fitsReader.readFilter() 

525 

526 setMethods("filter", bypassImpl=getFilter) 

527 

528 # TODO: deprecate in DM-27177, remove in DM-27811 

529 def getFilterLabel(datasetType, pythonType, location, dataId): 

530 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0]) 

531 storedFilter = fitsReader.readFilterLabel() 

532 

533 # Apply standardization used by full Exposure 

534 try: 

535 # mapping is local to enclosing scope 

536 idFilter = mapping.need(["filter"], dataId)["filter"] 

537 except dafPersist.NoResults: 

538 idFilter = None 

539 bestFilter = self._getBestFilter(storedFilter, idFilter) 

540 if bestFilter is not None: 

541 return bestFilter 

542 else: 

543 return storedFilter 

544 

545 setMethods("filterLabel", bypassImpl=getFilterLabel) 

546 

547 setMethods( 

548 "detector", 

549 mapImpl=lambda dataId, write=False: dafPersist.ButlerLocation( 

550 pythonType="lsst.afw.cameraGeom.CameraConfig", 

551 cppType="Config", 

552 storageName="Internal", 

553 locationList="ignored", 

554 dataId=dataId, 

555 mapper=self, 

556 storage=None, 

557 ), 

558 bypassImpl=lambda datasetType, pythonType, location, dataId: self.camera[ 

559 self._extractDetectorName(dataId) 

560 ], 

561 ) 

562 

563 def getBBox(datasetType, pythonType, location, dataId): 

564 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1) 

565 fix_header(md, translator_class=self.translatorClass) 

566 return afwImage.bboxFromMetadata(md) 

567 

568 setMethods("bbox", bypassImpl=getBBox) 

569 

570 elif name == "images": 

571 

572 def getBBox(datasetType, pythonType, location, dataId): 

573 md = readMetadata(location.getLocationsWithRoot()[0]) 

574 fix_header(md, translator_class=self.translatorClass) 

575 return afwImage.bboxFromMetadata(md) 

576 

577 setMethods("bbox", bypassImpl=getBBox) 

578 

579 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog 

580 

581 def getMetadata(datasetType, pythonType, location, dataId): 

582 md = readMetadata( 

583 os.path.join(location.getStorage().root, location.getLocations()[0]), hdu=1 

584 ) 

585 fix_header(md, translator_class=self.translatorClass) 

586 return md 

587 

588 setMethods("md", bypassImpl=getMetadata) 

589 

590 # Sub-images 

591 if subPolicy["storage"] == "FitsStorage": 

592 

593 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

594 subId = dataId.copy() 

595 del subId["bbox"] 

596 loc = mapping.map(mapper, subId, write) 

597 bbox = dataId["bbox"] 

598 llcX = bbox.getMinX() 

599 llcY = bbox.getMinY() 

600 width = bbox.getWidth() 

601 height = bbox.getHeight() 

602 loc.additionalData.set("llcX", llcX) 

603 loc.additionalData.set("llcY", llcY) 

604 loc.additionalData.set("width", width) 

605 loc.additionalData.set("height", height) 

606 if "imageOrigin" in dataId: 

607 loc.additionalData.set("imageOrigin", dataId["imageOrigin"]) 

608 return loc 

609 

610 def querySubClosure(key, format, dataId, mapping=mapping): 

611 subId = dataId.copy() 

612 del subId["bbox"] 

613 return mapping.lookup(format, subId) 

614 

615 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure) 

616 

617 if subPolicy["storage"] == "FitsCatalogStorage": 

618 # Length of catalog 

619 

620 def getLen(datasetType, pythonType, location, dataId): 

621 md = readMetadata( 

622 os.path.join(location.getStorage().root, location.getLocations()[0]), hdu=1 

623 ) 

624 fix_header(md, translator_class=self.translatorClass) 

625 return md["NAXIS2"] 

626 

627 setMethods("len", bypassImpl=getLen) 

628 

629 # Schema of catalog 

630 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets: 

631 setMethods( 

632 "schema", 

633 bypassImpl=lambda datasetType, pythonType, location, dataId: Schema.readFits( 

634 os.path.join(location.getStorage().root, location.getLocations()[0]) 

635 ), 

636 ) 

637 

638 def _computeCcdExposureId(self, dataId): 

639 """Compute the 64-bit (long) identifier for a CCD exposure. 

640 

641 Subclasses must override 

642 

643 Parameters 

644 ---------- 

645 dataId : `dict` 

646 Data identifier with visit, ccd. 

647 """ 

648 raise NotImplementedError() 

649 

650 def _computeCoaddExposureId(self, dataId, singleFilter): 

651 """Compute the 64-bit (long) identifier for a coadd. 

652 

653 Subclasses must override 

654 

655 Parameters 

656 ---------- 

657 dataId : `dict` 

658 Data identifier with tract and patch. 

659 singleFilter : `bool` 

660 True means the desired ID is for a single-filter coadd, in which 

661 case dataIdmust contain filter. 

662 """ 

663 raise NotImplementedError() 

664 

665 def _search(self, path): 

666 """Search for path in the associated repository's storage. 

667 

668 Parameters 

669 ---------- 

670 path : string 

671 Path that describes an object in the repository associated with 

672 this mapper. 

673 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 

674 indicator will be stripped when searching and so will match 

675 filenames without the HDU indicator, e.g. 'foo.fits'. The path 

676 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 

677 

678 Returns 

679 ------- 

680 string 

681 The path for this object in the repository. Will return None if the 

682 object can't be found. If the input argument path contained an HDU 

683 indicator, the returned path will also contain the HDU indicator. 

684 """ 

685 return self.rootStorage.search(path) 

686 

687 def backup(self, datasetType, dataId): 

688 """Rename any existing object with the given type and dataId. 

689 

690 The CameraMapper implementation saves objects in a sequence of e.g.: 

691 

692 - foo.fits 

693 - foo.fits~1 

694 - foo.fits~2 

695 

696 All of the backups will be placed in the output repo, however, and will 

697 not be removed if they are found elsewhere in the _parent chain. This 

698 means that the same file will be stored twice if the previous version 

699 was found in an input repo. 

700 """ 

701 

702 # Calling PosixStorage directly is not the long term solution in this 

703 # function, this is work-in-progress on epic DM-6225. The plan is for 

704 # parentSearch to be changed to 'search', and search only the storage 

705 # associated with this mapper. All searching of parents will be handled 

706 # by traversing the container of repositories in Butler. 

707 

708 def firstElement(list): 

709 """Get the first element in the list, or None if that can't be 

710 done. 

711 """ 

712 return list[0] if list is not None and len(list) else None 

713 

714 n = 0 

715 newLocation = self.map(datasetType, dataId, write=True) 

716 newPath = newLocation.getLocations()[0] 

717 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True) 

718 path = firstElement(path) 

719 oldPaths = [] 

720 while path is not None: 

721 n += 1 

722 oldPaths.append((n, path)) 

723 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True) 

724 path = firstElement(path) 

725 for n, oldPath in reversed(oldPaths): 

726 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n)) 

727 

728 def keys(self): 

729 """Return supported keys. 

730 

731 Returns 

732 ------- 

733 iterable 

734 List of keys usable in a dataset identifier 

735 """ 

736 return iter(self.keyDict.keys()) 

737 

738 def getKeys(self, datasetType, level): 

739 """Return a dict of supported keys and their value types for a given 

740 dataset type at a given level of the key hierarchy. 

741 

742 Parameters 

743 ---------- 

744 datasetType : `str` 

745 Dataset type or None for all dataset types. 

746 level : `str` or None 

747 Level or None for all levels or '' for the default level for the 

748 camera. 

749 

750 Returns 

751 ------- 

752 `dict` 

753 Keys are strings usable in a dataset identifier, values are their 

754 value types. 

755 """ 

756 

757 # not sure if this is how we want to do this. what if None was 

758 # intended? 

759 if level == "": 

760 level = self.getDefaultLevel() 

761 

762 if datasetType is None: 

763 keyDict = copy.copy(self.keyDict) 

764 else: 

765 keyDict = self.mappings[datasetType].keys() 

766 if level is not None and level in self.levels: 

767 keyDict = copy.copy(keyDict) 

768 for lev in self.levels[level]: 

769 if lev in keyDict: 

770 del keyDict[lev] 

771 return keyDict 

772 

773 def getDefaultLevel(self): 

774 return self.defaultLevel 

775 

776 def getDefaultSubLevel(self, level): 

777 if level in self.defaultSubLevels: 

778 return self.defaultSubLevels[level] 

779 return None 

780 

781 @classmethod 

782 def getCameraName(cls): 

783 """Return the name of the camera that this CameraMapper is for.""" 

784 className = str(cls) 

785 className = className[className.find(".") : -1] 

786 m = re.search(r"(\w+)Mapper", className) 

787 if m is None: 

788 m = re.search(r"class '[\w.]*?(\w+)'", className) 

789 name = m.group(1) 

790 return name[:1].lower() + name[1:] if name else "" 

791 

792 @classmethod 

793 def getPackageName(cls): 

794 """Return the name of the package containing this CameraMapper.""" 

795 if cls.packageName is None: 

796 raise ValueError("class variable packageName must not be None") 

797 return cls.packageName 

798 

799 @classmethod 

800 def getGen3Instrument(cls): 

801 """Return the gen3 Instrument class equivalent for this gen2 Mapper. 

802 

803 Returns 

804 ------- 

805 instr : `type` 

806 A `~lsst.obs.base.Instrument` class. 

807 """ 

808 if cls._gen3instrument is None: 

809 raise NotImplementedError( 

810 "Please provide a specific implementation for your instrument" 

811 " to enable conversion of this gen2 repository to gen3" 

812 ) 

813 if isinstance(cls._gen3instrument, str): 

814 # Given a string to convert to an instrument class 

815 cls._gen3instrument = doImportType(cls._gen3instrument) 

816 if not issubclass(cls._gen3instrument, Instrument): 

817 raise ValueError( 

818 f"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}" 

819 " but that is not an lsst.obs.base.Instrument" 

820 ) 

821 return cls._gen3instrument 

822 

823 @classmethod 

824 def getPackageDir(cls): 

825 """Return the base directory of this package""" 

826 return getPackageDir(cls.getPackageName()) 

827 

828 def map_camera(self, dataId, write=False): 

829 """Map a camera dataset.""" 

830 if self.camera is None: 

831 raise RuntimeError("No camera dataset available.") 

832 actualId = self._transformId(dataId) 

833 return dafPersist.ButlerLocation( 

834 pythonType="lsst.afw.cameraGeom.CameraConfig", 

835 cppType="Config", 

836 storageName="ConfigStorage", 

837 locationList=self.cameraDataLocation or "ignored", 

838 dataId=actualId, 

839 mapper=self, 

840 storage=self.rootStorage, 

841 ) 

842 

843 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId): 

844 """Return the (preloaded) camera object.""" 

845 if self.camera is None: 

846 raise RuntimeError("No camera dataset available.") 

847 return self.camera 

848 

849 def map_expIdInfo(self, dataId, write=False): 

850 return dafPersist.ButlerLocation( 

851 pythonType="lsst.obs.base.ExposureIdInfo", 

852 cppType=None, 

853 storageName="Internal", 

854 locationList="ignored", 

855 dataId=dataId, 

856 mapper=self, 

857 storage=self.rootStorage, 

858 ) 

859 

860 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId): 

861 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 

862 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId) 

863 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId) 

864 return ExposureIdInfo(expId=expId, expBits=expBits) 

865 

866 def std_bfKernel(self, item, dataId): 

867 """Disable standardization for bfKernel 

868 

869 bfKernel is a calibration product that is numpy array, 

870 unlike other calibration products that are all images; 

871 all calibration images are sent through _standardizeExposure 

872 due to CalibrationMapping, but we don't want that to happen to bfKernel 

873 """ 

874 return item 

875 

876 def std_raw(self, item, dataId): 

877 """Standardize a raw dataset by converting it to an Exposure instead 

878 of an Image""" 

879 return self._standardizeExposure( 

880 self.exposures["raw"], item, dataId, trimmed=False, setVisitInfo=True, setExposureId=True 

881 ) 

882 

883 def map_skypolicy(self, dataId): 

884 """Map a sky policy.""" 

885 return dafPersist.ButlerLocation( 

886 "lsst.pex.policy.Policy", "Policy", "Internal", None, None, self, storage=self.rootStorage 

887 ) 

888 

889 def std_skypolicy(self, item, dataId): 

890 """Standardize a sky policy by returning the one we use.""" 

891 return self.skypolicy 

892 

893 ########################################################################## 

894 # 

895 # Utility functions 

896 # 

897 ########################################################################## 

898 

899 def _setupRegistry( 

900 self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True 

901 ): 

902 """Set up a registry (usually SQLite3), trying a number of possible 

903 paths. 

904 

905 Parameters 

906 ---------- 

907 name : string 

908 Name of registry. 

909 description: `str` 

910 Description of registry (for log messages) 

911 path : string 

912 Path for registry. 

913 policy : string 

914 Policy that contains the registry name, used if path is None. 

915 policyKey : string 

916 Key in policy for registry path. 

917 storage : Storage subclass 

918 Repository Storage to look in. 

919 searchParents : bool, optional 

920 True if the search for a registry should follow any Butler v1 

921 _parent symlinks. 

922 posixIfNoSql : bool, optional 

923 If an sqlite registry is not found, will create a posix registry if 

924 this is True. 

925 

926 Returns 

927 ------- 

928 lsst.daf.persistence.Registry 

929 Registry object 

930 """ 

931 if path is None and policyKey in policy: 

932 path = dafPersist.LogicalLocation(policy[policyKey]).locString() 

933 if os.path.isabs(path): 

934 raise RuntimeError("Policy should not indicate an absolute path for registry.") 

935 if not storage.exists(path): 

936 newPath = storage.instanceSearch(path) 

937 

938 newPath = newPath[0] if newPath is not None and len(newPath) else None 

939 if newPath is None: 

940 self.log.warning( 

941 "Unable to locate registry at policy path (also looked in root): %s", path 

942 ) 

943 path = newPath 

944 else: 

945 self.log.warning("Unable to locate registry at policy path: %s", path) 

946 path = None 

947 

948 # Old Butler API was to indicate the registry WITH the repo folder, 

949 # New Butler expects the registry to be in the repo folder. To support 

950 # Old API, check to see if path starts with root, and if so, strip 

951 # root from path. Currently only works with PosixStorage 

952 try: 

953 root = storage.root 

954 if path and (path.startswith(root)): 

955 path = path[len(root + "/") :] 

956 except AttributeError: 

957 pass 

958 

959 # determine if there is an sqlite registry and if not, try the posix 

960 # registry. 

961 registry = None 

962 

963 def search(filename, description): 

964 """Search for file in storage 

965 

966 Parameters 

967 ---------- 

968 filename : `str` 

969 Filename to search for 

970 description : `str` 

971 Description of file, for error message. 

972 

973 Returns 

974 ------- 

975 path : `str` or `None` 

976 Path to file, or None 

977 """ 

978 result = storage.instanceSearch(filename) 

979 if result: 

980 return result[0] 

981 self.log.debug("Unable to locate %s: %s", description, filename) 

982 return None 

983 

984 # Search for a suitable registry database 

985 if path is None: 

986 path = search("%s.pgsql" % name, "%s in root" % description) 

987 if path is None: 

988 path = search("%s.sqlite3" % name, "%s in root" % description) 

989 if path is None: 

990 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description) 

991 

992 if path is not None: 

993 if not storage.exists(path): 

994 newPath = storage.instanceSearch(path) 

995 newPath = newPath[0] if newPath is not None and len(newPath) else None 

996 if newPath is not None: 

997 path = newPath 

998 localFileObj = storage.getLocalFile(path) 

999 self.log.info("Loading %s registry from %s", description, localFileObj.name) 

1000 registry = dafPersist.Registry.create(localFileObj.name) 

1001 localFileObj.close() 

1002 elif not registry and posixIfNoSql: 

1003 try: 

1004 self.log.info("Loading Posix %s registry from %s", description, storage.root) 

1005 registry = dafPersist.PosixRegistry(storage.root) 

1006 except Exception: 

1007 registry = None 

1008 

1009 return registry 

1010 

1011 def _transformId(self, dataId): 

1012 """Generate a standard ID dict from a camera-specific ID dict. 

1013 

1014 Canonical keys include: 

1015 - amp: amplifier name 

1016 - ccd: CCD name (in LSST this is a combination of raft and sensor) 

1017 The default implementation returns a copy of its input. 

1018 

1019 Parameters 

1020 ---------- 

1021 dataId : `dict` 

1022 Dataset identifier; this must not be modified 

1023 

1024 Returns 

1025 ------- 

1026 `dict` 

1027 Transformed dataset identifier. 

1028 """ 

1029 

1030 return dataId.copy() 

1031 

1032 def _mapActualToPath(self, template, actualId): 

1033 """Convert a template path to an actual path, using the actual data 

1034 identifier. This implementation is usually sufficient but can be 

1035 overridden by the subclass. 

1036 

1037 Parameters 

1038 ---------- 

1039 template : `str` 

1040 Template path 

1041 actualId : `dict` 

1042 Dataset identifier 

1043 

1044 Returns 

1045 ------- 

1046 `str` 

1047 Pathname 

1048 """ 

1049 

1050 try: 

1051 transformedId = self._transformId(actualId) 

1052 return template % transformedId 

1053 except Exception as e: 

1054 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e)) 

1055 

1056 @staticmethod 

1057 def getShortCcdName(ccdName): 

1058 """Convert a CCD name to a form useful as a filename 

1059 

1060 The default implementation converts spaces to underscores. 

1061 """ 

1062 return ccdName.replace(" ", "_") 

1063 

1064 def _extractDetectorName(self, dataId): 

1065 """Extract the detector (CCD) name from the dataset identifier. 

1066 

1067 The name in question is the detector name used by lsst.afw.cameraGeom. 

1068 

1069 Parameters 

1070 ---------- 

1071 dataId : `dict` 

1072 Dataset identifier. 

1073 

1074 Returns 

1075 ------- 

1076 `str` 

1077 Detector name 

1078 """ 

1079 raise NotImplementedError("No _extractDetectorName() function specified") 

1080 

1081 def _setAmpDetector(self, item, dataId, trimmed=True): 

1082 """Set the detector object in an Exposure for an amplifier. 

1083 

1084 Defects are also added to the Exposure based on the detector object. 

1085 

1086 Parameters 

1087 ---------- 

1088 item : `lsst.afw.image.Exposure` 

1089 Exposure to set the detector in. 

1090 dataId : `dict` 

1091 Dataset identifier 

1092 trimmed : `bool` 

1093 Should detector be marked as trimmed? (ignored) 

1094 """ 

1095 

1096 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed) 

1097 

1098 def _setCcdDetector(self, item, dataId, trimmed=True): 

1099 """Set the detector object in an Exposure for a CCD. 

1100 

1101 Parameters 

1102 ---------- 

1103 item : `lsst.afw.image.Exposure` 

1104 Exposure to set the detector in. 

1105 dataId : `dict` 

1106 Dataset identifier 

1107 trimmed : `bool` 

1108 Should detector be marked as trimmed? (ignored) 

1109 """ 

1110 if item.getDetector() is not None: 

1111 return 

1112 

1113 detectorName = self._extractDetectorName(dataId) 

1114 detector = self.camera[detectorName] 

1115 item.setDetector(detector) 

1116 

1117 @staticmethod 

1118 def _resolveFilters(definitions, idFilter, filterLabel): 

1119 """Identify the filter(s) consistent with partial filter information. 

1120 

1121 Parameters 

1122 ---------- 

1123 definitions : `lsst.obs.base.FilterDefinitionCollection` 

1124 The filter definitions in which to search for filters. 

1125 idFilter : `str` or `None` 

1126 The filter information provided in a data ID. 

1127 filterLabel : `lsst.afw.image.FilterLabel` or `None` 

1128 The filter information provided by an exposure; may be incomplete. 

1129 

1130 Returns 

1131 ------- 

1132 filters : `set` [`lsst.obs.base.FilterDefinition`] 

1133 The set of filters consistent with ``idFilter`` 

1134 and ``filterLabel``. 

1135 """ 

1136 # Assume none of the filter constraints actually wrong/contradictory. 

1137 # Then taking the intersection of all constraints will give a unique 

1138 # result if one exists. 

1139 matches = set(definitions) 

1140 if idFilter is not None: 

1141 matches.intersection_update(definitions.findAll(idFilter)) 

1142 if filterLabel is not None and filterLabel.hasPhysicalLabel(): 

1143 matches.intersection_update(definitions.findAll(filterLabel.physicalLabel)) 

1144 if filterLabel is not None and filterLabel.hasBandLabel(): 

1145 matches.intersection_update(definitions.findAll(filterLabel.bandLabel)) 

1146 return matches 

1147 

1148 def _getBestFilter(self, storedLabel, idFilter): 

1149 """Estimate the most complete filter information consistent with the 

1150 file or registry. 

1151 

1152 Parameters 

1153 ---------- 

1154 storedLabel : `lsst.afw.image.FilterLabel` or `None` 

1155 The filter previously stored in the file. 

1156 idFilter : `str` or `None` 

1157 The filter implied by the data ID, if any. 

1158 

1159 Returns 

1160 ------- 

1161 bestFitler : `lsst.afw.image.FilterLabel` or `None` 

1162 The complete filter to describe the dataset. May be equal to 

1163 ``storedLabel``. `None` if no recommendation can be generated. 

1164 """ 

1165 try: 

1166 # getGen3Instrument returns class; need to construct it. 

1167 filterDefinitions = self.getGen3Instrument()().filterDefinitions 

1168 except NotImplementedError: 

1169 filterDefinitions = None 

1170 

1171 if filterDefinitions is not None: 

1172 definitions = self._resolveFilters(filterDefinitions, idFilter, storedLabel) 

1173 self.log.debug( 

1174 "Matching filters for id=%r and label=%r are %s.", idFilter, storedLabel, definitions 

1175 ) 

1176 if len(definitions) == 1: 

1177 newLabel = list(definitions)[0].makeFilterLabel() 

1178 return newLabel 

1179 elif definitions: 

1180 # Some instruments have many filters for the same band, of 

1181 # which one is known by band name and the others always by 

1182 # afw name (e.g., i, i2). 

1183 nonAfw = {f for f in definitions if f.afw_name is None} 

1184 if len(nonAfw) == 1: 

1185 newLabel = list(nonAfw)[0].makeFilterLabel() 

1186 self.log.debug("Assuming %r is the correct match.", newLabel) 

1187 return newLabel 

1188 

1189 self.log.warning("Multiple matches for filter %r with data ID %r.", storedLabel, idFilter) 

1190 # Can we at least add a band? 

1191 # Never expect multiple definitions with same physical filter. 

1192 bands = {d.band for d in definitions} # None counts as separate result! 

1193 if len(bands) == 1 and storedLabel is None: 

1194 band = list(bands)[0] 

1195 return afwImage.FilterLabel(band=band) 

1196 else: 

1197 return None 

1198 else: 

1199 # Unknown filter, nothing to be done. 

1200 self.log.warning("Cannot reconcile filter %r with data ID %r.", storedLabel, idFilter) 

1201 return None 

1202 

1203 # Not practical to recommend a FilterLabel without filterDefinitions 

1204 

1205 return None 

1206 

1207 def _setFilter(self, mapping, item, dataId): 

1208 """Set the filter information in an Exposure. 

1209 

1210 The Exposure should already have had a filter loaded, but the reader 

1211 (in ``afw``) had to act on incomplete information. This method 

1212 cross-checks the filter against the data ID and the standard list 

1213 of filters. 

1214 

1215 Parameters 

1216 ---------- 

1217 mapping : `lsst.obs.base.Mapping` 

1218 Where to get the data ID filter from. 

1219 item : `lsst.afw.image.Exposure` 

1220 Exposure to set the filter in. 

1221 dataId : `dict` 

1222 Dataset identifier. 

1223 """ 

1224 if not ( 

1225 isinstance(item, afwImage.ExposureU) 

1226 or isinstance(item, afwImage.ExposureI) 

1227 or isinstance(item, afwImage.ExposureF) 

1228 or isinstance(item, afwImage.ExposureD) 

1229 ): 

1230 return 

1231 

1232 itemFilter = item.getFilterLabel() # may be None 

1233 try: 

1234 idFilter = mapping.need(["filter"], dataId)["filter"] 

1235 except dafPersist.NoResults: 

1236 idFilter = None 

1237 

1238 bestFilter = self._getBestFilter(itemFilter, idFilter) 

1239 if bestFilter is not None: 

1240 if bestFilter != itemFilter: 

1241 item.setFilterLabel(bestFilter) 

1242 # Already using bestFilter, avoid unnecessary edits 

1243 elif itemFilter is None: 

1244 # Old Filter cleanup, without the benefit of FilterDefinition 

1245 if self.filters is not None and idFilter in self.filters: 

1246 idFilter = self.filters[idFilter] 

1247 try: 

1248 # TODO: remove in DM-27177; at that point may not be able 

1249 # to process IDs without FilterDefinition. 

1250 with warnings.catch_warnings(): 

1251 warnings.filterwarnings("ignore", category=FutureWarning) 

1252 item.setFilter(afwImage.Filter(idFilter)) 

1253 except pexExcept.NotFoundError: 

1254 self.log.warning("Filter %s not defined. Set to UNKNOWN.", idFilter) 

1255 

1256 def _standardizeExposure( 

1257 self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True, setExposureId=False 

1258 ): 

1259 """Default standardization function for images. 

1260 

1261 This sets the Detector from the camera geometry 

1262 and optionally set the Filter. In both cases this saves 

1263 having to persist some data in each exposure (or image). 

1264 

1265 Parameters 

1266 ---------- 

1267 mapping : `lsst.obs.base.Mapping` 

1268 Where to get the values from. 

1269 item : image-like object 

1270 Can be any of lsst.afw.image.Exposure, 

1271 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 

1272 or lsst.afw.image.MaskedImage 

1273 

1274 dataId : `dict` 

1275 Dataset identifier 

1276 filter : `bool` 

1277 Set filter? Ignored if item is already an exposure 

1278 trimmed : `bool` 

1279 Should detector be marked as trimmed? 

1280 setVisitInfo : `bool` 

1281 Should Exposure have its VisitInfo filled out from the metadata? 

1282 setExposureId : `bool` 

1283 Should Exposure have its exposure ID filled out from the data ID? 

1284 

1285 Returns 

1286 ------- 

1287 `lsst.afw.image.Exposure` 

1288 The standardized Exposure. 

1289 """ 

1290 try: 

1291 exposure = exposureFromImage( 

1292 item, 

1293 dataId, 

1294 mapper=self, 

1295 logger=self.log, 

1296 setVisitInfo=setVisitInfo, 

1297 setFilter=filter, 

1298 setExposureId=setExposureId, 

1299 ) 

1300 except Exception as e: 

1301 self.log.error("Could not turn item=%r into an exposure: %s", item, e) 

1302 raise 

1303 

1304 if mapping.level.lower() == "amp": 

1305 self._setAmpDetector(exposure, dataId, trimmed) 

1306 elif mapping.level.lower() == "ccd": 

1307 self._setCcdDetector(exposure, dataId, trimmed) 

1308 

1309 # We can only create a WCS if it doesn't already have one and 

1310 # we have either a VisitInfo or exposure metadata. 

1311 # Do not calculate a WCS if this is an amplifier exposure 

1312 if ( 

1313 mapping.level.lower() != "amp" 

1314 and exposure.getWcs() is None 

1315 and (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()) 

1316 ): 

1317 self._createInitialSkyWcs(exposure) 

1318 

1319 if filter: 

1320 self._setFilter(mapping, exposure, dataId) 

1321 

1322 return exposure 

1323 

1324 def _createSkyWcsFromMetadata(self, exposure): 

1325 """Create a SkyWcs from the FITS header metadata in an Exposure. 

1326 

1327 Parameters 

1328 ---------- 

1329 exposure : `lsst.afw.image.Exposure` 

1330 The exposure to get metadata from, and attach the SkyWcs to. 

1331 """ 

1332 metadata = exposure.getMetadata() 

1333 fix_header(metadata, translator_class=self.translatorClass) 

1334 try: 

1335 wcs = afwGeom.makeSkyWcs(metadata, strip=True) 

1336 exposure.setWcs(wcs) 

1337 except pexExcept.TypeError as e: 

1338 # See DM-14372 for why this is debug and not warn (e.g. calib 

1339 # files without wcs metadata). 

1340 self.log.debug( 

1341 "wcs set to None; missing information found in metadata to create a valid wcs: %s", 

1342 e.args[0], 

1343 ) 

1344 # ensure any WCS values stripped from the metadata are removed in the 

1345 # exposure 

1346 exposure.setMetadata(metadata) 

1347 

1348 def _createInitialSkyWcs(self, exposure): 

1349 """Create a SkyWcs from the boresight and camera geometry. 

1350 

1351 If the boresight or camera geometry do not support this method of 

1352 WCS creation, this falls back on the header metadata-based version 

1353 (typically a purely linear FITS crval/crpix/cdmatrix WCS). 

1354 

1355 Parameters 

1356 ---------- 

1357 exposure : `lsst.afw.image.Exposure` 

1358 The exposure to get data from, and attach the SkyWcs to. 

1359 """ 

1360 # Always use try to use metadata first, to strip WCS keys from it. 

1361 self._createSkyWcsFromMetadata(exposure) 

1362 

1363 if exposure.getInfo().getVisitInfo() is None: 

1364 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs." 

1365 self.log.warning(msg) 

1366 return 

1367 try: 

1368 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector()) 

1369 exposure.setWcs(newSkyWcs) 

1370 except InitialSkyWcsError as e: 

1371 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s" 

1372 self.log.warning(msg, e) 

1373 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e)) 

1374 if e.__context__ is not None: 

1375 self.log.debug( 

1376 "Root-cause Exception was: %s", traceback.TracebackException.from_exception(e.__context__) 

1377 ) 

1378 

1379 def _makeCamera(self, policy, repositoryDir): 

1380 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 

1381 the camera geometry 

1382 

1383 Also set self.cameraDataLocation, if relevant (else it can be left 

1384 None). 

1385 

1386 This implementation assumes that policy contains an entry "camera" 

1387 that points to the subdirectory in this package of camera data; 

1388 specifically, that subdirectory must contain: 

1389 - a file named `camera.py` that contains persisted camera config 

1390 - ampInfo table FITS files, as required by 

1391 lsst.afw.cameraGeom.makeCameraFromPath 

1392 

1393 Parameters 

1394 ---------- 

1395 policy : `lsst.daf.persistence.Policy` 

1396 Policy with per-camera defaults already merged 

1397 (PexPolicy only for backward compatibility). 

1398 repositoryDir : `str` 

1399 Policy repository for the subclassing module (obtained with 

1400 getRepositoryPath() on the per-camera default dictionary). 

1401 """ 

1402 if "camera" not in policy: 

1403 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera") 

1404 cameraDataSubdir = policy["camera"] 

1405 self.cameraDataLocation = os.path.normpath(os.path.join(repositoryDir, cameraDataSubdir, "camera.py")) 

1406 cameraConfig = afwCameraGeom.CameraConfig() 

1407 cameraConfig.load(self.cameraDataLocation) 

1408 ampInfoPath = os.path.dirname(self.cameraDataLocation) 

1409 return afwCameraGeom.makeCameraFromPath( 

1410 cameraConfig=cameraConfig, 

1411 ampInfoPath=ampInfoPath, 

1412 shortNameFunc=self.getShortCcdName, 

1413 pupilFactoryClass=self.PupilFactoryClass, 

1414 ) 

1415 

1416 def getRegistry(self): 

1417 """Get the registry used by this mapper. 

1418 

1419 Returns 

1420 ------- 

1421 Registry or None 

1422 The registry used by this mapper for this mapper's repository. 

1423 """ 

1424 return self.registry 

1425 

1426 def getImageCompressionSettings(self, datasetType, dataId): 

1427 """Stuff image compression settings into a daf.base.PropertySet 

1428 

1429 This goes into the ButlerLocation's "additionalData", which gets 

1430 passed into the boost::persistence framework. 

1431 

1432 Parameters 

1433 ---------- 

1434 datasetType : `str` 

1435 Type of dataset for which to get the image compression settings. 

1436 dataId : `dict` 

1437 Dataset identifier. 

1438 

1439 Returns 

1440 ------- 

1441 additionalData : `lsst.daf.base.PropertySet` 

1442 Image compression settings. 

1443 """ 

1444 mapping = self.mappings[datasetType] 

1445 recipeName = mapping.recipe 

1446 storageType = mapping.storage 

1447 if storageType not in self._writeRecipes: 

1448 return dafBase.PropertySet() 

1449 if recipeName not in self._writeRecipes[storageType]: 

1450 raise RuntimeError( 

1451 "Unrecognized write recipe for datasetType %s (storage type %s): %s" 

1452 % (datasetType, storageType, recipeName) 

1453 ) 

1454 recipe = self._writeRecipes[storageType][recipeName].deepCopy() 

1455 seed = hash(tuple(dataId.items())) % 2**31 

1456 for plane in ("image", "mask", "variance"): 

1457 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0: 

1458 recipe.set(plane + ".scaling.seed", seed) 

1459 return recipe 

1460 

1461 def _initWriteRecipes(self): 

1462 """Read the recipes for writing files 

1463 

1464 These recipes are currently used for configuring FITS compression, 

1465 but they could have wider uses for configuring different flavors 

1466 of the storage types. A recipe is referred to by a symbolic name, 

1467 which has associated settings. These settings are stored as a 

1468 `PropertySet` so they can easily be passed down to the 

1469 boost::persistence framework as the "additionalData" parameter. 

1470 

1471 The list of recipes is written in YAML. A default recipe and 

1472 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 

1473 and these may be overridden or supplemented by the individual obs_* 

1474 packages' own policy/writeRecipes.yaml files. 

1475 

1476 Recipes are grouped by the storage type. Currently, only the 

1477 ``FitsStorage`` storage type uses recipes, which uses it to 

1478 configure FITS image compression. 

1479 

1480 Each ``FitsStorage`` recipe for FITS compression should define 

1481 "image", "mask" and "variance" entries, each of which may contain 

1482 "compression" and "scaling" entries. Defaults will be provided for 

1483 any missing elements under "compression" and "scaling". 

1484 

1485 The allowed entries under "compression" are: 

1486 

1487 * algorithm (string): compression algorithm to use 

1488 * rows (int): number of rows per tile (0 = entire dimension) 

1489 * columns (int): number of columns per tile (0 = entire dimension) 

1490 * quantizeLevel (float): cfitsio quantization level 

1491 

1492 The allowed entries under "scaling" are: 

1493 

1494 * algorithm (string): scaling algorithm to use 

1495 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 

1496 * fuzz (bool): fuzz the values when quantising floating-point values? 

1497 * seed (long): seed for random number generator when fuzzing 

1498 * maskPlanes (list of string): mask planes to ignore when doing 

1499 statistics 

1500 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 

1501 * quantizePad: number of stdev to allow on the low side (for 

1502 STDEV_POSITIVE/NEGATIVE) 

1503 * bscale: manually specified BSCALE (for MANUAL scaling) 

1504 * bzero: manually specified BSCALE (for MANUAL scaling) 

1505 

1506 A very simple example YAML recipe: 

1507 

1508 FitsStorage: 

1509 default: 

1510 image: &default 

1511 compression: 

1512 algorithm: GZIP_SHUFFLE 

1513 mask: *default 

1514 variance: *default 

1515 """ 

1516 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml") 

1517 recipes = dafPersist.Policy(recipesFile) 

1518 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml") 

1519 validationMenu = { 

1520 "FitsStorage": validateRecipeFitsStorage, 

1521 } 

1522 if os.path.exists(supplementsFile) and supplementsFile != recipesFile: 

1523 supplements = dafPersist.Policy(supplementsFile) 

1524 # Don't allow overrides, only supplements 

1525 for entry in validationMenu: 

1526 intersection = set(recipes[entry].names()).intersection(set(supplements.names())) 

1527 if intersection: 

1528 raise RuntimeError( 

1529 "Recipes provided in %s section %s may not override those in %s: %s" 

1530 % (supplementsFile, entry, recipesFile, intersection) 

1531 ) 

1532 recipes.update(supplements) 

1533 

1534 self._writeRecipes = {} 

1535 for storageType in recipes.names(True): 

1536 if "default" not in recipes[storageType]: 

1537 raise RuntimeError( 

1538 "No 'default' recipe defined for storage type %s in %s" % (storageType, recipesFile) 

1539 ) 

1540 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType]) 

1541 

1542 

1543def exposureFromImage( 

1544 image, dataId=None, mapper=None, logger=None, setVisitInfo=True, setFilter=False, setExposureId=False 

1545): 

1546 """Generate an Exposure from an image-like object 

1547 

1548 If the image is a DecoratedImage then also set its metadata 

1549 (Image and MaskedImage are missing the necessary metadata 

1550 and Exposure already has those set) 

1551 

1552 Parameters 

1553 ---------- 

1554 image : Image-like object 

1555 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 

1556 Exposure. 

1557 dataId : `dict`, optional 

1558 The data ID identifying the visit of the image. 

1559 mapper : `lsst.obs.base.CameraMapper`, optional 

1560 The mapper with which to convert the image. 

1561 logger : `lsst.log.Log`, optional 

1562 An existing logger to which to send output. 

1563 setVisitInfo : `bool`, optional 

1564 If `True`, create and attach a `lsst.afw.image.VisitInfo` to the 

1565 result. Ignored if ``image`` is an `~lsst.afw.image.Exposure` with an 

1566 existing ``VisitInfo``. 

1567 setFilter : `bool`, optional 

1568 If `True`, create and attach a `lsst.afw.image.FilterLabel` to the 

1569 result. Converts non-``FilterLabel`` information provided in ``image``. 

1570 Ignored if ``image`` is an `~lsst.afw.image.Exposure` with existing 

1571 filter information. 

1572 setExposureId : `bool`, optional 

1573 If `True`, create and set an exposure ID from ``dataID``. Ignored if 

1574 ``image`` is an `~lsst.afw.image.Exposure` with an existing ID. 

1575 

1576 Returns 

1577 ------- 

1578 `lsst.afw.image.Exposure` 

1579 Exposure containing input image. 

1580 """ 

1581 translatorClass = None 

1582 if mapper is not None: 

1583 translatorClass = mapper.translatorClass 

1584 

1585 metadata = None 

1586 if isinstance(image, afwImage.MaskedImage): 

1587 exposure = afwImage.makeExposure(image) 

1588 elif isinstance(image, afwImage.DecoratedImage): 

1589 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage())) 

1590 metadata = image.getMetadata() 

1591 fix_header(metadata, translator_class=translatorClass) 

1592 exposure.setMetadata(metadata) 

1593 elif isinstance(image, afwImage.Exposure): 

1594 exposure = image 

1595 metadata = exposure.getMetadata() 

1596 fix_header(metadata, translator_class=translatorClass) 

1597 else: # Image 

1598 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image)) 

1599 

1600 # set exposure ID if we can 

1601 if setExposureId and not exposure.info.hasId() and mapper is not None: 

1602 try: 

1603 exposureId = mapper._computeCcdExposureId(dataId) 

1604 exposure.info.id = exposureId 

1605 except NotImplementedError: 

1606 logger.warning("Could not set exposure ID; mapper does not support it.") 

1607 

1608 if metadata is not None: 

1609 # set filter if we can 

1610 if setFilter and mapper is not None and exposure.getFilterLabel() is None: 

1611 # Translate whatever was in the metadata 

1612 if "FILTER" in metadata: 

1613 oldFilter = metadata["FILTER"] 

1614 idFilter = dataId["filter"] if "filter" in dataId else None 

1615 # oldFilter may not be physical, but _getBestFilter always goes 

1616 # through the FilterDefinitions instead of returning 

1617 # unvalidated input. 

1618 filter = mapper._getBestFilter(afwImage.FilterLabel(physical=oldFilter), idFilter) 

1619 if filter is not None: 

1620 exposure.setFilterLabel(filter) 

1621 # set VisitInfo if we can 

1622 if setVisitInfo and exposure.getInfo().getVisitInfo() is None: 

1623 if mapper is None: 

1624 if not logger: 

1625 logger = lsstLog.Log.getLogger("lsst.CameraMapper") 

1626 logger.warn("I can only set the VisitInfo if you provide a mapper") 

1627 else: 

1628 exposureId = mapper._computeCcdExposureId(dataId) 

1629 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId) 

1630 

1631 exposure.getInfo().setVisitInfo(visitInfo) 

1632 

1633 return exposure 

1634 

1635 

1636def validateRecipeFitsStorage(recipes): 

1637 """Validate recipes for FitsStorage 

1638 

1639 The recipes are supplemented with default values where appropriate. 

1640 

1641 TODO: replace this custom validation code with Cerberus (DM-11846) 

1642 

1643 Parameters 

1644 ---------- 

1645 recipes : `lsst.daf.persistence.Policy` 

1646 FitsStorage recipes to validate. 

1647 

1648 Returns 

1649 ------- 

1650 validated : `lsst.daf.base.PropertySet` 

1651 Validated FitsStorage recipe. 

1652 

1653 Raises 

1654 ------ 

1655 `RuntimeError` 

1656 If validation fails. 

1657 """ 

1658 # Schemas define what should be there, and the default values (and by the 

1659 # default value, the expected type). 

1660 compressionSchema = { 

1661 "algorithm": "NONE", 

1662 "rows": 1, 

1663 "columns": 0, 

1664 "quantizeLevel": 0.0, 

1665 } 

1666 scalingSchema = { 

1667 "algorithm": "NONE", 

1668 "bitpix": 0, 

1669 "maskPlanes": ["NO_DATA"], 

1670 "seed": 0, 

1671 "quantizeLevel": 4.0, 

1672 "quantizePad": 5.0, 

1673 "fuzz": True, 

1674 "bscale": 1.0, 

1675 "bzero": 0.0, 

1676 } 

1677 

1678 def checkUnrecognized(entry, allowed, description): 

1679 """Check to see if the entry contains unrecognised keywords""" 

1680 unrecognized = set(entry.keys()) - set(allowed) 

1681 if unrecognized: 

1682 raise RuntimeError( 

1683 "Unrecognized entries when parsing image compression recipe %s: %s" 

1684 % (description, unrecognized) 

1685 ) 

1686 

1687 validated = {} 

1688 for name in recipes.names(True): 

1689 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name) 

1690 rr = dafBase.PropertySet() 

1691 validated[name] = rr 

1692 for plane in ("image", "mask", "variance"): 

1693 checkUnrecognized(recipes[name][plane], ["compression", "scaling"], name + "->" + plane) 

1694 

1695 for settings, schema in (("compression", compressionSchema), ("scaling", scalingSchema)): 

1696 prefix = plane + "." + settings 

1697 if settings not in recipes[name][plane]: 

1698 for key in schema: 

1699 rr.set(prefix + "." + key, schema[key]) 

1700 continue 

1701 entry = recipes[name][plane][settings] 

1702 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings) 

1703 for key in schema: 

1704 value = type(schema[key])(entry[key]) if key in entry else schema[key] 

1705 rr.set(prefix + "." + key, value) 

1706 return validated