lsst.obs.base  13.0-24-gedf0888
 All Classes Namespaces Files Functions Variables
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 from builtins import str
24 import copy
25 import os
26 import pyfits # required by _makeDefectsDict until defects are written as AFW tables
27 import re
28 import weakref
29 import lsst.daf.persistence as dafPersist
30 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.afw.geom as afwGeom
32 import lsst.afw.image as afwImage
33 import lsst.afw.table as afwTable
34 import lsst.afw.cameraGeom as afwCameraGeom
35 import lsst.log as lsstLog
36 import lsst.pex.policy as pexPolicy
37 from .exposureIdInfo import ExposureIdInfo
38 from .makeRawVisitInfo import MakeRawVisitInfo
39 from lsst.utils import getPackageDir
40 
41 """This module defines the CameraMapper base class."""
42 
43 
44 class CameraMapper(dafPersist.Mapper):
45 
46  """CameraMapper is a base class for mappers that handle images from a
47  camera and products derived from them. This provides an abstraction layer
48  between the data on disk and the code.
49 
50  Public methods: keys, queryMetadata, getDatasetTypes, map,
51  canStandardize, standardize
52 
53  Mappers for specific data sources (e.g., CFHT Megacam, LSST
54  simulations, etc.) should inherit this class.
55 
56  The CameraMapper manages datasets within a "root" directory. Note that
57  writing to a dataset present in the input root will hide the existing
58  dataset but not overwrite it. See #2160 for design discussion.
59 
60  A camera is assumed to consist of one or more rafts, each composed of
61  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
62  (amps). A camera is also assumed to have a camera geometry description
63  (CameraGeom object) as a policy file, a filter description (Filter class
64  static configuration) as another policy file, and an optional defects
65  description directory.
66 
67  Information from the camera geometry and defects are inserted into all
68  Exposure objects returned.
69 
70  The mapper uses one or two registries to retrieve metadata about the
71  images. The first is a registry of all raw exposures. This must contain
72  the time of the observation. One or more tables (or the equivalent)
73  within the registry are used to look up data identifier components that
74  are not specified by the user (e.g. filter) and to return results for
75  metadata queries. The second is an optional registry of all calibration
76  data. This should contain validity start and end entries for each
77  calibration dataset in the same timescale as the observation time.
78 
79  Subclasses will typically set MakeRawVisitInfoClass:
80 
81  MakeRawVisitInfoClass: a class variable that points to a subclass of
82  MakeRawVisitInfo, a functor that creates an
83  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
84 
85  Subclasses must provide the following methods:
86 
87  _extractDetectorName(self, dataId): returns the detector name for a CCD
88  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
89  a dataset identifier referring to that CCD or a subcomponent of it.
90 
91  _computeCcdExposureId(self, dataId): see below
92 
93  _computeCoaddExposureId(self, dataId, singleFilter): see below
94 
95  Subclasses may also need to override the following methods:
96 
97  _transformId(self, dataId): transformation of a data identifier
98  from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"),
99  including making suitable for path expansion (e.g. removing commas).
100  The default implementation does nothing. Note that this
101  method should not modify its input parameter.
102 
103  getShortCcdName(self, ccdName): a static method that returns a shortened name
104  suitable for use as a filename. The default version converts spaces to underscores.
105 
106  _getCcdKeyVal(self, dataId): return a CCD key and value
107  by which to look up defects in the defects registry.
108  The default value returns ("ccd", detector name)
109 
110  _mapActualToPath(self, template, actualId): convert a template path to an
111  actual path, using the actual dataset identifier.
112 
113  The mapper's behaviors are largely specified by the policy file.
114  See the MapperDictionary.paf for descriptions of the available items.
115 
116  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
117  mappings (see Mappings class).
118 
119  Common default mappings for all subclasses can be specified in the
120  "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides
121  a simple way to add a product to all camera mappers.
122 
123  Functions to map (provide a path to the data given a dataset
124  identifier dictionary) and standardize (convert data into some standard
125  format or type) may be provided in the subclass as "map_{dataset type}"
126  and "std_{dataset type}", respectively.
127 
128  If non-Exposure datasets cannot be retrieved using standard
129  daf_persistence methods alone, a "bypass_{dataset type}" function may be
130  provided in the subclass to return the dataset instead of using the
131  "datasets" subpolicy.
132 
133  Implementations of map_camera and bypass_camera that should typically be
134  sufficient are provided in this base class.
135 
136  @todo
137  * Handle defects the same was as all other calibration products, using the calibration registry
138  * Instead of auto-loading the camera at construction time, load it from the calibration registry
139  * Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention
140  of pyfits from this package.
141  """
142  packageName = None
143 
144  # a class or subclass of MakeRawVisitInfo, a functor that makes an
145  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
146  MakeRawVisitInfoClass = MakeRawVisitInfo
147 
148  # a class or subclass of PupilFactory
149  PupilFactoryClass = afwCameraGeom.PupilFactory
150 
151  def __init__(self, policy, repositoryDir,
152  root=None, registry=None, calibRoot=None, calibRegistry=None,
153  provided=None, parentRegistry=None, repositoryCfg=None):
154  """Initialize the CameraMapper.
155 
156  Parameters
157  ----------
158  policy : daf_persistence.Policy,
159  Can also be pexPolicy.Policy, only for backward compatibility.
160  Policy with per-camera defaults already merged.
161  repositoryDir : string
162  Policy repository for the subclassing module (obtained with
163  getRepositoryPath() on the per-camera default dictionary).
164  root : string, optional
165  Path to the root directory for data.
166  registry : string, optional
167  Path to registry with data's metadata.
168  calibRoot : string, optional
169  Root directory for calibrations.
170  calibRegistry : string, optional
171  Path to registry with calibrations' metadata.
172  provided : list of string, optional
173  Keys provided by the mapper.
174  parentRegistry : Registry subclass, optional
175  Registry from a parent repository that may be used to look up
176  data's metadata.
177  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
178  The configuration information for the repository this mapper is
179  being used with.
180  """
181 
182  dafPersist.Mapper.__init__(self)
183 
184  self.log = lsstLog.Log.getLogger("CameraMapper")
185 
186  if root:
187  self.root = root
188  elif repositoryCfg:
189  self.root = repositoryCfg.root
190  else:
191  self.root = None
192  if isinstance(policy, pexPolicy.Policy):
193  policy = dafPersist.Policy(policy)
194 
195  repoPolicy = repositoryCfg.policy if repositoryCfg else None
196  if repoPolicy is not None:
197  policy.update(repoPolicy)
198 
199  defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base",
200  "MapperDictionary.paf",
201  "policy")
202  dictPolicy = dafPersist.Policy(defaultPolicyFile)
203  policy.merge(dictPolicy)
204 
205  # Levels
206  self.levels = dict()
207  if 'levels' in policy:
208  levelsPolicy = policy['levels']
209  for key in levelsPolicy.names(True):
210  self.levels[key] = set(levelsPolicy.asArray(key))
211  self.defaultLevel = policy['defaultLevel']
212  self.defaultSubLevels = dict()
213  if 'defaultSubLevels' in policy:
214  self.defaultSubLevels = policy['defaultSubLevels']
215 
216  # Root directories
217  if root is None:
218  root = "."
219  root = dafPersist.LogicalLocation(root).locString()
220 
221  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
222 
223  # If the calibRoot is passed in, use that. If not and it's indicated in the policy, use that. And
224  # otherwise, the calibs are in the regular root.
225  # If the location indicated by the calib root does not exist, do not create it.
226  calibStorage = None
227  if calibRoot is not None and dafPersist.Storage.storageExists(uri=calibRoot):
228  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot)
229  elif 'calibRoot' in policy:
230  calibRoot = policy['calibRoot']
231  calibRoot = dafPersist.LogicalLocation(calibRoot).locString()
232  if dafPersist.Storage.exists(uri=calibRoot):
233  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot)
234  if calibStorage is None:
235  calibStorage = self.rootStorage
236 
237  self.root = root
238 
239  # Registries
240  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
241  self.rootStorage, searchParents=False,
242  posixIfNoSql=(not parentRegistry))
243  if not self.registry:
244  self.registry = parentRegistry
245  needCalibRegistry = policy.get('needCalibRegistry', None)
246  if needCalibRegistry:
247  if calibStorage:
248  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
249  "calibRegistryPath", calibStorage)
250  else:
251  raise RuntimeError(
252  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
253  "calibRoot ivar:%s or policy['calibRoot']:%s" %
254  (calibRoot, policy.get('calibRoot', None)))
255  else:
256  self.calibRegistry = None
257 
258  # Dict of valid keys and their value types
259  self.keyDict = dict()
260 
261  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
262 
263  # Camera geometry
264  self.cameraDataLocation = None # path to camera geometry config file
265  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
266 
267  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
268  # camera package, which is on the local filesystem.
269  self.defectRegistry = None
270  if 'defects' in policy:
271  self.defectPath = os.path.join(repositoryDir, policy['defects'])
272  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
273  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
274 
275  # Filter translation table
276  self.filters = None
277 
278  # Skytile policy
279  self.skypolicy = policy['skytiles']
280 
281  # verify that the class variable packageName is set before attempting
282  # to instantiate an instance
283  if self.packageName is None:
284  raise ValueError('class variable packageName must not be None')
285 
287 
288  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
289  """Initialize mappings
290 
291  For each of the dataset types that we want to be able to read, there are
292  methods that can be created to support them:
293  * map_<dataset> : determine the path for dataset
294  * std_<dataset> : standardize the retrieved dataset
295  * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery)
296  * query_<dataset> : query the registry
297 
298  Besides the dataset types explicitly listed in the policy, we create
299  additional, derived datasets for additional conveniences, e.g., reading
300  the header of an image, retrieving only the size of a catalog.
301 
302  @param policy (Policy) Policy with per-camera defaults already merged
303  @param rootStorage (Storage subclass instance) Interface to persisted repository data
304  @param calibRoot (Storage subclass instance) Interface to persisted calib repository data
305  @param provided (list of strings) Keys provided by the mapper
306  """
307  # Sub-dictionaries (for exposure/calibration/dataset types)
308  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
309  "obs_base", "ImageMappingDictionary.paf", "policy"))
310  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
311  "obs_base", "ExposureMappingDictionary.paf", "policy"))
312  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
313  "obs_base", "CalibrationMappingDictionary.paf", "policy"))
314  dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
315  "obs_base", "DatasetMappingDictionary.paf", "policy"))
316 
317  # Mappings
318  mappingList = (
319  ("images", imgMappingPolicy, ImageMapping),
320  ("exposures", expMappingPolicy, ExposureMapping),
321  ("calibrations", calMappingPolicy, CalibrationMapping),
322  ("datasets", dsMappingPolicy, DatasetMapping)
323  )
324  self.mappings = dict()
325  for name, defPolicy, cls in mappingList:
326  if name in policy:
327  datasets = policy[name]
328 
329  # Centrally-defined datasets
330  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
331  if os.path.exists(defaultsPath):
332  datasets.merge(dafPersist.Policy(defaultsPath))
333 
334  mappings = dict()
335  setattr(self, name, mappings)
336  for datasetType in datasets.names(True):
337  subPolicy = datasets[datasetType]
338  subPolicy.merge(defPolicy)
339 
340  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
341  def compositeClosure(dataId, write=False, mapper=None, mapping=None, subPolicy=subPolicy):
342  components = subPolicy.get('composite')
343  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
344  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
345  python = subPolicy['python']
346  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
347  disassembler=disassembler,
348  python=python,
349  dataId=dataId,
350  mapper=self)
351  for name, component in components.items():
352  butlerComposite.add(id=name,
353  datasetType=component.get('datasetType'),
354  setter=component.get('setter', None),
355  getter=component.get('getter', None),
356  subset=component.get('subset', False),
357  inputOnly=component.get('inputOnly', False))
358  return butlerComposite
359  setattr(self, "map_" + datasetType, compositeClosure)
360  # for now at least, don't set up any other handling for this dataset type.
361  continue
362 
363  if name == "calibrations":
364  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
365  provided=provided)
366  else:
367  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
368  self.keyDict.update(mapping.keys())
369  mappings[datasetType] = mapping
370  self.mappings[datasetType] = mapping
371  if not hasattr(self, "map_" + datasetType):
372  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
373  return mapping.map(mapper, dataId, write)
374  setattr(self, "map_" + datasetType, mapClosure)
375  if not hasattr(self, "query_" + datasetType):
376  def queryClosure(format, dataId, mapping=mapping):
377  return mapping.lookup(format, dataId)
378  setattr(self, "query_" + datasetType, queryClosure)
379  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
380  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
381  return mapping.standardize(mapper, item, dataId)
382  setattr(self, "std_" + datasetType, stdClosure)
383 
384  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
385  """Set convenience methods on CameraMapper"""
386  mapName = "map_" + datasetType + "_" + suffix
387  bypassName = "bypass_" + datasetType + "_" + suffix
388  queryName = "query_" + datasetType + "_" + suffix
389  if not hasattr(self, mapName):
390  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
391  if not hasattr(self, bypassName):
392  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
393  bypassImpl = getattr(self, "bypass_" + datasetType)
394  if bypassImpl is not None:
395  setattr(self, bypassName, bypassImpl)
396  if not hasattr(self, queryName):
397  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
398 
399  # Filename of dataset
400  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
401  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
402 
403  # Metadata from FITS file
404  if subPolicy["storage"] == "FitsStorage": # a FITS image
405  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
406  afwImage.readMetadata(location.getLocationsWithRoot()[0]))
407  if name == "exposures":
408  setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId:
409  afwImage.makeWcs(
410  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
411  setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
412  afwImage.Calib(
413  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
414  setMethods("visitInfo",
415  bypassImpl=lambda datasetType, pythonType, location, dataId:
416  afwImage.VisitInfo(
417  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
418  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
419  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
420  afwImage.readMetadata(os.path.join(location.getStorage().root,
421  location.getLocations()[0]), hdu=1))
422 
423  # Sub-images
424  if subPolicy["storage"] == "FitsStorage":
425  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
426  subId = dataId.copy()
427  del subId['bbox']
428  loc = mapping.map(mapper, subId, write)
429  bbox = dataId['bbox']
430  llcX = bbox.getMinX()
431  llcY = bbox.getMinY()
432  width = bbox.getWidth()
433  height = bbox.getHeight()
434  loc.additionalData.set('llcX', llcX)
435  loc.additionalData.set('llcY', llcY)
436  loc.additionalData.set('width', width)
437  loc.additionalData.set('height', height)
438  if 'imageOrigin' in dataId:
439  loc.additionalData.set('imageOrigin',
440  dataId['imageOrigin'])
441  return loc
442  def querySubClosure(key, format, dataId, mapping=mapping):
443  subId = dataId.copy()
444  del subId['bbox']
445  return mapping.lookup(format, subId)
446  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
447 
448  if subPolicy["storage"] == "FitsCatalogStorage":
449  # Length of catalog
450  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
451  afwImage.readMetadata(os.path.join(location.getStorage().root,
452  location.getLocations()[0]),
453  hdu=1).get("NAXIS2"))
454 
455  # Schema of catalog
456  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
457  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
458  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
459  location.getLocations()[0])))
460 
461  def _computeCcdExposureId(self, dataId):
462  """Compute the 64-bit (long) identifier for a CCD exposure.
463 
464  Subclasses must override
465 
466  @param dataId (dict) Data identifier with visit, ccd
467  """
468  raise NotImplementedError()
469 
470  def _computeCoaddExposureId(self, dataId, singleFilter):
471  """Compute the 64-bit (long) identifier for a coadd.
472 
473  Subclasses must override
474 
475  @param dataId (dict) Data identifier with tract and patch.
476  @param singleFilter (bool) True means the desired ID is for a single-
477  filter coadd, in which case dataId
478  must contain filter.
479  """
480  raise NotImplementedError()
481 
482  def _search(self, path):
483  """Search for path in the associated repository's storage.
484 
485  Parameters
486  ----------
487  path : string
488  Path that describes an object in the repository associated with
489  this mapper.
490  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
491  indicator will be stripped when searching and so will match
492  filenames without the HDU indicator, e.g. 'foo.fits'. The path
493  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
494 
495  Returns
496  -------
497  string
498  The path for this object in the repository. Will return None if the
499  object can't be found. If the input argument path contained an HDU
500  indicator, the returned path will also contain the HDU indicator.
501  """
502  # it would be better if storage was an instance, instead of having to demux the root URI every time.
503  return dafPersist.Storage.search(self.root, path)
504 
505  def backup(self, datasetType, dataId):
506  """Rename any existing object with the given type and dataId.
507 
508  The CameraMapper implementation saves objects in a sequence of e.g.:
509  foo.fits
510  foo.fits~1
511  foo.fits~2
512  All of the backups will be placed in the output repo, however, and will
513  not be removed if they are found elsewhere in the _parent chain. This
514  means that the same file will be stored twice if the previous version was
515  found in an input repo.
516  """
517 
518  # Calling PosixStorage directly is not the long term solution in this
519  # function, this is work-in-progress on epic DM-6225. The plan is for
520  # parentSearch to be changed to 'search', and search only the storage
521  # associated with this mapper. All searching of parents will be handled
522  # by traversing the container of repositories in Butler.
523 
524  def firstElement(list):
525  """Get the first element in the list, or None if that can't be done.
526  """
527  return list[0] if list is not None and len(list) else None
528 
529  n = 0
530  newLocation = self.map(datasetType, dataId, write=True)
531  newPath = newLocation.getLocations()[0]
532  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
533  path = firstElement(path)
534  oldPaths = []
535  while path is not None:
536  n += 1
537  oldPaths.append((n, path))
538  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
539  path = firstElement(path)
540  for n, oldPath in reversed(oldPaths):
541  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
542 
543  def keys(self):
544  """Return supported keys.
545  @return (iterable) List of keys usable in a dataset identifier"""
546  return iter(self.keyDict.keys())
547 
548  def getKeys(self, datasetType, level):
549  """Return a dict of supported keys and their value types for a given dataset
550  type at a given level of the key hierarchy.
551 
552  @param datasetType (str) dataset type or None for all dataset types
553  @param level (str) level or None for all levels or '' for the default level for the camera
554  @return (dict) dict keys are strings usable in a dataset identifier; values are their value types"""
555 
556  # not sure if this is how we want to do this. what if None was intended?
557  if level == '':
558  level = self.getDefaultLevel()
559 
560  if datasetType is None:
561  keyDict = copy.copy(self.keyDict)
562  else:
563  keyDict = self.mappings[datasetType].keys()
564  if level is not None and level in self.levels:
565  keyDict = copy.copy(keyDict)
566  for l in self.levels[level]:
567  if l in keyDict:
568  del keyDict[l]
569  return keyDict
570 
571  def getDefaultLevel(self):
572  return self.defaultLevel
573 
574  def getDefaultSubLevel(self, level):
575  if level in self.defaultSubLevels:
576  return self.defaultSubLevels[level]
577  return None
578 
579  @classmethod
580  def getCameraName(cls):
581  """Return the name of the camera that this CameraMapper is for."""
582  className = str(cls)
583  className = className[className.find('.'):-1]
584  m = re.search(r'(\w+)Mapper', className)
585  if m is None:
586  m = re.search(r"class '[\w.]*?(\w+)'", className)
587  name = m.group(1)
588  return name[:1].lower() + name[1:] if name else ''
589 
590  @classmethod
591  def getPackageName(cls):
592  """Return the name of the package containing this CameraMapper."""
593  if cls.packageName is None:
594  raise ValueError('class variable packageName must not be None')
595  return cls.packageName
596 
597  def map_camera(self, dataId, write=False):
598  """Map a camera dataset."""
599  if self.camera is None:
600  raise RuntimeError("No camera dataset available.")
601  actualId = self._transformId(dataId)
602  return dafPersist.ButlerLocation(
603  pythonType="lsst.afw.cameraGeom.CameraConfig",
604  cppType="Config",
605  storageName="ConfigStorage",
606  locationList=self.cameraDataLocation or "ignored",
607  dataId=actualId,
608  mapper=self,
609  storage=self.rootStorage
610  )
611 
612  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
613  """Return the (preloaded) camera object.
614  """
615  if self.camera is None:
616  raise RuntimeError("No camera dataset available.")
617  return self.camera
618 
619  def map_defects(self, dataId, write=False):
620  """Map defects dataset.
621 
622  @return a very minimal ButlerLocation containing just the locationList field
623  (just enough information that bypass_defects can use it).
624  """
625  defectFitsPath = self._defectLookup(dataId=dataId)
626  if defectFitsPath is None:
627  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
628 
629  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
630  dataId, self,
631  storage=self.rootStorage)
632 
633  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
634  """Return a defect based on the butler location returned by map_defects
635 
636  @param[in] butlerLocation: a ButlerLocation with locationList = path to defects FITS file
637  @param[in] dataId: the usual data ID; "ccd" must be set
638 
639  Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation
640  into an object, which is what we want for now, since that conversion is a bit tricky.
641  """
642  detectorName = self._extractDetectorName(dataId)
643  defectsFitsPath = butlerLocation.locationList[0]
644  with pyfits.open(defectsFitsPath) as hduList:
645  for hdu in hduList[1:]:
646  if hdu.header["name"] != detectorName:
647  continue
648 
649  defectList = []
650  for data in hdu.data:
651  bbox = afwGeom.Box2I(
652  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
653  afwGeom.Extent2I(int(data['width']), int(data['height'])),
654  )
655  defectList.append(afwImage.DefectBase(bbox))
656  return defectList
657 
658  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
659 
660  def map_expIdInfo(self, dataId, write=False):
661  return dafPersist.ButlerLocation(
662  pythonType="lsst.obs.base.ExposureIdInfo",
663  cppType=None,
664  storageName="Internal",
665  locationList="ignored",
666  dataId=dataId,
667  mapper=self,
668  storage=self.rootStorage
669  )
670 
671  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
672  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
673  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
674  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
675  return ExposureIdInfo(expId=expId, expBits=expBits)
676 
677  def std_bfKernel(self, item, dataId):
678  """Disable standardization for bfKernel
679 
680  bfKernel is a calibration product that is numpy array,
681  unlike other calibration products that are all images;
682  all calibration images are sent through _standardizeExposure
683  due to CalibrationMapping, but we don't want that to happen to bfKernel
684  """
685  return item
686 
687  def std_raw(self, item, dataId):
688  """Standardize a raw dataset by converting it to an Exposure instead of an Image"""
689  exposure = exposureFromImage(item)
690  exposureId = self._computeCcdExposureId(dataId)
691  md = exposure.getMetadata()
692  visitInfo = self.makeRawVisitInfo(md=md, exposureId=exposureId)
693  exposure.getInfo().setVisitInfo(visitInfo)
694  return self._standardizeExposure(self.exposures['raw'], exposure, dataId,
695  trimmed=False)
696 
697  def map_skypolicy(self, dataId):
698  """Map a sky policy."""
699  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
700  "Internal", None, None, self,
701  storage=self.rootStorage)
702 
703  def std_skypolicy(self, item, dataId):
704  """Standardize a sky policy by returning the one we use."""
705  return self.skypolicy
706 
707 ###############################################################################
708 #
709 # Utility functions
710 #
711 ###############################################################################
712 
713  def _getCcdKeyVal(self, dataId):
714  """Return CCD key and value used to look a defect in the defect registry
715 
716  The default implementation simply returns ("ccd", full detector name)
717  """
718  return ("ccd", self._extractDetectorName(dataId))
719 
720  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
721  posixIfNoSql=True):
722  """Set up a registry (usually SQLite3), trying a number of possible
723  paths.
724 
725  Parameters
726  ----------
727  name : string
728  Name of registry.
729  description: `str`
730  Description of registry (for log messages)
731  path : string
732  Path for registry.
733  policy : string
734  Policy that contains the registry name, used if path is None.
735  policyKey : string
736  Key in policy for registry path.
737  storage : Storage subclass
738  Repository Storage to look in.
739  searchParents : bool, optional
740  True if the search for a registry should follow any Butler v1
741  _parent symlinks.
742  posixIfNoSql : bool, optional
743  If an sqlite registry is not found, will create a posix registry if
744  this is True.
745 
746  Returns
747  -------
748  lsst.daf.persistence.Registry
749  Registry object
750  """
751  if path is None and policyKey in policy:
752  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
753  if os.path.isabs(path):
754  raise RuntimeError("Policy should not indicate an absolute path for registry.")
755  if not storage.exists(path):
756  newPath = storage.instanceSearch(path)
757 
758  newPath = newPath[0] if newPath is not None and len(newPath) else None
759  if newPath is None:
760  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
761  path)
762  path = newPath
763  else:
764  self.log.warn("Unable to locate registry at policy path: %s", path)
765  path = None
766 
767  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
768  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
769  # root from path.
770  root = storage.root
771  if path and (path.startswith(root)):
772  path = path[len(root + '/'):]
773 
774  # determine if there is an sqlite registry and if not, try the posix registry.
775  registry = None
776 
777  def search(filename, description):
778  """Search for file in storage
779 
780  Parameters
781  ----------
782  filename : `str`
783  Filename to search for
784  description : `str`
785  Description of file, for error message.
786 
787  Returns
788  -------
789  path : `str` or `None`
790  Path to file, or None
791  """
792  result = storage.instanceSearch(filename)
793  if result:
794  return result[0]
795  self.log.debug("Unable to locate %s: %s", description, filename)
796  return None
797 
798  # Search for a suitable registry database
799  if path is None:
800  path = search("%s.pgsql" % name, "%s in root" % description)
801  if path is None:
802  path = search("%s.sqlite3" % name, "%s in root" % description)
803  if path is None:
804  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
805 
806  if path is not None:
807  if not storage.exists(path):
808  newPath = storage.instanceSearch(path)
809  newPath = newPath[0] if newPath is not None and len(newPath) else None
810  if newPath is not None:
811  path = newPath
812  self.log.debug("Loading %s registry from %s", description, path)
813  registry = dafPersist.Registry.create(storage.getLocalFile(path))
814  elif not registry and posixIfNoSql:
815  self.log.info("Loading Posix %s registry from %s", description, storage.root)
816  registry = dafPersist.PosixRegistry(storage.root)
817 
818  return registry
819 
820  def _transformId(self, dataId):
821  """Generate a standard ID dict from a camera-specific ID dict.
822 
823  Canonical keys include:
824  - amp: amplifier name
825  - ccd: CCD name (in LSST this is a combination of raft and sensor)
826  The default implementation returns a copy of its input.
827 
828  @param dataId[in] (dict) Dataset identifier; this must not be modified
829  @return (dict) Transformed dataset identifier"""
830 
831  return dataId.copy()
832 
833  def _mapActualToPath(self, template, actualId):
834  """Convert a template path to an actual path, using the actual data
835  identifier. This implementation is usually sufficient but can be
836  overridden by the subclass.
837  @param template (string) Template path
838  @param actualId (dict) Dataset identifier
839  @return (string) Pathname"""
840 
841  try:
842  transformedId = self._transformId(actualId)
843  return template % transformedId
844  except Exception as e:
845  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
846 
847  @staticmethod
848  def getShortCcdName(ccdName):
849  """Convert a CCD name to a form useful as a filename
850 
851  The default implementation converts spaces to underscores.
852  """
853  return ccdName.replace(" ", "_")
854 
855  def _extractDetectorName(self, dataId):
856  """Extract the detector (CCD) name from the dataset identifier.
857 
858  The name in question is the detector name used by lsst.afw.cameraGeom.
859 
860  @param dataId (dict) Dataset identifier
861  @return (string) Detector name
862  """
863  raise NotImplementedError("No _extractDetectorName() function specified")
864 
865  def _extractAmpId(self, dataId):
866  """Extract the amplifier identifer from a dataset identifier.
867 
868  @warning this is deprecated; DO NOT USE IT
869 
870  amplifier identifier has two parts: the detector name for the CCD
871  containing the amplifier and index of the amplifier in the detector.
872  @param dataId (dict) Dataset identifer
873  @return (tuple) Amplifier identifier"""
874 
875  trDataId = self._transformId(dataId)
876  return (trDataId["ccd"], int(trDataId['amp']))
877 
878  def _setAmpDetector(self, item, dataId, trimmed=True):
879  """Set the detector object in an Exposure for an amplifier.
880  Defects are also added to the Exposure based on the detector object.
881  @param[in,out] item (lsst.afw.image.Exposure)
882  @param dataId (dict) Dataset identifier
883  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
884 
885  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
886 
887  def _setCcdDetector(self, item, dataId, trimmed=True):
888  """Set the detector object in an Exposure for a CCD.
889  @param[in,out] item (lsst.afw.image.Exposure)
890  @param dataId (dict) Dataset identifier
891  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
892 
893  if item.getDetector() is not None:
894  return
895 
896  detectorName = self._extractDetectorName(dataId)
897  detector = self.camera[detectorName]
898  item.setDetector(detector)
899 
900  def _setFilter(self, mapping, item, dataId):
901  """Set the filter object in an Exposure. If the Exposure had a FILTER
902  keyword, this was already processed during load. But if it didn't,
903  use the filter from the registry.
904  @param mapping (lsst.obs.base.Mapping)
905  @param[in,out] item (lsst.afw.image.Exposure)
906  @param dataId (dict) Dataset identifier"""
907 
908  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
909  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
910  return
911 
912  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
913  return
914 
915  actualId = mapping.need(['filter'], dataId)
916  filterName = actualId['filter']
917  if self.filters is not None and filterName in self.filters:
918  filterName = self.filters[filterName]
919  item.setFilter(afwImage.Filter(filterName))
920 
921  # Default standardization function for exposures
922  def _standardizeExposure(self, mapping, item, dataId, filter=True,
923  trimmed=True):
924  """Default standardization function for images.
925 
926  This sets the Detector from the camera geometry
927  and optionally set the Fiter. In both cases this saves
928  having to persist some data in each exposure (or image).
929 
930  @param mapping (lsst.obs.base.Mapping)
931  @param[in,out] item image-like object; any of lsst.afw.image.Exposure,
932  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
933  or lsst.afw.image.MaskedImage
934  @param dataId (dict) Dataset identifier
935  @param filter (bool) Set filter? Ignored if item is already an exposure
936  @param trimmed (bool) Should detector be marked as trimmed?
937  @return (lsst.afw.image.Exposure) the standardized Exposure"""
938  if not hasattr(item, "getMaskedImage"):
939  try:
940  item = exposureFromImage(item)
941  except Exception as e:
942  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
943  raise
944 
945  if mapping.level.lower() == "amp":
946  self._setAmpDetector(item, dataId, trimmed)
947  elif mapping.level.lower() == "ccd":
948  self._setCcdDetector(item, dataId, trimmed)
949 
950  if filter:
951  self._setFilter(mapping, item, dataId)
952 
953  return item
954 
955  def _defectLookup(self, dataId):
956  """Find the defects for a given CCD.
957  @param dataId (dict) Dataset identifier
958  @return (string) path to the defects file or None if not available"""
959  if self.defectRegistry is None:
960  return None
961  if self.registry is None:
962  raise RuntimeError("No registry for defect lookup")
963 
964  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
965 
966  dataIdForLookup = {'visit': dataId['visit']}
967  # .lookup will fail in a posix registry because there is no template to provide.
968  rows = self.registry.lookup(('taiObs'), ('raw_visit'), dataIdForLookup)
969  if len(rows) == 0:
970  return None
971  assert len(rows) == 1
972  taiObs = rows[0][0]
973 
974  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
975  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
976  [(ccdKey, "?")],
977  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
978  (ccdVal, taiObs))
979  if not rows or len(rows) == 0:
980  return None
981  if len(rows) == 1:
982  return os.path.join(self.defectPath, rows[0][0])
983  else:
984  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
985  (ccdVal, taiObs, len(rows), ", ".join([_[0] for _ in rows])))
986 
987  def _makeCamera(self, policy, repositoryDir):
988  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry
989 
990  Also set self.cameraDataLocation, if relevant (else it can be left None).
991 
992  This implementation assumes that policy contains an entry "camera" that points to the
993  subdirectory in this package of camera data; specifically, that subdirectory must contain:
994  - a file named `camera.py` that contains persisted camera config
995  - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath
996 
997  @param policy (daf_persistence.Policy, or pexPolicy.Policy (only for backward compatibility))
998  Policy with per-camera defaults already merged
999  @param repositoryDir (string) Policy repository for the subclassing
1000  module (obtained with getRepositoryPath() on the
1001  per-camera default dictionary)
1002  """
1003  if isinstance(policy, pexPolicy.Policy):
1004  policy = dafPersist.Policy(pexPolicy=policy)
1005  if 'camera' not in policy:
1006  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1007  cameraDataSubdir = policy['camera']
1008  self.cameraDataLocation = os.path.normpath(
1009  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1010  cameraConfig = afwCameraGeom.CameraConfig()
1011  cameraConfig.load(self.cameraDataLocation)
1012  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1013  return afwCameraGeom.makeCameraFromPath(
1014  cameraConfig=cameraConfig,
1015  ampInfoPath=ampInfoPath,
1016  shortNameFunc=self.getShortCcdName,
1017  pupilFactoryClass=self.PupilFactoryClass
1018  )
1019 
1020  def getRegistry(self):
1021  """Get the registry used by this mapper.
1022 
1023  Returns
1024  -------
1025  Registry or None
1026  The registry used by this mapper for this mapper's repository.
1027  """
1028  return self.registry
1029 
1031  """Generate an Exposure from an image-like object
1032 
1033  If the image is a DecoratedImage then also set its WCS and metadata
1034  (Image and MaskedImage are missing the necessary metadata
1035  and Exposure already has those set)
1036 
1037  @param[in] image Image-like object (lsst.afw.image.DecoratedImage, Image, MaskedImage or Exposure)
1038  @return (lsst.afw.image.Exposure) Exposure containing input image
1039  """
1040  if isinstance(image, afwImage.MaskedImage):
1041  exposure = afwImage.makeExposure(image)
1042  elif isinstance(image, afwImage.DecoratedImage):
1043  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1044  metadata = image.getMetadata()
1045  wcs = afwImage.makeWcs(metadata, True)
1046  exposure.setWcs(wcs)
1047  exposure.setMetadata(metadata)
1048  elif isinstance(image, afwImage.Exposure):
1049  # Exposure
1050  exposure = image
1051  else:
1052  # Image
1053  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1054 
1055  return exposure
def _getCcdKeyVal
Utility functions.