lsst.obs.base  13.0-13-gdd29b46+6
 All Classes Namespaces Files Functions Variables
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 from builtins import str
24 import copy
25 import os
26 import pyfits # required by _makeDefectsDict until defects are written as AFW tables
27 import re
28 import weakref
29 import lsst.daf.persistence as dafPersist
30 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.afw.geom as afwGeom
32 import lsst.afw.image as afwImage
33 import lsst.afw.table as afwTable
34 import lsst.afw.cameraGeom as afwCameraGeom
35 import lsst.log as lsstLog
36 import lsst.pex.policy as pexPolicy
37 from .exposureIdInfo import ExposureIdInfo
38 from .makeRawVisitInfo import MakeRawVisitInfo
39 from lsst.utils import getPackageDir
40 
41 """This module defines the CameraMapper base class."""
42 
43 
44 class CameraMapper(dafPersist.Mapper):
45 
46  """CameraMapper is a base class for mappers that handle images from a
47  camera and products derived from them. This provides an abstraction layer
48  between the data on disk and the code.
49 
50  Public methods: keys, queryMetadata, getDatasetTypes, map,
51  canStandardize, standardize
52 
53  Mappers for specific data sources (e.g., CFHT Megacam, LSST
54  simulations, etc.) should inherit this class.
55 
56  The CameraMapper manages datasets within a "root" directory. Note that
57  writing to a dataset present in the input root will hide the existing
58  dataset but not overwrite it. See #2160 for design discussion.
59 
60  A camera is assumed to consist of one or more rafts, each composed of
61  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
62  (amps). A camera is also assumed to have a camera geometry description
63  (CameraGeom object) as a policy file, a filter description (Filter class
64  static configuration) as another policy file, and an optional defects
65  description directory.
66 
67  Information from the camera geometry and defects are inserted into all
68  Exposure objects returned.
69 
70  The mapper uses one or two registries to retrieve metadata about the
71  images. The first is a registry of all raw exposures. This must contain
72  the time of the observation. One or more tables (or the equivalent)
73  within the registry are used to look up data identifier components that
74  are not specified by the user (e.g. filter) and to return results for
75  metadata queries. The second is an optional registry of all calibration
76  data. This should contain validity start and end entries for each
77  calibration dataset in the same timescale as the observation time.
78 
79  Subclasses will typically set MakeRawVisitInfoClass:
80 
81  MakeRawVisitInfoClass: a class variable that points to a subclass of
82  MakeRawVisitInfo, a functor that creates an
83  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
84 
85  Subclasses must provide the following methods:
86 
87  _extractDetectorName(self, dataId): returns the detector name for a CCD
88  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
89  a dataset identifier referring to that CCD or a subcomponent of it.
90 
91  _computeCcdExposureId(self, dataId): see below
92 
93  _computeCoaddExposureId(self, dataId, singleFilter): see below
94 
95  Subclasses may also need to override the following methods:
96 
97  _transformId(self, dataId): transformation of a data identifier
98  from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"),
99  including making suitable for path expansion (e.g. removing commas).
100  The default implementation does nothing. Note that this
101  method should not modify its input parameter.
102 
103  getShortCcdName(self, ccdName): a static method that returns a shortened name
104  suitable for use as a filename. The default version converts spaces to underscores.
105 
106  _getCcdKeyVal(self, dataId): return a CCD key and value
107  by which to look up defects in the defects registry.
108  The default value returns ("ccd", detector name)
109 
110  _mapActualToPath(self, template, actualId): convert a template path to an
111  actual path, using the actual dataset identifier.
112 
113  The mapper's behaviors are largely specified by the policy file.
114  See the MapperDictionary.paf for descriptions of the available items.
115 
116  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
117  mappings (see Mappings class).
118 
119  Common default mappings for all subclasses can be specified in the
120  "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides
121  a simple way to add a product to all camera mappers.
122 
123  Functions to map (provide a path to the data given a dataset
124  identifier dictionary) and standardize (convert data into some standard
125  format or type) may be provided in the subclass as "map_{dataset type}"
126  and "std_{dataset type}", respectively.
127 
128  If non-Exposure datasets cannot be retrieved using standard
129  daf_persistence methods alone, a "bypass_{dataset type}" function may be
130  provided in the subclass to return the dataset instead of using the
131  "datasets" subpolicy.
132 
133  Implementations of map_camera and bypass_camera that should typically be
134  sufficient are provided in this base class.
135 
136  @todo
137  * Handle defects the same was as all other calibration products, using the calibration registry
138  * Instead of auto-loading the camera at construction time, load it from the calibration registry
139  * Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention
140  of pyfits from this package.
141  """
142  packageName = None
143 
144  # a class or subclass of MakeRawVisitInfo, a functor that makes an
145  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
146  MakeRawVisitInfoClass = MakeRawVisitInfo
147 
148  # a class or subclass of PupilFactory
149  PupilFactoryClass = afwCameraGeom.PupilFactory
150 
151  def __init__(self, policy, repositoryDir,
152  root=None, registry=None, calibRoot=None, calibRegistry=None,
153  provided=None, parentRegistry=None, repositoryCfg=None):
154  """Initialize the CameraMapper.
155 
156  Parameters
157  ----------
158  policy : daf_persistence.Policy,
159  Can also be pexPolicy.Policy, only for backward compatibility.
160  Policy with per-camera defaults already merged.
161  repositoryDir : string
162  Policy repository for the subclassing module (obtained with
163  getRepositoryPath() on the per-camera default dictionary).
164  root : string, optional
165  Path to the root directory for data.
166  registry : string, optional
167  Path to registry with data's metadata.
168  calibRoot : string, optional
169  Root directory for calibrations.
170  calibRegistry : string, optional
171  Path to registry with calibrations' metadata.
172  provided : list of string, optional
173  Keys provided by the mapper.
174  parentRegistry : Registry subclass, optional
175  Registry from a parent repository that may be used to look up
176  data's metadata.
177  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
178  The configuration information for the repository this mapper is
179  being used with.
180  """
181 
182  dafPersist.Mapper.__init__(self)
183 
184  self.log = lsstLog.Log.getLogger("CameraMapper")
185 
186  if root:
187  self.root = root
188  elif repositoryCfg:
189  self.root = repositoryCfg.root
190  else:
191  self.root = None
192  if isinstance(policy, pexPolicy.Policy):
193  policy = dafPersist.Policy(policy)
194 
195  repoPolicy = repositoryCfg.policy if repositoryCfg else None
196  if repoPolicy is not None:
197  policy.update(repoPolicy)
198 
199  defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base",
200  "MapperDictionary.paf",
201  "policy")
202  dictPolicy = dafPersist.Policy(defaultPolicyFile)
203  policy.merge(dictPolicy)
204 
205  # Levels
206  self.levels = dict()
207  if 'levels' in policy:
208  levelsPolicy = policy['levels']
209  for key in levelsPolicy.names(True):
210  self.levels[key] = set(levelsPolicy.asArray(key))
211  self.defaultLevel = policy['defaultLevel']
212  self.defaultSubLevels = dict()
213  if 'defaultSubLevels' in policy:
214  self.defaultSubLevels = policy['defaultSubLevels']
215 
216  # Root directories
217  if root is None:
218  root = "."
219  root = dafPersist.LogicalLocation(root).locString()
220 
221  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
222 
223  # If the calibRoot is passed in, use that. If not and it's indicated in the policy, use that. And
224  # otherwise, the calibs are in the regular root.
225  # If the location indicated by the calib root does not exist, do not create it.
226  calibStorage = None
227  if calibRoot is not None and dafPersist.Storage.storageExists(uri=calibRoot):
228  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot)
229  elif 'calibRoot' in policy:
230  calibRoot = policy['calibRoot']
231  calibRoot = dafPersist.LogicalLocation(calibRoot).locString()
232  if dafPersist.Storage.exists(uri=calibRoot):
233  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot)
234  if calibStorage is None:
235  calibStorage = self.rootStorage
236 
237  self.root = root
238 
239  # Registries
240  self.registry = self._setupRegistry("registry", registry, policy, "registryPath", self.rootStorage,
241  searchParents=False, posixIfNoSql=(not parentRegistry))
242  if not self.registry:
243  self.registry = parentRegistry
244  needCalibRegistry = policy.get('needCalibRegistry', None)
245  if needCalibRegistry:
246  if calibStorage:
247  self.calibRegistry = self._setupRegistry("calibRegistry", calibRegistry, policy,
248  "calibRegistryPath", calibStorage)
249  else:
250  raise RuntimeError(
251  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
252  "calibRoot ivar:%s or policy['calibRoot']:%s" %
253  (calibRoot, policy.get('calibRoot', None)))
254  else:
255  self.calibRegistry = None
256 
257  # Dict of valid keys and their value types
258  self.keyDict = dict()
259 
260  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
261 
262  # Camera geometry
263  self.cameraDataLocation = None # path to camera geometry config file
264  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
265 
266  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
267  # camera package, which is on the local filesystem.
268  self.defectRegistry = None
269  if 'defects' in policy:
270  self.defectPath = os.path.join(repositoryDir, policy['defects'])
271  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
272  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
273 
274  # Filter translation table
275  self.filters = None
276 
277  # Skytile policy
278  self.skypolicy = policy['skytiles']
279 
280  # verify that the class variable packageName is set before attempting
281  # to instantiate an instance
282  if self.packageName is None:
283  raise ValueError('class variable packageName must not be None')
284 
286 
287  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
288  """Initialize mappings
289 
290  For each of the dataset types that we want to be able to read, there are
291  methods that can be created to support them:
292  * map_<dataset> : determine the path for dataset
293  * std_<dataset> : standardize the retrieved dataset
294  * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery)
295  * query_<dataset> : query the registry
296 
297  Besides the dataset types explicitly listed in the policy, we create
298  additional, derived datasets for additional conveniences, e.g., reading
299  the header of an image, retrieving only the size of a catalog.
300 
301  @param policy (Policy) Policy with per-camera defaults already merged
302  @param rootStorage (Storage subclass instance) Interface to persisted repository data
303  @param calibRoot (Storage subclass instance) Interface to persisted calib repository data
304  @param provided (list of strings) Keys provided by the mapper
305  """
306  # Sub-dictionaries (for exposure/calibration/dataset types)
307  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
308  "obs_base", "ImageMappingDictionary.paf", "policy"))
309  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
310  "obs_base", "ExposureMappingDictionary.paf", "policy"))
311  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
312  "obs_base", "CalibrationMappingDictionary.paf", "policy"))
313  dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
314  "obs_base", "DatasetMappingDictionary.paf", "policy"))
315 
316  # Mappings
317  mappingList = (
318  ("images", imgMappingPolicy, ImageMapping),
319  ("exposures", expMappingPolicy, ExposureMapping),
320  ("calibrations", calMappingPolicy, CalibrationMapping),
321  ("datasets", dsMappingPolicy, DatasetMapping)
322  )
323  self.mappings = dict()
324  for name, defPolicy, cls in mappingList:
325  if name in policy:
326  datasets = policy[name]
327 
328  # Centrally-defined datasets
329  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
330  if os.path.exists(defaultsPath):
331  datasets.merge(dafPersist.Policy(defaultsPath))
332 
333  mappings = dict()
334  setattr(self, name, mappings)
335  for datasetType in datasets.names(True):
336  subPolicy = datasets[datasetType]
337  subPolicy.merge(defPolicy)
338 
339  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
340  def compositeClosure(dataId, write=False, mapper=None, mapping=None, subPolicy=subPolicy):
341  components = subPolicy.get('composite')
342  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
343  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
344  python = subPolicy['python']
345  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
346  disassembler=disassembler,
347  python=python,
348  dataId=dataId,
349  mapper=self)
350  for name, component in components.items():
351  butlerComposite.add(id=name,
352  datasetType=component.get('datasetType'),
353  setter=component.get('setter', None),
354  getter=component.get('getter', None),
355  subset=component.get('subset', False),
356  inputOnly=component.get('inputOnly', False))
357  return butlerComposite
358  setattr(self, "map_" + datasetType, compositeClosure)
359  # for now at least, don't set up any other handling for this dataset type.
360  continue
361 
362  if name == "calibrations":
363  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
364  provided=provided)
365  else:
366  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
367  self.keyDict.update(mapping.keys())
368  mappings[datasetType] = mapping
369  self.mappings[datasetType] = mapping
370  if not hasattr(self, "map_" + datasetType):
371  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
372  return mapping.map(mapper, dataId, write)
373  setattr(self, "map_" + datasetType, mapClosure)
374  if not hasattr(self, "query_" + datasetType):
375  def queryClosure(format, dataId, mapping=mapping):
376  return mapping.lookup(format, dataId)
377  setattr(self, "query_" + datasetType, queryClosure)
378  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
379  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
380  return mapping.standardize(mapper, item, dataId)
381  setattr(self, "std_" + datasetType, stdClosure)
382 
383  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
384  """Set convenience methods on CameraMapper"""
385  mapName = "map_" + datasetType + "_" + suffix
386  bypassName = "bypass_" + datasetType + "_" + suffix
387  queryName = "query_" + datasetType + "_" + suffix
388  if not hasattr(self, mapName):
389  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
390  if not hasattr(self, bypassName):
391  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
392  bypassImpl = getattr(self, "bypass_" + datasetType)
393  if bypassImpl is not None:
394  setattr(self, bypassName, bypassImpl)
395  if not hasattr(self, queryName):
396  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
397 
398  # Filename of dataset
399  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
400  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
401 
402  # Metadata from FITS file
403  if subPolicy["storage"] == "FitsStorage": # a FITS image
404  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
405  afwImage.readMetadata(location.getLocationsWithRoot()[0]))
406  if name == "exposures":
407  setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId:
408  afwImage.makeWcs(
409  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
410  setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
411  afwImage.Calib(
412  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
413  setMethods("visitInfo",
414  bypassImpl=lambda datasetType, pythonType, location, dataId:
415  afwImage.VisitInfo(
416  afwImage.readMetadata(location.getLocationsWithRoot()[0])))
417  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
418  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
419  afwImage.readMetadata(os.path.join(location.getStorage().root,
420  location.getLocations()[0]), hdu=1))
421 
422  # Sub-images
423  if subPolicy["storage"] == "FitsStorage":
424  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
425  subId = dataId.copy()
426  del subId['bbox']
427  loc = mapping.map(mapper, subId, write)
428  bbox = dataId['bbox']
429  llcX = bbox.getMinX()
430  llcY = bbox.getMinY()
431  width = bbox.getWidth()
432  height = bbox.getHeight()
433  loc.additionalData.set('llcX', llcX)
434  loc.additionalData.set('llcY', llcY)
435  loc.additionalData.set('width', width)
436  loc.additionalData.set('height', height)
437  if 'imageOrigin' in dataId:
438  loc.additionalData.set('imageOrigin',
439  dataId['imageOrigin'])
440  return loc
441  def querySubClosure(key, format, dataId, mapping=mapping):
442  subId = dataId.copy()
443  del subId['bbox']
444  return mapping.lookup(format, subId)
445  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
446 
447  if subPolicy["storage"] == "FitsCatalogStorage":
448  # Length of catalog
449  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
450  afwImage.readMetadata(os.path.join(location.getStorage().root,
451  location.getLocations()[0]),
452  hdu=1).get("NAXIS2"))
453 
454  # Schema of catalog
455  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
456  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
457  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
458  location.getLocations()[0])))
459 
460  def _computeCcdExposureId(self, dataId):
461  """Compute the 64-bit (long) identifier for a CCD exposure.
462 
463  Subclasses must override
464 
465  @param dataId (dict) Data identifier with visit, ccd
466  """
467  raise NotImplementedError()
468 
469  def _computeCoaddExposureId(self, dataId, singleFilter):
470  """Compute the 64-bit (long) identifier for a coadd.
471 
472  Subclasses must override
473 
474  @param dataId (dict) Data identifier with tract and patch.
475  @param singleFilter (bool) True means the desired ID is for a single-
476  filter coadd, in which case dataId
477  must contain filter.
478  """
479  raise NotImplementedError()
480 
481  def _search(self, path):
482  """Search for path in the associated repository's storage.
483 
484  Parameters
485  ----------
486  path : string
487  Path that describes an object in the repository associated with
488  this mapper.
489  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
490  indicator will be stripped when searching and so will match
491  filenames without the HDU indicator, e.g. 'foo.fits'. The path
492  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
493 
494  Returns
495  -------
496  string
497  The path for this object in the repository. Will return None if the
498  object can't be found. If the input argument path contained an HDU
499  indicator, the returned path will also contain the HDU indicator.
500  """
501  # it would be better if storage was an instance, instead of having to demux the root URI every time.
502  return dafPersist.Storage.search(self.root, path)
503 
504  def backup(self, datasetType, dataId):
505  """Rename any existing object with the given type and dataId.
506 
507  The CameraMapper implementation saves objects in a sequence of e.g.:
508  foo.fits
509  foo.fits~1
510  foo.fits~2
511  All of the backups will be placed in the output repo, however, and will
512  not be removed if they are found elsewhere in the _parent chain. This
513  means that the same file will be stored twice if the previous version was
514  found in an input repo.
515  """
516 
517  # Calling PosixStorage directly is not the long term solution in this
518  # function, this is work-in-progress on epic DM-6225. The plan is for
519  # parentSearch to be changed to 'search', and search only the storage
520  # associated with this mapper. All searching of parents will be handled
521  # by traversing the container of repositories in Butler.
522 
523  def firstElement(list):
524  """Get the first element in the list, or None if that can't be done.
525  """
526  return list[0] if list is not None and len(list) else None
527 
528  n = 0
529  newLocation = self.map(datasetType, dataId, write=True)
530  newPath = newLocation.getLocations()[0]
531  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
532  path = firstElement(path)
533  oldPaths = []
534  while path is not None:
535  n += 1
536  oldPaths.append((n, path))
537  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
538  path = firstElement(path)
539  for n, oldPath in reversed(oldPaths):
540  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
541 
542  def keys(self):
543  """Return supported keys.
544  @return (iterable) List of keys usable in a dataset identifier"""
545  return iter(self.keyDict.keys())
546 
547  def getKeys(self, datasetType, level):
548  """Return a dict of supported keys and their value types for a given dataset
549  type at a given level of the key hierarchy.
550 
551  @param datasetType (str) dataset type or None for all dataset types
552  @param level (str) level or None for all levels or '' for the default level for the camera
553  @return (dict) dict keys are strings usable in a dataset identifier; values are their value types"""
554 
555  # not sure if this is how we want to do this. what if None was intended?
556  if level == '':
557  level = self.getDefaultLevel()
558 
559  if datasetType is None:
560  keyDict = copy.copy(self.keyDict)
561  else:
562  keyDict = self.mappings[datasetType].keys()
563  if level is not None and level in self.levels:
564  keyDict = copy.copy(keyDict)
565  for l in self.levels[level]:
566  if l in keyDict:
567  del keyDict[l]
568  return keyDict
569 
570  def getDefaultLevel(self):
571  return self.defaultLevel
572 
573  def getDefaultSubLevel(self, level):
574  if level in self.defaultSubLevels:
575  return self.defaultSubLevels[level]
576  return None
577 
578  @classmethod
579  def getCameraName(cls):
580  """Return the name of the camera that this CameraMapper is for."""
581  className = str(cls)
582  className = className[className.find('.'):-1]
583  m = re.search(r'(\w+)Mapper', className)
584  if m is None:
585  m = re.search(r"class '[\w.]*?(\w+)'", className)
586  name = m.group(1)
587  return name[:1].lower() + name[1:] if name else ''
588 
589  @classmethod
590  def getPackageName(cls):
591  """Return the name of the package containing this CameraMapper."""
592  if cls.packageName is None:
593  raise ValueError('class variable packageName must not be None')
594  return cls.packageName
595 
596  def map_camera(self, dataId, write=False):
597  """Map a camera dataset."""
598  if self.camera is None:
599  raise RuntimeError("No camera dataset available.")
600  actualId = self._transformId(dataId)
601  return dafPersist.ButlerLocation(
602  pythonType="lsst.afw.cameraGeom.CameraConfig",
603  cppType="Config",
604  storageName="ConfigStorage",
605  locationList=self.cameraDataLocation or "ignored",
606  dataId=actualId,
607  mapper=self,
608  storage=self.rootStorage
609  )
610 
611  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
612  """Return the (preloaded) camera object.
613  """
614  if self.camera is None:
615  raise RuntimeError("No camera dataset available.")
616  return self.camera
617 
618  def map_defects(self, dataId, write=False):
619  """Map defects dataset.
620 
621  @return a very minimal ButlerLocation containing just the locationList field
622  (just enough information that bypass_defects can use it).
623  """
624  defectFitsPath = self._defectLookup(dataId=dataId)
625  if defectFitsPath is None:
626  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
627 
628  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
629  dataId, self,
630  storage=self.rootStorage)
631 
632  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
633  """Return a defect based on the butler location returned by map_defects
634 
635  @param[in] butlerLocation: a ButlerLocation with locationList = path to defects FITS file
636  @param[in] dataId: the usual data ID; "ccd" must be set
637 
638  Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation
639  into an object, which is what we want for now, since that conversion is a bit tricky.
640  """
641  detectorName = self._extractDetectorName(dataId)
642  defectsFitsPath = butlerLocation.locationList[0]
643  with pyfits.open(defectsFitsPath) as hduList:
644  for hdu in hduList[1:]:
645  if hdu.header["name"] != detectorName:
646  continue
647 
648  defectList = []
649  for data in hdu.data:
650  bbox = afwGeom.Box2I(
651  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
652  afwGeom.Extent2I(int(data['width']), int(data['height'])),
653  )
654  defectList.append(afwImage.DefectBase(bbox))
655  return defectList
656 
657  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
658 
659  def map_expIdInfo(self, dataId, write=False):
660  return dafPersist.ButlerLocation(
661  pythonType="lsst.obs.base.ExposureIdInfo",
662  cppType=None,
663  storageName="Internal",
664  locationList="ignored",
665  dataId=dataId,
666  mapper=self,
667  storage=self.rootStorage
668  )
669 
670  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
671  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
672  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
673  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
674  return ExposureIdInfo(expId=expId, expBits=expBits)
675 
676  def std_bfKernel(self, item, dataId):
677  """Disable standardization for bfKernel
678 
679  bfKernel is a calibration product that is numpy array,
680  unlike other calibration products that are all images;
681  all calibration images are sent through _standardizeExposure
682  due to CalibrationMapping, but we don't want that to happen to bfKernel
683  """
684  return item
685 
686  def std_raw(self, item, dataId):
687  """Standardize a raw dataset by converting it to an Exposure instead of an Image"""
688  exposure = exposureFromImage(item)
689  exposureId = self._computeCcdExposureId(dataId)
690  md = exposure.getMetadata()
691  visitInfo = self.makeRawVisitInfo(md=md, exposureId=exposureId)
692  exposure.getInfo().setVisitInfo(visitInfo)
693  return self._standardizeExposure(self.exposures['raw'], exposure, dataId,
694  trimmed=False)
695 
696  def map_skypolicy(self, dataId):
697  """Map a sky policy."""
698  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
699  "Internal", None, None, self,
700  storage=self.rootStorage)
701 
702  def std_skypolicy(self, item, dataId):
703  """Standardize a sky policy by returning the one we use."""
704  return self.skypolicy
705 
706 ###############################################################################
707 #
708 # Utility functions
709 #
710 ###############################################################################
711 
712  def _getCcdKeyVal(self, dataId):
713  """Return CCD key and value used to look a defect in the defect registry
714 
715  The default implementation simply returns ("ccd", full detector name)
716  """
717  return ("ccd", self._extractDetectorName(dataId))
718 
719  def _setupRegistry(self, name, path, policy, policyKey, storage, searchParents=True,
720  posixIfNoSql=True):
721  """Set up a registry (usually SQLite3), trying a number of possible
722  paths.
723 
724  Parameters
725  ----------
726  name : string
727  Name of registry.
728  path : string
729  Path for registry.
730  policy : string
731  Policy that contains the registry name, used if path is None.
732  policyKey : string
733  Key in policy for registry path.
734  storage : Storage subclass
735  Repository Storage to look in.
736  searchParents : bool, optional
737  True if the search for a registry should follow any Butler v1
738  _parent symlinks.
739  posixIfNoSql : bool, optional
740  If an sqlite registry is not found, will create a posix registry if
741  this is True.
742 
743  Returns
744  -------
745  lsst.daf.persistence.Registry
746  Registry object
747  """
748  if path is None and policyKey in policy:
749  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
750  if os.path.isabs(path):
751  raise RuntimeError("Policy should not indicate an absolute path for registry.")
752  if not storage.exists(path):
753  newPath = storage.instanceSearch(path)
754 
755  newPath = newPath[0] if newPath is not None and len(newPath) else None
756  if newPath is None:
757  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
758  path)
759  path = newPath
760  else:
761  self.log.warn("Unable to locate registry at policy path: %s", path)
762  path = None
763 
764  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
765  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
766  # root from path.
767  root = storage.root
768  if path and (path.startswith(root)):
769  path = path[len(root + '/'):]
770 
771  # determine if there is an sqlite registry and if not, try the posix registry.
772  registry = None
773 
774  if path is None:
775  path = "%s.sqlite3" % name
776  newPath = storage.instanceSearch(path)
777  newPath = newPath[0] if newPath is not None and len(newPath) else None
778  if newPath is None:
779  self.log.info("Unable to locate %s registry in root: %s", name, path)
780  path = newPath
781  if path is None:
782  path = os.path.join(".", "%s.sqlite3" % name)
783  newPath = storage.instanceSearch(path)
784  newPath = newPath[0] if newPath is not None and len(newPath) else None
785  if newPath is None:
786  self.log.info("Unable to locate %s registry in current dir: %s", name, path)
787  path = newPath
788  if path is not None:
789  if not storage.exists(path):
790  newPath = storage.instanceSearch(path)
791  newPath = newPath[0] if newPath is not None and len(newPath) else None
792  if newPath is not None:
793  path = newPath
794  self.log.debug("Loading %s registry from %s", name, path)
795  registry = dafPersist.Registry.create(storage.getLocalFile(path))
796  elif not registry and posixIfNoSql:
797  self.log.info("Loading Posix registry from %s", storage.root)
798  registry = dafPersist.PosixRegistry(storage.root)
799 
800  return registry
801 
802  def _transformId(self, dataId):
803  """Generate a standard ID dict from a camera-specific ID dict.
804 
805  Canonical keys include:
806  - amp: amplifier name
807  - ccd: CCD name (in LSST this is a combination of raft and sensor)
808  The default implementation returns a copy of its input.
809 
810  @param dataId[in] (dict) Dataset identifier; this must not be modified
811  @return (dict) Transformed dataset identifier"""
812 
813  return dataId.copy()
814 
815  def _mapActualToPath(self, template, actualId):
816  """Convert a template path to an actual path, using the actual data
817  identifier. This implementation is usually sufficient but can be
818  overridden by the subclass.
819  @param template (string) Template path
820  @param actualId (dict) Dataset identifier
821  @return (string) Pathname"""
822 
823  try:
824  transformedId = self._transformId(actualId)
825  return template % transformedId
826  except Exception as e:
827  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
828 
829  @staticmethod
830  def getShortCcdName(ccdName):
831  """Convert a CCD name to a form useful as a filename
832 
833  The default implementation converts spaces to underscores.
834  """
835  return ccdName.replace(" ", "_")
836 
837  def _extractDetectorName(self, dataId):
838  """Extract the detector (CCD) name from the dataset identifier.
839 
840  The name in question is the detector name used by lsst.afw.cameraGeom.
841 
842  @param dataId (dict) Dataset identifier
843  @return (string) Detector name
844  """
845  raise NotImplementedError("No _extractDetectorName() function specified")
846 
847  def _extractAmpId(self, dataId):
848  """Extract the amplifier identifer from a dataset identifier.
849 
850  @warning this is deprecated; DO NOT USE IT
851 
852  amplifier identifier has two parts: the detector name for the CCD
853  containing the amplifier and index of the amplifier in the detector.
854  @param dataId (dict) Dataset identifer
855  @return (tuple) Amplifier identifier"""
856 
857  trDataId = self._transformId(dataId)
858  return (trDataId["ccd"], int(trDataId['amp']))
859 
860  def _setAmpDetector(self, item, dataId, trimmed=True):
861  """Set the detector object in an Exposure for an amplifier.
862  Defects are also added to the Exposure based on the detector object.
863  @param[in,out] item (lsst.afw.image.Exposure)
864  @param dataId (dict) Dataset identifier
865  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
866 
867  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
868 
869  def _setCcdDetector(self, item, dataId, trimmed=True):
870  """Set the detector object in an Exposure for a CCD.
871  @param[in,out] item (lsst.afw.image.Exposure)
872  @param dataId (dict) Dataset identifier
873  @param trimmed (bool) Should detector be marked as trimmed? (ignored)"""
874 
875  detectorName = self._extractDetectorName(dataId)
876  detector = self.camera[detectorName]
877  item.setDetector(detector)
878 
879  def _setFilter(self, mapping, item, dataId):
880  """Set the filter object in an Exposure. If the Exposure had a FILTER
881  keyword, this was already processed during load. But if it didn't,
882  use the filter from the registry.
883  @param mapping (lsst.obs.base.Mapping)
884  @param[in,out] item (lsst.afw.image.Exposure)
885  @param dataId (dict) Dataset identifier"""
886 
887  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
888  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
889  return
890 
891  actualId = mapping.need(['filter'], dataId)
892  filterName = actualId['filter']
893  if self.filters is not None and filterName in self.filters:
894  filterName = self.filters[filterName]
895  item.setFilter(afwImage.Filter(filterName))
896 
897  # Default standardization function for exposures
898  def _standardizeExposure(self, mapping, item, dataId, filter=True,
899  trimmed=True):
900  """Default standardization function for images.
901 
902  This sets the Detector from the camera geometry
903  and optionally set the Fiter. In both cases this saves
904  having to persist some data in each exposure (or image).
905 
906  @param mapping (lsst.obs.base.Mapping)
907  @param[in,out] item image-like object; any of lsst.afw.image.Exposure,
908  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
909  or lsst.afw.image.MaskedImage
910  @param dataId (dict) Dataset identifier
911  @param filter (bool) Set filter? Ignored if item is already an exposure
912  @param trimmed (bool) Should detector be marked as trimmed?
913  @return (lsst.afw.image.Exposure) the standardized Exposure"""
914  if not hasattr(item, "getMaskedImage"):
915  try:
916  item = exposureFromImage(item)
917  except Exception as e:
918  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
919  raise
920 
921  if mapping.level.lower() == "amp":
922  self._setAmpDetector(item, dataId, trimmed)
923  elif mapping.level.lower() == "ccd":
924  self._setCcdDetector(item, dataId, trimmed)
925 
926  if filter:
927  self._setFilter(mapping, item, dataId)
928 
929  return item
930 
931  def _defectLookup(self, dataId):
932  """Find the defects for a given CCD.
933  @param dataId (dict) Dataset identifier
934  @return (string) path to the defects file or None if not available"""
935  if self.defectRegistry is None:
936  return None
937  if self.registry is None:
938  raise RuntimeError("No registry for defect lookup")
939 
940  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
941 
942  dataIdForLookup = {'visit': dataId['visit']}
943  # .lookup will fail in a posix registry because there is no template to provide.
944  rows = self.registry.lookup(('taiObs'), ('raw_visit'), dataIdForLookup)
945  if len(rows) == 0:
946  return None
947  assert len(rows) == 1
948  taiObs = rows[0][0]
949 
950  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
951  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
952  [(ccdKey, "?")],
953  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
954  (ccdVal, taiObs))
955  if not rows or len(rows) == 0:
956  return None
957  if len(rows) == 1:
958  return os.path.join(self.defectPath, rows[0][0])
959  else:
960  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
961  (ccdVal, taiObs, len(rows), ", ".join([_[0] for _ in rows])))
962 
963  def _makeCamera(self, policy, repositoryDir):
964  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry
965 
966  Also set self.cameraDataLocation, if relevant (else it can be left None).
967 
968  This implementation assumes that policy contains an entry "camera" that points to the
969  subdirectory in this package of camera data; specifically, that subdirectory must contain:
970  - a file named `camera.py` that contains persisted camera config
971  - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath
972 
973  @param policy (daf_persistence.Policy, or pexPolicy.Policy (only for backward compatibility))
974  Policy with per-camera defaults already merged
975  @param repositoryDir (string) Policy repository for the subclassing
976  module (obtained with getRepositoryPath() on the
977  per-camera default dictionary)
978  """
979  if isinstance(policy, pexPolicy.Policy):
980  policy = dafPersist.Policy(pexPolicy=policy)
981  if 'camera' not in policy:
982  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
983  cameraDataSubdir = policy['camera']
984  self.cameraDataLocation = os.path.normpath(
985  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
986  cameraConfig = afwCameraGeom.CameraConfig()
987  cameraConfig.load(self.cameraDataLocation)
988  ampInfoPath = os.path.dirname(self.cameraDataLocation)
989  return afwCameraGeom.makeCameraFromPath(
990  cameraConfig=cameraConfig,
991  ampInfoPath=ampInfoPath,
992  shortNameFunc=self.getShortCcdName,
993  pupilFactoryClass=self.PupilFactoryClass
994  )
995 
996  def getRegistry(self):
997  """Get the registry used by this mapper.
998 
999  Returns
1000  -------
1001  Registry or None
1002  The registry used by this mapper for this mapper's repository.
1003  """
1004  return self.registry
1005 
1007  """Generate an Exposure from an image-like object
1008 
1009  If the image is a DecoratedImage then also set its WCS and metadata
1010  (Image and MaskedImage are missing the necessary metadata
1011  and Exposure already has those set)
1012 
1013  @param[in] image Image-like object (lsst.afw.image.DecoratedImage, Image, MaskedImage or Exposure)
1014  @return (lsst.afw.image.Exposure) Exposure containing input image
1015  """
1016  if hasattr(image, "getVariance"):
1017  # MaskedImage
1018  exposure = afwImage.makeExposure(image)
1019  elif hasattr(image, "getImage"):
1020  # DecoratedImage
1021  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1022  metadata = image.getMetadata()
1023  wcs = afwImage.makeWcs(metadata, True)
1024  exposure.setWcs(wcs)
1025  exposure.setMetadata(metadata)
1026  elif hasattr(image, "getMaskedImage"):
1027  # Exposure
1028  exposure = image
1029  else:
1030  # Image
1031  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1032 
1033  return exposure
def _getCcdKeyVal
Utility functions.