Coverage for python/lsst/summit/utils/bestEffort.py: 15%
101 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-14 22:09 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-14 22:09 +0000
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import os
23import logging
24from lsst.utils import getPackageDir
25from lsst.ip.isr import IsrTask
26import lsst.daf.butler as dafButler
27from lsst.daf.butler.registry import ConflictingDefinitionError
29from lsst.summit.utils.quickLook import QuickLookIsrTask
30from lsst.summit.utils.butlerUtils import getLatissDefaultCollections
32# TODO: add attempt for fringe once registry & templates are fixed
34CURRENT_RUN = "LATISS/runs/quickLook/1"
37class BestEffortIsr():
38 """Class for getting an assembled image with the maximum amount of isr.
40 BestEffortIsr.getExposure(dataId) returns an assembled image with as much
41 isr performed as possible, dictated by the calibration products available,
42 and optionally interpolates over cosmic rays. If an image image already
43 exists in the butler, it is returned (for the sake of speed), otherwise it
44 is generated and put(). Calibration products are loaded and cached to
45 improve performance.
47 This class uses the ``quickLookIsrTask``, see docs there for details.
49 defaultExtraIsrOptions is a dict of options applied to all images.
51 Parameters
52 ----------
53 repoString : `str`, optional
54 The Butler repo root.
55 extraCollections : `list` of `str`, optional
56 Extra collections to add to the butler init. Collections are prepended.
57 defaultExtraIsrOptions : `dict`, optional
58 A dict of extra isr config options to apply. Each key should be an
59 attribute of an isrTaskConfigClass.
60 doRepairCosmics : `bool`, optional
61 Repair cosmic ray hits?
62 doWrite : `bool`, optional
63 Write the outputs to the quickLook rerun/collection?
65 Raises
66 ------
67 FileNotFoundError:
68 Raised when a butler cannot be automatically instantiated using
69 the DAF_BUTLER_REPOSITORY_INDEX environment variable.
70 """
71 _datasetName = 'quickLookExp'
73 def __init__(self, *,
74 extraCollections=[],
75 defaultExtraIsrOptions={},
76 doRepairCosmics=True,
77 doWrite=True,
78 embargo=False,
79 repoString=None):
80 self.log = logging.getLogger(__name__)
82 collections = getLatissDefaultCollections()
83 self.collections = extraCollections + collections
84 self.log.info(f'Instantiating butler with collections={self.collections}')
86 if repoString is None:
87 repoString = "LATISS" if not embargo else "/repo/embargo"
88 try:
89 self.butler = dafButler.Butler(repoString, collections=self.collections,
90 instrument='LATISS',
91 run=CURRENT_RUN if doWrite else None)
92 except (FileNotFoundError, RuntimeError):
93 # Depending on the value of DAF_BUTLER_REPOSITORY_INDEX and whether
94 # it is present and blank, or just not set, both these exception
95 # types can be raised, see
96 # tests/test_butlerUtils.py:ButlerInitTestCase
97 # for details and tests which confirm these have not changed
98 raise FileNotFoundError # unify exception type
100 quickLookIsrConfig = QuickLookIsrTask.ConfigClass()
101 quickLookIsrConfig.doRepairCosmics = doRepairCosmics
102 self.doWrite = doWrite # the task, as run by run() method, can't do the write, so we handle in here
103 self.quickLookIsrTask = QuickLookIsrTask(config=quickLookIsrConfig)
105 self.defaultExtraIsrOptions = defaultExtraIsrOptions
107 self._cache = {}
108 self._cacheIsForDetector = None
110 def _applyConfigOverrides(self, config, overrides):
111 """Update a config class with a dict of options.
113 Parameters
114 ----------
115 config : `lsst.pex.config.Config`
116 The config class to update.
117 overrides : `dict`
118 The override options as a dict.
120 Raises
121 ------
122 ValueError
123 Raised if the override option isn't found in the config.
124 """
125 for option, value in overrides.items():
126 if hasattr(config, option):
127 setattr(config, option, value)
128 self.log.info(f"Set isr config override {option} to {value}")
129 else:
130 raise ValueError(f"Override option {option} not found in isrConfig")
132 @staticmethod
133 def updateDataId(expIdOrDataId, **kwargs):
134 """Sanitize the expIdOrDataId to allow support both expIds and dataIds
136 Supports expId as an integer, or a complete or partial dict. The dict
137 is updated with the supplied kwargs.
139 Parameters
140 ----------
141 expIdOrDataId : `int` or `dict` or `lsst.daf.butler.DataCoordinate` or
142 `lsst.daf.butler.DimensionRecord`
143 The exposure id as an int, or the dataId as as dict, or an
144 expRecord or a dataCoordinate.
146 Returns
147 -------
148 dataId : `dict`
149 The sanitized dataId.
150 """
151 match expIdOrDataId:
152 case int() as expId:
153 dataId = {"expId": expId}
154 dataId.update(**kwargs)
155 return dataId
156 case dafButler.DataCoordinate() as dataId:
157 return dafButler.DataCoordinate.standardize(dataId, **kwargs)
158 case dafButler.DimensionRecord() as record:
159 return dafButler.DataCoordinate.standardize(record.dataId, **kwargs)
160 case dict() as dataId:
161 dataId.update(**kwargs)
162 return dataId
163 raise RuntimeError(f"Invalid expId or dataId type {expIdOrDataId}: {type(expIdOrDataId)}")
165 def clearCache(self):
166 """Clear the internal cache of loaded calibration products.
168 Only necessary if you want to use an existing bestEffortIsr object
169 after adding new calibration products to the calibration collection.
170 """
171 self._cache = {}
173 def getExposure(self, expIdOrDataId, extraIsrOptions={}, skipCosmics=False, forceRemake=False,
174 **kwargs):
175 """Get the postIsr and cosmic-repaired image for this dataId.
177 Note that when using the forceRemake option the image will not be
178 written to the repo for reuse.
180 Parameters
181 ----------
182 expIdOrDataId : `dict`
183 The dataId
184 extraIsrOptions : `dict`, optional
185 extraIsrOptions is a dict of extra isr options applied to this
186 image only.
187 skipCosmics : `bool`, optional # XXX THIS CURRENTLY DOESN'T WORK!
188 Skip doing cosmic ray repair for this image?
189 forceRemake : `bool`
190 Remake the exposure even if there is a pre-existing one in the
191 repo. Images that are force-remade are never written, as this is
192 assumed to be used for testing/debug purposes, as opposed to normal
193 operation. For updating individual images, removal from the
194 registry can be used, and for bulk-updates the overall run number
195 can be incremented.
197 Returns
198 -------
199 exp : `lsst.afw.image.Exposure`
200 The postIsr exposure
201 """
202 dataId = self.updateDataId(expIdOrDataId, **kwargs)
203 if 'detector' not in dataId:
204 raise ValueError('dataId must contain a detector. Either specify a detector as a kwarg,'
205 ' or use a fully-qualified dataId')
207 if not forceRemake:
208 try:
209 exp = self.butler.get(self._datasetName, dataId)
210 self.log.info("Found a ready-made quickLookExp in the repo. Returning that.")
211 return exp
212 except LookupError:
213 pass
215 try:
216 raw = self.butler.get('raw', dataId)
217 except LookupError:
218 raise RuntimeError(f"Failed to retrieve raw for exp {dataId}") from None
220 # default options that are probably good for most engineering time
221 isrConfig = IsrTask.ConfigClass()
222 packageDir = getPackageDir("summit_utils")
223 isrConfig.load(os.path.join(packageDir, "config", "quickLookIsr.py"))
225 # apply general overrides
226 self._applyConfigOverrides(isrConfig, self.defaultExtraIsrOptions)
227 # apply per-image overrides
228 self._applyConfigOverrides(isrConfig, extraIsrOptions)
230 isrParts = ['camera', 'bias', 'dark', 'flat', 'defects', 'linearizer', 'crosstalk', 'bfKernel',
231 'bfGains', 'ptc']
233 if self._cacheIsForDetector != dataId['detector']:
234 self.clearCache()
235 self._cacheIsForDetector = dataId['detector']
237 isrDict = {}
238 # we build a cache of all the isr components which will be used to save
239 # the IO time on subsequent calls. This assumes people will not update
240 # calibration products while this object lives, but this is a fringe
241 # use case, and if they do, all they would need to do would be call
242 # .clearCache() and this will rebuild with the new products.
243 for component in isrParts:
244 if component in self._cache and component != 'flat':
245 self.log.info(f"Using {component} from cache...")
246 isrDict[component] = self._cache[component]
247 continue
248 if self.butler.exists(component, dataId):
249 try:
250 # TODO: add caching for flats
251 item = self.butler.get(component, dataId=dataId)
252 self._cache[component] = item
253 isrDict[component] = self._cache[component]
254 self.log.info(f"Loaded {component} to cache")
255 except Exception: # now that we log the exception, we can catch all errors
256 # the product *should* exist but the get() failed, so log
257 # a very loud warning inc. the traceback as this is a sign
258 # of butler/database failures or something like that.
259 self.log.critical(f'Failed to find expected data product {component}!')
260 self.log.exception(f'Finding failure for {component}:')
261 else:
262 self.log.debug('No %s found for %s', component, dataId)
264 quickLookExp = self.quickLookIsrTask.run(raw, **isrDict, isrBaseConfig=isrConfig).outputExposure
266 if self.doWrite and not forceRemake:
267 try:
268 self.butler.put(quickLookExp, self._datasetName, dataId)
269 self.log.info(f'Put {self._datasetName} for {dataId}')
270 except ConflictingDefinitionError:
271 # TODO: DM-34302 fix this message so that it's less scary for
272 # users. Do this by having daemons know they're daemons.
273 self.log.warning('Skipped putting existing exp into collection! (ignore if there was a race)')
274 pass
276 return quickLookExp