Coverage for python/lsst/summit/utils/bestEffort.py: 15%
101 statements
« prev ^ index » next coverage.py v7.5.0, created at 2024-05-01 05:37 -0700
« prev ^ index » next coverage.py v7.5.0, created at 2024-05-01 05:37 -0700
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import logging
23import os
25import lsst.daf.butler as dafButler
26from lsst.daf.butler.registry import ConflictingDefinitionError
27from lsst.ip.isr import IsrTask
28from lsst.summit.utils.butlerUtils import getLatissDefaultCollections
29from lsst.summit.utils.quickLook import QuickLookIsrTask
30from lsst.utils import getPackageDir
32# TODO: add attempt for fringe once registry & templates are fixed
34CURRENT_RUN = "LATISS/runs/quickLook/1"
37class BestEffortIsr:
38 """Class for getting an assembled image with the maximum amount of isr.
40 BestEffortIsr.getExposure(dataId) returns an assembled image with as much
41 isr performed as possible, dictated by the calibration products available,
42 and optionally interpolates over cosmic rays. If an image image already
43 exists in the butler, it is returned (for the sake of speed), otherwise it
44 is generated and put(). Calibration products are loaded and cached to
45 improve performance.
47 This class uses the ``quickLookIsrTask``, see docs there for details.
49 defaultExtraIsrOptions is a dict of options applied to all images.
51 Parameters
52 ----------
53 repoString : `str`, optional
54 The Butler repo root.
55 extraCollections : `list` of `str`, optional
56 Extra collections to add to the butler init. Collections are prepended.
57 defaultExtraIsrOptions : `dict`, optional
58 A dict of extra isr config options to apply. Each key should be an
59 attribute of an isrTaskConfigClass.
60 doRepairCosmics : `bool`, optional
61 Repair cosmic ray hits?
62 doWrite : `bool`, optional
63 Write the outputs to the quickLook rerun/collection?
65 Raises
66 ------
67 FileNotFoundError:
68 Raised when a butler cannot be automatically instantiated using
69 the DAF_BUTLER_REPOSITORY_INDEX environment variable.
70 """
72 _datasetName = "quickLookExp"
74 def __init__(
75 self,
76 *,
77 extraCollections=[],
78 defaultExtraIsrOptions={},
79 doRepairCosmics=True,
80 doWrite=True,
81 embargo=False,
82 repoString=None,
83 ):
84 self.log = logging.getLogger(__name__)
86 collections = getLatissDefaultCollections()
87 self.collections = extraCollections + collections
88 self.log.info(f"Instantiating butler with collections={self.collections}")
90 if repoString is None:
91 repoString = "LATISS" if not embargo else "/repo/embargo"
92 try:
93 self.butler = dafButler.Butler(
94 repoString,
95 collections=self.collections,
96 instrument="LATISS",
97 run=CURRENT_RUN if doWrite else None,
98 )
99 except (FileNotFoundError, RuntimeError):
100 # Depending on the value of DAF_BUTLER_REPOSITORY_INDEX and whether
101 # it is present and blank, or just not set, both these exception
102 # types can be raised, see
103 # tests/test_butlerUtils.py:ButlerInitTestCase
104 # for details and tests which confirm these have not changed
105 raise FileNotFoundError # unify exception type
107 quickLookIsrConfig = QuickLookIsrTask.ConfigClass()
108 quickLookIsrConfig.doRepairCosmics = doRepairCosmics
109 self.doWrite = doWrite # the task, as run by run() method, can't do the write, so we handle in here
110 self.quickLookIsrTask = QuickLookIsrTask(config=quickLookIsrConfig)
112 self.defaultExtraIsrOptions = defaultExtraIsrOptions
114 self._cache = {}
115 self._cacheIsForDetector = None
117 def _applyConfigOverrides(self, config, overrides):
118 """Update a config class with a dict of options.
120 Parameters
121 ----------
122 config : `lsst.pex.config.Config`
123 The config class to update.
124 overrides : `dict`
125 The override options as a dict.
127 Raises
128 ------
129 ValueError
130 Raised if the override option isn't found in the config.
131 """
132 for option, value in overrides.items():
133 if hasattr(config, option):
134 setattr(config, option, value)
135 self.log.info(f"Set isr config override {option} to {value}")
136 else:
137 raise ValueError(f"Override option {option} not found in isrConfig")
139 @staticmethod
140 def updateDataId(expIdOrDataId, **kwargs):
141 """Sanitize the expIdOrDataId to allow support both expIds and dataIds
143 Supports expId as an integer, or a complete or partial dict. The dict
144 is updated with the supplied kwargs.
146 Parameters
147 ----------
148 expIdOrDataId : `int` or `dict` or `lsst.daf.butler.DataCoordinate` or
149 `lsst.daf.butler.DimensionRecord`
150 The exposure id as an int, or the dataId as as dict, or an
151 expRecord or a dataCoordinate.
153 Returns
154 -------
155 dataId : `dict`
156 The sanitized dataId.
157 """
158 match expIdOrDataId:
159 case int() as expId:
160 dataId = {"expId": expId}
161 dataId.update(**kwargs)
162 return dataId
163 case dafButler.DataCoordinate() as dataId:
164 return dafButler.DataCoordinate.standardize(dataId, **kwargs)
165 case dafButler.DimensionRecord() as record:
166 return dafButler.DataCoordinate.standardize(record.dataId, **kwargs)
167 case dict() as dataId:
168 dataId.update(**kwargs)
169 return dataId
170 raise RuntimeError(f"Invalid expId or dataId type {expIdOrDataId}: {type(expIdOrDataId)}")
172 def clearCache(self):
173 """Clear the internal cache of loaded calibration products.
175 Only necessary if you want to use an existing bestEffortIsr object
176 after adding new calibration products to the calibration collection.
177 """
178 self._cache = {}
180 def getExposure(self, expIdOrDataId, extraIsrOptions={}, skipCosmics=False, forceRemake=False, **kwargs):
181 """Get the postIsr and cosmic-repaired image for this dataId.
183 Note that when using the forceRemake option the image will not be
184 written to the repo for reuse.
186 Parameters
187 ----------
188 expIdOrDataId : `dict`
189 The dataId
190 extraIsrOptions : `dict`, optional
191 extraIsrOptions is a dict of extra isr options applied to this
192 image only.
193 skipCosmics : `bool`, optional # XXX THIS CURRENTLY DOESN'T WORK!
194 Skip doing cosmic ray repair for this image?
195 forceRemake : `bool`
196 Remake the exposure even if there is a pre-existing one in the
197 repo. Images that are force-remade are never written, as this is
198 assumed to be used for testing/debug purposes, as opposed to normal
199 operation. For updating individual images, removal from the
200 registry can be used, and for bulk-updates the overall run number
201 can be incremented.
203 Returns
204 -------
205 exp : `lsst.afw.image.Exposure`
206 The postIsr exposure
207 """
208 dataId = self.updateDataId(expIdOrDataId, **kwargs)
209 if "detector" not in dataId:
210 raise ValueError(
211 "dataId must contain a detector. Either specify a detector as a kwarg,"
212 " or use a fully-qualified dataId"
213 )
215 if not forceRemake:
216 try:
217 exp = self.butler.get(self._datasetName, dataId)
218 self.log.info("Found a ready-made quickLookExp in the repo. Returning that.")
219 return exp
220 except LookupError:
221 pass
223 try:
224 raw = self.butler.get("raw", dataId)
225 except LookupError:
226 raise RuntimeError(f"Failed to retrieve raw for exp {dataId}") from None
228 # default options that are probably good for most engineering time
229 isrConfig = IsrTask.ConfigClass()
230 packageDir = getPackageDir("summit_utils")
231 isrConfig.load(os.path.join(packageDir, "config", "quickLookIsr.py"))
233 # apply general overrides
234 self._applyConfigOverrides(isrConfig, self.defaultExtraIsrOptions)
235 # apply per-image overrides
236 self._applyConfigOverrides(isrConfig, extraIsrOptions)
238 isrParts = [
239 "camera",
240 "bias",
241 "dark",
242 "flat",
243 "defects",
244 "linearizer",
245 "crosstalk",
246 "bfKernel",
247 "bfGains",
248 "ptc",
249 ]
251 if self._cacheIsForDetector != dataId["detector"]:
252 self.clearCache()
253 self._cacheIsForDetector = dataId["detector"]
255 isrDict = {}
256 # we build a cache of all the isr components which will be used to save
257 # the IO time on subsequent calls. This assumes people will not update
258 # calibration products while this object lives, but this is a fringe
259 # use case, and if they do, all they would need to do would be call
260 # .clearCache() and this will rebuild with the new products.
261 for component in isrParts:
262 if component in self._cache and component != "flat":
263 self.log.info(f"Using {component} from cache...")
264 isrDict[component] = self._cache[component]
265 continue
266 if self.butler.exists(component, dataId):
267 try:
268 # TODO: add caching for flats
269 item = self.butler.get(component, dataId=dataId)
270 self._cache[component] = item
271 isrDict[component] = self._cache[component]
272 self.log.info(f"Loaded {component} to cache")
273 except Exception: # now that we log the exception, we can catch all errors
274 # the product *should* exist but the get() failed, so log
275 # a very loud warning inc. the traceback as this is a sign
276 # of butler/database failures or something like that.
277 self.log.critical(f"Failed to find expected data product {component}!")
278 self.log.exception(f"Finding failure for {component}:")
279 else:
280 self.log.debug("No %s found for %s", component, dataId)
282 quickLookExp = self.quickLookIsrTask.run(raw, **isrDict, isrBaseConfig=isrConfig).outputExposure
284 if self.doWrite and not forceRemake:
285 try:
286 self.butler.put(quickLookExp, self._datasetName, dataId)
287 self.log.info(f"Put {self._datasetName} for {dataId}")
288 except ConflictingDefinitionError:
289 # TODO: DM-34302 fix this message so that it's less scary for
290 # users. Do this by having daemons know they're daemons.
291 self.log.warning("Skipped putting existing exp into collection! (ignore if there was a race)")
292 pass
294 return quickLookExp