Coverage for python / lsst / summit / utils / bestEffort.py: 15%
103 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-05-01 09:04 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-05-01 09:04 +0000
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import importlib.resources
23import logging
24from typing import Any
26import lsst.afw.image as afwImage
27import lsst.daf.butler as dafButler
28from lsst.daf.butler.registry import ConflictingDefinitionError
29from lsst.ip.isr import IsrTask
30from lsst.pex.config import Config
31from lsst.summit.utils.butlerUtils import getLatissDefaultCollections
32from lsst.summit.utils.quickLook import QuickLookIsrTask
34# TODO: add attempt for fringe once registry & templates are fixed
36CURRENT_RUN = "LATISS/runs/quickLook/1"
39class BestEffortIsr:
40 """Class for getting an assembled image with the maximum amount of isr.
42 BestEffortIsr.getExposure(dataId) returns an assembled image with as much
43 isr performed as possible, dictated by the calibration products available,
44 and optionally interpolates over cosmic rays. If an image image already
45 exists in the butler, it is returned (for the sake of speed), otherwise it
46 is generated and put(). Calibration products are loaded and cached to
47 improve performance.
49 This class uses the ``quickLookIsrTask``, see docs there for details.
51 defaultExtraIsrOptions is a dict of options applied to all images.
53 Parameters
54 ----------
55 repoString : `str`, optional
56 The Butler repo root.
57 extraCollections : `list` of `str`, optional
58 Extra collections to add to the butler init. Collections are prepended.
59 defaultExtraIsrOptions : `dict`, optional
60 A dict of extra isr config options to apply. Each key should be an
61 attribute of an isrTaskConfigClass.
62 doRepairCosmics : `bool`, optional
63 Repair cosmic ray hits?
64 doWrite : `bool`, optional
65 Write the outputs to the quickLook rerun/collection?
67 Raises
68 ------
69 FileNotFoundError:
70 Raised when a butler cannot be automatically instantiated using
71 the DAF_BUTLER_REPOSITORY_INDEX environment variable.
72 """
74 _datasetName = "quickLookExp"
76 def __init__(
77 self,
78 *,
79 extraCollections: list[str] = [],
80 defaultExtraIsrOptions: dict = {},
81 doRepairCosmics: bool = True,
82 doWrite: bool = True,
83 embargo: bool = False,
84 repoString: str | None = None,
85 ):
86 self.log = logging.getLogger(__name__)
88 collections = getLatissDefaultCollections()
89 self.collections = extraCollections + collections
90 self.log.info(f"Instantiating butler with collections={self.collections}")
92 if repoString is None:
93 repoString = "LATISS" if not embargo else "/repo/embargo"
94 try:
95 self.butler = dafButler.Butler.from_config(
96 repoString,
97 collections=self.collections,
98 instrument="LATISS",
99 run=CURRENT_RUN if doWrite else None,
100 )
101 except (FileNotFoundError, RuntimeError):
102 # Depending on the value of DAF_BUTLER_REPOSITORY_INDEX and whether
103 # it is present and blank, or just not set, both these exception
104 # types can be raised, see
105 # tests/test_butlerUtils.py:ButlerInitTestCase
106 # for details and tests which confirm these have not changed
107 raise FileNotFoundError # unify exception type
109 quickLookIsrConfig = QuickLookIsrTask.ConfigClass()
110 quickLookIsrConfig.doRepairCosmics = doRepairCosmics
111 self.doWrite = doWrite # the task, as run by run() method, can't do the write, so we handle in here
112 self.quickLookIsrTask = QuickLookIsrTask(config=quickLookIsrConfig)
114 self.defaultExtraIsrOptions = defaultExtraIsrOptions
116 self._cache: dict = {}
117 self._cacheIsForDetector: int | None = None
119 def _applyConfigOverrides(self, config: Config, overrides: dict) -> None:
120 """Update a config class with a dict of options.
122 Parameters
123 ----------
124 config : `lsst.pex.config.Config`
125 The config class to update.
126 overrides : `dict`
127 The override options as a dict.
129 Raises
130 ------
131 ValueError
132 Raised if the override option isn't found in the config.
133 """
134 for option, value in overrides.items():
135 if hasattr(config, option):
136 setattr(config, option, value)
137 self.log.info(f"Set isr config override {option} to {value}")
138 else:
139 raise ValueError(f"Override option {option} not found in isrConfig")
141 @staticmethod
142 def updateDataId(
143 expIdOrDataId: int | dict | dafButler.DataCoordinate | dafButler.DimensionRecord,
144 **kwargs: Any,
145 ) -> dict | dafButler.DataCoordinate:
146 """Sanitize the expIdOrDataId to allow support both expIds and dataIds
148 Supports expId as an integer, or a complete or partial dict. The dict
149 is updated with the supplied kwargs.
151 Parameters
152 ----------
153 expIdOrDataId : `int` or `dict` or `lsst.daf.butler.DataCoordinate` or
154 `lsst.daf.butler.DimensionRecord`
155 The exposure id as an int, or the dataId as as dict, or an
156 expRecord or a dataCoordinate.
158 Returns
159 -------
160 dataId : `dict`
161 The sanitized dataId.
162 """
163 match expIdOrDataId:
164 case int() as expId:
165 dataId = {"expId": expId}
166 dataId.update(**kwargs)
167 return dataId
168 case dafButler.DataCoordinate() as dataId:
169 return dafButler.DataCoordinate.standardize(dataId, **kwargs)
170 case dafButler.DimensionRecord() as record:
171 return dafButler.DataCoordinate.standardize(record.dataId, **kwargs)
172 case dict() as dataId:
173 dataId.update(**kwargs)
174 return dataId
175 raise RuntimeError(f"Invalid expId or dataId type {expIdOrDataId}: {type(expIdOrDataId)}")
177 def clearCache(self) -> None:
178 """Clear the internal cache of loaded calibration products.
180 Only necessary if you want to use an existing bestEffortIsr object
181 after adding new calibration products to the calibration collection.
182 """
183 self._cache = {}
185 def getExposure(
186 self,
187 expIdOrDataId: int | dict | dafButler.DataCoordinate | dafButler.DimensionRecord,
188 extraIsrOptions: dict = {},
189 skipCosmics: bool = False,
190 forceRemake: bool = False,
191 **kwargs: Any,
192 ) -> afwImage.Exposure:
193 """Get the postIsr and cosmic-repaired image for this dataId.
195 Note that when using the forceRemake option the image will not be
196 written to the repo for reuse.
198 Parameters
199 ----------
200 expIdOrDataId : `dict`
201 The dataId
202 extraIsrOptions : `dict`, optional
203 extraIsrOptions is a dict of extra isr options applied to this
204 image only.
205 skipCosmics : `bool`, optional # XXX THIS CURRENTLY DOESN'T WORK!
206 Skip doing cosmic ray repair for this image?
207 forceRemake : `bool`
208 Remake the exposure even if there is a pre-existing one in the
209 repo. Images that are force-remade are never written, as this is
210 assumed to be used for testing/debug purposes, as opposed to normal
211 operation. For updating individual images, removal from the
212 registry can be used, and for bulk-updates the overall run number
213 can be incremented.
215 Returns
216 -------
217 exp : `lsst.afw.image.Exposure`
218 The postIsr exposure
219 """
220 dataId = self.updateDataId(expIdOrDataId, **kwargs)
221 if "detector" not in dataId:
222 raise ValueError(
223 "dataId must contain a detector. Either specify a detector as a kwarg,"
224 " or use a fully-qualified dataId"
225 )
227 if not forceRemake:
228 try:
229 exp = self.butler.get(self._datasetName, dataId)
230 self.log.info("Found a ready-made quickLookExp in the repo. Returning that.")
231 return exp
232 except LookupError:
233 pass
235 try:
236 raw = self.butler.get("raw", dataId)
237 except LookupError:
238 raise RuntimeError(f"Failed to retrieve raw for exp {dataId}") from None
240 # default options that are probably good for most engineering time
241 isrConfig = IsrTask.ConfigClass()
242 with importlib.resources.path("lsst.summit.utils", "resources/config/quickLookIsr.py") as cfgPath:
243 isrConfig.load(cfgPath)
245 # apply general overrides
246 self._applyConfigOverrides(isrConfig, self.defaultExtraIsrOptions)
247 # apply per-image overrides
248 self._applyConfigOverrides(isrConfig, extraIsrOptions)
250 isrParts = [
251 "camera",
252 "bias",
253 "dark",
254 "flat",
255 "defects",
256 "linearizer",
257 "crosstalk",
258 "bfKernel",
259 "bfGains",
260 "ptc",
261 ]
263 if self._cacheIsForDetector != dataId["detector"]:
264 self.clearCache()
265 self._cacheIsForDetector = dataId["detector"] # type: ignore
267 isrDict = {}
268 # we build a cache of all the isr components which will be used to save
269 # the IO time on subsequent calls. This assumes people will not update
270 # calibration products while this object lives, but this is a fringe
271 # use case, and if they do, all they would need to do would be call
272 # .clearCache() and this will rebuild with the new products.
273 for component in isrParts:
274 if component in self._cache and component != "flat":
275 self.log.info(f"Using {component} from cache...")
276 isrDict[component] = self._cache[component]
277 continue
278 if self.butler.exists(component, dataId):
279 try:
280 # TODO: add caching for flats
281 item = self.butler.get(component, dataId=dataId)
282 self._cache[component] = item
283 isrDict[component] = self._cache[component]
284 self.log.info(f"Loaded {component} to cache")
285 except Exception: # now that we log the exception, we can catch all errors
286 # the product *should* exist but the get() failed, so log
287 # a very loud warning inc. the traceback as this is a sign
288 # of butler/database failures or something like that.
289 self.log.critical(f"Failed to find expected data product {component}!")
290 self.log.exception(f"Finding failure for {component}:")
291 else:
292 self.log.debug("No %s found for %s", component, dataId)
294 quickLookExp = self.quickLookIsrTask.run(raw, **isrDict, isrBaseConfig=isrConfig).outputExposure
296 if self.doWrite and not forceRemake:
297 try:
298 self.butler.put(quickLookExp, self._datasetName, dataId)
299 self.log.info(f"Put {self._datasetName} for {dataId}")
300 except ConflictingDefinitionError:
301 # TODO: DM-34302 fix this message so that it's less scary for
302 # users. Do this by having daemons know they're daemons.
303 self.log.warning("Skipped putting existing exp into collection! (ignore if there was a race)")
304 pass
306 return quickLookExp