Coverage for python/lsst/summit/utils/bestEffort.py: 17%
104 statements
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-12 03:04 -0700
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-12 03:04 -0700
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import logging
23import os
24from typing import Any
26import lsst.afw.image as afwImage
27import lsst.daf.butler as dafButler
28from lsst.daf.butler.registry import ConflictingDefinitionError
29from lsst.ip.isr import IsrTask
30from lsst.pex.config import Config
31from lsst.summit.utils.butlerUtils import getLatissDefaultCollections
32from lsst.summit.utils.quickLook import QuickLookIsrTask
33from lsst.utils import getPackageDir
35# TODO: add attempt for fringe once registry & templates are fixed
37CURRENT_RUN = "LATISS/runs/quickLook/1"
40class BestEffortIsr:
41 """Class for getting an assembled image with the maximum amount of isr.
43 BestEffortIsr.getExposure(dataId) returns an assembled image with as much
44 isr performed as possible, dictated by the calibration products available,
45 and optionally interpolates over cosmic rays. If an image image already
46 exists in the butler, it is returned (for the sake of speed), otherwise it
47 is generated and put(). Calibration products are loaded and cached to
48 improve performance.
50 This class uses the ``quickLookIsrTask``, see docs there for details.
52 defaultExtraIsrOptions is a dict of options applied to all images.
54 Parameters
55 ----------
56 repoString : `str`, optional
57 The Butler repo root.
58 extraCollections : `list` of `str`, optional
59 Extra collections to add to the butler init. Collections are prepended.
60 defaultExtraIsrOptions : `dict`, optional
61 A dict of extra isr config options to apply. Each key should be an
62 attribute of an isrTaskConfigClass.
63 doRepairCosmics : `bool`, optional
64 Repair cosmic ray hits?
65 doWrite : `bool`, optional
66 Write the outputs to the quickLook rerun/collection?
68 Raises
69 ------
70 FileNotFoundError:
71 Raised when a butler cannot be automatically instantiated using
72 the DAF_BUTLER_REPOSITORY_INDEX environment variable.
73 """
75 _datasetName = "quickLookExp"
77 def __init__(
78 self,
79 *,
80 extraCollections: list[str] = [],
81 defaultExtraIsrOptions: dict = {},
82 doRepairCosmics: bool = True,
83 doWrite: bool = True,
84 embargo: bool = False,
85 repoString: str | None = None,
86 ):
87 self.log = logging.getLogger(__name__)
89 collections = getLatissDefaultCollections()
90 self.collections = extraCollections + collections
91 self.log.info(f"Instantiating butler with collections={self.collections}")
93 if repoString is None:
94 repoString = "LATISS" if not embargo else "/repo/embargo"
95 try:
96 self.butler = dafButler.Butler(
97 repoString,
98 collections=self.collections,
99 instrument="LATISS",
100 run=CURRENT_RUN if doWrite else None,
101 )
102 except (FileNotFoundError, RuntimeError):
103 # Depending on the value of DAF_BUTLER_REPOSITORY_INDEX and whether
104 # it is present and blank, or just not set, both these exception
105 # types can be raised, see
106 # tests/test_butlerUtils.py:ButlerInitTestCase
107 # for details and tests which confirm these have not changed
108 raise FileNotFoundError # unify exception type
110 quickLookIsrConfig = QuickLookIsrTask.ConfigClass()
111 quickLookIsrConfig.doRepairCosmics = doRepairCosmics
112 self.doWrite = doWrite # the task, as run by run() method, can't do the write, so we handle in here
113 self.quickLookIsrTask = QuickLookIsrTask(config=quickLookIsrConfig)
115 self.defaultExtraIsrOptions = defaultExtraIsrOptions
117 self._cache = {}
118 self._cacheIsForDetector = None
120 def _applyConfigOverrides(self, config: Config, overrides: dict) -> None:
121 """Update a config class with a dict of options.
123 Parameters
124 ----------
125 config : `lsst.pex.config.Config`
126 The config class to update.
127 overrides : `dict`
128 The override options as a dict.
130 Raises
131 ------
132 ValueError
133 Raised if the override option isn't found in the config.
134 """
135 for option, value in overrides.items():
136 if hasattr(config, option):
137 setattr(config, option, value)
138 self.log.info(f"Set isr config override {option} to {value}")
139 else:
140 raise ValueError(f"Override option {option} not found in isrConfig")
142 @staticmethod
143 def updateDataId(
144 expIdOrDataId: int | dict | dafButler.DataCoordinate | dafButler.DimensionRecord,
145 **kwargs: Any,
146 ) -> dict | dafButler.DataCoordinate:
147 """Sanitize the expIdOrDataId to allow support both expIds and dataIds
149 Supports expId as an integer, or a complete or partial dict. The dict
150 is updated with the supplied kwargs.
152 Parameters
153 ----------
154 expIdOrDataId : `int` or `dict` or `lsst.daf.butler.DataCoordinate` or
155 `lsst.daf.butler.DimensionRecord`
156 The exposure id as an int, or the dataId as as dict, or an
157 expRecord or a dataCoordinate.
159 Returns
160 -------
161 dataId : `dict`
162 The sanitized dataId.
163 """
164 match expIdOrDataId:
165 case int() as expId:
166 dataId = {"expId": expId}
167 dataId.update(**kwargs)
168 return dataId
169 case dafButler.DataCoordinate() as dataId:
170 return dafButler.DataCoordinate.standardize(dataId, **kwargs)
171 case dafButler.DimensionRecord() as record:
172 return dafButler.DataCoordinate.standardize(record.dataId, **kwargs)
173 case dict() as dataId:
174 dataId.update(**kwargs)
175 return dataId
176 raise RuntimeError(f"Invalid expId or dataId type {expIdOrDataId}: {type(expIdOrDataId)}")
178 def clearCache(self) -> None:
179 """Clear the internal cache of loaded calibration products.
181 Only necessary if you want to use an existing bestEffortIsr object
182 after adding new calibration products to the calibration collection.
183 """
184 self._cache = {}
186 def getExposure(
187 self,
188 expIdOrDataId: int | dict | dafButler.DataCoordinate | dafButler.DimensionRecord,
189 extraIsrOptions: dict = {},
190 skipCosmics: bool = False,
191 forceRemake: bool = False,
192 **kwargs: Any,
193 ) -> afwImage.Exposure:
194 """Get the postIsr and cosmic-repaired image for this dataId.
196 Note that when using the forceRemake option the image will not be
197 written to the repo for reuse.
199 Parameters
200 ----------
201 expIdOrDataId : `dict`
202 The dataId
203 extraIsrOptions : `dict`, optional
204 extraIsrOptions is a dict of extra isr options applied to this
205 image only.
206 skipCosmics : `bool`, optional # XXX THIS CURRENTLY DOESN'T WORK!
207 Skip doing cosmic ray repair for this image?
208 forceRemake : `bool`
209 Remake the exposure even if there is a pre-existing one in the
210 repo. Images that are force-remade are never written, as this is
211 assumed to be used for testing/debug purposes, as opposed to normal
212 operation. For updating individual images, removal from the
213 registry can be used, and for bulk-updates the overall run number
214 can be incremented.
216 Returns
217 -------
218 exp : `lsst.afw.image.Exposure`
219 The postIsr exposure
220 """
221 dataId = self.updateDataId(expIdOrDataId, **kwargs)
222 if "detector" not in dataId:
223 raise ValueError(
224 "dataId must contain a detector. Either specify a detector as a kwarg,"
225 " or use a fully-qualified dataId"
226 )
228 if not forceRemake:
229 try:
230 exp = self.butler.get(self._datasetName, dataId)
231 self.log.info("Found a ready-made quickLookExp in the repo. Returning that.")
232 return exp
233 except LookupError:
234 pass
236 try:
237 raw = self.butler.get("raw", dataId)
238 except LookupError:
239 raise RuntimeError(f"Failed to retrieve raw for exp {dataId}") from None
241 # default options that are probably good for most engineering time
242 isrConfig = IsrTask.ConfigClass()
243 packageDir = getPackageDir("summit_utils")
244 isrConfig.load(os.path.join(packageDir, "config", "quickLookIsr.py"))
246 # apply general overrides
247 self._applyConfigOverrides(isrConfig, self.defaultExtraIsrOptions)
248 # apply per-image overrides
249 self._applyConfigOverrides(isrConfig, extraIsrOptions)
251 isrParts = [
252 "camera",
253 "bias",
254 "dark",
255 "flat",
256 "defects",
257 "linearizer",
258 "crosstalk",
259 "bfKernel",
260 "bfGains",
261 "ptc",
262 ]
264 if self._cacheIsForDetector != dataId["detector"]:
265 self.clearCache()
266 self._cacheIsForDetector = dataId["detector"]
268 isrDict = {}
269 # we build a cache of all the isr components which will be used to save
270 # the IO time on subsequent calls. This assumes people will not update
271 # calibration products while this object lives, but this is a fringe
272 # use case, and if they do, all they would need to do would be call
273 # .clearCache() and this will rebuild with the new products.
274 for component in isrParts:
275 if component in self._cache and component != "flat":
276 self.log.info(f"Using {component} from cache...")
277 isrDict[component] = self._cache[component]
278 continue
279 if self.butler.exists(component, dataId):
280 try:
281 # TODO: add caching for flats
282 item = self.butler.get(component, dataId=dataId)
283 self._cache[component] = item
284 isrDict[component] = self._cache[component]
285 self.log.info(f"Loaded {component} to cache")
286 except Exception: # now that we log the exception, we can catch all errors
287 # the product *should* exist but the get() failed, so log
288 # a very loud warning inc. the traceback as this is a sign
289 # of butler/database failures or something like that.
290 self.log.critical(f"Failed to find expected data product {component}!")
291 self.log.exception(f"Finding failure for {component}:")
292 else:
293 self.log.debug("No %s found for %s", component, dataId)
295 quickLookExp = self.quickLookIsrTask.run(raw, **isrDict, isrBaseConfig=isrConfig).outputExposure
297 if self.doWrite and not forceRemake:
298 try:
299 self.butler.put(quickLookExp, self._datasetName, dataId)
300 self.log.info(f"Put {self._datasetName} for {dataId}")
301 except ConflictingDefinitionError:
302 # TODO: DM-34302 fix this message so that it's less scary for
303 # users. Do this by having daemons know they're daemons.
304 self.log.warning("Skipped putting existing exp into collection! (ignore if there was a race)")
305 pass
307 return quickLookExp