Coverage for python/lsst/summit/utils/bestEffort.py: 14%
96 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-03-16 11:10 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2023-03-16 11:10 +0000
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import logging
23from lsst.ip.isr import IsrTask
24import lsst.daf.butler as dafButler
25from lsst.daf.butler.registry import ConflictingDefinitionError
27from lsst.summit.utils.quickLook import QuickLookIsrTask
28from lsst.summit.utils.butlerUtils import getLatissDefaultCollections, datasetExists
30# TODO: add attempt for fringe once registry & templates are fixed
32CURRENT_RUN = "LATISS/runs/quickLook/1"
33ALLOWED_REPOS = ['/repo/main', '/repo/LATISS', '/readonly/repo/main']
36class BestEffortIsr():
37 """Class for getting an assembled image with the maximum amount of isr.
39 BestEffortIsr.getExposure(dataId) returns an assembled image with as much
40 isr performed as possible, dictated by the calibration products available,
41 and optionally interpolates over cosmic rays. If an image image already
42 exists in the butler, it is returned (for the sake of speed), otherwise it
43 is generated and put(). Calibration products are loaded and cached to
44 improve performance.
46 This class uses the ``quickLookIsrTask``, see docs there for details.
48 Acceptable repodir values are currently listed in ALLOWED_REPOS. This will
49 be updated (removed) once DM-33849 is done.
51 defaultExtraIsrOptions is a dict of options applied to all images.
53 Parameters
54 ----------
55 repoDir : `str`
56 The repo root. Will be removed after DM-33849.
57 extraCollections : `list` of `str`, optional
58 Extra collections to add to the butler init. Collections are prepended.
59 defaultExtraIsrOptions : `dict`, optional
60 A dict of extra isr config options to apply. Each key should be an
61 attribute of an isrTaskConfigClass.
62 doRepairCosmics : `bool`, optional
63 Repair cosmic ray hits?
64 doWrite : `bool`, optional
65 Write the outputs to the quickLook rerun/collection?
67 Raises
68 ------
69 FileNotFoundError:
70 Raised when a butler cannot be automatically instantiated using
71 the DAF_BUTLER_REPOSITORY_INDEX environment variable.
72 """
73 _datasetName = 'quickLookExp'
75 def __init__(self, *,
76 extraCollections=[],
77 defaultExtraIsrOptions={},
78 doRepairCosmics=True,
79 doWrite=True,
80 embargo=False):
81 self.log = logging.getLogger(__name__)
83 collections = getLatissDefaultCollections()
84 self.collections = extraCollections + collections
85 self.log.info(f'Instantiating butler with collections={self.collections}')
86 try:
87 repoString = "LATISS" if not embargo else "/repo/embargo"
88 self.butler = dafButler.Butler(repoString, collections=self.collections,
89 instrument='LATISS',
90 run=CURRENT_RUN if doWrite else None)
91 except(FileNotFoundError, RuntimeError):
92 # Depending on the value of DAF_BUTLER_REPOSITORY_INDEX and whether
93 # it is present and blank, or just not set, both these exception
94 # types can be raised, see
95 # tests/test_butlerUtils.py:ButlerInitTestCase
96 # for details and tests which confirm these have not changed
97 raise FileNotFoundError # unify exception type
99 quickLookIsrConfig = QuickLookIsrTask.ConfigClass()
100 quickLookIsrConfig.doRepairCosmics = doRepairCosmics
101 self.doWrite = doWrite # the task, as run by run() method, can't do the write, so we handle in here
102 self.quickLookIsrTask = QuickLookIsrTask(config=quickLookIsrConfig)
104 self.defaultExtraIsrOptions = defaultExtraIsrOptions
106 self._cache = {}
108 def _applyConfigOverrides(self, config, overrides):
109 """Update a config class with a dict of options.
111 Parameters
112 ----------
113 config : `lsst.pex.config.Config`
114 The config class to update.
115 overrides : `dict`
116 The override options as a dict.
118 Raises
119 ------
120 ValueError
121 Raised if the override option isn't found in the config.
122 """
123 for option, value in overrides.items():
124 if hasattr(config, option):
125 setattr(config, option, value)
126 self.log.info(f"Set isr config override {option} to {value}")
127 else:
128 raise ValueError(f"Override option {option} not found in isrConfig")
130 @staticmethod
131 def updateDataId(expIdOrDataId, **kwargs):
132 """Sanitize the expIdOrDataId to allow support both expIds and dataIds
134 Supports expId as an integer, or a complete or partial dict. The dict
135 is updated with the supplied kwargs.
137 Parameters
138 ----------
139 expIdOrDataId : `int` or `dict` or `lsst.daf.butler.DataCoordinate` or
140 `lsst.daf.butler.DimensionRecord`
141 The exposure id as an int, or the dataId as as dict, or an
142 expRecord or a dataCoordinate.
144 Returns
145 -------
146 dataId : `dict`
147 The sanitized dataId.
148 """
149 match expIdOrDataId:
150 case int() as expId:
151 dataId = {"expId": expId}
152 dataId.update(**kwargs)
153 return dataId
154 case dafButler.DataCoordinate() as dataId:
155 return dafButler.DataCoordinate.standardize(dataId, **kwargs)
156 case dafButler.DimensionRecord() as record:
157 return dafButler.DataCoordinate.standardize(record.dataId, **kwargs)
158 case dict() as dataId:
159 dataId.update(**kwargs)
160 return dataId
161 raise RuntimeError(f"Invalid expId or dataId type {expIdOrDataId}: {type(expIdOrDataId)}")
163 def clearCache(self):
164 """Clear the internal cache of loaded calibration products.
166 Only necessary if you want to use an existing bestEffortIsr object
167 after adding new calibration products to the calibration collection.
168 """
169 self._cache = {}
171 def getExposure(self, expIdOrDataId, extraIsrOptions={}, skipCosmics=False, forceRemake=False,
172 **kwargs):
173 """Get the postIsr and cosmic-repaired image for this dataId.
175 Note that when using the forceRemake option the image will not be
176 written to the repo for reuse.
178 Parameters
179 ----------
180 expIdOrDataId : `dict`
181 The dataId
182 extraIsrOptions : `dict`, optional
183 extraIsrOptions is a dict of extra isr options applied to this
184 image only.
185 skipCosmics : `bool`, optional # XXX THIS CURRENTLY DOESN'T WORK!
186 Skip doing cosmic ray repair for this image?
187 forceRemake : `bool`
188 Remake the exposure even if there is a pre-existing one in the
189 repo. Images that are force-remade are never written, as this is
190 assumed to be used for testing/debug purposes, as opposed to normal
191 operation. For updating individual images, removal from the
192 registry can be used, and for bulk-updates the overall run number
193 can be incremented.
195 Returns
196 -------
197 exp : `lsst.afw.image.Exposure`
198 The postIsr exposure
199 """
200 dataId = self.updateDataId(expIdOrDataId, **kwargs)
202 if not forceRemake:
203 try:
204 exp = self.butler.get(self._datasetName, dataId)
205 self.log.info("Found a ready-made quickLookExp in the repo. Returning that.")
206 return exp
207 except LookupError:
208 pass
210 try:
211 raw = self.butler.get('raw', dataId)
212 except LookupError:
213 raise RuntimeError(f"Failed to retrieve raw for exp {dataId}") from None
215 # default options that are probably good for most engineering time
216 isrConfig = IsrTask.ConfigClass()
217 isrConfig.doWrite = False # this task writes separately, no need for this
218 isrConfig.doSaturation = True # saturation very important for roundness measurement in qfm
219 isrConfig.doSaturationInterpolation = True
220 isrConfig.overscan.fitType = 'MEDIAN_PER_ROW'
221 isrConfig.overscan.doParallelOverscan = True
223 # apply general overrides
224 self._applyConfigOverrides(isrConfig, self.defaultExtraIsrOptions)
225 # apply per-image overrides
226 self._applyConfigOverrides(isrConfig, extraIsrOptions)
228 isrParts = ['camera', 'bias', 'dark', 'flat', 'defects', 'linearizer', 'crosstalk', 'bfKernel',
229 'bfGains', 'ptc']
231 isrDict = {}
232 # we build a cache of all the isr components which will be used to save
233 # the IO time on subsequent calls. This assumes people will not update
234 # calibration products while this object lives, but this is a fringe
235 # use case, and if they do, all they would need to do would be call
236 # .clearCache() and this will rebuild with the new products.
237 for component in isrParts:
238 if component in self._cache and component != 'flat':
239 self.log.info(f"Using {component} from cache...")
240 isrDict[component] = self._cache[component]
241 continue
242 if datasetExists(self.butler, component, dataId):
243 try:
244 # TODO: add caching for flats
245 item = self.butler.get(component, dataId=dataId)
246 self._cache[component] = item
247 isrDict[component] = self._cache[component]
248 self.log.info(f"Loaded {component} to cache")
249 except Exception: # now that we log the exception, we can catch all errors
250 # the product *should* exist but the get() failed, so log
251 # a very loud warning inc. the traceback as this is a sign
252 # of butler/database failures or something like that.
253 self.log.critical(f'Failed to find expected data product {component}!')
254 self.log.exception(f'Finding failure for {component}:')
255 else:
256 self.log.debug('No %s found for %s', component, dataId)
258 quickLookExp = self.quickLookIsrTask.run(raw, **isrDict, isrBaseConfig=isrConfig).outputExposure
260 if self.doWrite and not forceRemake:
261 try:
262 self.butler.put(quickLookExp, self._datasetName, dataId)
263 self.log.info(f'Put {self._datasetName} for {dataId}')
264 except ConflictingDefinitionError:
265 # TODO: DM-34302 fix this message so that it's less scary for
266 # users. Do this by having daemons know they're daemons.
267 self.log.warning('Skipped putting existing exp into collection! (ignore if there was a race)')
268 pass
270 return quickLookExp