Coverage for python/lsst/obs/base/formatters/fitsExposure.py: 23%

192 statements  

« prev     ^ index     » next       coverage.py v7.4.4, created at 2024-03-20 04:19 -0700

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = ( 

23 "FitsExposureFormatter", 

24 "FitsImageFormatter", 

25 "FitsMaskFormatter", 

26 "FitsMaskedImageFormatter", 

27 "standardizeAmplifierParameters", 

28) 

29 

30import warnings 

31from abc import abstractmethod 

32from collections.abc import Set 

33from typing import ClassVar 

34 

35from lsst.afw.cameraGeom import AmplifierGeometryComparison, AmplifierIsolator 

36from lsst.afw.image import ( 

37 ExposureFitsReader, 

38 ExposureInfo, 

39 FilterLabel, 

40 ImageFitsReader, 

41 MaskedImageFitsReader, 

42 MaskFitsReader, 

43) 

44 

45# Needed for ApCorrMap to resolve properly 

46from lsst.afw.math import BoundedField # noqa: F401 

47from lsst.daf.base import PropertySet 

48from lsst.daf.butler import Formatter 

49from lsst.utils.classes import cached_getter 

50from lsst.utils.introspection import find_outside_stacklevel 

51 

52 

53class FitsImageFormatterBase(Formatter): 

54 """Base class formatter for image-like storage classes stored via FITS. 

55 

56 Notes 

57 ----- 

58 This class makes no assumptions about how many HDUs are used to represent 

59 the image on disk, and includes no support for writing. It's really just a 

60 collection of miscellaneous boilerplate common to all FITS image 

61 formatters. 

62 

63 Concrete subclasses must implement `readComponent`, `readFull`, and `write` 

64 (even if just to disable them by raising an exception). 

65 """ 

66 

67 extension = ".fits" 

68 supportedExtensions: ClassVar[Set[str]] = frozenset({".fits", ".fits.gz", ".fits.fz", ".fz", ".fit"}) 

69 

70 unsupportedParameters: ClassVar[Set[str]] = frozenset() 

71 """Support all parameters.""" 

72 

73 @property 

74 @cached_getter 

75 def checked_parameters(self): 

76 """The parameters passed by the butler user, after checking them 

77 against the storage class and transforming `None` into an empty `dict` 

78 (`dict`). 

79 

80 This is computed on first use and then cached. It should never be 

81 accessed when writing. Subclasses that need additional checking should 

82 delegate to `super` and then check the result before returning it. 

83 """ 

84 parameters = self.fileDescriptor.parameters 

85 if parameters is None: 

86 parameters = {} 

87 self.fileDescriptor.storageClass.validateParameters(parameters) 

88 return parameters 

89 

90 def read(self, component=None): 

91 # Docstring inherited. 

92 if component is not None: 

93 return self.readComponent(component) 

94 return self.readFull() 

95 

96 @abstractmethod 

97 def readComponent(self, component): 

98 """Read a component dataset. 

99 

100 Parameters 

101 ---------- 

102 component : `str`, optional 

103 Component to read from the file. 

104 

105 Returns 

106 ------- 

107 obj : component-dependent 

108 In-memory component object. 

109 

110 Raises 

111 ------ 

112 KeyError 

113 Raised if the requested component cannot be handled. 

114 """ 

115 raise NotImplementedError() 

116 

117 @abstractmethod 

118 def readFull(self): 

119 """Read the full dataset (while still accounting for parameters). 

120 

121 Returns 

122 ------- 

123 obj : component-dependent 

124 In-memory component object. 

125 

126 """ 

127 raise NotImplementedError() 

128 

129 

130class ReaderFitsImageFormatterBase(FitsImageFormatterBase): 

131 """Base class formatter for image-like storage classes stored via FITS 

132 backed by a "reader" object similar to `lsst.afw.image.ImageFitsReader`. 

133 

134 Notes 

135 ----- 

136 This class includes no support for writing. 

137 

138 Concrete subclasses must provide at least the `ReaderClass` attribute 

139 and a `write` implementation (even just to disable writing by raising). 

140 

141 The provided implementation of `readComponent` handles only the 'bbox', 

142 'dimensions', and 'xy0' components common to all image-like storage 

143 classes. Subclasses with additional components should handle them first, 

144 then delegate to ``super()`` for these (or, if necessary, delegate first 

145 and catch `KeyError`). 

146 

147 The provided implementation of `readFull` handles only parameters that 

148 can be forwarded directly to the reader class (usually ``bbox`` and 

149 ``origin``). Concrete subclasses that need to handle additional parameters 

150 should generally reimplement without delegating (the implementation is 

151 trivial). 

152 """ 

153 

154 

155class StandardFitsImageFormatterBase(ReaderFitsImageFormatterBase): 

156 """Base class interface for image-like storage stored via FITS, 

157 written using LSST code. 

158 

159 Notes 

160 ----- 

161 Concrete subclasses must provide at least the `ReaderClass` attribute. 

162 

163 The provided implementation of `readComponent` handles only the 'bbox', 

164 'dimensions', and 'xy0' components common to all image-like storage 

165 classes. Subclasses with additional components should handle them first, 

166 then delegate to ``super()`` for these (or, if necessary, delegate first 

167 and catch `KeyError`). 

168 

169 The provided implementation of `readFull` handles only parameters that 

170 can be forwarded directly to the reader class (usually ``bbox`` and 

171 ``origin``). Concrete subclasses that need to handle additional parameters 

172 should generally reimplement without delegating (the implementation is 

173 trivial). 

174 

175 This Formatter supports write recipes, and assumes its in-memory type has 

176 ``writeFits`` and (for write recipes) ``writeFitsWithOptions`` methods. 

177 

178 Each ``StandardFitsImageFormatterBase`` recipe for FITS compression should 

179 define ``image``, ``mask`` and ``variance`` entries, each of which may 

180 contain ``compression`` and ``scaling`` entries. Defaults will be 

181 provided for any missing elements under ``compression`` and 

182 ``scaling``. 

183 

184 The allowed entries under ``compression`` are: 

185 

186 * ``algorithm`` (`str`): compression algorithm to use 

187 * ``rows`` (`int`): number of rows per tile (0 = entire dimension) 

188 * ``columns`` (`int`): number of columns per tile (0 = entire dimension) 

189 * ``quantizeLevel`` (`float`): cfitsio quantization level 

190 

191 The allowed entries under ``scaling`` are: 

192 

193 * ``algorithm`` (`str`): scaling algorithm to use 

194 * ``bitpix`` (`int`): bits per pixel (0,8,16,32,64,-32,-64) 

195 * ``fuzz`` (`bool`): fuzz the values when quantising floating-point values? 

196 * ``seed`` (`int`): seed for random number generator when fuzzing 

197 * ``maskPlanes`` (`list` of `str`): mask planes to ignore when doing 

198 statistics 

199 * ``quantizeLevel`` (`float`): divisor of the standard deviation for 

200 ``STDEV_*`` scaling 

201 * ``quantizePad`` (`float`): number of stdev to allow on the low side (for 

202 ``STDEV_POSITIVE``/``NEGATIVE``) 

203 * ``bscale`` (`float`): manually specified ``BSCALE`` 

204 (for ``MANUAL`` scaling) 

205 * ``bzero`` (`float`): manually specified ``BSCALE`` 

206 (for ``MANUAL`` scaling) 

207 

208 A very simple example YAML recipe (for the ``Exposure`` specialization): 

209 

210 .. code-block:: yaml 

211 

212 lsst.obs.base.fitsExposureFormatter.FitsExposureFormatter: 

213 default: 

214 image: &default 

215 compression: 

216 algorithm: GZIP_SHUFFLE 

217 mask: *default 

218 variance: *default 

219 

220 """ 

221 

222 supportedWriteParameters = frozenset({"recipe"}) 

223 ReaderClass: type # must be set by concrete subclasses 

224 

225 @property 

226 @cached_getter 

227 def reader(self): 

228 """The reader object that backs this formatter's read operations. 

229 

230 This is computed on first use and then cached. It should never be 

231 accessed when writing. 

232 """ 

233 return self.ReaderClass(self.fileDescriptor.location.path) 

234 

235 def readComponent(self, component): 

236 # Docstring inherited. 

237 if component in ("bbox", "dimensions", "xy0"): 

238 bbox = self.reader.readBBox() 

239 if component == "dimensions": 

240 return bbox.getDimensions() 

241 elif component == "xy0": 

242 return bbox.getMin() 

243 else: 

244 return bbox 

245 else: 

246 raise KeyError(f"Unknown component requested: {component}") 

247 

248 def readFull(self): 

249 # Docstring inherited. 

250 return self.reader.read(**self.checked_parameters) 

251 

252 def write(self, inMemoryDataset): 

253 """Write a Python object to a file. 

254 

255 Parameters 

256 ---------- 

257 inMemoryDataset : `object` 

258 The Python object to store. 

259 """ 

260 # Update the location with the formatter-preferred file extension 

261 self.fileDescriptor.location.updateExtension(self.extension) 

262 outputPath = self.fileDescriptor.location.path 

263 

264 # check to see if we have a recipe requested 

265 recipeName = self.writeParameters.get("recipe") 

266 recipe = self.getImageCompressionSettings(recipeName) 

267 if recipe: 

268 # Can not construct a PropertySet from a hierarchical 

269 # dict but can update one. 

270 ps = PropertySet() 

271 ps.update(recipe) 

272 inMemoryDataset.writeFitsWithOptions(outputPath, options=ps) 

273 else: 

274 inMemoryDataset.writeFits(outputPath) 

275 

276 def getImageCompressionSettings(self, recipeName): 

277 """Retrieve the relevant compression settings for this recipe. 

278 

279 Parameters 

280 ---------- 

281 recipeName : `str` 

282 Label associated with the collection of compression parameters 

283 to select. 

284 

285 Returns 

286 ------- 

287 settings : `dict` 

288 The selected settings. 

289 """ 

290 # if no recipe has been provided and there is no default 

291 # return immediately 

292 if not recipeName: 

293 if "default" not in self.writeRecipes: 

294 return {} 

295 recipeName = "default" 

296 

297 if recipeName not in self.writeRecipes: 

298 raise RuntimeError(f"Unrecognized recipe option given for compression: {recipeName}") 

299 

300 recipe = self.writeRecipes[recipeName] 

301 

302 # Set the seed based on dataId 

303 seed = hash(tuple(self.dataId.required.items())) % 2**31 

304 for plane in ("image", "mask", "variance"): 

305 if plane in recipe and "scaling" in recipe[plane]: 

306 scaling = recipe[plane]["scaling"] 

307 if "seed" in scaling and scaling["seed"] == 0: 

308 scaling["seed"] = seed 

309 

310 return recipe 

311 

312 @classmethod 

313 def validateWriteRecipes(cls, recipes): 

314 """Validate supplied recipes for this formatter. 

315 

316 The recipes are supplemented with default values where appropriate. 

317 

318 TODO: replace this custom validation code with Cerberus (DM-11846) 

319 

320 Parameters 

321 ---------- 

322 recipes : `dict` 

323 Recipes to validate. Can be empty dict or `None`. 

324 

325 Returns 

326 ------- 

327 validated : `dict` 

328 Validated recipes. Returns what was given if there are no 

329 recipes listed. 

330 

331 Raises 

332 ------ 

333 RuntimeError 

334 Raised if validation fails. 

335 """ 

336 # Schemas define what should be there, and the default values (and by 

337 # the default value, the expected type). 

338 compressionSchema = { 

339 "algorithm": "NONE", 

340 "rows": 1, 

341 "columns": 0, 

342 "quantizeLevel": 0.0, 

343 } 

344 scalingSchema = { 

345 "algorithm": "NONE", 

346 "bitpix": 0, 

347 "maskPlanes": ["NO_DATA"], 

348 "seed": 0, 

349 "quantizeLevel": 4.0, 

350 "quantizePad": 5.0, 

351 "fuzz": True, 

352 "bscale": 1.0, 

353 "bzero": 0.0, 

354 } 

355 

356 if not recipes: 

357 # We can not insist on recipes being specified 

358 return recipes 

359 

360 def checkUnrecognized(entry, allowed, description): 

361 """Check to see if the entry contains unrecognised keywords.""" 

362 unrecognized = set(entry) - set(allowed) 

363 if unrecognized: 

364 raise RuntimeError( 

365 f"Unrecognized entries when parsing image compression recipe {description}: " 

366 f"{unrecognized}" 

367 ) 

368 

369 validated = {} 

370 for name in recipes: 

371 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name) 

372 validated[name] = {} 

373 for plane in ("image", "mask", "variance"): 

374 checkUnrecognized(recipes[name][plane], ["compression", "scaling"], f"{name}->{plane}") 

375 

376 np = {} 

377 validated[name][plane] = np 

378 for settings, schema in (("compression", compressionSchema), ("scaling", scalingSchema)): 

379 np[settings] = {} 

380 if settings not in recipes[name][plane]: 

381 for key in schema: 

382 np[settings][key] = schema[key] 

383 continue 

384 entry = recipes[name][plane][settings] 

385 checkUnrecognized(entry, schema.keys(), f"{name}->{plane}->{settings}") 

386 for key in schema: 

387 value = type(schema[key])(entry[key]) if key in entry else schema[key] 

388 np[settings][key] = value 

389 return validated 

390 

391 

392class FitsImageFormatter(StandardFitsImageFormatterBase): 

393 """Concrete formatter for reading/writing `~lsst.afw.image.Image` 

394 from/to FITS. 

395 """ 

396 

397 ReaderClass = ImageFitsReader 

398 

399 

400class FitsMaskFormatter(StandardFitsImageFormatterBase): 

401 """Concrete formatter for reading/writing `~lsst.afw.image.Mask` 

402 from/to FITS. 

403 """ 

404 

405 ReaderClass = MaskFitsReader 

406 

407 

408class FitsMaskedImageFormatter(StandardFitsImageFormatterBase): 

409 """Concrete formatter for reading/writing `~lsst.afw.image.MaskedImage` 

410 from/to FITS. 

411 """ 

412 

413 ReaderClass = MaskedImageFitsReader 

414 

415 def readComponent(self, component): 

416 # Docstring inherited. 

417 if component == "image": 

418 return self.reader.readImage(**self.checked_parameters) 

419 elif component == "mask": 

420 return self.reader.readMask(**self.checked_parameters) 

421 elif component == "variance": 

422 return self.reader.readVariance(**self.checked_parameters) 

423 else: 

424 # Delegate to base for bbox, dimensions, xy0. 

425 return super().readComponent(component) 

426 

427 

428def standardizeAmplifierParameters(parameters, on_disk_detector): 

429 """Preprocess the Exposure storage class's "amp" and "detector" parameters. 

430 

431 This checks the given objects for consistency with the on-disk geometry and 

432 converts amplifier IDs/names to Amplifier instances. 

433 

434 Parameters 

435 ---------- 

436 parameters : `dict` 

437 Dictionary of parameters passed to formatter. See the Exposure storage 

438 class definition in daf_butler for allowed keys and values. 

439 on_disk_detector : `lsst.afw.cameraGeom.Detector` or `None` 

440 Detector that represents the on-disk image being loaded, or `None` if 

441 this is unknown (and hence the user must provide one in 

442 ``parameters`` if "amp" is in ``parameters``). 

443 

444 Returns 

445 ------- 

446 amplifier : `lsst.afw.cameraGeom.Amplifier` or `None` 

447 An amplifier object that defines a subimage to load, or `None` if there 

448 was no "amp" parameter. 

449 detector : `lsst.afw.cameraGeom.Detector` or `None` 

450 A detector object whose amplifiers are in the same s/orientation 

451 state as the on-disk image. If there is no "amp" parameter, 

452 ``on_disk_detector`` is simply passed through. 

453 regions_differ : `bool` 

454 `True` if the on-disk detector and the detector given in the parameters 

455 had different bounding boxes for one or more regions. This can happen 

456 if the true overscan region sizes can only be determined when the image 

457 is actually read, but otherwise it should be considered user error. 

458 """ 

459 if (amplifier := parameters.get("amp")) is None: 

460 return None, on_disk_detector, False 

461 if "bbox" in parameters or "origin" in parameters: 

462 raise ValueError("Cannot pass 'amp' with 'bbox' or 'origin'.") 

463 if isinstance(amplifier, int | str): 

464 amp_key = amplifier 

465 target_amplifier = None 

466 else: 

467 amp_key = amplifier.getName() 

468 target_amplifier = amplifier 

469 if (detector := parameters.get("detector")) is not None: 

470 if on_disk_detector is not None: 

471 # User passed a detector and we also found one on disk. Check them 

472 # for consistency. Note that we are checking the amps we'd get 

473 # from the two detectors against each other, not the amplifier we 

474 # got directly from the user, as the latter is allowed to differ in 

475 # assembly/orientation state. 

476 comparison = on_disk_detector[amp_key].compareGeometry(detector[amp_key]) 

477 if comparison & comparison.ASSEMBLY_DIFFERS: 

478 raise ValueError( 

479 "The given 'detector' has a different assembly state and/or orientation from " 

480 f"the on-disk one for amp {amp_key}." 

481 ) 

482 else: 

483 if on_disk_detector is None: 

484 raise ValueError( 

485 f"No on-disk detector and no detector given; cannot load amplifier from key {amp_key}. " 

486 "Please provide either a 'detector' parameter or an Amplifier instance in the " 

487 "'amp' parameter." 

488 ) 

489 comparison = AmplifierGeometryComparison.EQUAL 

490 detector = on_disk_detector 

491 if target_amplifier is None: 

492 target_amplifier = detector[amp_key] 

493 return target_amplifier, detector, comparison & comparison.REGIONS_DIFFER 

494 

495 

496class FitsExposureFormatter(FitsMaskedImageFormatter): 

497 """Concrete formatter for reading/writing `~lsst.afw.image.Exposure` 

498 from/to FITS. 

499 

500 Notes 

501 ----- 

502 This class inherits from `FitsMaskedImageFormatter` even though 

503 `lsst.afw.image.Exposure` doesn't inherit from 

504 `lsst.afw.image.MaskedImage`; this is just an easy way to be able to 

505 delegate to `FitsMaskedImageFormatter.super()` for component-handling, and 

506 should be replaced with e.g. both calling a free function if that slight 

507 type covariance violation ever becomes a practical problem. 

508 """ 

509 

510 ReaderClass = ExposureFitsReader 

511 

512 def readComponent(self, component): 

513 # Docstring inherited. 

514 # Generic components can be read via a string name; DM-27754 will make 

515 # this mapping larger at the expense of the following one. 

516 genericComponents = { 

517 "summaryStats": ExposureInfo.KEY_SUMMARY_STATS, 

518 } 

519 if (genericComponentName := genericComponents.get(component)) is not None: 

520 return self.reader.readComponent(genericComponentName) 

521 # Other components have hard-coded method names, but don't take 

522 # parameters. 

523 standardComponents = { 

524 "id": "readExposureId", 

525 "metadata": "readMetadata", 

526 "wcs": "readWcs", 

527 "coaddInputs": "readCoaddInputs", 

528 "psf": "readPsf", 

529 "photoCalib": "readPhotoCalib", 

530 "filter": "readFilter", 

531 "validPolygon": "readValidPolygon", 

532 "apCorrMap": "readApCorrMap", 

533 "visitInfo": "readVisitInfo", 

534 "transmissionCurve": "readTransmissionCurve", 

535 "detector": "readDetector", 

536 "exposureInfo": "readExposureInfo", 

537 } 

538 if (methodName := standardComponents.get(component)) is not None: 

539 result = getattr(self.reader, methodName)() 

540 if component == "filter": 

541 return self._fixFilterLabels(result) 

542 return result 

543 # Delegate to MaskedImage and ImageBase implementations for the rest. 

544 return super().readComponent(component) 

545 

546 def readFull(self): 

547 # Docstring inherited. 

548 amplifier, detector, _ = standardizeAmplifierParameters( 

549 self.checked_parameters, 

550 self.reader.readDetector(), 

551 ) 

552 if amplifier is not None: 

553 amplifier_isolator = AmplifierIsolator( 

554 amplifier, 

555 self.reader.readBBox(), 

556 detector, 

557 ) 

558 result = amplifier_isolator.transform_subimage( 

559 self.reader.read(bbox=amplifier_isolator.subimage_bbox) 

560 ) 

561 result.setDetector(amplifier_isolator.make_detector()) 

562 else: 

563 result = self.reader.read(**self.checked_parameters) 

564 result.getInfo().setFilter(self._fixFilterLabels(result.getInfo().getFilter())) 

565 return result 

566 

567 def _fixFilterLabels(self, file_filter_label, should_be_standardized=None): 

568 """Compare the filter label read from the file with the one in the 

569 data ID. 

570 

571 Parameters 

572 ---------- 

573 file_filter_label : `lsst.afw.image.FilterLabel` or `None` 

574 Filter label read from the file, if there was one. 

575 should_be_standardized : `bool`, optional 

576 If `True`, expect ``file_filter_label`` to be consistent with the 

577 data ID and warn only if it is not. If `False`, expect it to be 

578 inconsistent and warn only if the data ID is incomplete and hence 

579 the `FilterLabel` cannot be fixed. If `None` (default) guess 

580 whether the file should be standardized by looking at the 

581 serialization version number in file, which requires this method to 

582 have been run after `readFull` or `readComponent`. 

583 

584 Returns 

585 ------- 

586 filter_label : `lsst.afw.image.FilterLabel` or `None` 

587 The preferred filter label; may be the given one or one built from 

588 the data ID. `None` is returned if there should never be any 

589 filters associated with this dataset type. 

590 

591 Notes 

592 ----- 

593 Most test coverage for this method is in ci_hsc_gen3, where we have 

594 much easier access to test data that exhibits the problems it attempts 

595 to solve. 

596 """ 

597 # Remember filter data ID keys that weren't in this particular data ID, 

598 # so we can warn about them later. 

599 missing = [] 

600 band = None 

601 physical_filter = None 

602 if "band" in self.dataId.dimensions.names: 

603 band = self.dataId.get("band") 

604 # band isn't in the data ID; is that just because this data ID 

605 # hasn't been filled in with everything the Registry knows, or 

606 # because this dataset is never associated with a band? 

607 if band is None and not self.dataId.hasFull() and "band" in self.dataId.dimensions.implied: 

608 missing.append("band") 

609 if "physical_filter" in self.dataId.dimensions.names: 

610 physical_filter = self.dataId.get("physical_filter") 

611 # Same check as above for band, but for physical_filter. 

612 if ( 

613 physical_filter is None 

614 and not self.dataId.hasFull() 

615 and "physical_filter" in self.dataId.dimensions.implied 

616 ): 

617 missing.append("physical_filter") 

618 if should_be_standardized is None: 

619 version = self.reader.readSerializationVersion() 

620 should_be_standardized = version >= 2 

621 if missing: 

622 # Data ID identifies a filter but the actual filter label values 

623 # haven't been fetched from the database; we have no choice but 

624 # to use the one in the file. 

625 # Warn if that's more likely than not to be bad, because the file 

626 # predates filter standardization. 

627 if not should_be_standardized: 

628 warnings.warn( 

629 f"Data ID {self.dataId} is missing (implied) value(s) for {missing}; " 

630 "the correctness of this Exposure's FilterLabel cannot be guaranteed. " 

631 "Call Registry.expandDataId before Butler.get to avoid this.", 

632 # Report the warning from outside of middleware or the 

633 # relevant runQuantum method. 

634 stacklevel=find_outside_stacklevel( 

635 "lsst.obs.base", "lsst.pipe.base", "lsst.daf.butler", allow_methods={"runQuantum"} 

636 ), 

637 ) 

638 return file_filter_label 

639 if band is None and physical_filter is None: 

640 data_id_filter_label = None 

641 else: 

642 data_id_filter_label = FilterLabel(band=band, physical=physical_filter) 

643 if data_id_filter_label != file_filter_label and should_be_standardized: 

644 # File was written after FilterLabel and standardization, but its 

645 # FilterLabel doesn't agree with the data ID: this indicates a bug 

646 # in whatever code produced the Exposure (though it may be one that 

647 # has been fixed since the file was written). 

648 warnings.warn( 

649 f"Reading {self.fileDescriptor.location} with data ID {self.dataId}: " 

650 f"filter label mismatch (file is {file_filter_label}, data ID is " 

651 f"{data_id_filter_label}). This is probably a bug in the code that produced it.", 

652 stacklevel=find_outside_stacklevel( 

653 "lsst.obs.base", "lsst.pipe.base", "lsst.daf.butler", allow_methods={"runQuantum"} 

654 ), 

655 ) 

656 return data_id_filter_label