Coverage for python/lsst/cp/pipe/cpCombine.py: 21%

221 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-02-09 04:13 -0800

1# This file is part of cp_pipe. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21import numpy as np 

22import time 

23 

24import lsst.geom as geom 

25import lsst.pex.config as pexConfig 

26import lsst.pipe.base as pipeBase 

27import lsst.pipe.base.connectionTypes as cT 

28import lsst.afw.math as afwMath 

29import lsst.afw.image as afwImage 

30 

31from lsst.ip.isr.vignette import maskVignettedRegion 

32 

33from astro_metadata_translator import merge_headers, ObservationGroup 

34from astro_metadata_translator.serialize import dates_to_fits 

35 

36 

37__all__ = ["CalibStatsConfig", "CalibStatsTask", 

38 "CalibCombineConfig", "CalibCombineConnections", "CalibCombineTask", 

39 "CalibCombineByFilterConfig", "CalibCombineByFilterConnections", "CalibCombineByFilterTask"] 

40 

41 

42# CalibStatsConfig/CalibStatsTask from pipe_base/constructCalibs.py 

43class CalibStatsConfig(pexConfig.Config): 

44 """Parameters controlling the measurement of background 

45 statistics. 

46 """ 

47 

48 stat = pexConfig.Field( 

49 dtype=str, 

50 default="MEANCLIP", 

51 doc="Statistic name to use to estimate background (from `~lsst.afw.math.Property`)", 

52 ) 

53 clip = pexConfig.Field( 

54 dtype=float, 

55 default=3.0, 

56 doc="Clipping threshold for background", 

57 ) 

58 nIter = pexConfig.Field( 

59 dtype=int, 

60 default=3, 

61 doc="Clipping iterations for background", 

62 ) 

63 mask = pexConfig.ListField( 

64 dtype=str, 

65 default=["DETECTED", "BAD", "NO_DATA"], 

66 doc="Mask planes to reject", 

67 ) 

68 

69 

70class CalibStatsTask(pipeBase.Task): 

71 """Measure statistics on the background 

72 

73 This can be useful for scaling the background, e.g., for flats and 

74 fringe frames. 

75 """ 

76 

77 ConfigClass = CalibStatsConfig 

78 

79 def run(self, exposureOrImage): 

80 """Measure a particular statistic on an image (of some sort). 

81 

82 Parameters 

83 ---------- 

84 exposureOrImage : `lsst.afw.image.Exposure`, 

85 `lsst.afw.image.MaskedImage`, or 

86 `lsst.afw.image.Image` 

87 Exposure or image to calculate statistics on. 

88 

89 Returns 

90 ------- 

91 results : `float` 

92 Resulting statistic value. 

93 """ 

94 stats = afwMath.StatisticsControl(self.config.clip, self.config.nIter, 

95 afwImage.Mask.getPlaneBitMask(self.config.mask)) 

96 try: 

97 image = exposureOrImage.getMaskedImage() 

98 except Exception: 

99 try: 

100 image = exposureOrImage.getImage() 

101 except Exception: 

102 image = exposureOrImage 

103 statType = afwMath.stringToStatisticsProperty(self.config.stat) 

104 return afwMath.makeStatistics(image, statType, stats).getValue() 

105 

106 

107class CalibCombineConnections(pipeBase.PipelineTaskConnections, 

108 dimensions=("instrument", "detector")): 

109 inputExpHandles = cT.Input( 

110 name="cpInputs", 

111 doc="Input pre-processed exposures to combine.", 

112 storageClass="Exposure", 

113 dimensions=("instrument", "detector", "exposure"), 

114 multiple=True, 

115 deferLoad=True, 

116 ) 

117 inputScales = cT.Input( 

118 name="cpScales", 

119 doc="Input scale factors to use.", 

120 storageClass="StructuredDataDict", 

121 dimensions=("instrument", ), 

122 multiple=False, 

123 ) 

124 

125 outputData = cT.Output( 

126 name="cpProposal", 

127 doc="Output combined proposed calibration to be validated and certified..", 

128 storageClass="ExposureF", 

129 dimensions=("instrument", "detector"), 

130 isCalibration=True, 

131 ) 

132 

133 def __init__(self, *, config=None): 

134 super().__init__(config=config) 

135 

136 if config and config.exposureScaling != "InputList": 

137 self.inputs.discard("inputScales") 

138 

139 

140# CalibCombineConfig/CalibCombineTask from pipe_base/constructCalibs.py 

141class CalibCombineConfig(pipeBase.PipelineTaskConfig, 

142 pipelineConnections=CalibCombineConnections): 

143 """Configuration for combining calib exposures. 

144 """ 

145 

146 calibrationType = pexConfig.Field( 

147 dtype=str, 

148 default="calibration", 

149 doc="Name of calibration to be generated.", 

150 ) 

151 

152 exposureScaling = pexConfig.ChoiceField( 

153 dtype=str, 

154 allowed={ 

155 "Unity": "Do not scale inputs. Scale factor is 1.0.", 

156 "ExposureTime": "Scale inputs by their exposure time.", 

157 "DarkTime": "Scale inputs by their dark time.", 

158 "MeanStats": "Scale inputs based on their mean values.", 

159 "InputList": "Scale inputs based on a list of values.", 

160 }, 

161 default="Unity", 

162 doc="Scaling to be applied to each input exposure.", 

163 ) 

164 scalingLevel = pexConfig.ChoiceField( 

165 dtype=str, 

166 allowed={ 

167 "DETECTOR": "Scale by detector.", 

168 "AMP": "Scale by amplifier.", 

169 }, 

170 default="DETECTOR", 

171 doc="Region to scale.", 

172 ) 

173 maxVisitsToCalcErrorFromInputVariance = pexConfig.Field( 

174 dtype=int, 

175 default=5, 

176 doc="Maximum number of visits to estimate variance from input variance, not per-pixel spread", 

177 ) 

178 subregionSize = pexConfig.ListField( 

179 dtype=int, 

180 doc="Width, height of subregion size.", 

181 length=2, 

182 # This is 200 rows for all detectors smaller than 10k in width. 

183 default=(10000, 200), 

184 ) 

185 

186 doVignette = pexConfig.Field( 

187 dtype=bool, 

188 default=False, 

189 doc="Copy vignette polygon to output and censor vignetted pixels?" 

190 ) 

191 

192 mask = pexConfig.ListField( 

193 dtype=str, 

194 default=["SAT", "DETECTED", "INTRP"], 

195 doc="Mask planes to respect", 

196 ) 

197 combine = pexConfig.Field( 

198 dtype=str, 

199 default="MEANCLIP", 

200 doc="Statistic name to use for combination (from `~lsst.afw.math.Property`)", 

201 ) 

202 clip = pexConfig.Field( 

203 dtype=float, 

204 default=3.0, 

205 doc="Clipping threshold for combination", 

206 ) 

207 nIter = pexConfig.Field( 

208 dtype=int, 

209 default=3, 

210 doc="Clipping iterations for combination", 

211 ) 

212 stats = pexConfig.ConfigurableField( 

213 target=CalibStatsTask, 

214 doc="Background statistics configuration", 

215 ) 

216 

217 

218class CalibCombineTask(pipeBase.PipelineTask): 

219 """Task to combine calib exposures.""" 

220 

221 ConfigClass = CalibCombineConfig 

222 _DefaultName = "cpCombine" 

223 

224 def __init__(self, **kwargs): 

225 super().__init__(**kwargs) 

226 self.makeSubtask("stats") 

227 

228 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

229 inputs = butlerQC.get(inputRefs) 

230 

231 dimensions = [expHandle.dataId.byName() for expHandle in inputRefs.inputExpHandles] 

232 inputs["inputDims"] = dimensions 

233 

234 outputs = self.run(**inputs) 

235 butlerQC.put(outputs, outputRefs) 

236 

237 def run(self, inputExpHandles, inputScales=None, inputDims=None): 

238 """Combine calib exposures for a single detector. 

239 

240 Parameters 

241 ---------- 

242 inputExpHandles : `list` [`lsst.daf.butler.DeferredDatasetHandle`] 

243 Input list of exposure handles to combine. 

244 inputScales : `dict` [`dict` [`dict` [`float`]]], optional 

245 Dictionary of scales, indexed by detector (`int`), 

246 amplifier (`int`), and exposure (`int`). Used for 

247 'inputExps' scaling. 

248 inputDims : `list` [`dict`] 

249 List of dictionaries of input data dimensions/values. 

250 Each list entry should contain: 

251 

252 ``"exposure"`` 

253 exposure id value (`int`) 

254 ``"detector"`` 

255 detector id value (`int`) 

256 

257 Returns 

258 ------- 

259 results : `lsst.pipe.base.Struct` 

260 The results struct containing: 

261 

262 ``outputData`` 

263 Final combined exposure generated from the inputs 

264 (`lsst.afw.image.Exposure`). 

265 

266 Raises 

267 ------ 

268 RuntimeError 

269 Raised if no input data is found. Also raised if 

270 config.exposureScaling == InputList, and a necessary scale 

271 was not found. 

272 """ 

273 width, height = self.getDimensions(inputExpHandles) 

274 stats = afwMath.StatisticsControl(self.config.clip, self.config.nIter, 

275 afwImage.Mask.getPlaneBitMask(self.config.mask)) 

276 numExps = len(inputExpHandles) 

277 if numExps < 1: 

278 raise RuntimeError("No valid input data") 

279 if numExps < self.config.maxVisitsToCalcErrorFromInputVariance: 

280 stats.setCalcErrorFromInputVariance(True) 

281 

282 inputDetector = inputExpHandles[0].get(component="detector") 

283 

284 # Create output exposure for combined data. 

285 combined = afwImage.MaskedImageF(width, height) 

286 combinedExp = afwImage.makeExposure(combined) 

287 

288 # Apply scaling: 

289 expScales = [] 

290 if inputDims is None: 

291 inputDims = [dict() for i in inputExpHandles] 

292 

293 for index, (expHandle, dims) in enumerate(zip(inputExpHandles, inputDims)): 

294 scale = 1.0 

295 visitInfo = expHandle.get(component="visitInfo") 

296 if self.config.exposureScaling == "ExposureTime": 

297 scale = visitInfo.getExposureTime() 

298 elif self.config.exposureScaling == "DarkTime": 

299 scale = visitInfo.getDarkTime() 

300 elif self.config.exposureScaling == "MeanStats": 

301 # Note: there may a bug freeing memory here. TBD. 

302 exp = expHandle.get() 

303 scale = self.stats.run(exp) 

304 del exp 

305 elif self.config.exposureScaling == "InputList": 

306 visitId = dims.get("exposure", None) 

307 detectorId = dims.get("detector", None) 

308 if visitId is None or detectorId is None: 

309 raise RuntimeError(f"Could not identify scaling for input {index} ({dims})") 

310 if detectorId not in inputScales["expScale"]: 

311 raise RuntimeError(f"Could not identify a scaling for input {index}" 

312 f" detector {detectorId}") 

313 

314 if self.config.scalingLevel == "DETECTOR": 

315 if visitId not in inputScales["expScale"][detectorId]: 

316 raise RuntimeError(f"Could not identify a scaling for input {index}" 

317 f"detector {detectorId} visit {visitId}") 

318 scale = inputScales["expScale"][detectorId][visitId] 

319 elif self.config.scalingLevel == "AMP": 

320 scale = [inputScales["expScale"][detectorId][amp.getName()][visitId] 

321 for amp in inputDetector] 

322 else: 

323 raise RuntimeError(f"Unknown scaling level: {self.config.scalingLevel}") 

324 elif self.config.exposureScaling == "Unity": 

325 scale = 1.0 

326 else: 

327 raise RuntimeError(f"Unknown scaling type: {self.config.exposureScaling}.") 

328 

329 expScales.append(scale) 

330 self.log.info("Scaling input %d by %s", index, scale) 

331 

332 self.combine(combinedExp, inputExpHandles, expScales, stats) 

333 

334 self.interpolateNans(combined) 

335 

336 if self.config.doVignette: 

337 polygon = inputExpHandles[0].get(component="validPolygon") 

338 maskVignettedRegion(combined, polygon=polygon, vignetteValue=0.0) 

339 

340 # Combine headers 

341 self.combineHeaders(inputExpHandles, combinedExp, 

342 calibType=self.config.calibrationType, scales=expScales) 

343 

344 # Set the detector 

345 combinedExp.setDetector(inputDetector) 

346 

347 # Do we need to set a filter? 

348 filterLabel = inputExpHandles[0].get(component="filter") 

349 self.setFilter(combinedExp, filterLabel) 

350 

351 # Return 

352 return pipeBase.Struct( 

353 outputData=combinedExp, 

354 ) 

355 

356 def getDimensions(self, expHandleList): 

357 """Get dimensions of the inputs. 

358 

359 Parameters 

360 ---------- 

361 expHandleList : `list` [`lsst.daf.butler.DeferredDatasetHandle`] 

362 Exposure handles to check the sizes of. 

363 

364 Returns 

365 ------- 

366 width, height : `int` 

367 Unique set of input dimensions. 

368 """ 

369 dimList = [expHandle.get(component="bbox").getDimensions() for expHandle in expHandleList] 

370 

371 return self.getSize(dimList) 

372 

373 def getSize(self, dimList): 

374 """Determine a consistent size, given a list of image sizes. 

375 

376 Parameters 

377 ---------- 

378 dimList : `list` [`tuple` [`int`, `int`]] 

379 List of dimensions. 

380 

381 Raises 

382 ------ 

383 RuntimeError 

384 If input dimensions are inconsistent. 

385 

386 Returns 

387 ------- 

388 width, height : `int` 

389 Common dimensions. 

390 """ 

391 dim = set((w, h) for w, h in dimList) 

392 if len(dim) != 1: 

393 raise RuntimeError("Inconsistent dimensions: %s" % dim) 

394 return dim.pop() 

395 

396 def applyScale(self, exposure, bbox=None, scale=None): 

397 """Apply scale to input exposure. 

398 

399 This implementation applies a flux scaling: the input exposure is 

400 divided by the provided scale. 

401 

402 Parameters 

403 ---------- 

404 exposure : `lsst.afw.image.Exposure` 

405 Exposure to scale. 

406 bbox : `lsst.geom.Box2I` 

407 BBox matching the segment of the exposure passed in. 

408 scale : `float` or `list` [`float`], optional 

409 Constant scale to divide the exposure by. 

410 """ 

411 if scale is not None: 

412 mi = exposure.getMaskedImage() 

413 if isinstance(scale, list): 

414 # Create a realization of the per-amp scales as an 

415 # image we can take a subset of. This may be slightly 

416 # slower than only populating the region we care 

417 # about, but this avoids needing to do arbitrary 

418 # numbers of offsets, etc. 

419 scaleExp = afwImage.MaskedImageF(exposure.getDetector().getBBox()) 

420 for amp, ampScale in zip(exposure.getDetector(), scale): 

421 scaleExp.image[amp.getBBox()] = ampScale 

422 scale = scaleExp[bbox] 

423 mi /= scale 

424 

425 @staticmethod 

426 def _subBBoxIter(bbox, subregionSize): 

427 """Iterate over subregions of a bbox. 

428 

429 Parameters 

430 ---------- 

431 bbox : `lsst.geom.Box2I` 

432 Bounding box over which to iterate. 

433 subregionSize: `lsst.geom.Extent2I` 

434 Size of sub-bboxes. 

435 

436 Yields 

437 ------ 

438 subBBox : `lsst.geom.Box2I` 

439 Next sub-bounding box of size ``subregionSize`` or 

440 smaller; each ``subBBox`` is contained within ``bbox``, so 

441 it may be smaller than ``subregionSize`` at the edges of 

442 ``bbox``, but it will never be empty. 

443 """ 

444 if bbox.isEmpty(): 

445 raise RuntimeError("bbox %s is empty" % (bbox,)) 

446 if subregionSize[0] < 1 or subregionSize[1] < 1: 

447 raise RuntimeError("subregionSize %s must be nonzero" % (subregionSize,)) 

448 

449 for rowShift in range(0, bbox.getHeight(), subregionSize[1]): 

450 for colShift in range(0, bbox.getWidth(), subregionSize[0]): 

451 subBBox = geom.Box2I(bbox.getMin() + geom.Extent2I(colShift, rowShift), subregionSize) 

452 subBBox.clip(bbox) 

453 if subBBox.isEmpty(): 

454 raise RuntimeError("Bug: empty bbox! bbox=%s, subregionSize=%s, " 

455 "colShift=%s, rowShift=%s" % 

456 (bbox, subregionSize, colShift, rowShift)) 

457 yield subBBox 

458 

459 def combine(self, target, expHandleList, expScaleList, stats): 

460 """Combine multiple images. 

461 

462 Parameters 

463 ---------- 

464 target : `lsst.afw.image.Exposure` 

465 Output exposure to construct. 

466 expHandleList : `list` [`lsst.daf.butler.DeferredDatasetHandle`] 

467 Input exposure handles to combine. 

468 expScaleList : `list` [`float`] 

469 List of scales to apply to each input image. 

470 stats : `lsst.afw.math.StatisticsControl` 

471 Control explaining how to combine the input images. 

472 """ 

473 combineType = afwMath.stringToStatisticsProperty(self.config.combine) 

474 

475 subregionSizeArr = self.config.subregionSize 

476 subregionSize = geom.Extent2I(subregionSizeArr[0], subregionSizeArr[1]) 

477 for subBbox in self._subBBoxIter(target.getBBox(), subregionSize): 

478 images = [] 

479 for expHandle, expScale in zip(expHandleList, expScaleList): 

480 inputExp = expHandle.get(parameters={"bbox": subBbox}) 

481 self.applyScale(inputExp, subBbox, expScale) 

482 images.append(inputExp.getMaskedImage()) 

483 

484 combinedSubregion = afwMath.statisticsStack(images, combineType, stats) 

485 target.maskedImage.assign(combinedSubregion, subBbox) 

486 

487 def combineHeaders(self, expHandleList, calib, calibType="CALIB", scales=None): 

488 """Combine input headers to determine the set of common headers, 

489 supplemented by calibration inputs. 

490 

491 Parameters 

492 ---------- 

493 expHandleList : `list` [`lsst.daf.butler.DeferredDatasetHandle`] 

494 Input list of exposure handles to combine. 

495 calib : `lsst.afw.image.Exposure` 

496 Output calibration to construct headers for. 

497 calibType : `str`, optional 

498 OBSTYPE the output should claim. 

499 scales : `list` [`float`], optional 

500 Scale values applied to each input to record. 

501 

502 Returns 

503 ------- 

504 header : `lsst.daf.base.PropertyList` 

505 Constructed header. 

506 """ 

507 # Header 

508 header = calib.getMetadata() 

509 header.set("OBSTYPE", calibType) 

510 

511 # Keywords we care about 

512 comments = {"TIMESYS": "Time scale for all dates", 

513 "DATE-OBS": "Start date of earliest input observation", 

514 "MJD-OBS": "[d] Start MJD of earliest input observation", 

515 "DATE-END": "End date of oldest input observation", 

516 "MJD-END": "[d] End MJD of oldest input observation", 

517 "MJD-AVG": "[d] MJD midpoint of all input observations", 

518 "DATE-AVG": "Midpoint date of all input observations"} 

519 

520 # Creation date 

521 now = time.localtime() 

522 calibDate = time.strftime("%Y-%m-%d", now) 

523 calibTime = time.strftime("%X %Z", now) 

524 header.set("CALIB_CREATION_DATE", calibDate) 

525 header.set("CALIB_CREATION_TIME", calibTime) 

526 

527 # Merge input headers 

528 inputHeaders = [expHandle.get(component="metadata") for expHandle in expHandleList] 

529 merged = merge_headers(inputHeaders, mode="drop") 

530 

531 # Scan the first header for items that were dropped due to 

532 # conflict, and replace them. 

533 for k, v in merged.items(): 

534 if k not in header: 

535 md = inputHeaders[0] 

536 comment = md.getComment(k) if k in md else None 

537 header.set(k, v, comment=comment) 

538 

539 # Construct list of visits 

540 visitInfoList = [expHandle.get(component="visitInfo") for expHandle in expHandleList] 

541 for i, visit in enumerate(visitInfoList): 

542 if visit is None: 

543 continue 

544 header.set("CPP_INPUT_%d" % (i,), visit.id) 

545 header.set("CPP_INPUT_DATE_%d" % (i,), str(visit.getDate())) 

546 header.set("CPP_INPUT_EXPT_%d" % (i,), visit.getExposureTime()) 

547 if scales is not None: 

548 header.set("CPP_INPUT_SCALE_%d" % (i,), scales[i]) 

549 

550 # Not yet working: DM-22302 

551 # Create an observation group so we can add some standard headers 

552 # independent of the form in the input files. 

553 # Use try block in case we are dealing with unexpected data headers 

554 try: 

555 group = ObservationGroup(visitInfoList, pedantic=False) 

556 except Exception: 

557 self.log.warning("Exception making an obs group for headers. Continuing.") 

558 # Fall back to setting a DATE-OBS from the calibDate 

559 dateCards = {"DATE-OBS": "{}T00:00:00.00".format(calibDate)} 

560 comments["DATE-OBS"] = "Date of start of day of calibration midpoint" 

561 else: 

562 oldest, newest = group.extremes() 

563 dateCards = dates_to_fits(oldest.datetime_begin, newest.datetime_end) 

564 

565 for k, v in dateCards.items(): 

566 header.set(k, v, comment=comments.get(k, None)) 

567 

568 return header 

569 

570 def interpolateNans(self, exp): 

571 """Interpolate over NANs in the combined image. 

572 

573 NANs can result from masked areas on the CCD. We don't want 

574 them getting into our science images, so we replace them with 

575 the median of the image. 

576 

577 Parameters 

578 ---------- 

579 exp : `lsst.afw.image.Exposure` 

580 Exp to check for NaNs. 

581 """ 

582 array = exp.getImage().getArray() 

583 bad = np.isnan(array) 

584 if np.any(bad): 

585 median = np.median(array[np.logical_not(bad)]) 

586 count = np.sum(bad) 

587 array[bad] = median 

588 self.log.warning("Found and fixed %s NAN pixels", count) 

589 

590 @staticmethod 

591 def setFilter(exp, filterLabel): 

592 """Dummy function that will not assign a filter. 

593 

594 Parameters 

595 ---------- 

596 exp : `lsst.afw.image.Exposure` 

597 Exposure to assign filter to. 

598 filterLabel : `lsst.afw.image.FilterLabel` 

599 Filter to assign. 

600 """ 

601 pass 

602 

603 

604# Create versions of the Connections, Config, and Task that support 

605# filter constraints. 

606class CalibCombineByFilterConnections(CalibCombineConnections, 

607 dimensions=("instrument", "detector", "physical_filter")): 

608 inputScales = cT.Input( 

609 name="cpFilterScales", 

610 doc="Input scale factors to use.", 

611 storageClass="StructuredDataDict", 

612 dimensions=("instrument", "physical_filter"), 

613 multiple=False, 

614 ) 

615 

616 outputData = cT.Output( 

617 name="cpFilterProposal", 

618 doc="Output combined proposed calibration to be validated and certified.", 

619 storageClass="ExposureF", 

620 dimensions=("instrument", "detector", "physical_filter"), 

621 isCalibration=True, 

622 ) 

623 

624 def __init__(self, *, config=None): 

625 super().__init__(config=config) 

626 

627 if config and config.exposureScaling != "InputList": 

628 self.inputs.discard("inputScales") 

629 

630 

631class CalibCombineByFilterConfig(CalibCombineConfig, 

632 pipelineConnections=CalibCombineByFilterConnections): 

633 pass 

634 

635 

636class CalibCombineByFilterTask(CalibCombineTask): 

637 """Task to combine calib exposures.""" 

638 

639 ConfigClass = CalibCombineByFilterConfig 

640 _DefaultName = "cpFilterCombine" 

641 

642 @staticmethod 

643 def setFilter(exp, filterLabel): 

644 """Dummy function that will not assign a filter. 

645 

646 Parameters 

647 ---------- 

648 exp : `lsst.afw.image.Exposure` 

649 Exposure to assign filter to. 

650 filterLabel : `lsst.afw.image.FilterLabel` 

651 Filter to assign. 

652 """ 

653 if filterLabel: 

654 exp.setFilter(filterLabel)