Coverage for python / lsst / pipe / base / _instrument.py: 39%

132 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-04-24 08:19 +0000

1# This file is part of pipe_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This software is dual licensed under the GNU General Public License and also 

10# under a 3-clause BSD license. Recipients may choose which of these licenses 

11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt, 

12# respectively. If you choose the GPL option then the following text applies 

13# (but note that there is still no warranty even if you opt for BSD instead): 

14# 

15# This program is free software: you can redistribute it and/or modify 

16# it under the terms of the GNU General Public License as published by 

17# the Free Software Foundation, either version 3 of the License, or 

18# (at your option) any later version. 

19# 

20# This program is distributed in the hope that it will be useful, 

21# but WITHOUT ANY WARRANTY; without even the implied warranty of 

22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

23# GNU General Public License for more details. 

24# 

25# You should have received a copy of the GNU General Public License 

26# along with this program. If not, see <http://www.gnu.org/licenses/>. 

27 

28from __future__ import annotations 

29 

30__all__ = ("Instrument",) 

31 

32import contextlib 

33import datetime 

34from abc import ABCMeta, abstractmethod 

35from collections.abc import Sequence 

36from typing import TYPE_CHECKING, Any, Self, cast, final 

37 

38from lsst.daf.butler import DataCoordinate, DataId, DimensionPacker, DimensionRecord, Formatter, FormatterV2 

39from lsst.daf.butler.registry import DataIdError 

40from lsst.pex.config import Config, RegistryField 

41from lsst.resources import ResourcePath, ResourcePathExpression 

42from lsst.utils import doImportType 

43from lsst.utils.introspection import get_full_type_name 

44 

45from ._observation_dimension_packer import observation_packer_registry 

46 

47if TYPE_CHECKING: 

48 from lsst.daf.butler import Registry 

49 

50 

51class Instrument(metaclass=ABCMeta): 

52 """Base class for instrument-specific logic for the Gen3 Butler. 

53 

54 Parameters 

55 ---------- 

56 collection_prefix : `str`, optional 

57 Prefix for collection names to use instead of the instrument's own 

58 name. This is primarily for use in simulated-data repositories, where 

59 the instrument name may not be necessary and/or sufficient to 

60 distinguish between collections. 

61 

62 Notes 

63 ----- 

64 Concrete instrument subclasses must have the same construction signature as 

65 the base class. 

66 """ 

67 

68 configPaths: Sequence[ResourcePathExpression] = () 

69 """Paths to config files to read for specific Tasks. 

70 

71 The paths in this list should contain files of the form ``task.py``, for 

72 each of the Tasks that requires special configuration. 

73 """ 

74 

75 policyName: str | None = None 

76 """Instrument specific name to use when locating a policy or configuration 

77 file in the file system.""" 

78 

79 raw_definition: tuple[str, tuple[str, ...], str] | None = None 

80 """Dataset type definition to use for "raw" datasets. This is a tuple 

81 of the dataset type name, a tuple of dimension names, and the storage class 

82 name. If `None` the ingest system will use its default definition.""" 

83 

84 def __init__(self, collection_prefix: str | None = None): 

85 if collection_prefix is None: 

86 collection_prefix = self.getName() 

87 self.collection_prefix = collection_prefix 

88 

89 @classmethod 

90 @abstractmethod 

91 def getName(cls) -> str: 

92 """Return the short (dimension) name for this instrument. 

93 

94 This is not (in general) the same as the class name - it's what is used 

95 as the value of the "instrument" field in data IDs, and is usually an 

96 abbreviation of the full name. 

97 """ 

98 raise NotImplementedError() 

99 

100 @abstractmethod 

101 def register(self, registry: Registry, *, update: bool = False) -> None: 

102 """Insert instrument, and other relevant records into a butler 

103 registry. 

104 

105 Parameters 

106 ---------- 

107 registry : `lsst.daf.butler.Registry` 

108 Registry client for the data repository to modify. 

109 update : `bool`, optional 

110 If `True` (`False` is default), update existing records if they 

111 differ from the new ones. 

112 

113 Returns 

114 ------- 

115 None 

116 

117 Raises 

118 ------ 

119 lsst.daf.butler.registry.ConflictingDefinitionError 

120 Raised if any existing record has the same key but a different 

121 definition as one being registered. 

122 

123 Notes 

124 ----- 

125 New records can always be added by calling this method multiple times, 

126 as long as no existing records have changed (if existing records have 

127 changed, ``update=True`` must be used). Old records can never be 

128 removed by this method. 

129 

130 Implementations should guarantee that registration is atomic (the 

131 registry should not be modified if any error occurs) and idempotent at 

132 the level of individual dimension entries; new detectors and filters 

133 should be added, but changes to any existing record should not be. 

134 This can generally be achieved via a block like 

135 """ 

136 raise NotImplementedError() 

137 

138 @classmethod 

139 def fromName(cls, name: str, registry: Registry, collection_prefix: str | None = None) -> Self: 

140 """Given an instrument name and a butler registry, retrieve a 

141 corresponding instantiated instrument object. 

142 

143 Parameters 

144 ---------- 

145 name : `str` 

146 Name of the instrument (must match the return value of `getName`). 

147 registry : `lsst.daf.butler.Registry` 

148 Butler registry to query to find the information. 

149 collection_prefix : `str`, optional 

150 Prefix for collection names to use instead of the instrument's own 

151 name. This is primarily for use in simulated-data repositories, 

152 where the instrument name may not be necessary and/or sufficient to 

153 distinguish between collections. 

154 

155 Returns 

156 ------- 

157 instrument : `Instrument` 

158 An instance of the relevant `Instrument`. 

159 

160 Notes 

161 ----- 

162 The instrument must be registered in the corresponding butler. 

163 

164 Raises 

165 ------ 

166 LookupError 

167 Raised if the instrument is not known to the supplied registry. 

168 ModuleNotFoundError 

169 Raised if the class could not be imported. This could mean 

170 that the relevant obs package has not been setup. 

171 TypeError 

172 Raised if the class name retrieved is not a string or the imported 

173 symbol is not an `Instrument` subclass. 

174 """ 

175 try: 

176 records = list(registry.queryDimensionRecords("instrument", instrument=name)) 

177 except DataIdError: 

178 records = None 

179 if not records: 

180 raise LookupError(f"No registered instrument with name '{name}'.") 

181 cls_name = records[0].class_name 

182 if not isinstance(cls_name, str): 

183 raise TypeError( 

184 f"Unexpected class name retrieved from {name} instrument dimension (got {cls_name})" 

185 ) 

186 return cls._from_cls_name(cls_name, collection_prefix) 

187 

188 @classmethod 

189 def from_string( 

190 cls, name: str, registry: Registry | None = None, collection_prefix: str | None = None 

191 ) -> Self: 

192 """Return an instance from the short name or class name. 

193 

194 If the instrument name is not qualified (does not contain a '.') and a 

195 butler registry is provided, this will attempt to load the instrument 

196 using `Instrument.fromName()`. Otherwise the instrument will be 

197 imported and instantiated. 

198 

199 Parameters 

200 ---------- 

201 name : `str` 

202 The name or fully-qualified class name of an instrument. 

203 registry : `lsst.daf.butler.Registry`, optional 

204 Butler registry to query to find information about the instrument, 

205 by default `None`. 

206 collection_prefix : `str`, optional 

207 Prefix for collection names to use instead of the instrument's own 

208 name. This is primarily for use in simulated-data repositories, 

209 where the instrument name may not be necessary and/or sufficient 

210 to distinguish between collections. 

211 

212 Returns 

213 ------- 

214 instrument : `Instrument` 

215 The instantiated instrument. 

216 

217 Raises 

218 ------ 

219 RuntimeError 

220 Raised if the instrument can not be imported, instantiated, or 

221 obtained from the registry. 

222 TypeError 

223 Raised if the instrument is not a subclass of 

224 `~lsst.pipe.base.Instrument`. 

225 

226 See Also 

227 -------- 

228 Instrument.fromName : Constructing Instrument from a name. 

229 """ 

230 if "." not in name and registry is not None: 

231 try: 

232 instr = cls.fromName(name, registry, collection_prefix=collection_prefix) 

233 except Exception as err: 

234 raise RuntimeError( 

235 f"Could not get instrument from name: {name}. Failed with exception: {err}" 

236 ) from err 

237 else: 

238 try: 

239 instr_class = doImportType(name) 

240 except Exception as err: 

241 raise RuntimeError( 

242 f"Could not import instrument: {name}. Failed with exception: {err}" 

243 ) from err 

244 instr = instr_class(collection_prefix=collection_prefix) 

245 if not isinstance(instr, cls): 

246 raise TypeError(f"{name} is not a {get_full_type_name(cls)} subclass.") 

247 return instr 

248 

249 @classmethod 

250 def from_data_id(cls, data_id: DataCoordinate, collection_prefix: str | None = None) -> Self: 

251 """Instantiate an `Instrument` object from a fully-expanded data ID. 

252 

253 Parameters 

254 ---------- 

255 data_id : `~lsst.daf.butler.DataCoordinate` 

256 Expanded data ID that includes the instrument dimension. 

257 collection_prefix : `str`, optional 

258 Prefix for collection names to use instead of the instrument's own 

259 name. This is primarily for use in simulated-data repositories, 

260 where the instrument name may not be necessary and/or sufficient to 

261 distinguish between collections. 

262 

263 Returns 

264 ------- 

265 instrument : `Instrument` 

266 An instance of the relevant `Instrument`. 

267 

268 Raises 

269 ------ 

270 TypeError 

271 Raised if the class name retrieved is not a string or the imported 

272 symbol is not an `Instrument` subclass. 

273 """ 

274 return cls._from_cls_name( 

275 cast(DimensionRecord, data_id.records["instrument"]).class_name, collection_prefix 

276 ) 

277 

278 @classmethod 

279 def _from_cls_name(cls, cls_name: str, collection_prefix: str | None = None) -> Self: 

280 """Instantiate an `Instrument` object type name. 

281 

282 This just provides common error-handling for `fromName` and 

283 `from_data_id` 

284 

285 Parameters 

286 ---------- 

287 cls_name : `str` 

288 Fully-qualified name of the type. 

289 collection_prefix : `str`, optional 

290 Prefix for collection names to use instead of the instrument's own 

291 name. This is primarily for use in simulated-data repositories, 

292 where the instrument name may not be necessary and/or sufficient to 

293 distinguish between collections. 

294 

295 Returns 

296 ------- 

297 instrument : `Instrument` 

298 An instance of the relevant `Instrument`. 

299 

300 Raises 

301 ------ 

302 TypeError 

303 Raised if the class name retrieved is not a string or the imported 

304 symbol is not an `Instrument` subclass. 

305 """ 

306 instrument_cls: type = doImportType(cls_name) 

307 if not issubclass(instrument_cls, cls): 

308 raise TypeError( 

309 f"{instrument_cls!r}, obtained from importing {cls_name}, is not a subclass " 

310 f"of {get_full_type_name(cls)}." 

311 ) 

312 return instrument_cls(collection_prefix=collection_prefix) 

313 

314 @staticmethod 

315 def importAll(registry: Registry) -> dict[str, type[Instrument]]: 

316 """Import all the instruments known to this registry. 

317 

318 This will ensure that all metadata translators have been registered. 

319 

320 Parameters 

321 ---------- 

322 registry : `lsst.daf.butler.Registry` 

323 Butler registry to query to find the information. 

324 

325 Returns 

326 ------- 

327 imported : `dict` [`str`, `type` [`Instrument`]] 

328 A mapping containing all the instrument classes that were loaded 

329 successfully, keyed by their butler names. 

330 

331 Notes 

332 ----- 

333 It is allowed for a particular instrument class to fail on import. 

334 This might simply indicate that a particular obs package has 

335 not been setup. 

336 """ 

337 imported: dict[str, type[Instrument]] = {} 

338 records = list(registry.queryDimensionRecords("instrument")) 

339 for record in records: 

340 cls = record.class_name 

341 instrument_name: str = cast(str, record.name) 

342 with contextlib.suppress(Exception): 

343 instr = doImportType(cls) 

344 assert issubclass(instr, Instrument) 

345 imported[instrument_name] = instr 

346 return imported 

347 

348 @abstractmethod 

349 def getRawFormatter(self, dataId: DataId) -> type[Formatter | FormatterV2]: 

350 """Return the Formatter class that should be used to read a particular 

351 raw file. 

352 

353 Parameters 

354 ---------- 

355 dataId : `lsst.daf.butler.DataId` 

356 Dimension-based ID for the raw file or files being ingested. 

357 

358 Returns 

359 ------- 

360 formatter : `type` \ 

361 [`lsst.daf.butler.Formatter` | `lsst.daf.butler.FormatterV2` ] 

362 Class to be used that reads the file into the correct 

363 Python object for the raw data. 

364 """ 

365 raise NotImplementedError() 

366 

367 def applyConfigOverrides(self, name: str, config: Config) -> None: 

368 """Apply instrument-specific overrides for a task config. 

369 

370 Parameters 

371 ---------- 

372 name : `str` 

373 Name of the object being configured; typically the _DefaultName 

374 of a Task. 

375 config : `lsst.pex.config.Config` 

376 Config instance to which overrides should be applied. 

377 """ 

378 for root in self.configPaths: 

379 resource = ResourcePath(root, forceDirectory=True, forceAbsolute=True) 

380 uri = resource.join(f"{name}.py", forceDirectory=False) 

381 if uri.exists(): 

382 config.load(uri) 

383 

384 @staticmethod 

385 def formatCollectionTimestamp(timestamp: str | datetime.datetime) -> str: 

386 """Format a timestamp for use in a collection name. 

387 

388 Parameters 

389 ---------- 

390 timestamp : `str` or `datetime.datetime` 

391 Timestamp to format. May be a date or datetime string in extended 

392 ISO format (assumed UTC), with or without a timezone specifier, a 

393 datetime string in basic ISO format with a timezone specifier, a 

394 naive `datetime.datetime` instance (assumed UTC) or a 

395 timezone-aware `datetime.datetime` instance (converted to UTC). 

396 This is intended to cover all forms that string ``CALIBDATE`` 

397 metadata values have taken in the past, as well as the format this 

398 method itself writes out (to enable round-tripping). 

399 

400 Returns 

401 ------- 

402 formatted : `str` 

403 Standardized string form for the timestamp. 

404 """ 

405 if isinstance(timestamp, str): 

406 if "-" in timestamp: 

407 # extended ISO format, with - and : delimiters 

408 timestamp = datetime.datetime.fromisoformat(timestamp) 

409 else: 

410 # basic ISO format, with no delimiters (what this method 

411 # returns) 

412 timestamp = datetime.datetime.strptime(timestamp, "%Y%m%dT%H%M%S%z") 

413 if not isinstance(timestamp, datetime.datetime): 

414 raise TypeError(f"Unexpected date/time object: {timestamp!r}.") 

415 if timestamp.tzinfo is not None: 

416 timestamp = timestamp.astimezone(datetime.UTC) 

417 return f"{timestamp:%Y%m%dT%H%M%S}Z" 

418 

419 @staticmethod 

420 def makeCollectionTimestamp() -> str: 

421 """Create a timestamp string for use in a collection name from the 

422 current time. 

423 

424 Returns 

425 ------- 

426 formatted : `str` 

427 Standardized string form of the current time. 

428 """ 

429 return Instrument.formatCollectionTimestamp(datetime.datetime.now(tz=datetime.UTC)) 

430 

431 def makeDefaultRawIngestRunName(self) -> str: 

432 """Make the default instrument-specific run collection string for raw 

433 data ingest. 

434 

435 Returns 

436 ------- 

437 coll : `str` 

438 Run collection name to be used as the default for ingestion of 

439 raws. 

440 """ 

441 return self.makeCollectionName("raw", "all") 

442 

443 def makeUnboundedCalibrationRunName(self, *labels: str) -> str: 

444 """Make a RUN collection name appropriate for inserting calibration 

445 datasets whose validity ranges are unbounded. 

446 

447 Parameters 

448 ---------- 

449 *labels : `str` 

450 Extra strings to be included in the base name, using the default 

451 delimiter for collection names. Usually this is the name of the 

452 ticket on which the calibration collection is being created. 

453 

454 Returns 

455 ------- 

456 name : `str` 

457 Run collection name. 

458 """ 

459 return self.makeCollectionName("calib", *labels, "unbounded") 

460 

461 def makeCuratedCalibrationRunName(self, calibDate: str, *labels: str) -> str: 

462 """Make a RUN collection name appropriate for inserting curated 

463 calibration datasets with the given ``CALIBDATE`` metadata value. 

464 

465 Parameters 

466 ---------- 

467 calibDate : `str` 

468 The ``CALIBDATE`` metadata value. 

469 *labels : `str` 

470 Strings to be included in the collection name (before 

471 ``calibDate``, but after all other terms), using the default 

472 delimiter for collection names. Usually this is the name of the 

473 ticket on which the calibration collection is being created. 

474 

475 Returns 

476 ------- 

477 name : `str` 

478 Run collection name. 

479 """ 

480 return self.makeCollectionName("calib", *labels, "curated", self.formatCollectionTimestamp(calibDate)) 

481 

482 def makeCalibrationCollectionName(self, *labels: str) -> str: 

483 """Make a CALIBRATION collection name appropriate for associating 

484 calibration datasets with validity ranges. 

485 

486 Parameters 

487 ---------- 

488 *labels : `str` 

489 Strings to be appended to the base name, using the default 

490 delimiter for collection names. Usually this is the name of the 

491 ticket on which the calibration collection is being created. 

492 

493 Returns 

494 ------- 

495 name : `str` 

496 Calibration collection name. 

497 """ 

498 return self.makeCollectionName("calib", *labels) 

499 

500 @staticmethod 

501 def makeRefCatCollectionName(*labels: str) -> str: 

502 """Return a global (not instrument-specific) name for a collection that 

503 holds reference catalogs. 

504 

505 With no arguments, this returns the name of the collection that holds 

506 all reference catalogs (usually a ``CHAINED`` collection, at least in 

507 long-lived repos that may contain more than one reference catalog). 

508 

509 Parameters 

510 ---------- 

511 *labels : `str` 

512 Strings to be added to the global collection name, in order to 

513 define a collection name for one or more reference catalogs being 

514 ingested at the same time. 

515 

516 Returns 

517 ------- 

518 name : `str` 

519 Collection name. 

520 

521 Notes 

522 ----- 

523 This is a ``staticmethod``, not a ``classmethod``, because it should 

524 be the same for all instruments. 

525 """ 

526 return "/".join(("refcats",) + labels) 

527 

528 def makeUmbrellaCollectionName(self) -> str: 

529 """Return the name of the umbrella ``CHAINED`` collection for this 

530 instrument that combines all standard recommended input collections. 

531 

532 This method should almost never be overridden by derived classes. 

533 

534 Returns 

535 ------- 

536 name : `str` 

537 Name for the umbrella collection. 

538 """ 

539 return self.makeCollectionName("defaults") 

540 

541 def makeCollectionName(self, *labels: str) -> str: 

542 """Get the instrument-specific collection string to use as derived 

543 from the supplied labels. 

544 

545 Parameters 

546 ---------- 

547 *labels : `str` 

548 Strings to be combined with the instrument name to form a 

549 collection name. 

550 

551 Returns 

552 ------- 

553 name : `str` 

554 Collection name to use that includes the instrument's recommended 

555 prefix. 

556 """ 

557 return "/".join((self.collection_prefix,) + labels) 

558 

559 @staticmethod 

560 def make_dimension_packer_config_field( 

561 doc: str = ( 

562 "How to pack visit+detector or exposure+detector data IDs into integers. " 

563 "The default (None) is to delegate to the Instrument class for which " 

564 "registered implementation to use (but still use the nested configuration " 

565 "for that implementation)." 

566 ), 

567 ) -> RegistryField: 

568 """Make an `lsst.pex.config.Field` that can be used to configure how 

569 data IDs for this instrument are packed. 

570 

571 Parameters 

572 ---------- 

573 doc : `str`, optional 

574 Documentation for the config field. 

575 

576 Returns 

577 ------- 

578 field : `lsst.pex.config.RegistryField` 

579 A config field for which calling ``apply`` on the instance 

580 attribute constructs an `lsst.daf.butler.DimensionPacker` that 

581 defaults to the appropriate one for this instrument. 

582 

583 Notes 

584 ----- 

585 This method is expected to be used whenever code requires a single 

586 integer that represents the combination of a detector and either a 

587 visit or exposure, but in most cases the `lsst.meas.base.IdGenerator` 

588 class and its helper configs provide a simpler high-level interface 

589 that should be used instead of calling this method directly. 

590 

591 This system is designed to work best when the configuration for the ID 

592 packer is not overridden at all, allowing the appropriate instrument 

593 class to determine the behavior for each data ID encountered. When the 

594 configuration does need to be modified (most often when the scheme for 

595 packing an instrument's data IDs is undergoing an upgrade), it is 

596 important to ensure the overrides are only applied to data IDs with the 

597 desired instrument value. 

598 

599 Unit tests of code that use a field produced by this method will often 

600 want to explicitly set the packer to "observation" and manually set 

601 its ``n_detectors`` and ``n_observations`` fields; this will make it 

602 unnecessary for tests to provide expanded data IDs. 

603 """ 

604 # The control flow here bounces around a bit when this RegistryField's 

605 # apply() method is called, so it merits a thorough walkthrough 

606 # somewhere, and that might as well be here: 

607 # 

608 # - If the config field's name is not `None`, that kind of packer is 

609 # constructed and returned with the arguments to `apply`, in just the 

610 # way it works with most RegistryFields or ConfigurableFields. But 

611 # this is expected to be rare. 

612 # 

613 # - If the config fields' name is `None`, the `apply` method (which 

614 # actually lives on the `pex.config.RegistryInstanceDict` class, 

615 # since `RegistryField` is a descriptor), calls 

616 # `_make_default_dimension_packer_dispatch` (which is final, and 

617 # hence the base class implementation just below is the only one). 

618 # 

619 # - `_make_default_dimension_packer_dispatch` instantiates an 

620 # `Instrument` instance of the type pointed at by the data ID (i.e. 

621 # calling `Instrument.from_data_id`), then calls 

622 # `_make_default_dimension_packer` on that. 

623 # 

624 # - The default implementation of `_make_default_dimension_packer` here 

625 # in the base class picks the "observation" dimension packer, so if 

626 # it's not overridden by a derived class everything proceeds as if 

627 # the config field's name was set to that. Note that this sets which 

628 # item in the registry is used, but it still pays attention to the 

629 # configuration for that entry in the registry field. 

630 # 

631 # - A subclass implementation of `_make_default_dimension_packer` will 

632 # take precedence over the base class, but it's expected that these 

633 # will usually just delegate back to ``super()`` while changing the 

634 # ``default`` argument to something other than "observation". Once 

635 # again, this will control which packer entry in the registry is used 

636 # but the result will still reflect the configuration for that packer 

637 # in the registry field. 

638 # 

639 return observation_packer_registry.makeField( 

640 doc, default=None, optional=True, on_none=Instrument._make_default_dimension_packer_dispatch 

641 ) 

642 

643 @staticmethod 

644 @final 

645 def make_default_dimension_packer( 

646 data_id: DataCoordinate, is_exposure: bool | None = None 

647 ) -> DimensionPacker: 

648 """Return the default dimension packer for the given data ID. 

649 

650 Parameters 

651 ---------- 

652 data_id : `lsst.daf.butler.DataCoordinate` 

653 Data ID that identifies at least the ``instrument`` dimension. Must 

654 have dimension records attached. 

655 is_exposure : `bool`, optional 

656 If `False`, construct a packer for visit+detector data IDs. If 

657 `True`, construct a packer for exposure+detector data IDs. If 

658 `None`, this is determined based on whether ``visit`` or 

659 ``exposure`` is present in ``data_id``, with ``visit`` checked 

660 first and hence used if both are present. 

661 

662 Returns 

663 ------- 

664 packer : `lsst.daf.butler.DimensionPacker` 

665 Object that packs {visit, detector} or {exposure, detector} data 

666 IDs into integers. 

667 

668 Notes 

669 ----- 

670 When using a dimension packer in task code, using 

671 `make_dimension_packer_config_field` to make the packing algorithm 

672 configurable is preferred over this method. 

673 

674 When obtaining a dimension packer to unpack IDs that were packed by 

675 task code, it is similarly preferable to load the configuration for 

676 that task and the existing packer configuration field there, to ensure 

677 any config overrides are respected. That is sometimes quite difficult, 

678 however, and since config overrides for dimension packers are expected 

679 to be exceedingly rare, using this simpler method will almost always 

680 work. 

681 """ 

682 

683 class _DummyConfig(Config): 

684 packer = Instrument.make_dimension_packer_config_field() 

685 

686 config = _DummyConfig() 

687 

688 return config.packer.apply(data_id, is_exposure=is_exposure) # type: ignore 

689 

690 @staticmethod 

691 @final 

692 def _make_default_dimension_packer_dispatch( 

693 config_dict: Any, data_id: DataCoordinate, is_exposure: bool | None = None 

694 ) -> DimensionPacker: 

695 """Dispatch method used to invoke `_make_dimension_packer`. 

696 

697 This method constructs the appropriate `Instrument` subclass from 

698 config and then calls its `_make_default_dimension_packer`. 

699 It is called when (as usual) the field returned by 

700 `make_dimension_packer_config_field` is left to its default selection 

701 of `None`. 

702 

703 All arguments and return values are the same as 

704 `_make_default_dimension_packer.` 

705 """ 

706 instrument = Instrument.from_data_id(data_id) 

707 return instrument._make_default_dimension_packer(config_dict, data_id, is_exposure=is_exposure) 

708 

709 def _make_default_dimension_packer( 

710 self, 

711 config_dict: Any, 

712 data_id: DataCoordinate, 

713 is_exposure: bool | None = None, 

714 default: str = "observation", 

715 ) -> DimensionPacker: 

716 """Construct return the default dimension packer for this instrument. 

717 

718 This method is a protected hook for subclasses to override the behavior 

719 of `make_dimension_packer_config_field` when the packer is not selected 

720 explicitly via configuration. 

721 

722 Parameters 

723 ---------- 

724 config_dict 

725 Mapping attribute of a `lsst.pex.config.Config` instance that 

726 corresponds to a field created by `make_dimension_packer_config` 

727 (the actual type of this object is a `lsst.pex.config` 

728 implementation detail). 

729 data_id : `lsst.daf.butler.DataCoordinate` 

730 Data ID that identifies at least the ``instrument`` dimension. For 

731 most configurations this must have dimension records attached. 

732 is_exposure : `bool`, optional 

733 If `False`, construct a packer for visit+detector data IDs. If 

734 `True`, construct a packer for exposure+detector data IDs. If 

735 `None`, this is determined based on whether ``visit`` or 

736 ``exposure`` is present in ``data_id``, with ``visit`` checked 

737 first and hence used if both are present. 

738 default : `str`, optional 

739 Registered name of the dimension packer to select when the 

740 configured packer is `None` (as is usually the case). This is 

741 intended primarily for derived classes delegating to `super` in 

742 reimplementations of this method. 

743 

744 Returns 

745 ------- 

746 packer : `lsst.daf.butler.DimensionPacker` 

747 Object that packs {visit, detector} or {exposure, detector} data 

748 IDs into integers. 

749 """ 

750 return config_dict.apply_with(default, data_id, is_exposure=is_exposure)