Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ("Formatter", "FormatterFactory", "FormatterParameter") 

25 

26from abc import ABCMeta, abstractmethod 

27from collections.abc import Mapping 

28import contextlib 

29import logging 

30import copy 

31from typing import ( 

32 AbstractSet, 

33 Any, 

34 ClassVar, 

35 Dict, 

36 Iterator, 

37 Optional, 

38 Set, 

39 Tuple, 

40 Type, 

41 TYPE_CHECKING, 

42 Union, 

43) 

44 

45from .configSupport import processLookupConfigs, LookupKey 

46from .mappingFactory import MappingFactory 

47from .utils import getFullTypeName 

48from .fileDescriptor import FileDescriptor 

49from .location import Location 

50from .config import Config 

51from .dimensions import DimensionUniverse 

52from .storageClass import StorageClass 

53from .datasets import DatasetType, DatasetRef 

54 

55log = logging.getLogger(__name__) 

56 

57# Define a new special type for functions that take "entity" 

58Entity = Union[DatasetType, DatasetRef, StorageClass, str] 

59 

60 

61if TYPE_CHECKING: 61 ↛ 62line 61 didn't jump to line 62, because the condition on line 61 was never true

62 from .dimensions import DataCoordinate 

63 

64 

65class Formatter(metaclass=ABCMeta): 

66 """Interface for reading and writing Datasets with a particular 

67 `StorageClass`. 

68 

69 Parameters 

70 ---------- 

71 fileDescriptor : `FileDescriptor`, optional 

72 Identifies the file to read or write, and the associated storage 

73 classes and parameter information. Its value can be `None` if the 

74 caller will never call `Formatter.read` or `Formatter.write`. 

75 dataId : `DataCoordinate`, optional 

76 Data ID associated with this formatter. 

77 writeParameters : `dict`, optional 

78 Any parameters to be hard-coded into this instance to control how 

79 the dataset is serialized. 

80 """ 

81 

82 unsupportedParameters: ClassVar[Optional[AbstractSet[str]]] = frozenset() 

83 """Set of read parameters not understood by this `Formatter`. An empty set 

84 means all parameters are supported. `None` indicates that no parameters 

85 are supported. These param (`frozenset`). 

86 """ 

87 

88 supportedWriteParameters: ClassVar[Optional[AbstractSet[str]]] = None 

89 """Parameters understood by this formatter that can be used to control 

90 how a dataset is serialized. `None` indicates that no parameters are 

91 supported.""" 

92 

93 supportedExtensions: ClassVar[AbstractSet[str]] = frozenset() 

94 """Set of all extensions supported by this formatter. 

95 

96 Only expected to be populated by Formatters that write files. Any extension 

97 assigned to the ``extension`` property will be automatically included in 

98 the list of supported extensions.""" 

99 

100 def __init__(self, fileDescriptor: FileDescriptor, dataId: Optional[DataCoordinate] = None, 

101 writeParameters: Optional[Dict[str, Any]] = None, 

102 writeRecipes: Optional[Dict[str, Any]] = None): 

103 if not isinstance(fileDescriptor, FileDescriptor): 

104 raise TypeError("File descriptor must be a FileDescriptor") 

105 self._fileDescriptor = fileDescriptor 

106 self._dataId = dataId 

107 

108 # Check that the write parameters are allowed 

109 if writeParameters: 

110 if self.supportedWriteParameters is None: 

111 raise ValueError("This formatter does not accept any write parameters. " 

112 f"Got: {', '.join(writeParameters)}") 

113 else: 

114 given = set(writeParameters) 

115 unknown = given - self.supportedWriteParameters 

116 if unknown: 

117 s = "s" if len(unknown) != 1 else "" 

118 unknownStr = ", ".join(f"'{u}'" for u in unknown) 

119 raise ValueError(f"This formatter does not accept parameter{s} {unknownStr}") 

120 

121 self._writeParameters = writeParameters 

122 self._writeRecipes = self.validateWriteRecipes(writeRecipes) 

123 

124 def __str__(self) -> str: 

125 return f"{self.name()}@{self.fileDescriptor.location.path}" 

126 

127 def __repr__(self) -> str: 

128 return f"{self.name()}({self.fileDescriptor!r})" 

129 

130 @property 

131 def fileDescriptor(self) -> FileDescriptor: 

132 """FileDescriptor associated with this formatter 

133 (`FileDescriptor`, read-only)""" 

134 return self._fileDescriptor 

135 

136 @property 

137 def dataId(self) -> Optional[DataCoordinate]: 

138 """DataId associated with this formatter (`DataCoordinate`)""" 

139 return self._dataId 

140 

141 @property 

142 def writeParameters(self) -> Mapping[str, Any]: 

143 """Parameters to use when writing out datasets.""" 

144 if self._writeParameters is not None: 

145 return self._writeParameters 

146 return {} 

147 

148 @property 

149 def writeRecipes(self) -> Mapping[str, Any]: 

150 """Detailed write Recipes indexed by recipe name.""" 

151 if self._writeRecipes is not None: 

152 return self._writeRecipes 

153 return {} 

154 

155 @classmethod 

156 def validateWriteRecipes(cls, recipes: Optional[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]: 

157 """Validate supplied recipes for this formatter. 

158 

159 The recipes are supplemented with default values where appropriate. 

160 

161 Parameters 

162 ---------- 

163 recipes : `dict` 

164 Recipes to validate. 

165 

166 Returns 

167 ------- 

168 validated : `dict` 

169 Validated recipes. 

170 

171 Raises 

172 ------ 

173 RuntimeError 

174 Raised if validation fails. The default implementation raises 

175 if any recipes are given. 

176 """ 

177 if recipes: 

178 raise RuntimeError(f"This formatter does not understand these writeRecipes: {recipes}") 

179 return recipes 

180 

181 @classmethod 

182 def name(cls) -> str: 

183 """Returns the fully qualified name of the formatter. 

184 

185 Returns 

186 ------- 

187 name : `str` 

188 Fully-qualified name of formatter class. 

189 """ 

190 return getFullTypeName(cls) 

191 

192 @abstractmethod 

193 def read(self, component: Optional[str] = None) -> Any: 

194 """Read a Dataset. 

195 

196 Parameters 

197 ---------- 

198 component : `str`, optional 

199 Component to read from the file. Only used if the `StorageClass` 

200 for reading differed from the `StorageClass` used to write the 

201 file. 

202 

203 Returns 

204 ------- 

205 inMemoryDataset : `object` 

206 The requested Dataset. 

207 """ 

208 raise NotImplementedError("Type does not support reading") 

209 

210 @abstractmethod 

211 def write(self, inMemoryDataset: Any) -> None: 

212 """Write a Dataset. 

213 

214 Parameters 

215 ---------- 

216 inMemoryDataset : `object` 

217 The Dataset to store. 

218 """ 

219 raise NotImplementedError("Type does not support writing") 

220 

221 @classmethod 

222 def can_read_bytes(cls) -> bool: 

223 """Indicate if this formatter can format from bytes. 

224 

225 Returns 

226 ------- 

227 can : `bool` 

228 `True` if the `fromBytes` method is implemented. 

229 """ 

230 # We have no property to read so instead try to format from a byte 

231 # and see what happens 

232 try: 

233 # We know the arguments are incompatible 

234 cls.fromBytes(cls, b"") # type: ignore 

235 except NotImplementedError: 

236 return False 

237 except Exception: 

238 # There will be problems with the bytes we are supplying so ignore 

239 pass 

240 return True 

241 

242 def fromBytes(self, serializedDataset: bytes, 

243 component: Optional[str] = None) -> object: 

244 """Reads serialized data into a Dataset or its component. 

245 

246 Parameters 

247 ---------- 

248 serializedDataset : `bytes` 

249 Bytes object to unserialize. 

250 component : `str`, optional 

251 Component to read from the Dataset. Only used if the `StorageClass` 

252 for reading differed from the `StorageClass` used to write the 

253 file. 

254 

255 Returns 

256 ------- 

257 inMemoryDataset : `object` 

258 The requested data as a Python object. The type of object 

259 is controlled by the specific formatter. 

260 """ 

261 raise NotImplementedError("Type does not support reading from bytes.") 

262 

263 def toBytes(self, inMemoryDataset: Any) -> bytes: 

264 """Serialize the Dataset to bytes based on formatter. 

265 

266 Parameters 

267 ---------- 

268 inMemoryDataset : `object` 

269 The Python object to serialize. 

270 

271 Returns 

272 ------- 

273 serializedDataset : `bytes` 

274 Bytes representing the serialized dataset. 

275 """ 

276 raise NotImplementedError("Type does not support writing to bytes.") 

277 

278 @contextlib.contextmanager 

279 def _updateLocation(self, location: Optional[Location]) -> Iterator[Location]: 

280 """Temporarily replace the location associated with this formatter. 

281 

282 Parameters 

283 ---------- 

284 location : `Location` 

285 New location to use for this formatter. If `None` the 

286 formatter will not change but it will still return 

287 the old location. This allows it to be used in a code 

288 path where the location may not need to be updated 

289 but the with block is still convenient. 

290 

291 Yields 

292 ------ 

293 old : `Location` 

294 The old location that will be restored. 

295 

296 Notes 

297 ----- 

298 This is an internal method that should be used with care. 

299 It may change in the future. Should be used as a context 

300 manager to restore the location when the temporary is no 

301 longer required. 

302 """ 

303 old = self._fileDescriptor.location 

304 try: 

305 if location is not None: 

306 self._fileDescriptor.location = location 

307 yield old 

308 finally: 

309 if location is not None: 

310 self._fileDescriptor.location = old 

311 

312 def makeUpdatedLocation(self, location: Location) -> Location: 

313 """Return a new `Location` instance updated with this formatter's 

314 extension. 

315 

316 Parameters 

317 ---------- 

318 location : `Location` 

319 The location to update. 

320 

321 Returns 

322 ------- 

323 updated : `Location` 

324 A new `Location` with a new file extension applied. 

325 

326 Raises 

327 ------ 

328 NotImplementedError 

329 Raised if there is no ``extension`` attribute associated with 

330 this formatter. 

331 

332 Notes 

333 ----- 

334 This method is available to all Formatters but might not be 

335 implemented by all formatters. It requires that a formatter set 

336 an ``extension`` attribute containing the file extension used when 

337 writing files. If ``extension`` is `None` the supplied file will 

338 not be updated. Not all formatters write files so this is not 

339 defined in the base class. 

340 """ 

341 location = copy.deepcopy(location) 

342 try: 

343 # We are deliberately allowing extension to be undefined by 

344 # default in the base class and mypy complains. 

345 location.updateExtension(self.extension) # type:ignore 

346 except AttributeError: 

347 raise NotImplementedError("No file extension registered with this formatter") from None 

348 return location 

349 

350 @classmethod 

351 def validateExtension(cls, location: Location) -> None: 

352 """Check that the provided location refers to a file extension that is 

353 understood by this formatter. 

354 

355 Parameters 

356 ---------- 

357 location : `Location` 

358 Location from which to extract a file extension. 

359 

360 Raises 

361 ------ 

362 NotImplementedError 

363 Raised if file extensions are a concept not understood by this 

364 formatter. 

365 ValueError 

366 Raised if the formatter does not understand this extension. 

367 

368 Notes 

369 ----- 

370 This method is available to all Formatters but might not be 

371 implemented by all formatters. It requires that a formatter set 

372 an ``extension`` attribute containing the file extension used when 

373 writing files. If ``extension`` is `None` only the set of supported 

374 extensions will be examined. 

375 """ 

376 supported = set(cls.supportedExtensions) 

377 

378 try: 

379 # We are deliberately allowing extension to be undefined by 

380 # default in the base class and mypy complains. 

381 default = cls.extension # type: ignore 

382 except AttributeError: 

383 raise NotImplementedError("No file extension registered with this formatter") from None 

384 

385 # If extension is implemented as an instance property it won't return 

386 # a string when called as a class propertt. Assume that 

387 # the supported extensions class property is complete. 

388 if default is not None and isinstance(default, str): 

389 supported.add(default) 

390 

391 # Get the file name from the uri 

392 file = location.uri.basename() 

393 

394 # Check that this file name ends with one of the supported extensions. 

395 # This is less prone to confusion than asking the location for 

396 # its extension and then doing a set comparison 

397 for ext in supported: 

398 if file.endswith(ext): 

399 return 

400 

401 raise ValueError(f"Extension '{location.getExtension()}' on '{location}' " 

402 f"is not supported by Formatter '{cls.__name__}' (supports: {supported})") 

403 

404 def predictPath(self) -> str: 

405 """Return the path that would be returned by write, without actually 

406 writing. 

407 

408 Uses the `FileDescriptor` associated with the instance. 

409 

410 Returns 

411 ------- 

412 path : `str` 

413 Path within datastore that would be associated with the location 

414 stored in this `Formatter`. 

415 """ 

416 updated = self.makeUpdatedLocation(self.fileDescriptor.location) 

417 return updated.pathInStore.path 

418 

419 def segregateParameters(self, parameters: Optional[Dict[str, Any]] = None) -> Tuple[Dict, Dict]: 

420 """Segregate the supplied parameters into those understood by the 

421 formatter and those not understood by the formatter. 

422 

423 Any unsupported parameters are assumed to be usable by associated 

424 assemblers. 

425 

426 Parameters 

427 ---------- 

428 parameters : `dict`, optional 

429 Parameters with values that have been supplied by the caller 

430 and which might be relevant for the formatter. If `None` 

431 parameters will be read from the registered `FileDescriptor`. 

432 

433 Returns 

434 ------- 

435 supported : `dict` 

436 Those parameters supported by this formatter. 

437 unsupported : `dict` 

438 Those parameters not supported by this formatter. 

439 """ 

440 

441 if parameters is None: 

442 parameters = self.fileDescriptor.parameters 

443 

444 if parameters is None: 

445 return {}, {} 

446 

447 if self.unsupportedParameters is None: 

448 # Support none of the parameters 

449 return {}, parameters.copy() 

450 

451 # Start by assuming all are supported 

452 supported = parameters.copy() 

453 unsupported = {} 

454 

455 # And remove any we know are not supported 

456 for p in set(supported): 

457 if p in self.unsupportedParameters: 

458 unsupported[p] = supported.pop(p) 

459 

460 return supported, unsupported 

461 

462 

463class FormatterFactory: 

464 """Factory for `Formatter` instances. 

465 """ 

466 

467 defaultKey = LookupKey("default") 

468 """Configuration key associated with default write parameter settings.""" 

469 

470 writeRecipesKey = LookupKey("write_recipes") 

471 """Configuration key associated with write recipes.""" 

472 

473 def __init__(self) -> None: 

474 self._mappingFactory = MappingFactory(Formatter) 

475 

476 def __contains__(self, key: Union[LookupKey, str]) -> bool: 

477 """Indicates whether the supplied key is present in the factory. 

478 

479 Parameters 

480 ---------- 

481 key : `LookupKey`, `str` or objects with ``name`` attribute 

482 Key to use to lookup in the factory whether a corresponding 

483 formatter is present. 

484 

485 Returns 

486 ------- 

487 in : `bool` 

488 `True` if the supplied key is present in the factory. 

489 """ 

490 return key in self._mappingFactory 

491 

492 def registerFormatters(self, config: Config, *, universe: DimensionUniverse) -> None: 

493 """Bulk register formatters from a config. 

494 

495 Parameters 

496 ---------- 

497 config : `Config` 

498 ``formatters`` section of a configuration. 

499 universe : `DimensionUniverse`, optional 

500 Set of all known dimensions, used to expand and validate any used 

501 in lookup keys. 

502 

503 Notes 

504 ----- 

505 The configuration can include one level of hierarchy where an 

506 instrument-specific section can be defined to override more general 

507 template specifications. This is represented in YAML using a 

508 key of form ``instrument<name>`` which can then define templates 

509 that will be returned if a `DatasetRef` contains a matching instrument 

510 name in the data ID. 

511 

512 The config is parsed using the function 

513 `~lsst.daf.butler.configSubset.processLookupConfigs`. 

514 

515 The values for formatter entries can be either a simple string 

516 referring to a python type or a dict representing the formatter and 

517 parameters to be hard-coded into the formatter constructor. For 

518 the dict case the following keys are supported: 

519 

520 - formatter: The python type to be used as the formatter class. 

521 - parameters: A further dict to be passed directly to the 

522 ``writeParameters`` Formatter constructor to seed it. 

523 These parameters are validated at instance creation and not at 

524 configuration. 

525 

526 Additionally, a special ``default`` section can be defined that 

527 uses the formatter type (class) name as the keys and specifies 

528 default write parameters that should be used whenever an instance 

529 of that class is constructed. 

530 

531 .. code-block:: yaml 

532 

533 formatters: 

534 default: 

535 lsst.daf.butler.formatters.example.ExampleFormatter: 

536 max: 10 

537 min: 2 

538 comment: Default comment 

539 calexp: lsst.daf.butler.formatters.example.ExampleFormatter 

540 coadd: 

541 formatter: lsst.daf.butler.formatters.example.ExampleFormatter 

542 parameters: 

543 max: 5 

544 

545 Any time an ``ExampleFormatter`` is constructed it will use those 

546 parameters. If an explicit entry later in the configuration specifies 

547 a different set of parameters, the two will be merged with the later 

548 entry taking priority. In the example above ``calexp`` will use 

549 the default parameters but ``coadd`` will override the value for 

550 ``max``. 

551 

552 Formatter configuration can also include a special section describing 

553 collections of write parameters that can be accessed through a 

554 simple label. This allows common collections of options to be 

555 specified in one place in the configuration and reused later. 

556 The ``write_recipes`` section is indexed by Formatter class name 

557 and each key is the label to associate with the parameters. 

558 

559 .. code-block:: yaml 

560 

561 formatters: 

562 write_recipes: 

563 lsst.obs.base.formatters.fitsExposure.FixExposureFormatter: 

564 lossless: 

565 ... 

566 noCompression: 

567 ... 

568 

569 By convention a formatter that uses write recipes will support a 

570 ``recipe`` write parameter that will refer to a recipe name in 

571 the ``write_recipes`` component. The `Formatter` will be constructed 

572 in the `FormatterFactory` with all the relevant recipes and 

573 will not attempt to filter by looking at ``writeParameters`` in 

574 advance. See the specific formatter documentation for details on 

575 acceptable recipe options. 

576 """ 

577 allowed_keys = {"formatter", "parameters"} 

578 

579 contents = processLookupConfigs(config, allow_hierarchy=True, universe=universe) 

580 

581 # Extract any default parameter settings 

582 defaultParameters = contents.get(self.defaultKey, {}) 

583 if not isinstance(defaultParameters, Mapping): 

584 raise RuntimeError("Default formatter parameters in config can not be a single string" 

585 f" (got: {type(defaultParameters)})") 

586 

587 # Extract any global write recipes -- these are indexed by 

588 # Formatter class name. 

589 writeRecipes = contents.get(self.writeRecipesKey, {}) 

590 if isinstance(writeRecipes, str): 

591 raise RuntimeError(f"The formatters.{self.writeRecipesKey} section must refer to a dict" 

592 f" not '{writeRecipes}'") 

593 

594 for key, f in contents.items(): 

595 # default is handled in a special way 

596 if key == self.defaultKey: 

597 continue 

598 if key == self.writeRecipesKey: 

599 continue 

600 

601 # Can be a str or a dict. 

602 specificWriteParameters = {} 

603 if isinstance(f, str): 

604 formatter = f 

605 elif isinstance(f, Mapping): 

606 all_keys = set(f) 

607 unexpected_keys = all_keys - allowed_keys 

608 if unexpected_keys: 

609 raise ValueError(f"Formatter {key} uses unexpected keys {unexpected_keys} in config") 

610 if "formatter" not in f: 

611 raise ValueError(f"Mandatory 'formatter' key missing for formatter key {key}") 

612 formatter = f["formatter"] 

613 if "parameters" in f: 

614 specificWriteParameters = f["parameters"] 

615 else: 

616 raise ValueError(f"Formatter for key {key} has unexpected value: '{f}'") 

617 

618 # Apply any default parameters for this formatter 

619 writeParameters = copy.deepcopy(defaultParameters.get(formatter, {})) 

620 writeParameters.update(specificWriteParameters) 

621 

622 kwargs: Dict[str, Any] = {} 

623 if writeParameters: 

624 kwargs["writeParameters"] = writeParameters 

625 

626 if formatter in writeRecipes: 

627 kwargs["writeRecipes"] = writeRecipes[formatter] 

628 

629 self.registerFormatter(key, formatter, **kwargs) 

630 

631 def getLookupKeys(self) -> Set[LookupKey]: 

632 """Retrieve the look up keys for all the registry entries. 

633 

634 Returns 

635 ------- 

636 keys : `set` of `LookupKey` 

637 The keys available for matching in the registry. 

638 """ 

639 return self._mappingFactory.getLookupKeys() 

640 

641 def getFormatterClassWithMatch(self, entity: Entity) -> Tuple[LookupKey, Type[Formatter], 

642 Dict[str, Any]]: 

643 """Get the matching formatter class along with the matching registry 

644 key. 

645 

646 Parameters 

647 ---------- 

648 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str` 

649 Entity to use to determine the formatter to return. 

650 `StorageClass` will be used as a last resort if `DatasetRef` 

651 or `DatasetType` instance is provided. Supports instrument 

652 override if a `DatasetRef` is provided configured with an 

653 ``instrument`` value for the data ID. 

654 

655 Returns 

656 ------- 

657 matchKey : `LookupKey` 

658 The key that resulted in the successful match. 

659 formatter : `type` 

660 The class of the registered formatter. 

661 formatter_kwargs : `dict` 

662 Keyword arguments that are associated with this formatter entry. 

663 """ 

664 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames() 

665 matchKey, formatter, formatter_kwargs = self._mappingFactory.getClassFromRegistryWithMatch(names) 

666 log.debug("Retrieved formatter %s from key '%s' for entity '%s'", getFullTypeName(formatter), 

667 matchKey, entity) 

668 

669 return matchKey, formatter, formatter_kwargs 

670 

671 def getFormatterClass(self, entity: Entity) -> Type: 

672 """Get the matching formatter class. 

673 

674 Parameters 

675 ---------- 

676 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str` 

677 Entity to use to determine the formatter to return. 

678 `StorageClass` will be used as a last resort if `DatasetRef` 

679 or `DatasetType` instance is provided. Supports instrument 

680 override if a `DatasetRef` is provided configured with an 

681 ``instrument`` value for the data ID. 

682 

683 Returns 

684 ------- 

685 formatter : `type` 

686 The class of the registered formatter. 

687 """ 

688 _, formatter, _ = self.getFormatterClassWithMatch(entity) 

689 return formatter 

690 

691 def getFormatterWithMatch(self, entity: Entity, *args: Any, **kwargs: Any) -> Tuple[LookupKey, Formatter]: 

692 """Get a new formatter instance along with the matching registry 

693 key. 

694 

695 Parameters 

696 ---------- 

697 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str` 

698 Entity to use to determine the formatter to return. 

699 `StorageClass` will be used as a last resort if `DatasetRef` 

700 or `DatasetType` instance is provided. Supports instrument 

701 override if a `DatasetRef` is provided configured with an 

702 ``instrument`` value for the data ID. 

703 args : `tuple` 

704 Positional arguments to use pass to the object constructor. 

705 kwargs : `dict` 

706 Keyword arguments to pass to object constructor. 

707 

708 Returns 

709 ------- 

710 matchKey : `LookupKey` 

711 The key that resulted in the successful match. 

712 formatter : `Formatter` 

713 An instance of the registered formatter. 

714 """ 

715 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames() 

716 matchKey, formatter = self._mappingFactory.getFromRegistryWithMatch(names, *args, **kwargs) 

717 log.debug("Retrieved formatter %s from key '%s' for entity '%s'", getFullTypeName(formatter), 

718 matchKey, entity) 

719 

720 return matchKey, formatter 

721 

722 def getFormatter(self, entity: Entity, *args: Any, **kwargs: Any) -> Formatter: 

723 """Get a new formatter instance. 

724 

725 Parameters 

726 ---------- 

727 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str` 

728 Entity to use to determine the formatter to return. 

729 `StorageClass` will be used as a last resort if `DatasetRef` 

730 or `DatasetType` instance is provided. Supports instrument 

731 override if a `DatasetRef` is provided configured with an 

732 ``instrument`` value for the data ID. 

733 args : `tuple` 

734 Positional arguments to use pass to the object constructor. 

735 kwargs : `dict` 

736 Keyword arguments to pass to object constructor. 

737 

738 Returns 

739 ------- 

740 formatter : `Formatter` 

741 An instance of the registered formatter. 

742 """ 

743 _, formatter = self.getFormatterWithMatch(entity, *args, **kwargs) 

744 return formatter 

745 

746 def registerFormatter(self, type_: Union[LookupKey, str, StorageClass, DatasetType], 

747 formatter: str, *, overwrite: bool = False, 

748 **kwargs: Any) -> None: 

749 """Register a `Formatter`. 

750 

751 Parameters 

752 ---------- 

753 type_ : `LookupKey`, `str`, `StorageClass` or `DatasetType` 

754 Type for which this formatter is to be used. If a `LookupKey` 

755 is not provided, one will be constructed from the supplied string 

756 or by using the ``name`` property of the supplied entity. 

757 formatter : `str` or class of type `Formatter` 

758 Identifies a `Formatter` subclass to use for reading and writing 

759 Datasets of this type. Can be a `Formatter` class. 

760 overwrite : `bool`, optional 

761 If `True` an existing entry will be replaced by the new value. 

762 Default is `False`. 

763 kwargs : `dict` 

764 Keyword arguments to always pass to object constructor when 

765 retrieved. 

766 

767 Raises 

768 ------ 

769 ValueError 

770 Raised if the formatter does not name a valid formatter type and 

771 ``overwrite`` is `False`. 

772 """ 

773 self._mappingFactory.placeInRegistry(type_, formatter, overwrite=overwrite, **kwargs) 

774 

775 

776# Type to use when allowing a Formatter or its class name 

777FormatterParameter = Union[str, Type[Formatter], Formatter]