Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ("Formatter", "FormatterFactory", "FormatterParameter") 

25 

26from abc import ABCMeta, abstractmethod 

27from collections.abc import Mapping 

28import contextlib 

29import logging 

30import copy 

31from typing import ( 

32 AbstractSet, 

33 Any, 

34 ClassVar, 

35 Dict, 

36 Iterator, 

37 Optional, 

38 Set, 

39 Tuple, 

40 Type, 

41 TYPE_CHECKING, 

42 Union, 

43) 

44 

45from .configSupport import processLookupConfigs, LookupKey 

46from .mappingFactory import MappingFactory 

47from .utils import getFullTypeName 

48from .fileDescriptor import FileDescriptor 

49from .location import Location 

50from .config import Config 

51from .dimensions import DimensionUniverse 

52from .storageClass import StorageClass 

53from .datasets import DatasetType, DatasetRef 

54 

55log = logging.getLogger(__name__) 

56 

57# Define a new special type for functions that take "entity" 

58Entity = Union[DatasetType, DatasetRef, StorageClass, str] 

59 

60 

61if TYPE_CHECKING: 61 ↛ 62line 61 didn't jump to line 62, because the condition on line 61 was never true

62 from .dimensions import DataCoordinate 

63 

64 

65class Formatter(metaclass=ABCMeta): 

66 """Interface for reading and writing Datasets with a particular 

67 `StorageClass`. 

68 

69 Parameters 

70 ---------- 

71 fileDescriptor : `FileDescriptor`, optional 

72 Identifies the file to read or write, and the associated storage 

73 classes and parameter information. Its value can be `None` if the 

74 caller will never call `Formatter.read` or `Formatter.write`. 

75 dataId : `DataCoordinate`, optional 

76 Data ID associated with this formatter. 

77 writeParameters : `dict`, optional 

78 Any parameters to be hard-coded into this instance to control how 

79 the dataset is serialized. 

80 """ 

81 

82 unsupportedParameters: ClassVar[Optional[AbstractSet[str]]] = frozenset() 

83 """Set of read parameters not understood by this `Formatter`. An empty set 

84 means all parameters are supported. `None` indicates that no parameters 

85 are supported. These param (`frozenset`). 

86 """ 

87 

88 supportedWriteParameters: ClassVar[Optional[AbstractSet[str]]] = None 

89 """Parameters understood by this formatter that can be used to control 

90 how a dataset is serialized. `None` indicates that no parameters are 

91 supported.""" 

92 

93 supportedExtensions: ClassVar[AbstractSet[str]] = frozenset() 

94 """Set of all extensions supported by this formatter. 

95 

96 Only expected to be populated by Formatters that write files. Any extension 

97 assigned to the ``extension`` property will be automatically included in 

98 the list of supported extensions.""" 

99 

100 def __init__(self, fileDescriptor: FileDescriptor, dataId: Optional[DataCoordinate] = None, 

101 writeParameters: Optional[Dict[str, Any]] = None, 

102 writeRecipes: Optional[Dict[str, Any]] = None): 

103 if not isinstance(fileDescriptor, FileDescriptor): 

104 raise TypeError("File descriptor must be a FileDescriptor") 

105 self._fileDescriptor = fileDescriptor 

106 self._dataId = dataId 

107 

108 # Check that the write parameters are allowed 

109 if writeParameters: 

110 if self.supportedWriteParameters is None: 

111 raise ValueError("This formatter does not accept any write parameters. " 

112 f"Got: {', '.join(writeParameters)}") 

113 else: 

114 given = set(writeParameters) 

115 unknown = given - self.supportedWriteParameters 

116 if unknown: 

117 s = "s" if len(unknown) != 1 else "" 

118 unknownStr = ", ".join(f"'{u}'" for u in unknown) 

119 raise ValueError(f"This formatter does not accept parameter{s} {unknownStr}") 

120 

121 self._writeParameters = writeParameters 

122 self._writeRecipes = self.validateWriteRecipes(writeRecipes) 

123 

124 def __str__(self) -> str: 

125 return f"{self.name()}@{self.fileDescriptor.location.path}" 

126 

127 def __repr__(self) -> str: 

128 return f"{self.name()}({self.fileDescriptor!r})" 

129 

130 @property 

131 def fileDescriptor(self) -> FileDescriptor: 

132 """FileDescriptor associated with this formatter 

133 (`FileDescriptor`, read-only)""" 

134 return self._fileDescriptor 

135 

136 @property 

137 def dataId(self) -> Optional[DataCoordinate]: 

138 """DataId associated with this formatter (`DataCoordinate`)""" 

139 return self._dataId 

140 

141 @property 

142 def writeParameters(self) -> Mapping[str, Any]: 

143 """Parameters to use when writing out datasets.""" 

144 if self._writeParameters is not None: 

145 return self._writeParameters 

146 return {} 

147 

148 @property 

149 def writeRecipes(self) -> Mapping[str, Any]: 

150 """Detailed write Recipes indexed by recipe name.""" 

151 if self._writeRecipes is not None: 

152 return self._writeRecipes 

153 return {} 

154 

155 @classmethod 

156 def validateWriteRecipes(cls, recipes: Optional[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]: 

157 """Validate supplied recipes for this formatter. 

158 

159 The recipes are supplemented with default values where appropriate. 

160 

161 Parameters 

162 ---------- 

163 recipes : `dict` 

164 Recipes to validate. 

165 

166 Returns 

167 ------- 

168 validated : `dict` 

169 Validated recipes. 

170 

171 Raises 

172 ------ 

173 RuntimeError 

174 Raised if validation fails. The default implementation raises 

175 if any recipes are given. 

176 """ 

177 if recipes: 

178 raise RuntimeError(f"This formatter does not understand these writeRecipes: {recipes}") 

179 return recipes 

180 

181 @classmethod 

182 def name(cls) -> str: 

183 """Returns the fully qualified name of the formatter. 

184 

185 Returns 

186 ------- 

187 name : `str` 

188 Fully-qualified name of formatter class. 

189 """ 

190 return getFullTypeName(cls) 

191 

192 @abstractmethod 

193 def read(self, component: Optional[str] = None) -> Any: 

194 """Read a Dataset. 

195 

196 Parameters 

197 ---------- 

198 component : `str`, optional 

199 Component to read from the file. Only used if the `StorageClass` 

200 for reading differed from the `StorageClass` used to write the 

201 file. 

202 

203 Returns 

204 ------- 

205 inMemoryDataset : `object` 

206 The requested Dataset. 

207 """ 

208 raise NotImplementedError("Type does not support reading") 

209 

210 @abstractmethod 

211 def write(self, inMemoryDataset: Any) -> str: 

212 """Write a Dataset. 

213 

214 Parameters 

215 ---------- 

216 inMemoryDataset : `object` 

217 The Dataset to store. 

218 

219 Returns 

220 ------- 

221 path : `str` 

222 The path to where the Dataset was stored within the datastore. 

223 """ 

224 raise NotImplementedError("Type does not support writing") 

225 

226 @classmethod 

227 def can_read_bytes(cls) -> bool: 

228 """Indicate if this formatter can format from bytes. 

229 

230 Returns 

231 ------- 

232 can : `bool` 

233 `True` if the `fromBytes` method is implemented. 

234 """ 

235 # We have no property to read so instead try to format from a byte 

236 # and see what happens 

237 try: 

238 # We know the arguments are incompatible 

239 cls.fromBytes(cls, b"") # type: ignore 

240 except NotImplementedError: 

241 return False 

242 except Exception: 

243 # There will be problems with the bytes we are supplying so ignore 

244 pass 

245 return True 

246 

247 def fromBytes(self, serializedDataset: bytes, 

248 component: Optional[str] = None) -> object: 

249 """Reads serialized data into a Dataset or its component. 

250 

251 Parameters 

252 ---------- 

253 serializedDataset : `bytes` 

254 Bytes object to unserialize. 

255 component : `str`, optional 

256 Component to read from the Dataset. Only used if the `StorageClass` 

257 for reading differed from the `StorageClass` used to write the 

258 file. 

259 

260 Returns 

261 ------- 

262 inMemoryDataset : `object` 

263 The requested data as a Python object. The type of object 

264 is controlled by the specific formatter. 

265 """ 

266 raise NotImplementedError("Type does not support reading from bytes.") 

267 

268 def toBytes(self, inMemoryDataset: Any) -> bytes: 

269 """Serialize the Dataset to bytes based on formatter. 

270 

271 Parameters 

272 ---------- 

273 inMemoryDataset : `object` 

274 The Python object to serialize. 

275 

276 Returns 

277 ------- 

278 serializedDataset : `bytes` 

279 Bytes representing the serialized dataset. 

280 """ 

281 raise NotImplementedError("Type does not support writing to bytes.") 

282 

283 @contextlib.contextmanager 

284 def _updateLocation(self, location: Optional[Location]) -> Iterator[Location]: 

285 """Temporarily replace the location associated with this formatter. 

286 

287 Parameters 

288 ---------- 

289 location : `Location` 

290 New location to use for this formatter. If `None` the 

291 formatter will not change but it will still return 

292 the old location. This allows it to be used in a code 

293 path where the location may not need to be updated 

294 but the with block is still convenient. 

295 

296 Yields 

297 ------ 

298 old : `Location` 

299 The old location that will be restored. 

300 

301 Notes 

302 ----- 

303 This is an internal method that should be used with care. 

304 It may change in the future. Should be used as a context 

305 manager to restore the location when the temporary is no 

306 longer required. 

307 """ 

308 old = self._fileDescriptor.location 

309 try: 

310 if location is not None: 

311 self._fileDescriptor.location = location 

312 yield old 

313 finally: 

314 if location is not None: 

315 self._fileDescriptor.location = old 

316 

317 def makeUpdatedLocation(self, location: Location) -> Location: 

318 """Return a new `Location` instance updated with this formatter's 

319 extension. 

320 

321 Parameters 

322 ---------- 

323 location : `Location` 

324 The location to update. 

325 

326 Returns 

327 ------- 

328 updated : `Location` 

329 A new `Location` with a new file extension applied. 

330 

331 Raises 

332 ------ 

333 NotImplementedError 

334 Raised if there is no ``extension`` attribute associated with 

335 this formatter. 

336 

337 Notes 

338 ----- 

339 This method is available to all Formatters but might not be 

340 implemented by all formatters. It requires that a formatter set 

341 an ``extension`` attribute containing the file extension used when 

342 writing files. If ``extension`` is `None` the supplied file will 

343 not be updated. Not all formatters write files so this is not 

344 defined in the base class. 

345 """ 

346 location = copy.deepcopy(location) 

347 try: 

348 # We are deliberately allowing extension to be undefined by 

349 # default in the base class and mypy complains. 

350 location.updateExtension(self.extension) # type:ignore 

351 except AttributeError: 

352 raise NotImplementedError("No file extension registered with this formatter") from None 

353 return location 

354 

355 @classmethod 

356 def validateExtension(cls, location: Location) -> None: 

357 """Check that the provided location refers to a file extension that is 

358 understood by this formatter. 

359 

360 Parameters 

361 ---------- 

362 location : `Location` 

363 Location from which to extract a file extension. 

364 

365 Raises 

366 ------ 

367 NotImplementedError 

368 Raised if file extensions are a concept not understood by this 

369 formatter. 

370 ValueError 

371 Raised if the formatter does not understand this extension. 

372 

373 Notes 

374 ----- 

375 This method is available to all Formatters but might not be 

376 implemented by all formatters. It requires that a formatter set 

377 an ``extension`` attribute containing the file extension used when 

378 writing files. If ``extension`` is `None` only the set of supported 

379 extensions will be examined. 

380 """ 

381 supported = set(cls.supportedExtensions) 

382 

383 try: 

384 # We are deliberately allowing extension to be undefined by 

385 # default in the base class and mypy complains. 

386 default = cls.extension # type: ignore 

387 except AttributeError: 

388 raise NotImplementedError("No file extension registered with this formatter") from None 

389 

390 # If extension is implemented as an instance property it won't return 

391 # a string when called as a class propertt. Assume that 

392 # the supported extensions class property is complete. 

393 if default is not None and isinstance(default, str): 

394 supported.add(default) 

395 

396 # Get the file name from the uri 

397 file = location.uri.basename() 

398 

399 # Check that this file name ends with one of the supported extensions. 

400 # This is less prone to confusion than asking the location for 

401 # its extension and then doing a set comparison 

402 for ext in supported: 

403 if file.endswith(ext): 

404 return 

405 

406 raise ValueError(f"Extension '{location.getExtension()}' on '{location}' " 

407 f"is not supported by Formatter '{cls.__name__}' (supports: {supported})") 

408 

409 def predictPath(self) -> str: 

410 """Return the path that would be returned by write, without actually 

411 writing. 

412 

413 Uses the `FileDescriptor` associated with the instance. 

414 

415 Returns 

416 ------- 

417 path : `str` 

418 Path within datastore that would be associated with the location 

419 stored in this `Formatter`. 

420 """ 

421 updated = self.makeUpdatedLocation(self.fileDescriptor.location) 

422 return updated.pathInStore 

423 

424 def segregateParameters(self, parameters: Optional[Dict[str, Any]] = None) -> Tuple[Dict, Dict]: 

425 """Segregate the supplied parameters into those understood by the 

426 formatter and those not understood by the formatter. 

427 

428 Any unsupported parameters are assumed to be usable by associated 

429 assemblers. 

430 

431 Parameters 

432 ---------- 

433 parameters : `dict`, optional 

434 Parameters with values that have been supplied by the caller 

435 and which might be relevant for the formatter. If `None` 

436 parameters will be read from the registered `FileDescriptor`. 

437 

438 Returns 

439 ------- 

440 supported : `dict` 

441 Those parameters supported by this formatter. 

442 unsupported : `dict` 

443 Those parameters not supported by this formatter. 

444 """ 

445 

446 if parameters is None: 

447 parameters = self.fileDescriptor.parameters 

448 

449 if parameters is None: 

450 return {}, {} 

451 

452 if self.unsupportedParameters is None: 

453 # Support none of the parameters 

454 return {}, parameters.copy() 

455 

456 # Start by assuming all are supported 

457 supported = parameters.copy() 

458 unsupported = {} 

459 

460 # And remove any we know are not supported 

461 for p in set(supported): 

462 if p in self.unsupportedParameters: 

463 unsupported[p] = supported.pop(p) 

464 

465 return supported, unsupported 

466 

467 

468class FormatterFactory: 

469 """Factory for `Formatter` instances. 

470 """ 

471 

472 defaultKey = LookupKey("default") 

473 """Configuration key associated with default write parameter settings.""" 

474 

475 writeRecipesKey = LookupKey("write_recipes") 

476 """Configuration key associated with write recipes.""" 

477 

478 def __init__(self) -> None: 

479 self._mappingFactory = MappingFactory(Formatter) 

480 

481 def __contains__(self, key: Union[LookupKey, str]) -> bool: 

482 """Indicates whether the supplied key is present in the factory. 

483 

484 Parameters 

485 ---------- 

486 key : `LookupKey`, `str` or objects with ``name`` attribute 

487 Key to use to lookup in the factory whether a corresponding 

488 formatter is present. 

489 

490 Returns 

491 ------- 

492 in : `bool` 

493 `True` if the supplied key is present in the factory. 

494 """ 

495 return key in self._mappingFactory 

496 

497 def registerFormatters(self, config: Config, *, universe: DimensionUniverse) -> None: 

498 """Bulk register formatters from a config. 

499 

500 Parameters 

501 ---------- 

502 config : `Config` 

503 ``formatters`` section of a configuration. 

504 universe : `DimensionUniverse`, optional 

505 Set of all known dimensions, used to expand and validate any used 

506 in lookup keys. 

507 

508 Notes 

509 ----- 

510 The configuration can include one level of hierarchy where an 

511 instrument-specific section can be defined to override more general 

512 template specifications. This is represented in YAML using a 

513 key of form ``instrument<name>`` which can then define templates 

514 that will be returned if a `DatasetRef` contains a matching instrument 

515 name in the data ID. 

516 

517 The config is parsed using the function 

518 `~lsst.daf.butler.configSubset.processLookupConfigs`. 

519 

520 The values for formatter entries can be either a simple string 

521 referring to a python type or a dict representing the formatter and 

522 parameters to be hard-coded into the formatter constructor. For 

523 the dict case the following keys are supported: 

524 

525 - formatter: The python type to be used as the formatter class. 

526 - parameters: A further dict to be passed directly to the 

527 ``writeParameters`` Formatter constructor to seed it. 

528 These parameters are validated at instance creation and not at 

529 configuration. 

530 

531 Additionally, a special ``default`` section can be defined that 

532 uses the formatter type (class) name as the keys and specifies 

533 default write parameters that should be used whenever an instance 

534 of that class is constructed. 

535 

536 .. code-block:: yaml 

537 

538 formatters: 

539 default: 

540 lsst.daf.butler.formatters.example.ExampleFormatter: 

541 max: 10 

542 min: 2 

543 comment: Default comment 

544 calexp: lsst.daf.butler.formatters.example.ExampleFormatter 

545 coadd: 

546 formatter: lsst.daf.butler.formatters.example.ExampleFormatter 

547 parameters: 

548 max: 5 

549 

550 Any time an ``ExampleFormatter`` is constructed it will use those 

551 parameters. If an explicit entry later in the configuration specifies 

552 a different set of parameters, the two will be merged with the later 

553 entry taking priority. In the example above ``calexp`` will use 

554 the default parameters but ``coadd`` will override the value for 

555 ``max``. 

556 

557 Formatter configuration can also include a special section describing 

558 collections of write parameters that can be accessed through a 

559 simple label. This allows common collections of options to be 

560 specified in one place in the configuration and reused later. 

561 The ``write_recipes`` section is indexed by Formatter class name 

562 and each key is the label to associate with the parameters. 

563 

564 .. code-block:: yaml 

565 

566 formatters: 

567 write_recipes: 

568 lsst.obs.base.formatters.fitsExposure.FixExposureFormatter: 

569 lossless: 

570 ... 

571 noCompression: 

572 ... 

573 

574 By convention a formatter that uses write recipes will support a 

575 ``recipe`` write parameter that will refer to a recipe name in 

576 the ``write_recipes`` component. The `Formatter` will be constructed 

577 in the `FormatterFactory` with all the relevant recipes and 

578 will not attempt to filter by looking at ``writeParameters`` in 

579 advance. See the specific formatter documentation for details on 

580 acceptable recipe options. 

581 """ 

582 allowed_keys = {"formatter", "parameters"} 

583 

584 contents = processLookupConfigs(config, allow_hierarchy=True, universe=universe) 

585 

586 # Extract any default parameter settings 

587 defaultParameters = contents.get(self.defaultKey, {}) 

588 if not isinstance(defaultParameters, Mapping): 

589 raise RuntimeError("Default formatter parameters in config can not be a single string" 

590 f" (got: {type(defaultParameters)})") 

591 

592 # Extract any global write recipes -- these are indexed by 

593 # Formatter class name. 

594 writeRecipes = contents.get(self.writeRecipesKey, {}) 

595 if isinstance(writeRecipes, str): 

596 raise RuntimeError(f"The formatters.{self.writeRecipesKey} section must refer to a dict" 

597 f" not '{writeRecipes}'") 

598 

599 for key, f in contents.items(): 

600 # default is handled in a special way 

601 if key == self.defaultKey: 

602 continue 

603 if key == self.writeRecipesKey: 

604 continue 

605 

606 # Can be a str or a dict. 

607 specificWriteParameters = {} 

608 if isinstance(f, str): 

609 formatter = f 

610 elif isinstance(f, Mapping): 

611 all_keys = set(f) 

612 unexpected_keys = all_keys - allowed_keys 

613 if unexpected_keys: 

614 raise ValueError(f"Formatter {key} uses unexpected keys {unexpected_keys} in config") 

615 if "formatter" not in f: 

616 raise ValueError(f"Mandatory 'formatter' key missing for formatter key {key}") 

617 formatter = f["formatter"] 

618 if "parameters" in f: 

619 specificWriteParameters = f["parameters"] 

620 else: 

621 raise ValueError(f"Formatter for key {key} has unexpected value: '{f}'") 

622 

623 # Apply any default parameters for this formatter 

624 writeParameters = copy.deepcopy(defaultParameters.get(formatter, {})) 

625 writeParameters.update(specificWriteParameters) 

626 

627 kwargs: Dict[str, Any] = {} 

628 if writeParameters: 

629 kwargs["writeParameters"] = writeParameters 

630 

631 if formatter in writeRecipes: 

632 kwargs["writeRecipes"] = writeRecipes[formatter] 

633 

634 self.registerFormatter(key, formatter, **kwargs) 

635 

636 def getLookupKeys(self) -> Set[LookupKey]: 

637 """Retrieve the look up keys for all the registry entries. 

638 

639 Returns 

640 ------- 

641 keys : `set` of `LookupKey` 

642 The keys available for matching in the registry. 

643 """ 

644 return self._mappingFactory.getLookupKeys() 

645 

646 def getFormatterClassWithMatch(self, entity: Entity) -> Tuple[LookupKey, Type[Formatter], 

647 Dict[str, Any]]: 

648 """Get the matching formatter class along with the matching registry 

649 key. 

650 

651 Parameters 

652 ---------- 

653 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str` 

654 Entity to use to determine the formatter to return. 

655 `StorageClass` will be used as a last resort if `DatasetRef` 

656 or `DatasetType` instance is provided. Supports instrument 

657 override if a `DatasetRef` is provided configured with an 

658 ``instrument`` value for the data ID. 

659 

660 Returns 

661 ------- 

662 matchKey : `LookupKey` 

663 The key that resulted in the successful match. 

664 formatter : `type` 

665 The class of the registered formatter. 

666 formatter_kwargs : `dict` 

667 Keyword arguments that are associated with this formatter entry. 

668 """ 

669 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames() 

670 matchKey, formatter, formatter_kwargs = self._mappingFactory.getClassFromRegistryWithMatch(names) 

671 log.debug("Retrieved formatter %s from key '%s' for entity '%s'", getFullTypeName(formatter), 

672 matchKey, entity) 

673 

674 return matchKey, formatter, formatter_kwargs 

675 

676 def getFormatterClass(self, entity: Entity) -> Type: 

677 """Get the matching formatter class. 

678 

679 Parameters 

680 ---------- 

681 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str` 

682 Entity to use to determine the formatter to return. 

683 `StorageClass` will be used as a last resort if `DatasetRef` 

684 or `DatasetType` instance is provided. Supports instrument 

685 override if a `DatasetRef` is provided configured with an 

686 ``instrument`` value for the data ID. 

687 

688 Returns 

689 ------- 

690 formatter : `type` 

691 The class of the registered formatter. 

692 """ 

693 _, formatter, _ = self.getFormatterClassWithMatch(entity) 

694 return formatter 

695 

696 def getFormatterWithMatch(self, entity: Entity, *args: Any, **kwargs: Any) -> Tuple[LookupKey, Formatter]: 

697 """Get a new formatter instance along with the matching registry 

698 key. 

699 

700 Parameters 

701 ---------- 

702 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str` 

703 Entity to use to determine the formatter to return. 

704 `StorageClass` will be used as a last resort if `DatasetRef` 

705 or `DatasetType` instance is provided. Supports instrument 

706 override if a `DatasetRef` is provided configured with an 

707 ``instrument`` value for the data ID. 

708 args : `tuple` 

709 Positional arguments to use pass to the object constructor. 

710 kwargs : `dict` 

711 Keyword arguments to pass to object constructor. 

712 

713 Returns 

714 ------- 

715 matchKey : `LookupKey` 

716 The key that resulted in the successful match. 

717 formatter : `Formatter` 

718 An instance of the registered formatter. 

719 """ 

720 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames() 

721 matchKey, formatter = self._mappingFactory.getFromRegistryWithMatch(names, *args, **kwargs) 

722 log.debug("Retrieved formatter %s from key '%s' for entity '%s'", getFullTypeName(formatter), 

723 matchKey, entity) 

724 

725 return matchKey, formatter 

726 

727 def getFormatter(self, entity: Entity, *args: Any, **kwargs: Any) -> Formatter: 

728 """Get a new formatter instance. 

729 

730 Parameters 

731 ---------- 

732 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str` 

733 Entity to use to determine the formatter to return. 

734 `StorageClass` will be used as a last resort if `DatasetRef` 

735 or `DatasetType` instance is provided. Supports instrument 

736 override if a `DatasetRef` is provided configured with an 

737 ``instrument`` value for the data ID. 

738 args : `tuple` 

739 Positional arguments to use pass to the object constructor. 

740 kwargs : `dict` 

741 Keyword arguments to pass to object constructor. 

742 

743 Returns 

744 ------- 

745 formatter : `Formatter` 

746 An instance of the registered formatter. 

747 """ 

748 _, formatter = self.getFormatterWithMatch(entity, *args, **kwargs) 

749 return formatter 

750 

751 def registerFormatter(self, type_: Union[LookupKey, str, StorageClass, DatasetType], 

752 formatter: str, *, overwrite: bool = False, 

753 **kwargs: Any) -> None: 

754 """Register a `Formatter`. 

755 

756 Parameters 

757 ---------- 

758 type_ : `LookupKey`, `str`, `StorageClass` or `DatasetType` 

759 Type for which this formatter is to be used. If a `LookupKey` 

760 is not provided, one will be constructed from the supplied string 

761 or by using the ``name`` property of the supplied entity. 

762 formatter : `str` or class of type `Formatter` 

763 Identifies a `Formatter` subclass to use for reading and writing 

764 Datasets of this type. Can be a `Formatter` class. 

765 overwrite : `bool`, optional 

766 If `True` an existing entry will be replaced by the new value. 

767 Default is `False`. 

768 kwargs : `dict` 

769 Keyword arguments to always pass to object constructor when 

770 retrieved. 

771 

772 Raises 

773 ------ 

774 ValueError 

775 Raised if the formatter does not name a valid formatter type and 

776 ``overwrite`` is `False`. 

777 """ 

778 self._mappingFactory.placeInRegistry(type_, formatter, overwrite=overwrite, **kwargs) 

779 

780 

781# Type to use when allowing a Formatter or its class name 

782FormatterParameter = Union[str, Type[Formatter], Formatter]