Coverage for python/lsst/daf/butler/core/formatter.py: 27%
204 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-10-21 02:03 -0700
« prev ^ index » next coverage.py v6.5.0, created at 2022-10-21 02:03 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("Formatter", "FormatterFactory", "FormatterParameter")
26import contextlib
27import copy
28import logging
29from abc import ABCMeta, abstractmethod
30from collections.abc import Mapping
31from typing import (
32 TYPE_CHECKING,
33 AbstractSet,
34 Any,
35 ClassVar,
36 Dict,
37 Iterator,
38 Optional,
39 Set,
40 Tuple,
41 Type,
42 Union,
43)
45from lsst.utils.introspection import get_full_type_name
47from .config import Config
48from .configSupport import LookupKey, processLookupConfigs
49from .datasets import DatasetRef, DatasetType
50from .dimensions import DimensionUniverse
51from .fileDescriptor import FileDescriptor
52from .location import Location
53from .mappingFactory import MappingFactory
54from .storageClass import StorageClass
56log = logging.getLogger(__name__)
58# Define a new special type for functions that take "entity"
59Entity = Union[DatasetType, DatasetRef, StorageClass, str]
62if TYPE_CHECKING: 62 ↛ 63line 62 didn't jump to line 63, because the condition on line 62 was never true
63 from .dimensions import DataCoordinate
66class Formatter(metaclass=ABCMeta):
67 """Interface for reading and writing Datasets.
69 The formatters are associated with a particular `StorageClass`.
71 Parameters
72 ----------
73 fileDescriptor : `FileDescriptor`, optional
74 Identifies the file to read or write, and the associated storage
75 classes and parameter information. Its value can be `None` if the
76 caller will never call `Formatter.read` or `Formatter.write`.
77 dataId : `DataCoordinate`
78 Data ID associated with this formatter.
79 writeParameters : `dict`, optional
80 Any parameters to be hard-coded into this instance to control how
81 the dataset is serialized.
82 writeRecipes : `dict`, optional
83 Detailed write Recipes indexed by recipe name.
85 Notes
86 -----
87 All Formatter subclasses should share the base class's constructor
88 signature.
89 """
91 unsupportedParameters: ClassVar[Optional[AbstractSet[str]]] = frozenset()
92 """Set of read parameters not understood by this `Formatter`. An empty set
93 means all parameters are supported. `None` indicates that no parameters
94 are supported. These param (`frozenset`).
95 """
97 supportedWriteParameters: ClassVar[Optional[AbstractSet[str]]] = None
98 """Parameters understood by this formatter that can be used to control
99 how a dataset is serialized. `None` indicates that no parameters are
100 supported."""
102 supportedExtensions: ClassVar[AbstractSet[str]] = frozenset()
103 """Set of all extensions supported by this formatter.
105 Only expected to be populated by Formatters that write files. Any extension
106 assigned to the ``extension`` property will be automatically included in
107 the list of supported extensions."""
109 def __init__(
110 self,
111 fileDescriptor: FileDescriptor,
112 dataId: DataCoordinate,
113 writeParameters: Optional[Dict[str, Any]] = None,
114 writeRecipes: Optional[Dict[str, Any]] = None,
115 ):
116 if not isinstance(fileDescriptor, FileDescriptor):
117 raise TypeError("File descriptor must be a FileDescriptor")
118 assert dataId is not None, "dataId is now required for formatter initialization"
119 self._fileDescriptor = fileDescriptor
120 self._dataId = dataId
122 # Check that the write parameters are allowed
123 if writeParameters:
124 if self.supportedWriteParameters is None:
125 raise ValueError(
126 "This formatter does not accept any write parameters. "
127 f"Got: {', '.join(writeParameters)}"
128 )
129 else:
130 given = set(writeParameters)
131 unknown = given - self.supportedWriteParameters
132 if unknown:
133 s = "s" if len(unknown) != 1 else ""
134 unknownStr = ", ".join(f"'{u}'" for u in unknown)
135 raise ValueError(f"This formatter does not accept parameter{s} {unknownStr}")
137 self._writeParameters = writeParameters
138 self._writeRecipes = self.validateWriteRecipes(writeRecipes)
140 def __str__(self) -> str:
141 return f"{self.name()}@{self.fileDescriptor.location.path}"
143 def __repr__(self) -> str:
144 return f"{self.name()}({self.fileDescriptor!r})"
146 @property
147 def fileDescriptor(self) -> FileDescriptor:
148 """File descriptor associated with this formatter (`FileDescriptor`).
150 Read-only property.
151 """
152 return self._fileDescriptor
154 @property
155 def dataId(self) -> DataCoordinate:
156 """Return Data ID associated with this formatter (`DataCoordinate`)."""
157 return self._dataId
159 @property
160 def writeParameters(self) -> Mapping[str, Any]:
161 """Parameters to use when writing out datasets."""
162 if self._writeParameters is not None:
163 return self._writeParameters
164 return {}
166 @property
167 def writeRecipes(self) -> Mapping[str, Any]:
168 """Detailed write Recipes indexed by recipe name."""
169 if self._writeRecipes is not None:
170 return self._writeRecipes
171 return {}
173 @classmethod
174 def validateWriteRecipes(cls, recipes: Optional[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]:
175 """Validate supplied recipes for this formatter.
177 The recipes are supplemented with default values where appropriate.
179 Parameters
180 ----------
181 recipes : `dict`
182 Recipes to validate.
184 Returns
185 -------
186 validated : `dict`
187 Validated recipes.
189 Raises
190 ------
191 RuntimeError
192 Raised if validation fails. The default implementation raises
193 if any recipes are given.
194 """
195 if recipes:
196 raise RuntimeError(f"This formatter does not understand these writeRecipes: {recipes}")
197 return recipes
199 @classmethod
200 def name(cls) -> str:
201 """Return the fully qualified name of the formatter.
203 Returns
204 -------
205 name : `str`
206 Fully-qualified name of formatter class.
207 """
208 return get_full_type_name(cls)
210 @abstractmethod
211 def read(self, component: Optional[str] = None) -> Any:
212 """Read a Dataset.
214 Parameters
215 ----------
216 component : `str`, optional
217 Component to read from the file. Only used if the `StorageClass`
218 for reading differed from the `StorageClass` used to write the
219 file.
221 Returns
222 -------
223 inMemoryDataset : `object`
224 The requested Dataset.
225 """
226 raise NotImplementedError("Type does not support reading")
228 @abstractmethod
229 def write(self, inMemoryDataset: Any) -> None:
230 """Write a Dataset.
232 Parameters
233 ----------
234 inMemoryDataset : `object`
235 The Dataset to store.
236 """
237 raise NotImplementedError("Type does not support writing")
239 @classmethod
240 def can_read_bytes(cls) -> bool:
241 """Indicate if this formatter can format from bytes.
243 Returns
244 -------
245 can : `bool`
246 `True` if the `fromBytes` method is implemented.
247 """
248 # We have no property to read so instead try to format from a byte
249 # and see what happens
250 try:
251 # We know the arguments are incompatible
252 cls.fromBytes(cls, b"") # type: ignore
253 except NotImplementedError:
254 return False
255 except Exception:
256 # There will be problems with the bytes we are supplying so ignore
257 pass
258 return True
260 def fromBytes(self, serializedDataset: bytes, component: Optional[str] = None) -> object:
261 """Read serialized data into a Dataset or its component.
263 Parameters
264 ----------
265 serializedDataset : `bytes`
266 Bytes object to unserialize.
267 component : `str`, optional
268 Component to read from the Dataset. Only used if the `StorageClass`
269 for reading differed from the `StorageClass` used to write the
270 file.
272 Returns
273 -------
274 inMemoryDataset : `object`
275 The requested data as a Python object. The type of object
276 is controlled by the specific formatter.
277 """
278 raise NotImplementedError("Type does not support reading from bytes.")
280 def toBytes(self, inMemoryDataset: Any) -> bytes:
281 """Serialize the Dataset to bytes based on formatter.
283 Parameters
284 ----------
285 inMemoryDataset : `object`
286 The Python object to serialize.
288 Returns
289 -------
290 serializedDataset : `bytes`
291 Bytes representing the serialized dataset.
292 """
293 raise NotImplementedError("Type does not support writing to bytes.")
295 @contextlib.contextmanager
296 def _updateLocation(self, location: Optional[Location]) -> Iterator[Location]:
297 """Temporarily replace the location associated with this formatter.
299 Parameters
300 ----------
301 location : `Location`
302 New location to use for this formatter. If `None` the
303 formatter will not change but it will still return
304 the old location. This allows it to be used in a code
305 path where the location may not need to be updated
306 but the with block is still convenient.
308 Yields
309 ------
310 old : `Location`
311 The old location that will be restored.
313 Notes
314 -----
315 This is an internal method that should be used with care.
316 It may change in the future. Should be used as a context
317 manager to restore the location when the temporary is no
318 longer required.
319 """
320 old = self._fileDescriptor.location
321 try:
322 if location is not None:
323 self._fileDescriptor.location = location
324 yield old
325 finally:
326 if location is not None:
327 self._fileDescriptor.location = old
329 def makeUpdatedLocation(self, location: Location) -> Location:
330 """Return a new `Location` updated with this formatter's extension.
332 Parameters
333 ----------
334 location : `Location`
335 The location to update.
337 Returns
338 -------
339 updated : `Location`
340 A new `Location` with a new file extension applied.
342 Raises
343 ------
344 NotImplementedError
345 Raised if there is no ``extension`` attribute associated with
346 this formatter.
348 Notes
349 -----
350 This method is available to all Formatters but might not be
351 implemented by all formatters. It requires that a formatter set
352 an ``extension`` attribute containing the file extension used when
353 writing files. If ``extension`` is `None` the supplied file will
354 not be updated. Not all formatters write files so this is not
355 defined in the base class.
356 """
357 location = copy.deepcopy(location)
358 try:
359 # We are deliberately allowing extension to be undefined by
360 # default in the base class and mypy complains.
361 location.updateExtension(self.extension) # type:ignore
362 except AttributeError:
363 raise NotImplementedError("No file extension registered with this formatter") from None
364 return location
366 @classmethod
367 def validateExtension(cls, location: Location) -> None:
368 """Check the extension of the provided location for compatibility.
370 Parameters
371 ----------
372 location : `Location`
373 Location from which to extract a file extension.
375 Raises
376 ------
377 NotImplementedError
378 Raised if file extensions are a concept not understood by this
379 formatter.
380 ValueError
381 Raised if the formatter does not understand this extension.
383 Notes
384 -----
385 This method is available to all Formatters but might not be
386 implemented by all formatters. It requires that a formatter set
387 an ``extension`` attribute containing the file extension used when
388 writing files. If ``extension`` is `None` only the set of supported
389 extensions will be examined.
390 """
391 supported = set(cls.supportedExtensions)
393 try:
394 # We are deliberately allowing extension to be undefined by
395 # default in the base class and mypy complains.
396 default = cls.extension # type: ignore
397 except AttributeError:
398 raise NotImplementedError("No file extension registered with this formatter") from None
400 # If extension is implemented as an instance property it won't return
401 # a string when called as a class property. Assume that
402 # the supported extensions class property is complete.
403 if default is not None and isinstance(default, str):
404 supported.add(default)
406 # Get the file name from the uri
407 file = location.uri.basename()
409 # Check that this file name ends with one of the supported extensions.
410 # This is less prone to confusion than asking the location for
411 # its extension and then doing a set comparison
412 for ext in supported:
413 if file.endswith(ext):
414 return
416 raise ValueError(
417 f"Extension '{location.getExtension()}' on '{location}' "
418 f"is not supported by Formatter '{cls.__name__}' (supports: {supported})"
419 )
421 def predictPath(self) -> str:
422 """Return the path that would be returned by write.
424 Does not write any data file.
426 Uses the `FileDescriptor` associated with the instance.
428 Returns
429 -------
430 path : `str`
431 Path within datastore that would be associated with the location
432 stored in this `Formatter`.
433 """
434 updated = self.makeUpdatedLocation(self.fileDescriptor.location)
435 return updated.pathInStore.path
437 def segregateParameters(self, parameters: Optional[Dict[str, Any]] = None) -> Tuple[Dict, Dict]:
438 """Segregate the supplied parameters.
440 This splits the parameters into those understood by the
441 formatter and those not understood by the formatter.
443 Any unsupported parameters are assumed to be usable by associated
444 assemblers.
446 Parameters
447 ----------
448 parameters : `dict`, optional
449 Parameters with values that have been supplied by the caller
450 and which might be relevant for the formatter. If `None`
451 parameters will be read from the registered `FileDescriptor`.
453 Returns
454 -------
455 supported : `dict`
456 Those parameters supported by this formatter.
457 unsupported : `dict`
458 Those parameters not supported by this formatter.
459 """
460 if parameters is None:
461 parameters = self.fileDescriptor.parameters
463 if parameters is None:
464 return {}, {}
466 if self.unsupportedParameters is None:
467 # Support none of the parameters
468 return {}, parameters.copy()
470 # Start by assuming all are supported
471 supported = parameters.copy()
472 unsupported = {}
474 # And remove any we know are not supported
475 for p in set(supported):
476 if p in self.unsupportedParameters:
477 unsupported[p] = supported.pop(p)
479 return supported, unsupported
482class FormatterFactory:
483 """Factory for `Formatter` instances."""
485 defaultKey = LookupKey("default")
486 """Configuration key associated with default write parameter settings."""
488 writeRecipesKey = LookupKey("write_recipes")
489 """Configuration key associated with write recipes."""
491 def __init__(self) -> None:
492 self._mappingFactory = MappingFactory(Formatter)
494 def __contains__(self, key: Union[LookupKey, str]) -> bool:
495 """Indicate whether the supplied key is present in the factory.
497 Parameters
498 ----------
499 key : `LookupKey`, `str` or objects with ``name`` attribute
500 Key to use to lookup in the factory whether a corresponding
501 formatter is present.
503 Returns
504 -------
505 in : `bool`
506 `True` if the supplied key is present in the factory.
507 """
508 return key in self._mappingFactory
510 def registerFormatters(self, config: Config, *, universe: DimensionUniverse) -> None:
511 """Bulk register formatters from a config.
513 Parameters
514 ----------
515 config : `Config`
516 ``formatters`` section of a configuration.
517 universe : `DimensionUniverse`, optional
518 Set of all known dimensions, used to expand and validate any used
519 in lookup keys.
521 Notes
522 -----
523 The configuration can include one level of hierarchy where an
524 instrument-specific section can be defined to override more general
525 template specifications. This is represented in YAML using a
526 key of form ``instrument<name>`` which can then define templates
527 that will be returned if a `DatasetRef` contains a matching instrument
528 name in the data ID.
530 The config is parsed using the function
531 `~lsst.daf.butler.configSubset.processLookupConfigs`.
533 The values for formatter entries can be either a simple string
534 referring to a python type or a dict representing the formatter and
535 parameters to be hard-coded into the formatter constructor. For
536 the dict case the following keys are supported:
538 - formatter: The python type to be used as the formatter class.
539 - parameters: A further dict to be passed directly to the
540 ``writeParameters`` Formatter constructor to seed it.
541 These parameters are validated at instance creation and not at
542 configuration.
544 Additionally, a special ``default`` section can be defined that
545 uses the formatter type (class) name as the keys and specifies
546 default write parameters that should be used whenever an instance
547 of that class is constructed.
549 .. code-block:: yaml
551 formatters:
552 default:
553 lsst.daf.butler.formatters.example.ExampleFormatter:
554 max: 10
555 min: 2
556 comment: Default comment
557 calexp: lsst.daf.butler.formatters.example.ExampleFormatter
558 coadd:
559 formatter: lsst.daf.butler.formatters.example.ExampleFormatter
560 parameters:
561 max: 5
563 Any time an ``ExampleFormatter`` is constructed it will use those
564 parameters. If an explicit entry later in the configuration specifies
565 a different set of parameters, the two will be merged with the later
566 entry taking priority. In the example above ``calexp`` will use
567 the default parameters but ``coadd`` will override the value for
568 ``max``.
570 Formatter configuration can also include a special section describing
571 collections of write parameters that can be accessed through a
572 simple label. This allows common collections of options to be
573 specified in one place in the configuration and reused later.
574 The ``write_recipes`` section is indexed by Formatter class name
575 and each key is the label to associate with the parameters.
577 .. code-block:: yaml
579 formatters:
580 write_recipes:
581 lsst.obs.base.formatters.fitsExposure.FixExposureFormatter:
582 lossless:
583 ...
584 noCompression:
585 ...
587 By convention a formatter that uses write recipes will support a
588 ``recipe`` write parameter that will refer to a recipe name in
589 the ``write_recipes`` component. The `Formatter` will be constructed
590 in the `FormatterFactory` with all the relevant recipes and
591 will not attempt to filter by looking at ``writeParameters`` in
592 advance. See the specific formatter documentation for details on
593 acceptable recipe options.
594 """
595 allowed_keys = {"formatter", "parameters"}
597 contents = processLookupConfigs(config, allow_hierarchy=True, universe=universe)
599 # Extract any default parameter settings
600 defaultParameters = contents.get(self.defaultKey, {})
601 if not isinstance(defaultParameters, Mapping):
602 raise RuntimeError(
603 "Default formatter parameters in config can not be a single string"
604 f" (got: {type(defaultParameters)})"
605 )
607 # Extract any global write recipes -- these are indexed by
608 # Formatter class name.
609 writeRecipes = contents.get(self.writeRecipesKey, {})
610 if isinstance(writeRecipes, str):
611 raise RuntimeError(
612 f"The formatters.{self.writeRecipesKey} section must refer to a dict not '{writeRecipes}'"
613 )
615 for key, f in contents.items():
616 # default is handled in a special way
617 if key == self.defaultKey:
618 continue
619 if key == self.writeRecipesKey:
620 continue
622 # Can be a str or a dict.
623 specificWriteParameters = {}
624 if isinstance(f, str):
625 formatter = f
626 elif isinstance(f, Mapping):
627 all_keys = set(f)
628 unexpected_keys = all_keys - allowed_keys
629 if unexpected_keys:
630 raise ValueError(f"Formatter {key} uses unexpected keys {unexpected_keys} in config")
631 if "formatter" not in f:
632 raise ValueError(f"Mandatory 'formatter' key missing for formatter key {key}")
633 formatter = f["formatter"]
634 if "parameters" in f:
635 specificWriteParameters = f["parameters"]
636 else:
637 raise ValueError(f"Formatter for key {key} has unexpected value: '{f}'")
639 # Apply any default parameters for this formatter
640 writeParameters = copy.deepcopy(defaultParameters.get(formatter, {}))
641 writeParameters.update(specificWriteParameters)
643 kwargs: Dict[str, Any] = {}
644 if writeParameters:
645 kwargs["writeParameters"] = writeParameters
647 if formatter in writeRecipes:
648 kwargs["writeRecipes"] = writeRecipes[formatter]
650 self.registerFormatter(key, formatter, **kwargs)
652 def getLookupKeys(self) -> Set[LookupKey]:
653 """Retrieve the look up keys for all the registry entries.
655 Returns
656 -------
657 keys : `set` of `LookupKey`
658 The keys available for matching in the registry.
659 """
660 return self._mappingFactory.getLookupKeys()
662 def getFormatterClassWithMatch(self, entity: Entity) -> Tuple[LookupKey, Type[Formatter], Dict[str, Any]]:
663 """Get the matching formatter class along with the registry key.
665 Parameters
666 ----------
667 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
668 Entity to use to determine the formatter to return.
669 `StorageClass` will be used as a last resort if `DatasetRef`
670 or `DatasetType` instance is provided. Supports instrument
671 override if a `DatasetRef` is provided configured with an
672 ``instrument`` value for the data ID.
674 Returns
675 -------
676 matchKey : `LookupKey`
677 The key that resulted in the successful match.
678 formatter : `type`
679 The class of the registered formatter.
680 formatter_kwargs : `dict`
681 Keyword arguments that are associated with this formatter entry.
682 """
683 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames()
684 matchKey, formatter, formatter_kwargs = self._mappingFactory.getClassFromRegistryWithMatch(names)
685 log.debug(
686 "Retrieved formatter %s from key '%s' for entity '%s'",
687 get_full_type_name(formatter),
688 matchKey,
689 entity,
690 )
692 return matchKey, formatter, formatter_kwargs
694 def getFormatterClass(self, entity: Entity) -> Type:
695 """Get the matching formatter class.
697 Parameters
698 ----------
699 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
700 Entity to use to determine the formatter to return.
701 `StorageClass` will be used as a last resort if `DatasetRef`
702 or `DatasetType` instance is provided. Supports instrument
703 override if a `DatasetRef` is provided configured with an
704 ``instrument`` value for the data ID.
706 Returns
707 -------
708 formatter : `type`
709 The class of the registered formatter.
710 """
711 _, formatter, _ = self.getFormatterClassWithMatch(entity)
712 return formatter
714 def getFormatterWithMatch(self, entity: Entity, *args: Any, **kwargs: Any) -> Tuple[LookupKey, Formatter]:
715 """Get a new formatter instance along with the matching registry key.
717 Parameters
718 ----------
719 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
720 Entity to use to determine the formatter to return.
721 `StorageClass` will be used as a last resort if `DatasetRef`
722 or `DatasetType` instance is provided. Supports instrument
723 override if a `DatasetRef` is provided configured with an
724 ``instrument`` value for the data ID.
725 args : `tuple`
726 Positional arguments to use pass to the object constructor.
727 **kwargs
728 Keyword arguments to pass to object constructor.
730 Returns
731 -------
732 matchKey : `LookupKey`
733 The key that resulted in the successful match.
734 formatter : `Formatter`
735 An instance of the registered formatter.
736 """
737 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames()
738 matchKey, formatter = self._mappingFactory.getFromRegistryWithMatch(names, *args, **kwargs)
739 log.debug(
740 "Retrieved formatter %s from key '%s' for entity '%s'",
741 get_full_type_name(formatter),
742 matchKey,
743 entity,
744 )
746 return matchKey, formatter
748 def getFormatter(self, entity: Entity, *args: Any, **kwargs: Any) -> Formatter:
749 """Get a new formatter instance.
751 Parameters
752 ----------
753 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
754 Entity to use to determine the formatter to return.
755 `StorageClass` will be used as a last resort if `DatasetRef`
756 or `DatasetType` instance is provided. Supports instrument
757 override if a `DatasetRef` is provided configured with an
758 ``instrument`` value for the data ID.
759 args : `tuple`
760 Positional arguments to use pass to the object constructor.
761 **kwargs
762 Keyword arguments to pass to object constructor.
764 Returns
765 -------
766 formatter : `Formatter`
767 An instance of the registered formatter.
768 """
769 _, formatter = self.getFormatterWithMatch(entity, *args, **kwargs)
770 return formatter
772 def registerFormatter(
773 self,
774 type_: Union[LookupKey, str, StorageClass, DatasetType],
775 formatter: str,
776 *,
777 overwrite: bool = False,
778 **kwargs: Any,
779 ) -> None:
780 """Register a `Formatter`.
782 Parameters
783 ----------
784 type_ : `LookupKey`, `str`, `StorageClass` or `DatasetType`
785 Type for which this formatter is to be used. If a `LookupKey`
786 is not provided, one will be constructed from the supplied string
787 or by using the ``name`` property of the supplied entity.
788 formatter : `str` or class of type `Formatter`
789 Identifies a `Formatter` subclass to use for reading and writing
790 Datasets of this type. Can be a `Formatter` class.
791 overwrite : `bool`, optional
792 If `True` an existing entry will be replaced by the new value.
793 Default is `False`.
794 **kwargs
795 Keyword arguments to always pass to object constructor when
796 retrieved.
798 Raises
799 ------
800 ValueError
801 Raised if the formatter does not name a valid formatter type and
802 ``overwrite`` is `False`.
803 """
804 self._mappingFactory.placeInRegistry(type_, formatter, overwrite=overwrite, **kwargs)
807# Type to use when allowing a Formatter or its class name
808FormatterParameter = Union[str, Type[Formatter], Formatter]