Coverage for python/lsst/daf/butler/core/formatter.py : 26%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("Formatter", "FormatterFactory", "FormatterParameter")
26from abc import ABCMeta, abstractmethod
27from collections.abc import Mapping
28import contextlib
29import logging
30import copy
31from typing import (
32 AbstractSet,
33 Any,
34 ClassVar,
35 Dict,
36 Iterator,
37 Optional,
38 Set,
39 Tuple,
40 Type,
41 TYPE_CHECKING,
42 Union,
43)
45from .configSupport import processLookupConfigs, LookupKey
46from .mappingFactory import MappingFactory
47from .utils import getFullTypeName
48from .fileDescriptor import FileDescriptor
49from .location import Location
50from .config import Config
51from .dimensions import DimensionUniverse
52from .storageClass import StorageClass
53from .datasets import DatasetType, DatasetRef
55log = logging.getLogger(__name__)
57# Define a new special type for functions that take "entity"
58Entity = Union[DatasetType, DatasetRef, StorageClass, str]
61if TYPE_CHECKING: 61 ↛ 62line 61 didn't jump to line 62, because the condition on line 61 was never true
62 from .dimensions import DataCoordinate
65class Formatter(metaclass=ABCMeta):
66 """Interface for reading and writing Datasets.
68 The formatters are associated with a particular `StorageClass`.
70 Parameters
71 ----------
72 fileDescriptor : `FileDescriptor`, optional
73 Identifies the file to read or write, and the associated storage
74 classes and parameter information. Its value can be `None` if the
75 caller will never call `Formatter.read` or `Formatter.write`.
76 dataId : `DataCoordinate`
77 Data ID associated with this formatter.
78 writeParameters : `dict`, optional
79 Any parameters to be hard-coded into this instance to control how
80 the dataset is serialized.
81 writeRecipes : `dict`, optional
82 Detailed write Recipes indexed by recipe name.
84 Notes
85 -----
86 All Formatter subclasses should share the base class's constructor
87 signature.
88 """
90 unsupportedParameters: ClassVar[Optional[AbstractSet[str]]] = frozenset()
91 """Set of read parameters not understood by this `Formatter`. An empty set
92 means all parameters are supported. `None` indicates that no parameters
93 are supported. These param (`frozenset`).
94 """
96 supportedWriteParameters: ClassVar[Optional[AbstractSet[str]]] = None
97 """Parameters understood by this formatter that can be used to control
98 how a dataset is serialized. `None` indicates that no parameters are
99 supported."""
101 supportedExtensions: ClassVar[AbstractSet[str]] = frozenset()
102 """Set of all extensions supported by this formatter.
104 Only expected to be populated by Formatters that write files. Any extension
105 assigned to the ``extension`` property will be automatically included in
106 the list of supported extensions."""
108 def __init__(self, fileDescriptor: FileDescriptor, dataId: DataCoordinate,
109 writeParameters: Optional[Dict[str, Any]] = None,
110 writeRecipes: Optional[Dict[str, Any]] = None):
111 if not isinstance(fileDescriptor, FileDescriptor):
112 raise TypeError("File descriptor must be a FileDescriptor")
113 assert dataId is not None, "dataId is now required for formatter initialization"
114 self._fileDescriptor = fileDescriptor
115 self._dataId = dataId
117 # Check that the write parameters are allowed
118 if writeParameters:
119 if self.supportedWriteParameters is None:
120 raise ValueError("This formatter does not accept any write parameters. "
121 f"Got: {', '.join(writeParameters)}")
122 else:
123 given = set(writeParameters)
124 unknown = given - self.supportedWriteParameters
125 if unknown:
126 s = "s" if len(unknown) != 1 else ""
127 unknownStr = ", ".join(f"'{u}'" for u in unknown)
128 raise ValueError(f"This formatter does not accept parameter{s} {unknownStr}")
130 self._writeParameters = writeParameters
131 self._writeRecipes = self.validateWriteRecipes(writeRecipes)
133 def __str__(self) -> str:
134 return f"{self.name()}@{self.fileDescriptor.location.path}"
136 def __repr__(self) -> str:
137 return f"{self.name()}({self.fileDescriptor!r})"
139 @property
140 def fileDescriptor(self) -> FileDescriptor:
141 """File descriptor associated with this formatter (`FileDescriptor`).
143 Read-only property.
144 """
145 return self._fileDescriptor
147 @property
148 def dataId(self) -> DataCoordinate:
149 """Return Data ID associated with this formatter (`DataCoordinate`)."""
150 return self._dataId
152 @property
153 def writeParameters(self) -> Mapping[str, Any]:
154 """Parameters to use when writing out datasets."""
155 if self._writeParameters is not None:
156 return self._writeParameters
157 return {}
159 @property
160 def writeRecipes(self) -> Mapping[str, Any]:
161 """Detailed write Recipes indexed by recipe name."""
162 if self._writeRecipes is not None:
163 return self._writeRecipes
164 return {}
166 @classmethod
167 def validateWriteRecipes(cls, recipes: Optional[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]:
168 """Validate supplied recipes for this formatter.
170 The recipes are supplemented with default values where appropriate.
172 Parameters
173 ----------
174 recipes : `dict`
175 Recipes to validate.
177 Returns
178 -------
179 validated : `dict`
180 Validated recipes.
182 Raises
183 ------
184 RuntimeError
185 Raised if validation fails. The default implementation raises
186 if any recipes are given.
187 """
188 if recipes:
189 raise RuntimeError(f"This formatter does not understand these writeRecipes: {recipes}")
190 return recipes
192 @classmethod
193 def name(cls) -> str:
194 """Return the fully qualified name of the formatter.
196 Returns
197 -------
198 name : `str`
199 Fully-qualified name of formatter class.
200 """
201 return getFullTypeName(cls)
203 @abstractmethod
204 def read(self, component: Optional[str] = None) -> Any:
205 """Read a Dataset.
207 Parameters
208 ----------
209 component : `str`, optional
210 Component to read from the file. Only used if the `StorageClass`
211 for reading differed from the `StorageClass` used to write the
212 file.
214 Returns
215 -------
216 inMemoryDataset : `object`
217 The requested Dataset.
218 """
219 raise NotImplementedError("Type does not support reading")
221 @abstractmethod
222 def write(self, inMemoryDataset: Any) -> None:
223 """Write a Dataset.
225 Parameters
226 ----------
227 inMemoryDataset : `object`
228 The Dataset to store.
229 """
230 raise NotImplementedError("Type does not support writing")
232 @classmethod
233 def can_read_bytes(cls) -> bool:
234 """Indicate if this formatter can format from bytes.
236 Returns
237 -------
238 can : `bool`
239 `True` if the `fromBytes` method is implemented.
240 """
241 # We have no property to read so instead try to format from a byte
242 # and see what happens
243 try:
244 # We know the arguments are incompatible
245 cls.fromBytes(cls, b"") # type: ignore
246 except NotImplementedError:
247 return False
248 except Exception:
249 # There will be problems with the bytes we are supplying so ignore
250 pass
251 return True
253 def fromBytes(self, serializedDataset: bytes,
254 component: Optional[str] = None) -> object:
255 """Read serialized data into a Dataset or its component.
257 Parameters
258 ----------
259 serializedDataset : `bytes`
260 Bytes object to unserialize.
261 component : `str`, optional
262 Component to read from the Dataset. Only used if the `StorageClass`
263 for reading differed from the `StorageClass` used to write the
264 file.
266 Returns
267 -------
268 inMemoryDataset : `object`
269 The requested data as a Python object. The type of object
270 is controlled by the specific formatter.
271 """
272 raise NotImplementedError("Type does not support reading from bytes.")
274 def toBytes(self, inMemoryDataset: Any) -> bytes:
275 """Serialize the Dataset to bytes based on formatter.
277 Parameters
278 ----------
279 inMemoryDataset : `object`
280 The Python object to serialize.
282 Returns
283 -------
284 serializedDataset : `bytes`
285 Bytes representing the serialized dataset.
286 """
287 raise NotImplementedError("Type does not support writing to bytes.")
289 @contextlib.contextmanager
290 def _updateLocation(self, location: Optional[Location]) -> Iterator[Location]:
291 """Temporarily replace the location associated with this formatter.
293 Parameters
294 ----------
295 location : `Location`
296 New location to use for this formatter. If `None` the
297 formatter will not change but it will still return
298 the old location. This allows it to be used in a code
299 path where the location may not need to be updated
300 but the with block is still convenient.
302 Yields
303 ------
304 old : `Location`
305 The old location that will be restored.
307 Notes
308 -----
309 This is an internal method that should be used with care.
310 It may change in the future. Should be used as a context
311 manager to restore the location when the temporary is no
312 longer required.
313 """
314 old = self._fileDescriptor.location
315 try:
316 if location is not None:
317 self._fileDescriptor.location = location
318 yield old
319 finally:
320 if location is not None:
321 self._fileDescriptor.location = old
323 def makeUpdatedLocation(self, location: Location) -> Location:
324 """Return a new `Location` updated with this formatter's extension.
326 Parameters
327 ----------
328 location : `Location`
329 The location to update.
331 Returns
332 -------
333 updated : `Location`
334 A new `Location` with a new file extension applied.
336 Raises
337 ------
338 NotImplementedError
339 Raised if there is no ``extension`` attribute associated with
340 this formatter.
342 Notes
343 -----
344 This method is available to all Formatters but might not be
345 implemented by all formatters. It requires that a formatter set
346 an ``extension`` attribute containing the file extension used when
347 writing files. If ``extension`` is `None` the supplied file will
348 not be updated. Not all formatters write files so this is not
349 defined in the base class.
350 """
351 location = copy.deepcopy(location)
352 try:
353 # We are deliberately allowing extension to be undefined by
354 # default in the base class and mypy complains.
355 location.updateExtension(self.extension) # type:ignore
356 except AttributeError:
357 raise NotImplementedError("No file extension registered with this formatter") from None
358 return location
360 @classmethod
361 def validateExtension(cls, location: Location) -> None:
362 """Check the extension of the provided location for compatibility.
364 Parameters
365 ----------
366 location : `Location`
367 Location from which to extract a file extension.
369 Raises
370 ------
371 NotImplementedError
372 Raised if file extensions are a concept not understood by this
373 formatter.
374 ValueError
375 Raised if the formatter does not understand this extension.
377 Notes
378 -----
379 This method is available to all Formatters but might not be
380 implemented by all formatters. It requires that a formatter set
381 an ``extension`` attribute containing the file extension used when
382 writing files. If ``extension`` is `None` only the set of supported
383 extensions will be examined.
384 """
385 supported = set(cls.supportedExtensions)
387 try:
388 # We are deliberately allowing extension to be undefined by
389 # default in the base class and mypy complains.
390 default = cls.extension # type: ignore
391 except AttributeError:
392 raise NotImplementedError("No file extension registered with this formatter") from None
394 # If extension is implemented as an instance property it won't return
395 # a string when called as a class propertt. Assume that
396 # the supported extensions class property is complete.
397 if default is not None and isinstance(default, str):
398 supported.add(default)
400 # Get the file name from the uri
401 file = location.uri.basename()
403 # Check that this file name ends with one of the supported extensions.
404 # This is less prone to confusion than asking the location for
405 # its extension and then doing a set comparison
406 for ext in supported:
407 if file.endswith(ext):
408 return
410 raise ValueError(f"Extension '{location.getExtension()}' on '{location}' "
411 f"is not supported by Formatter '{cls.__name__}' (supports: {supported})")
413 def predictPath(self) -> str:
414 """Return the path that would be returned by write.
416 Does not write any data file.
418 Uses the `FileDescriptor` associated with the instance.
420 Returns
421 -------
422 path : `str`
423 Path within datastore that would be associated with the location
424 stored in this `Formatter`.
425 """
426 updated = self.makeUpdatedLocation(self.fileDescriptor.location)
427 return updated.pathInStore.path
429 def segregateParameters(self, parameters: Optional[Dict[str, Any]] = None) -> Tuple[Dict, Dict]:
430 """Segregate the supplied parameters.
432 This splits the parameters into those understood by the
433 formatter and those not understood by the formatter.
435 Any unsupported parameters are assumed to be usable by associated
436 assemblers.
438 Parameters
439 ----------
440 parameters : `dict`, optional
441 Parameters with values that have been supplied by the caller
442 and which might be relevant for the formatter. If `None`
443 parameters will be read from the registered `FileDescriptor`.
445 Returns
446 -------
447 supported : `dict`
448 Those parameters supported by this formatter.
449 unsupported : `dict`
450 Those parameters not supported by this formatter.
451 """
452 if parameters is None:
453 parameters = self.fileDescriptor.parameters
455 if parameters is None:
456 return {}, {}
458 if self.unsupportedParameters is None:
459 # Support none of the parameters
460 return {}, parameters.copy()
462 # Start by assuming all are supported
463 supported = parameters.copy()
464 unsupported = {}
466 # And remove any we know are not supported
467 for p in set(supported):
468 if p in self.unsupportedParameters:
469 unsupported[p] = supported.pop(p)
471 return supported, unsupported
474class FormatterFactory:
475 """Factory for `Formatter` instances."""
477 defaultKey = LookupKey("default")
478 """Configuration key associated with default write parameter settings."""
480 writeRecipesKey = LookupKey("write_recipes")
481 """Configuration key associated with write recipes."""
483 def __init__(self) -> None:
484 self._mappingFactory = MappingFactory(Formatter)
486 def __contains__(self, key: Union[LookupKey, str]) -> bool:
487 """Indicate whether the supplied key is present in the factory.
489 Parameters
490 ----------
491 key : `LookupKey`, `str` or objects with ``name`` attribute
492 Key to use to lookup in the factory whether a corresponding
493 formatter is present.
495 Returns
496 -------
497 in : `bool`
498 `True` if the supplied key is present in the factory.
499 """
500 return key in self._mappingFactory
502 def registerFormatters(self, config: Config, *, universe: DimensionUniverse) -> None:
503 """Bulk register formatters from a config.
505 Parameters
506 ----------
507 config : `Config`
508 ``formatters`` section of a configuration.
509 universe : `DimensionUniverse`, optional
510 Set of all known dimensions, used to expand and validate any used
511 in lookup keys.
513 Notes
514 -----
515 The configuration can include one level of hierarchy where an
516 instrument-specific section can be defined to override more general
517 template specifications. This is represented in YAML using a
518 key of form ``instrument<name>`` which can then define templates
519 that will be returned if a `DatasetRef` contains a matching instrument
520 name in the data ID.
522 The config is parsed using the function
523 `~lsst.daf.butler.configSubset.processLookupConfigs`.
525 The values for formatter entries can be either a simple string
526 referring to a python type or a dict representing the formatter and
527 parameters to be hard-coded into the formatter constructor. For
528 the dict case the following keys are supported:
530 - formatter: The python type to be used as the formatter class.
531 - parameters: A further dict to be passed directly to the
532 ``writeParameters`` Formatter constructor to seed it.
533 These parameters are validated at instance creation and not at
534 configuration.
536 Additionally, a special ``default`` section can be defined that
537 uses the formatter type (class) name as the keys and specifies
538 default write parameters that should be used whenever an instance
539 of that class is constructed.
541 .. code-block:: yaml
543 formatters:
544 default:
545 lsst.daf.butler.formatters.example.ExampleFormatter:
546 max: 10
547 min: 2
548 comment: Default comment
549 calexp: lsst.daf.butler.formatters.example.ExampleFormatter
550 coadd:
551 formatter: lsst.daf.butler.formatters.example.ExampleFormatter
552 parameters:
553 max: 5
555 Any time an ``ExampleFormatter`` is constructed it will use those
556 parameters. If an explicit entry later in the configuration specifies
557 a different set of parameters, the two will be merged with the later
558 entry taking priority. In the example above ``calexp`` will use
559 the default parameters but ``coadd`` will override the value for
560 ``max``.
562 Formatter configuration can also include a special section describing
563 collections of write parameters that can be accessed through a
564 simple label. This allows common collections of options to be
565 specified in one place in the configuration and reused later.
566 The ``write_recipes`` section is indexed by Formatter class name
567 and each key is the label to associate with the parameters.
569 .. code-block:: yaml
571 formatters:
572 write_recipes:
573 lsst.obs.base.formatters.fitsExposure.FixExposureFormatter:
574 lossless:
575 ...
576 noCompression:
577 ...
579 By convention a formatter that uses write recipes will support a
580 ``recipe`` write parameter that will refer to a recipe name in
581 the ``write_recipes`` component. The `Formatter` will be constructed
582 in the `FormatterFactory` with all the relevant recipes and
583 will not attempt to filter by looking at ``writeParameters`` in
584 advance. See the specific formatter documentation for details on
585 acceptable recipe options.
586 """
587 allowed_keys = {"formatter", "parameters"}
589 contents = processLookupConfigs(config, allow_hierarchy=True, universe=universe)
591 # Extract any default parameter settings
592 defaultParameters = contents.get(self.defaultKey, {})
593 if not isinstance(defaultParameters, Mapping):
594 raise RuntimeError("Default formatter parameters in config can not be a single string"
595 f" (got: {type(defaultParameters)})")
597 # Extract any global write recipes -- these are indexed by
598 # Formatter class name.
599 writeRecipes = contents.get(self.writeRecipesKey, {})
600 if isinstance(writeRecipes, str):
601 raise RuntimeError(f"The formatters.{self.writeRecipesKey} section must refer to a dict"
602 f" not '{writeRecipes}'")
604 for key, f in contents.items():
605 # default is handled in a special way
606 if key == self.defaultKey:
607 continue
608 if key == self.writeRecipesKey:
609 continue
611 # Can be a str or a dict.
612 specificWriteParameters = {}
613 if isinstance(f, str):
614 formatter = f
615 elif isinstance(f, Mapping):
616 all_keys = set(f)
617 unexpected_keys = all_keys - allowed_keys
618 if unexpected_keys:
619 raise ValueError(f"Formatter {key} uses unexpected keys {unexpected_keys} in config")
620 if "formatter" not in f:
621 raise ValueError(f"Mandatory 'formatter' key missing for formatter key {key}")
622 formatter = f["formatter"]
623 if "parameters" in f:
624 specificWriteParameters = f["parameters"]
625 else:
626 raise ValueError(f"Formatter for key {key} has unexpected value: '{f}'")
628 # Apply any default parameters for this formatter
629 writeParameters = copy.deepcopy(defaultParameters.get(formatter, {}))
630 writeParameters.update(specificWriteParameters)
632 kwargs: Dict[str, Any] = {}
633 if writeParameters:
634 kwargs["writeParameters"] = writeParameters
636 if formatter in writeRecipes:
637 kwargs["writeRecipes"] = writeRecipes[formatter]
639 self.registerFormatter(key, formatter, **kwargs)
641 def getLookupKeys(self) -> Set[LookupKey]:
642 """Retrieve the look up keys for all the registry entries.
644 Returns
645 -------
646 keys : `set` of `LookupKey`
647 The keys available for matching in the registry.
648 """
649 return self._mappingFactory.getLookupKeys()
651 def getFormatterClassWithMatch(self, entity: Entity) -> Tuple[LookupKey, Type[Formatter],
652 Dict[str, Any]]:
653 """Get the matching formatter class along with the registry key.
655 Parameters
656 ----------
657 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
658 Entity to use to determine the formatter to return.
659 `StorageClass` will be used as a last resort if `DatasetRef`
660 or `DatasetType` instance is provided. Supports instrument
661 override if a `DatasetRef` is provided configured with an
662 ``instrument`` value for the data ID.
664 Returns
665 -------
666 matchKey : `LookupKey`
667 The key that resulted in the successful match.
668 formatter : `type`
669 The class of the registered formatter.
670 formatter_kwargs : `dict`
671 Keyword arguments that are associated with this formatter entry.
672 """
673 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames()
674 matchKey, formatter, formatter_kwargs = self._mappingFactory.getClassFromRegistryWithMatch(names)
675 log.debug("Retrieved formatter %s from key '%s' for entity '%s'", getFullTypeName(formatter),
676 matchKey, entity)
678 return matchKey, formatter, formatter_kwargs
680 def getFormatterClass(self, entity: Entity) -> Type:
681 """Get the matching formatter class.
683 Parameters
684 ----------
685 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
686 Entity to use to determine the formatter to return.
687 `StorageClass` will be used as a last resort if `DatasetRef`
688 or `DatasetType` instance is provided. Supports instrument
689 override if a `DatasetRef` is provided configured with an
690 ``instrument`` value for the data ID.
692 Returns
693 -------
694 formatter : `type`
695 The class of the registered formatter.
696 """
697 _, formatter, _ = self.getFormatterClassWithMatch(entity)
698 return formatter
700 def getFormatterWithMatch(self, entity: Entity, *args: Any, **kwargs: Any) -> Tuple[LookupKey, Formatter]:
701 """Get a new formatter instance along with the matching registry key.
703 Parameters
704 ----------
705 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
706 Entity to use to determine the formatter to return.
707 `StorageClass` will be used as a last resort if `DatasetRef`
708 or `DatasetType` instance is provided. Supports instrument
709 override if a `DatasetRef` is provided configured with an
710 ``instrument`` value for the data ID.
711 args : `tuple`
712 Positional arguments to use pass to the object constructor.
713 kwargs : `dict`
714 Keyword arguments to pass to object constructor.
716 Returns
717 -------
718 matchKey : `LookupKey`
719 The key that resulted in the successful match.
720 formatter : `Formatter`
721 An instance of the registered formatter.
722 """
723 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames()
724 matchKey, formatter = self._mappingFactory.getFromRegistryWithMatch(names, *args, **kwargs)
725 log.debug("Retrieved formatter %s from key '%s' for entity '%s'", getFullTypeName(formatter),
726 matchKey, entity)
728 return matchKey, formatter
730 def getFormatter(self, entity: Entity, *args: Any, **kwargs: Any) -> Formatter:
731 """Get a new formatter instance.
733 Parameters
734 ----------
735 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
736 Entity to use to determine the formatter to return.
737 `StorageClass` will be used as a last resort if `DatasetRef`
738 or `DatasetType` instance is provided. Supports instrument
739 override if a `DatasetRef` is provided configured with an
740 ``instrument`` value for the data ID.
741 args : `tuple`
742 Positional arguments to use pass to the object constructor.
743 kwargs : `dict`
744 Keyword arguments to pass to object constructor.
746 Returns
747 -------
748 formatter : `Formatter`
749 An instance of the registered formatter.
750 """
751 _, formatter = self.getFormatterWithMatch(entity, *args, **kwargs)
752 return formatter
754 def registerFormatter(self, type_: Union[LookupKey, str, StorageClass, DatasetType],
755 formatter: str, *, overwrite: bool = False,
756 **kwargs: Any) -> None:
757 """Register a `Formatter`.
759 Parameters
760 ----------
761 type_ : `LookupKey`, `str`, `StorageClass` or `DatasetType`
762 Type for which this formatter is to be used. If a `LookupKey`
763 is not provided, one will be constructed from the supplied string
764 or by using the ``name`` property of the supplied entity.
765 formatter : `str` or class of type `Formatter`
766 Identifies a `Formatter` subclass to use for reading and writing
767 Datasets of this type. Can be a `Formatter` class.
768 overwrite : `bool`, optional
769 If `True` an existing entry will be replaced by the new value.
770 Default is `False`.
771 kwargs : `dict`
772 Keyword arguments to always pass to object constructor when
773 retrieved.
775 Raises
776 ------
777 ValueError
778 Raised if the formatter does not name a valid formatter type and
779 ``overwrite`` is `False`.
780 """
781 self._mappingFactory.placeInRegistry(type_, formatter, overwrite=overwrite, **kwargs)
784# Type to use when allowing a Formatter or its class name
785FormatterParameter = Union[str, Type[Formatter], Formatter]