Coverage for python/lsst/daf/butler/core/formatter.py: 26%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("Formatter", "FormatterFactory", "FormatterParameter")
26from abc import ABCMeta, abstractmethod
27from collections.abc import Mapping
28import contextlib
29import logging
30import copy
31from typing import (
32 AbstractSet,
33 Any,
34 ClassVar,
35 Dict,
36 Iterator,
37 Optional,
38 Set,
39 Tuple,
40 Type,
41 TYPE_CHECKING,
42 Union,
43)
44from lsst.utils.introspection import get_full_type_name
45from .configSupport import processLookupConfigs, LookupKey
46from .mappingFactory import MappingFactory
47from .fileDescriptor import FileDescriptor
48from .location import Location
49from .config import Config
50from .dimensions import DimensionUniverse
51from .storageClass import StorageClass
52from .datasets import DatasetType, DatasetRef
54log = logging.getLogger(__name__)
56# Define a new special type for functions that take "entity"
57Entity = Union[DatasetType, DatasetRef, StorageClass, str]
60if TYPE_CHECKING: 60 ↛ 61line 60 didn't jump to line 61, because the condition on line 60 was never true
61 from .dimensions import DataCoordinate
64class Formatter(metaclass=ABCMeta):
65 """Interface for reading and writing Datasets.
67 The formatters are associated with a particular `StorageClass`.
69 Parameters
70 ----------
71 fileDescriptor : `FileDescriptor`, optional
72 Identifies the file to read or write, and the associated storage
73 classes and parameter information. Its value can be `None` if the
74 caller will never call `Formatter.read` or `Formatter.write`.
75 dataId : `DataCoordinate`
76 Data ID associated with this formatter.
77 writeParameters : `dict`, optional
78 Any parameters to be hard-coded into this instance to control how
79 the dataset is serialized.
80 writeRecipes : `dict`, optional
81 Detailed write Recipes indexed by recipe name.
83 Notes
84 -----
85 All Formatter subclasses should share the base class's constructor
86 signature.
87 """
89 unsupportedParameters: ClassVar[Optional[AbstractSet[str]]] = frozenset()
90 """Set of read parameters not understood by this `Formatter`. An empty set
91 means all parameters are supported. `None` indicates that no parameters
92 are supported. These param (`frozenset`).
93 """
95 supportedWriteParameters: ClassVar[Optional[AbstractSet[str]]] = None
96 """Parameters understood by this formatter that can be used to control
97 how a dataset is serialized. `None` indicates that no parameters are
98 supported."""
100 supportedExtensions: ClassVar[AbstractSet[str]] = frozenset()
101 """Set of all extensions supported by this formatter.
103 Only expected to be populated by Formatters that write files. Any extension
104 assigned to the ``extension`` property will be automatically included in
105 the list of supported extensions."""
107 def __init__(self, fileDescriptor: FileDescriptor, dataId: DataCoordinate,
108 writeParameters: Optional[Dict[str, Any]] = None,
109 writeRecipes: Optional[Dict[str, Any]] = None):
110 if not isinstance(fileDescriptor, FileDescriptor):
111 raise TypeError("File descriptor must be a FileDescriptor")
112 assert dataId is not None, "dataId is now required for formatter initialization"
113 self._fileDescriptor = fileDescriptor
114 self._dataId = dataId
116 # Check that the write parameters are allowed
117 if writeParameters:
118 if self.supportedWriteParameters is None:
119 raise ValueError("This formatter does not accept any write parameters. "
120 f"Got: {', '.join(writeParameters)}")
121 else:
122 given = set(writeParameters)
123 unknown = given - self.supportedWriteParameters
124 if unknown:
125 s = "s" if len(unknown) != 1 else ""
126 unknownStr = ", ".join(f"'{u}'" for u in unknown)
127 raise ValueError(f"This formatter does not accept parameter{s} {unknownStr}")
129 self._writeParameters = writeParameters
130 self._writeRecipes = self.validateWriteRecipes(writeRecipes)
132 def __str__(self) -> str:
133 return f"{self.name()}@{self.fileDescriptor.location.path}"
135 def __repr__(self) -> str:
136 return f"{self.name()}({self.fileDescriptor!r})"
138 @property
139 def fileDescriptor(self) -> FileDescriptor:
140 """File descriptor associated with this formatter (`FileDescriptor`).
142 Read-only property.
143 """
144 return self._fileDescriptor
146 @property
147 def dataId(self) -> DataCoordinate:
148 """Return Data ID associated with this formatter (`DataCoordinate`)."""
149 return self._dataId
151 @property
152 def writeParameters(self) -> Mapping[str, Any]:
153 """Parameters to use when writing out datasets."""
154 if self._writeParameters is not None:
155 return self._writeParameters
156 return {}
158 @property
159 def writeRecipes(self) -> Mapping[str, Any]:
160 """Detailed write Recipes indexed by recipe name."""
161 if self._writeRecipes is not None:
162 return self._writeRecipes
163 return {}
165 @classmethod
166 def validateWriteRecipes(cls, recipes: Optional[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]:
167 """Validate supplied recipes for this formatter.
169 The recipes are supplemented with default values where appropriate.
171 Parameters
172 ----------
173 recipes : `dict`
174 Recipes to validate.
176 Returns
177 -------
178 validated : `dict`
179 Validated recipes.
181 Raises
182 ------
183 RuntimeError
184 Raised if validation fails. The default implementation raises
185 if any recipes are given.
186 """
187 if recipes:
188 raise RuntimeError(f"This formatter does not understand these writeRecipes: {recipes}")
189 return recipes
191 @classmethod
192 def name(cls) -> str:
193 """Return the fully qualified name of the formatter.
195 Returns
196 -------
197 name : `str`
198 Fully-qualified name of formatter class.
199 """
200 return get_full_type_name(cls)
202 @abstractmethod
203 def read(self, component: Optional[str] = None) -> Any:
204 """Read a Dataset.
206 Parameters
207 ----------
208 component : `str`, optional
209 Component to read from the file. Only used if the `StorageClass`
210 for reading differed from the `StorageClass` used to write the
211 file.
213 Returns
214 -------
215 inMemoryDataset : `object`
216 The requested Dataset.
217 """
218 raise NotImplementedError("Type does not support reading")
220 @abstractmethod
221 def write(self, inMemoryDataset: Any) -> None:
222 """Write a Dataset.
224 Parameters
225 ----------
226 inMemoryDataset : `object`
227 The Dataset to store.
228 """
229 raise NotImplementedError("Type does not support writing")
231 @classmethod
232 def can_read_bytes(cls) -> bool:
233 """Indicate if this formatter can format from bytes.
235 Returns
236 -------
237 can : `bool`
238 `True` if the `fromBytes` method is implemented.
239 """
240 # We have no property to read so instead try to format from a byte
241 # and see what happens
242 try:
243 # We know the arguments are incompatible
244 cls.fromBytes(cls, b"") # type: ignore
245 except NotImplementedError:
246 return False
247 except Exception:
248 # There will be problems with the bytes we are supplying so ignore
249 pass
250 return True
252 def fromBytes(self, serializedDataset: bytes,
253 component: Optional[str] = None) -> object:
254 """Read serialized data into a Dataset or its component.
256 Parameters
257 ----------
258 serializedDataset : `bytes`
259 Bytes object to unserialize.
260 component : `str`, optional
261 Component to read from the Dataset. Only used if the `StorageClass`
262 for reading differed from the `StorageClass` used to write the
263 file.
265 Returns
266 -------
267 inMemoryDataset : `object`
268 The requested data as a Python object. The type of object
269 is controlled by the specific formatter.
270 """
271 raise NotImplementedError("Type does not support reading from bytes.")
273 def toBytes(self, inMemoryDataset: Any) -> bytes:
274 """Serialize the Dataset to bytes based on formatter.
276 Parameters
277 ----------
278 inMemoryDataset : `object`
279 The Python object to serialize.
281 Returns
282 -------
283 serializedDataset : `bytes`
284 Bytes representing the serialized dataset.
285 """
286 raise NotImplementedError("Type does not support writing to bytes.")
288 @contextlib.contextmanager
289 def _updateLocation(self, location: Optional[Location]) -> Iterator[Location]:
290 """Temporarily replace the location associated with this formatter.
292 Parameters
293 ----------
294 location : `Location`
295 New location to use for this formatter. If `None` the
296 formatter will not change but it will still return
297 the old location. This allows it to be used in a code
298 path where the location may not need to be updated
299 but the with block is still convenient.
301 Yields
302 ------
303 old : `Location`
304 The old location that will be restored.
306 Notes
307 -----
308 This is an internal method that should be used with care.
309 It may change in the future. Should be used as a context
310 manager to restore the location when the temporary is no
311 longer required.
312 """
313 old = self._fileDescriptor.location
314 try:
315 if location is not None:
316 self._fileDescriptor.location = location
317 yield old
318 finally:
319 if location is not None:
320 self._fileDescriptor.location = old
322 def makeUpdatedLocation(self, location: Location) -> Location:
323 """Return a new `Location` updated with this formatter's extension.
325 Parameters
326 ----------
327 location : `Location`
328 The location to update.
330 Returns
331 -------
332 updated : `Location`
333 A new `Location` with a new file extension applied.
335 Raises
336 ------
337 NotImplementedError
338 Raised if there is no ``extension`` attribute associated with
339 this formatter.
341 Notes
342 -----
343 This method is available to all Formatters but might not be
344 implemented by all formatters. It requires that a formatter set
345 an ``extension`` attribute containing the file extension used when
346 writing files. If ``extension`` is `None` the supplied file will
347 not be updated. Not all formatters write files so this is not
348 defined in the base class.
349 """
350 location = copy.deepcopy(location)
351 try:
352 # We are deliberately allowing extension to be undefined by
353 # default in the base class and mypy complains.
354 location.updateExtension(self.extension) # type:ignore
355 except AttributeError:
356 raise NotImplementedError("No file extension registered with this formatter") from None
357 return location
359 @classmethod
360 def validateExtension(cls, location: Location) -> None:
361 """Check the extension of the provided location for compatibility.
363 Parameters
364 ----------
365 location : `Location`
366 Location from which to extract a file extension.
368 Raises
369 ------
370 NotImplementedError
371 Raised if file extensions are a concept not understood by this
372 formatter.
373 ValueError
374 Raised if the formatter does not understand this extension.
376 Notes
377 -----
378 This method is available to all Formatters but might not be
379 implemented by all formatters. It requires that a formatter set
380 an ``extension`` attribute containing the file extension used when
381 writing files. If ``extension`` is `None` only the set of supported
382 extensions will be examined.
383 """
384 supported = set(cls.supportedExtensions)
386 try:
387 # We are deliberately allowing extension to be undefined by
388 # default in the base class and mypy complains.
389 default = cls.extension # type: ignore
390 except AttributeError:
391 raise NotImplementedError("No file extension registered with this formatter") from None
393 # If extension is implemented as an instance property it won't return
394 # a string when called as a class propertt. Assume that
395 # the supported extensions class property is complete.
396 if default is not None and isinstance(default, str):
397 supported.add(default)
399 # Get the file name from the uri
400 file = location.uri.basename()
402 # Check that this file name ends with one of the supported extensions.
403 # This is less prone to confusion than asking the location for
404 # its extension and then doing a set comparison
405 for ext in supported:
406 if file.endswith(ext):
407 return
409 raise ValueError(f"Extension '{location.getExtension()}' on '{location}' "
410 f"is not supported by Formatter '{cls.__name__}' (supports: {supported})")
412 def predictPath(self) -> str:
413 """Return the path that would be returned by write.
415 Does not write any data file.
417 Uses the `FileDescriptor` associated with the instance.
419 Returns
420 -------
421 path : `str`
422 Path within datastore that would be associated with the location
423 stored in this `Formatter`.
424 """
425 updated = self.makeUpdatedLocation(self.fileDescriptor.location)
426 return updated.pathInStore.path
428 def segregateParameters(self, parameters: Optional[Dict[str, Any]] = None) -> Tuple[Dict, Dict]:
429 """Segregate the supplied parameters.
431 This splits the parameters into those understood by the
432 formatter and those not understood by the formatter.
434 Any unsupported parameters are assumed to be usable by associated
435 assemblers.
437 Parameters
438 ----------
439 parameters : `dict`, optional
440 Parameters with values that have been supplied by the caller
441 and which might be relevant for the formatter. If `None`
442 parameters will be read from the registered `FileDescriptor`.
444 Returns
445 -------
446 supported : `dict`
447 Those parameters supported by this formatter.
448 unsupported : `dict`
449 Those parameters not supported by this formatter.
450 """
451 if parameters is None:
452 parameters = self.fileDescriptor.parameters
454 if parameters is None:
455 return {}, {}
457 if self.unsupportedParameters is None:
458 # Support none of the parameters
459 return {}, parameters.copy()
461 # Start by assuming all are supported
462 supported = parameters.copy()
463 unsupported = {}
465 # And remove any we know are not supported
466 for p in set(supported):
467 if p in self.unsupportedParameters:
468 unsupported[p] = supported.pop(p)
470 return supported, unsupported
473class FormatterFactory:
474 """Factory for `Formatter` instances."""
476 defaultKey = LookupKey("default")
477 """Configuration key associated with default write parameter settings."""
479 writeRecipesKey = LookupKey("write_recipes")
480 """Configuration key associated with write recipes."""
482 def __init__(self) -> None:
483 self._mappingFactory = MappingFactory(Formatter)
485 def __contains__(self, key: Union[LookupKey, str]) -> bool:
486 """Indicate whether the supplied key is present in the factory.
488 Parameters
489 ----------
490 key : `LookupKey`, `str` or objects with ``name`` attribute
491 Key to use to lookup in the factory whether a corresponding
492 formatter is present.
494 Returns
495 -------
496 in : `bool`
497 `True` if the supplied key is present in the factory.
498 """
499 return key in self._mappingFactory
501 def registerFormatters(self, config: Config, *, universe: DimensionUniverse) -> None:
502 """Bulk register formatters from a config.
504 Parameters
505 ----------
506 config : `Config`
507 ``formatters`` section of a configuration.
508 universe : `DimensionUniverse`, optional
509 Set of all known dimensions, used to expand and validate any used
510 in lookup keys.
512 Notes
513 -----
514 The configuration can include one level of hierarchy where an
515 instrument-specific section can be defined to override more general
516 template specifications. This is represented in YAML using a
517 key of form ``instrument<name>`` which can then define templates
518 that will be returned if a `DatasetRef` contains a matching instrument
519 name in the data ID.
521 The config is parsed using the function
522 `~lsst.daf.butler.configSubset.processLookupConfigs`.
524 The values for formatter entries can be either a simple string
525 referring to a python type or a dict representing the formatter and
526 parameters to be hard-coded into the formatter constructor. For
527 the dict case the following keys are supported:
529 - formatter: The python type to be used as the formatter class.
530 - parameters: A further dict to be passed directly to the
531 ``writeParameters`` Formatter constructor to seed it.
532 These parameters are validated at instance creation and not at
533 configuration.
535 Additionally, a special ``default`` section can be defined that
536 uses the formatter type (class) name as the keys and specifies
537 default write parameters that should be used whenever an instance
538 of that class is constructed.
540 .. code-block:: yaml
542 formatters:
543 default:
544 lsst.daf.butler.formatters.example.ExampleFormatter:
545 max: 10
546 min: 2
547 comment: Default comment
548 calexp: lsst.daf.butler.formatters.example.ExampleFormatter
549 coadd:
550 formatter: lsst.daf.butler.formatters.example.ExampleFormatter
551 parameters:
552 max: 5
554 Any time an ``ExampleFormatter`` is constructed it will use those
555 parameters. If an explicit entry later in the configuration specifies
556 a different set of parameters, the two will be merged with the later
557 entry taking priority. In the example above ``calexp`` will use
558 the default parameters but ``coadd`` will override the value for
559 ``max``.
561 Formatter configuration can also include a special section describing
562 collections of write parameters that can be accessed through a
563 simple label. This allows common collections of options to be
564 specified in one place in the configuration and reused later.
565 The ``write_recipes`` section is indexed by Formatter class name
566 and each key is the label to associate with the parameters.
568 .. code-block:: yaml
570 formatters:
571 write_recipes:
572 lsst.obs.base.formatters.fitsExposure.FixExposureFormatter:
573 lossless:
574 ...
575 noCompression:
576 ...
578 By convention a formatter that uses write recipes will support a
579 ``recipe`` write parameter that will refer to a recipe name in
580 the ``write_recipes`` component. The `Formatter` will be constructed
581 in the `FormatterFactory` with all the relevant recipes and
582 will not attempt to filter by looking at ``writeParameters`` in
583 advance. See the specific formatter documentation for details on
584 acceptable recipe options.
585 """
586 allowed_keys = {"formatter", "parameters"}
588 contents = processLookupConfigs(config, allow_hierarchy=True, universe=universe)
590 # Extract any default parameter settings
591 defaultParameters = contents.get(self.defaultKey, {})
592 if not isinstance(defaultParameters, Mapping):
593 raise RuntimeError("Default formatter parameters in config can not be a single string"
594 f" (got: {type(defaultParameters)})")
596 # Extract any global write recipes -- these are indexed by
597 # Formatter class name.
598 writeRecipes = contents.get(self.writeRecipesKey, {})
599 if isinstance(writeRecipes, str):
600 raise RuntimeError(f"The formatters.{self.writeRecipesKey} section must refer to a dict"
601 f" not '{writeRecipes}'")
603 for key, f in contents.items():
604 # default is handled in a special way
605 if key == self.defaultKey:
606 continue
607 if key == self.writeRecipesKey:
608 continue
610 # Can be a str or a dict.
611 specificWriteParameters = {}
612 if isinstance(f, str):
613 formatter = f
614 elif isinstance(f, Mapping):
615 all_keys = set(f)
616 unexpected_keys = all_keys - allowed_keys
617 if unexpected_keys:
618 raise ValueError(f"Formatter {key} uses unexpected keys {unexpected_keys} in config")
619 if "formatter" not in f:
620 raise ValueError(f"Mandatory 'formatter' key missing for formatter key {key}")
621 formatter = f["formatter"]
622 if "parameters" in f:
623 specificWriteParameters = f["parameters"]
624 else:
625 raise ValueError(f"Formatter for key {key} has unexpected value: '{f}'")
627 # Apply any default parameters for this formatter
628 writeParameters = copy.deepcopy(defaultParameters.get(formatter, {}))
629 writeParameters.update(specificWriteParameters)
631 kwargs: Dict[str, Any] = {}
632 if writeParameters:
633 kwargs["writeParameters"] = writeParameters
635 if formatter in writeRecipes:
636 kwargs["writeRecipes"] = writeRecipes[formatter]
638 self.registerFormatter(key, formatter, **kwargs)
640 def getLookupKeys(self) -> Set[LookupKey]:
641 """Retrieve the look up keys for all the registry entries.
643 Returns
644 -------
645 keys : `set` of `LookupKey`
646 The keys available for matching in the registry.
647 """
648 return self._mappingFactory.getLookupKeys()
650 def getFormatterClassWithMatch(self, entity: Entity) -> Tuple[LookupKey, Type[Formatter],
651 Dict[str, Any]]:
652 """Get the matching formatter class along with the registry key.
654 Parameters
655 ----------
656 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
657 Entity to use to determine the formatter to return.
658 `StorageClass` will be used as a last resort if `DatasetRef`
659 or `DatasetType` instance is provided. Supports instrument
660 override if a `DatasetRef` is provided configured with an
661 ``instrument`` value for the data ID.
663 Returns
664 -------
665 matchKey : `LookupKey`
666 The key that resulted in the successful match.
667 formatter : `type`
668 The class of the registered formatter.
669 formatter_kwargs : `dict`
670 Keyword arguments that are associated with this formatter entry.
671 """
672 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames()
673 matchKey, formatter, formatter_kwargs = self._mappingFactory.getClassFromRegistryWithMatch(names)
674 log.debug("Retrieved formatter %s from key '%s' for entity '%s'", get_full_type_name(formatter),
675 matchKey, entity)
677 return matchKey, formatter, formatter_kwargs
679 def getFormatterClass(self, entity: Entity) -> Type:
680 """Get the matching formatter class.
682 Parameters
683 ----------
684 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
685 Entity to use to determine the formatter to return.
686 `StorageClass` will be used as a last resort if `DatasetRef`
687 or `DatasetType` instance is provided. Supports instrument
688 override if a `DatasetRef` is provided configured with an
689 ``instrument`` value for the data ID.
691 Returns
692 -------
693 formatter : `type`
694 The class of the registered formatter.
695 """
696 _, formatter, _ = self.getFormatterClassWithMatch(entity)
697 return formatter
699 def getFormatterWithMatch(self, entity: Entity, *args: Any, **kwargs: Any) -> Tuple[LookupKey, Formatter]:
700 """Get a new formatter instance along with the matching registry key.
702 Parameters
703 ----------
704 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
705 Entity to use to determine the formatter to return.
706 `StorageClass` will be used as a last resort if `DatasetRef`
707 or `DatasetType` instance is provided. Supports instrument
708 override if a `DatasetRef` is provided configured with an
709 ``instrument`` value for the data ID.
710 args : `tuple`
711 Positional arguments to use pass to the object constructor.
712 **kwargs
713 Keyword arguments to pass to object constructor.
715 Returns
716 -------
717 matchKey : `LookupKey`
718 The key that resulted in the successful match.
719 formatter : `Formatter`
720 An instance of the registered formatter.
721 """
722 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames()
723 matchKey, formatter = self._mappingFactory.getFromRegistryWithMatch(names, *args, **kwargs)
724 log.debug("Retrieved formatter %s from key '%s' for entity '%s'", get_full_type_name(formatter),
725 matchKey, entity)
727 return matchKey, formatter
729 def getFormatter(self, entity: Entity, *args: Any, **kwargs: Any) -> Formatter:
730 """Get a new formatter instance.
732 Parameters
733 ----------
734 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
735 Entity to use to determine the formatter to return.
736 `StorageClass` will be used as a last resort if `DatasetRef`
737 or `DatasetType` instance is provided. Supports instrument
738 override if a `DatasetRef` is provided configured with an
739 ``instrument`` value for the data ID.
740 args : `tuple`
741 Positional arguments to use pass to the object constructor.
742 **kwargs
743 Keyword arguments to pass to object constructor.
745 Returns
746 -------
747 formatter : `Formatter`
748 An instance of the registered formatter.
749 """
750 _, formatter = self.getFormatterWithMatch(entity, *args, **kwargs)
751 return formatter
753 def registerFormatter(self, type_: Union[LookupKey, str, StorageClass, DatasetType],
754 formatter: str, *, overwrite: bool = False,
755 **kwargs: Any) -> None:
756 """Register a `Formatter`.
758 Parameters
759 ----------
760 type_ : `LookupKey`, `str`, `StorageClass` or `DatasetType`
761 Type for which this formatter is to be used. If a `LookupKey`
762 is not provided, one will be constructed from the supplied string
763 or by using the ``name`` property of the supplied entity.
764 formatter : `str` or class of type `Formatter`
765 Identifies a `Formatter` subclass to use for reading and writing
766 Datasets of this type. Can be a `Formatter` class.
767 overwrite : `bool`, optional
768 If `True` an existing entry will be replaced by the new value.
769 Default is `False`.
770 **kwargs
771 Keyword arguments to always pass to object constructor when
772 retrieved.
774 Raises
775 ------
776 ValueError
777 Raised if the formatter does not name a valid formatter type and
778 ``overwrite`` is `False`.
779 """
780 self._mappingFactory.placeInRegistry(type_, formatter, overwrite=overwrite, **kwargs)
783# Type to use when allowing a Formatter or its class name
784FormatterParameter = Union[str, Type[Formatter], Formatter]