Coverage for python/lsst/daf/butler/core/formatter.py: 28%
196 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-04-01 02:05 -0700
« prev ^ index » next coverage.py v6.5.0, created at 2023-04-01 02:05 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("Formatter", "FormatterFactory", "FormatterParameter")
26import contextlib
27import copy
28import logging
29from abc import ABCMeta, abstractmethod
30from collections.abc import Mapping
31from typing import (
32 TYPE_CHECKING,
33 AbstractSet,
34 Any,
35 ClassVar,
36 Dict,
37 Iterator,
38 Optional,
39 Set,
40 Tuple,
41 Type,
42 Union,
43)
45from lsst.utils.introspection import get_full_type_name
47from .config import Config
48from .configSupport import LookupKey, processLookupConfigs
49from .datasets import DatasetRef, DatasetType
50from .dimensions import DimensionUniverse
51from .fileDescriptor import FileDescriptor
52from .location import Location
53from .mappingFactory import MappingFactory
54from .storageClass import StorageClass
56log = logging.getLogger(__name__)
58# Define a new special type for functions that take "entity"
59Entity = Union[DatasetType, DatasetRef, StorageClass, str]
62if TYPE_CHECKING:
63 from .dimensions import DataCoordinate
66class Formatter(metaclass=ABCMeta):
67 """Interface for reading and writing Datasets.
69 The formatters are associated with a particular `StorageClass`.
71 Parameters
72 ----------
73 fileDescriptor : `FileDescriptor`, optional
74 Identifies the file to read or write, and the associated storage
75 classes and parameter information. Its value can be `None` if the
76 caller will never call `Formatter.read` or `Formatter.write`.
77 dataId : `DataCoordinate`
78 Data ID associated with this formatter.
79 writeParameters : `dict`, optional
80 Any parameters to be hard-coded into this instance to control how
81 the dataset is serialized.
82 writeRecipes : `dict`, optional
83 Detailed write Recipes indexed by recipe name.
85 Notes
86 -----
87 All Formatter subclasses should share the base class's constructor
88 signature.
89 """
91 unsupportedParameters: ClassVar[Optional[AbstractSet[str]]] = frozenset()
92 """Set of read parameters not understood by this `Formatter`. An empty set
93 means all parameters are supported. `None` indicates that no parameters
94 are supported. These param (`frozenset`).
95 """
97 supportedWriteParameters: ClassVar[Optional[AbstractSet[str]]] = None
98 """Parameters understood by this formatter that can be used to control
99 how a dataset is serialized. `None` indicates that no parameters are
100 supported."""
102 supportedExtensions: ClassVar[AbstractSet[str]] = frozenset()
103 """Set of all extensions supported by this formatter.
105 Only expected to be populated by Formatters that write files. Any extension
106 assigned to the ``extension`` property will be automatically included in
107 the list of supported extensions."""
109 def __init__(
110 self,
111 fileDescriptor: FileDescriptor,
112 dataId: DataCoordinate,
113 writeParameters: Optional[Dict[str, Any]] = None,
114 writeRecipes: Optional[Dict[str, Any]] = None,
115 ):
116 if not isinstance(fileDescriptor, FileDescriptor):
117 raise TypeError("File descriptor must be a FileDescriptor")
118 assert dataId is not None, "dataId is now required for formatter initialization"
119 self._fileDescriptor = fileDescriptor
120 self._dataId = dataId
122 # Check that the write parameters are allowed
123 if writeParameters:
124 if self.supportedWriteParameters is None:
125 raise ValueError(
126 f"This formatter does not accept any write parameters. Got: {', '.join(writeParameters)}"
127 )
128 else:
129 given = set(writeParameters)
130 unknown = given - self.supportedWriteParameters
131 if unknown:
132 s = "s" if len(unknown) != 1 else ""
133 unknownStr = ", ".join(f"'{u}'" for u in unknown)
134 raise ValueError(f"This formatter does not accept parameter{s} {unknownStr}")
136 self._writeParameters = writeParameters
137 self._writeRecipes = self.validateWriteRecipes(writeRecipes)
139 def __str__(self) -> str:
140 return f"{self.name()}@{self.fileDescriptor.location.path}"
142 def __repr__(self) -> str:
143 return f"{self.name()}({self.fileDescriptor!r})"
145 @property
146 def fileDescriptor(self) -> FileDescriptor:
147 """File descriptor associated with this formatter (`FileDescriptor`).
149 Read-only property.
150 """
151 return self._fileDescriptor
153 @property
154 def dataId(self) -> DataCoordinate:
155 """Return Data ID associated with this formatter (`DataCoordinate`)."""
156 return self._dataId
158 @property
159 def writeParameters(self) -> Mapping[str, Any]:
160 """Parameters to use when writing out datasets."""
161 if self._writeParameters is not None:
162 return self._writeParameters
163 return {}
165 @property
166 def writeRecipes(self) -> Mapping[str, Any]:
167 """Detailed write Recipes indexed by recipe name."""
168 if self._writeRecipes is not None:
169 return self._writeRecipes
170 return {}
172 @classmethod
173 def validateWriteRecipes(cls, recipes: Optional[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]:
174 """Validate supplied recipes for this formatter.
176 The recipes are supplemented with default values where appropriate.
178 Parameters
179 ----------
180 recipes : `dict`
181 Recipes to validate.
183 Returns
184 -------
185 validated : `dict`
186 Validated recipes.
188 Raises
189 ------
190 RuntimeError
191 Raised if validation fails. The default implementation raises
192 if any recipes are given.
193 """
194 if recipes:
195 raise RuntimeError(f"This formatter does not understand these writeRecipes: {recipes}")
196 return recipes
198 @classmethod
199 def name(cls) -> str:
200 """Return the fully qualified name of the formatter.
202 Returns
203 -------
204 name : `str`
205 Fully-qualified name of formatter class.
206 """
207 return get_full_type_name(cls)
209 @abstractmethod
210 def read(self, component: Optional[str] = None) -> Any:
211 """Read a Dataset.
213 Parameters
214 ----------
215 component : `str`, optional
216 Component to read from the file. Only used if the `StorageClass`
217 for reading differed from the `StorageClass` used to write the
218 file.
220 Returns
221 -------
222 inMemoryDataset : `object`
223 The requested Dataset.
224 """
225 raise NotImplementedError("Type does not support reading")
227 @abstractmethod
228 def write(self, inMemoryDataset: Any) -> None:
229 """Write a Dataset.
231 Parameters
232 ----------
233 inMemoryDataset : `object`
234 The Dataset to store.
235 """
236 raise NotImplementedError("Type does not support writing")
238 @classmethod
239 def can_read_bytes(cls) -> bool:
240 """Indicate if this formatter can format from bytes.
242 Returns
243 -------
244 can : `bool`
245 `True` if the `fromBytes` method is implemented.
246 """
247 # We have no property to read so instead try to format from a byte
248 # and see what happens
249 try:
250 # We know the arguments are incompatible
251 cls.fromBytes(cls, b"") # type: ignore
252 except NotImplementedError:
253 return False
254 except Exception:
255 # There will be problems with the bytes we are supplying so ignore
256 pass
257 return True
259 def fromBytes(self, serializedDataset: bytes, component: Optional[str] = None) -> object:
260 """Read serialized data into a Dataset or its component.
262 Parameters
263 ----------
264 serializedDataset : `bytes`
265 Bytes object to unserialize.
266 component : `str`, optional
267 Component to read from the Dataset. Only used if the `StorageClass`
268 for reading differed from the `StorageClass` used to write the
269 file.
271 Returns
272 -------
273 inMemoryDataset : `object`
274 The requested data as a Python object. The type of object
275 is controlled by the specific formatter.
276 """
277 raise NotImplementedError("Type does not support reading from bytes.")
279 def toBytes(self, inMemoryDataset: Any) -> bytes:
280 """Serialize the Dataset to bytes based on formatter.
282 Parameters
283 ----------
284 inMemoryDataset : `object`
285 The Python object to serialize.
287 Returns
288 -------
289 serializedDataset : `bytes`
290 Bytes representing the serialized dataset.
291 """
292 raise NotImplementedError("Type does not support writing to bytes.")
294 @contextlib.contextmanager
295 def _updateLocation(self, location: Optional[Location]) -> Iterator[Location]:
296 """Temporarily replace the location associated with this formatter.
298 Parameters
299 ----------
300 location : `Location`
301 New location to use for this formatter. If `None` the
302 formatter will not change but it will still return
303 the old location. This allows it to be used in a code
304 path where the location may not need to be updated
305 but the with block is still convenient.
307 Yields
308 ------
309 old : `Location`
310 The old location that will be restored.
312 Notes
313 -----
314 This is an internal method that should be used with care.
315 It may change in the future. Should be used as a context
316 manager to restore the location when the temporary is no
317 longer required.
318 """
319 old = self._fileDescriptor.location
320 try:
321 if location is not None:
322 self._fileDescriptor.location = location
323 yield old
324 finally:
325 if location is not None:
326 self._fileDescriptor.location = old
328 def makeUpdatedLocation(self, location: Location) -> Location:
329 """Return a new `Location` updated with this formatter's extension.
331 Parameters
332 ----------
333 location : `Location`
334 The location to update.
336 Returns
337 -------
338 updated : `Location`
339 A new `Location` with a new file extension applied.
341 Raises
342 ------
343 NotImplementedError
344 Raised if there is no ``extension`` attribute associated with
345 this formatter.
347 Notes
348 -----
349 This method is available to all Formatters but might not be
350 implemented by all formatters. It requires that a formatter set
351 an ``extension`` attribute containing the file extension used when
352 writing files. If ``extension`` is `None` the supplied file will
353 not be updated. Not all formatters write files so this is not
354 defined in the base class.
355 """
356 location = copy.deepcopy(location)
357 try:
358 # We are deliberately allowing extension to be undefined by
359 # default in the base class and mypy complains.
360 location.updateExtension(self.extension) # type:ignore
361 except AttributeError:
362 raise NotImplementedError("No file extension registered with this formatter") from None
363 return location
365 @classmethod
366 def validateExtension(cls, location: Location) -> None:
367 """Check the extension of the provided location for compatibility.
369 Parameters
370 ----------
371 location : `Location`
372 Location from which to extract a file extension.
374 Raises
375 ------
376 NotImplementedError
377 Raised if file extensions are a concept not understood by this
378 formatter.
379 ValueError
380 Raised if the formatter does not understand this extension.
382 Notes
383 -----
384 This method is available to all Formatters but might not be
385 implemented by all formatters. It requires that a formatter set
386 an ``extension`` attribute containing the file extension used when
387 writing files. If ``extension`` is `None` only the set of supported
388 extensions will be examined.
389 """
390 supported = set(cls.supportedExtensions)
392 try:
393 # We are deliberately allowing extension to be undefined by
394 # default in the base class and mypy complains.
395 default = cls.extension # type: ignore
396 except AttributeError:
397 raise NotImplementedError("No file extension registered with this formatter") from None
399 # If extension is implemented as an instance property it won't return
400 # a string when called as a class property. Assume that
401 # the supported extensions class property is complete.
402 if default is not None and isinstance(default, str):
403 supported.add(default)
405 # Get the file name from the uri
406 file = location.uri.basename()
408 # Check that this file name ends with one of the supported extensions.
409 # This is less prone to confusion than asking the location for
410 # its extension and then doing a set comparison
411 for ext in supported:
412 if file.endswith(ext):
413 return
415 raise ValueError(
416 f"Extension '{location.getExtension()}' on '{location}' "
417 f"is not supported by Formatter '{cls.__name__}' (supports: {supported})"
418 )
420 def predictPath(self) -> str:
421 """Return the path that would be returned by write.
423 Does not write any data file.
425 Uses the `FileDescriptor` associated with the instance.
427 Returns
428 -------
429 path : `str`
430 Path within datastore that would be associated with the location
431 stored in this `Formatter`.
432 """
433 updated = self.makeUpdatedLocation(self.fileDescriptor.location)
434 return updated.pathInStore.path
436 def segregateParameters(self, parameters: Optional[Dict[str, Any]] = None) -> Tuple[Dict, Dict]:
437 """Segregate the supplied parameters.
439 This splits the parameters into those understood by the
440 formatter and those not understood by the formatter.
442 Any unsupported parameters are assumed to be usable by associated
443 assemblers.
445 Parameters
446 ----------
447 parameters : `dict`, optional
448 Parameters with values that have been supplied by the caller
449 and which might be relevant for the formatter. If `None`
450 parameters will be read from the registered `FileDescriptor`.
452 Returns
453 -------
454 supported : `dict`
455 Those parameters supported by this formatter.
456 unsupported : `dict`
457 Those parameters not supported by this formatter.
458 """
459 if parameters is None:
460 parameters = self.fileDescriptor.parameters
462 if parameters is None:
463 return {}, {}
465 if self.unsupportedParameters is None:
466 # Support none of the parameters
467 return {}, parameters.copy()
469 # Start by assuming all are supported
470 supported = parameters.copy()
471 unsupported = {}
473 # And remove any we know are not supported
474 for p in set(supported):
475 if p in self.unsupportedParameters:
476 unsupported[p] = supported.pop(p)
478 return supported, unsupported
481class FormatterFactory:
482 """Factory for `Formatter` instances."""
484 defaultKey = LookupKey("default")
485 """Configuration key associated with default write parameter settings."""
487 writeRecipesKey = LookupKey("write_recipes")
488 """Configuration key associated with write recipes."""
490 def __init__(self) -> None:
491 self._mappingFactory = MappingFactory(Formatter)
493 def __contains__(self, key: Union[LookupKey, str]) -> bool:
494 """Indicate whether the supplied key is present in the factory.
496 Parameters
497 ----------
498 key : `LookupKey`, `str` or objects with ``name`` attribute
499 Key to use to lookup in the factory whether a corresponding
500 formatter is present.
502 Returns
503 -------
504 in : `bool`
505 `True` if the supplied key is present in the factory.
506 """
507 return key in self._mappingFactory
509 def registerFormatters(self, config: Config, *, universe: DimensionUniverse) -> None:
510 """Bulk register formatters from a config.
512 Parameters
513 ----------
514 config : `Config`
515 ``formatters`` section of a configuration.
516 universe : `DimensionUniverse`, optional
517 Set of all known dimensions, used to expand and validate any used
518 in lookup keys.
520 Notes
521 -----
522 The configuration can include one level of hierarchy where an
523 instrument-specific section can be defined to override more general
524 template specifications. This is represented in YAML using a
525 key of form ``instrument<name>`` which can then define templates
526 that will be returned if a `DatasetRef` contains a matching instrument
527 name in the data ID.
529 The config is parsed using the function
530 `~lsst.daf.butler.configSubset.processLookupConfigs`.
532 The values for formatter entries can be either a simple string
533 referring to a python type or a dict representing the formatter and
534 parameters to be hard-coded into the formatter constructor. For
535 the dict case the following keys are supported:
537 - formatter: The python type to be used as the formatter class.
538 - parameters: A further dict to be passed directly to the
539 ``writeParameters`` Formatter constructor to seed it.
540 These parameters are validated at instance creation and not at
541 configuration.
543 Additionally, a special ``default`` section can be defined that
544 uses the formatter type (class) name as the keys and specifies
545 default write parameters that should be used whenever an instance
546 of that class is constructed.
548 .. code-block:: yaml
550 formatters:
551 default:
552 lsst.daf.butler.formatters.example.ExampleFormatter:
553 max: 10
554 min: 2
555 comment: Default comment
556 calexp: lsst.daf.butler.formatters.example.ExampleFormatter
557 coadd:
558 formatter: lsst.daf.butler.formatters.example.ExampleFormatter
559 parameters:
560 max: 5
562 Any time an ``ExampleFormatter`` is constructed it will use those
563 parameters. If an explicit entry later in the configuration specifies
564 a different set of parameters, the two will be merged with the later
565 entry taking priority. In the example above ``calexp`` will use
566 the default parameters but ``coadd`` will override the value for
567 ``max``.
569 Formatter configuration can also include a special section describing
570 collections of write parameters that can be accessed through a
571 simple label. This allows common collections of options to be
572 specified in one place in the configuration and reused later.
573 The ``write_recipes`` section is indexed by Formatter class name
574 and each key is the label to associate with the parameters.
576 .. code-block:: yaml
578 formatters:
579 write_recipes:
580 lsst.obs.base.formatters.fitsExposure.FixExposureFormatter:
581 lossless:
582 ...
583 noCompression:
584 ...
586 By convention a formatter that uses write recipes will support a
587 ``recipe`` write parameter that will refer to a recipe name in
588 the ``write_recipes`` component. The `Formatter` will be constructed
589 in the `FormatterFactory` with all the relevant recipes and
590 will not attempt to filter by looking at ``writeParameters`` in
591 advance. See the specific formatter documentation for details on
592 acceptable recipe options.
593 """
594 allowed_keys = {"formatter", "parameters"}
596 contents = processLookupConfigs(config, allow_hierarchy=True, universe=universe)
598 # Extract any default parameter settings
599 defaultParameters = contents.get(self.defaultKey, {})
600 if not isinstance(defaultParameters, Mapping):
601 raise RuntimeError(
602 "Default formatter parameters in config can not be a single string"
603 f" (got: {type(defaultParameters)})"
604 )
606 # Extract any global write recipes -- these are indexed by
607 # Formatter class name.
608 writeRecipes = contents.get(self.writeRecipesKey, {})
609 if isinstance(writeRecipes, str):
610 raise RuntimeError(
611 f"The formatters.{self.writeRecipesKey} section must refer to a dict not '{writeRecipes}'"
612 )
614 for key, f in contents.items():
615 # default is handled in a special way
616 if key == self.defaultKey:
617 continue
618 if key == self.writeRecipesKey:
619 continue
621 # Can be a str or a dict.
622 specificWriteParameters = {}
623 if isinstance(f, str):
624 formatter = f
625 elif isinstance(f, Mapping):
626 all_keys = set(f)
627 unexpected_keys = all_keys - allowed_keys
628 if unexpected_keys:
629 raise ValueError(f"Formatter {key} uses unexpected keys {unexpected_keys} in config")
630 if "formatter" not in f:
631 raise ValueError(f"Mandatory 'formatter' key missing for formatter key {key}")
632 formatter = f["formatter"]
633 if "parameters" in f:
634 specificWriteParameters = f["parameters"]
635 else:
636 raise ValueError(f"Formatter for key {key} has unexpected value: '{f}'")
638 # Apply any default parameters for this formatter
639 writeParameters = copy.deepcopy(defaultParameters.get(formatter, {}))
640 writeParameters.update(specificWriteParameters)
642 kwargs: Dict[str, Any] = {}
643 if writeParameters:
644 kwargs["writeParameters"] = writeParameters
646 if formatter in writeRecipes:
647 kwargs["writeRecipes"] = writeRecipes[formatter]
649 self.registerFormatter(key, formatter, **kwargs)
651 def getLookupKeys(self) -> Set[LookupKey]:
652 """Retrieve the look up keys for all the registry entries.
654 Returns
655 -------
656 keys : `set` of `LookupKey`
657 The keys available for matching in the registry.
658 """
659 return self._mappingFactory.getLookupKeys()
661 def getFormatterClassWithMatch(self, entity: Entity) -> Tuple[LookupKey, Type[Formatter], Dict[str, Any]]:
662 """Get the matching formatter class along with the registry key.
664 Parameters
665 ----------
666 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
667 Entity to use to determine the formatter to return.
668 `StorageClass` will be used as a last resort if `DatasetRef`
669 or `DatasetType` instance is provided. Supports instrument
670 override if a `DatasetRef` is provided configured with an
671 ``instrument`` value for the data ID.
673 Returns
674 -------
675 matchKey : `LookupKey`
676 The key that resulted in the successful match.
677 formatter : `type`
678 The class of the registered formatter.
679 formatter_kwargs : `dict`
680 Keyword arguments that are associated with this formatter entry.
681 """
682 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames()
683 matchKey, formatter, formatter_kwargs = self._mappingFactory.getClassFromRegistryWithMatch(names)
684 log.debug(
685 "Retrieved formatter %s from key '%s' for entity '%s'",
686 get_full_type_name(formatter),
687 matchKey,
688 entity,
689 )
691 return matchKey, formatter, formatter_kwargs
693 def getFormatterClass(self, entity: Entity) -> Type:
694 """Get the matching formatter class.
696 Parameters
697 ----------
698 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
699 Entity to use to determine the formatter to return.
700 `StorageClass` will be used as a last resort if `DatasetRef`
701 or `DatasetType` instance is provided. Supports instrument
702 override if a `DatasetRef` is provided configured with an
703 ``instrument`` value for the data ID.
705 Returns
706 -------
707 formatter : `type`
708 The class of the registered formatter.
709 """
710 _, formatter, _ = self.getFormatterClassWithMatch(entity)
711 return formatter
713 def getFormatterWithMatch(self, entity: Entity, *args: Any, **kwargs: Any) -> Tuple[LookupKey, Formatter]:
714 """Get a new formatter instance along with the matching registry key.
716 Parameters
717 ----------
718 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
719 Entity to use to determine the formatter to return.
720 `StorageClass` will be used as a last resort if `DatasetRef`
721 or `DatasetType` instance is provided. Supports instrument
722 override if a `DatasetRef` is provided configured with an
723 ``instrument`` value for the data ID.
724 args : `tuple`
725 Positional arguments to use pass to the object constructor.
726 **kwargs
727 Keyword arguments to pass to object constructor.
729 Returns
730 -------
731 matchKey : `LookupKey`
732 The key that resulted in the successful match.
733 formatter : `Formatter`
734 An instance of the registered formatter.
735 """
736 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames()
737 matchKey, formatter = self._mappingFactory.getFromRegistryWithMatch(names, *args, **kwargs)
738 log.debug(
739 "Retrieved formatter %s from key '%s' for entity '%s'",
740 get_full_type_name(formatter),
741 matchKey,
742 entity,
743 )
745 return matchKey, formatter
747 def getFormatter(self, entity: Entity, *args: Any, **kwargs: Any) -> Formatter:
748 """Get a new formatter instance.
750 Parameters
751 ----------
752 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
753 Entity to use to determine the formatter to return.
754 `StorageClass` will be used as a last resort if `DatasetRef`
755 or `DatasetType` instance is provided. Supports instrument
756 override if a `DatasetRef` is provided configured with an
757 ``instrument`` value for the data ID.
758 args : `tuple`
759 Positional arguments to use pass to the object constructor.
760 **kwargs
761 Keyword arguments to pass to object constructor.
763 Returns
764 -------
765 formatter : `Formatter`
766 An instance of the registered formatter.
767 """
768 _, formatter = self.getFormatterWithMatch(entity, *args, **kwargs)
769 return formatter
771 def registerFormatter(
772 self,
773 type_: Union[LookupKey, str, StorageClass, DatasetType],
774 formatter: str,
775 *,
776 overwrite: bool = False,
777 **kwargs: Any,
778 ) -> None:
779 """Register a `Formatter`.
781 Parameters
782 ----------
783 type_ : `LookupKey`, `str`, `StorageClass` or `DatasetType`
784 Type for which this formatter is to be used. If a `LookupKey`
785 is not provided, one will be constructed from the supplied string
786 or by using the ``name`` property of the supplied entity.
787 formatter : `str` or class of type `Formatter`
788 Identifies a `Formatter` subclass to use for reading and writing
789 Datasets of this type. Can be a `Formatter` class.
790 overwrite : `bool`, optional
791 If `True` an existing entry will be replaced by the new value.
792 Default is `False`.
793 **kwargs
794 Keyword arguments to always pass to object constructor when
795 retrieved.
797 Raises
798 ------
799 ValueError
800 Raised if the formatter does not name a valid formatter type and
801 ``overwrite`` is `False`.
802 """
803 self._mappingFactory.placeInRegistry(type_, formatter, overwrite=overwrite, **kwargs)
806# Type to use when allowing a Formatter or its class name
807FormatterParameter = Union[str, Type[Formatter], Formatter]