Coverage for python/lsst/daf/butler/_formatter.py: 33%
197 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-12 09:44 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-12 09:44 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28from __future__ import annotations
30__all__ = ("Formatter", "FormatterFactory", "FormatterParameter")
32import contextlib
33import copy
34import logging
35from abc import ABCMeta, abstractmethod
36from collections.abc import Iterator, Mapping, Set
37from typing import TYPE_CHECKING, Any, ClassVar
39from lsst.utils.introspection import get_full_type_name
41from ._config import Config
42from ._config_support import LookupKey, processLookupConfigs
43from ._dataset_ref import DatasetRef
44from ._dataset_type import DatasetType
45from ._file_descriptor import FileDescriptor
46from ._location import Location
47from ._storage_class import StorageClass
48from .dimensions import DimensionUniverse
49from .mapping_factory import MappingFactory
51log = logging.getLogger(__name__)
53# Define a new special type for functions that take "entity"
54Entity = DatasetType | DatasetRef | StorageClass | str
57if TYPE_CHECKING:
58 from .dimensions import DataCoordinate
61class Formatter(metaclass=ABCMeta):
62 """Interface for reading and writing Datasets.
64 The formatters are associated with a particular `StorageClass`.
66 Parameters
67 ----------
68 fileDescriptor : `FileDescriptor`, optional
69 Identifies the file to read or write, and the associated storage
70 classes and parameter information. Its value can be `None` if the
71 caller will never call `Formatter.read` or `Formatter.write`.
72 dataId : `DataCoordinate`
73 Data ID associated with this formatter.
74 writeParameters : `dict`, optional
75 Any parameters to be hard-coded into this instance to control how
76 the dataset is serialized.
77 writeRecipes : `dict`, optional
78 Detailed write Recipes indexed by recipe name.
80 Notes
81 -----
82 All Formatter subclasses should share the base class's constructor
83 signature.
84 """
86 unsupportedParameters: ClassVar[Set[str] | None] = frozenset()
87 """Set of read parameters not understood by this `Formatter`. An empty set
88 means all parameters are supported. `None` indicates that no parameters
89 are supported. These param (`frozenset`).
90 """
92 supportedWriteParameters: ClassVar[Set[str] | None] = None
93 """Parameters understood by this formatter that can be used to control
94 how a dataset is serialized. `None` indicates that no parameters are
95 supported."""
97 supportedExtensions: ClassVar[Set[str]] = frozenset()
98 """Set of all extensions supported by this formatter.
100 Only expected to be populated by Formatters that write files. Any extension
101 assigned to the ``extension`` property will be automatically included in
102 the list of supported extensions."""
104 def __init__(
105 self,
106 fileDescriptor: FileDescriptor,
107 dataId: DataCoordinate,
108 writeParameters: dict[str, Any] | None = None,
109 writeRecipes: dict[str, Any] | None = None,
110 ):
111 if not isinstance(fileDescriptor, FileDescriptor):
112 raise TypeError("File descriptor must be a FileDescriptor")
113 assert dataId is not None, "dataId is now required for formatter initialization"
114 self._fileDescriptor = fileDescriptor
115 self._dataId = dataId
117 # Check that the write parameters are allowed
118 if writeParameters:
119 if self.supportedWriteParameters is None:
120 raise ValueError(
121 f"This formatter does not accept any write parameters. Got: {', '.join(writeParameters)}"
122 )
123 else:
124 given = set(writeParameters)
125 unknown = given - self.supportedWriteParameters
126 if unknown:
127 s = "s" if len(unknown) != 1 else ""
128 unknownStr = ", ".join(f"'{u}'" for u in unknown)
129 raise ValueError(f"This formatter does not accept parameter{s} {unknownStr}")
131 self._writeParameters = writeParameters
132 self._writeRecipes = self.validateWriteRecipes(writeRecipes)
134 def __str__(self) -> str:
135 return f"{self.name()}@{self.fileDescriptor.location.path}"
137 def __repr__(self) -> str:
138 return f"{self.name()}({self.fileDescriptor!r})"
140 @property
141 def fileDescriptor(self) -> FileDescriptor:
142 """File descriptor associated with this formatter (`FileDescriptor`).
144 Read-only property.
145 """
146 return self._fileDescriptor
148 @property
149 def dataId(self) -> DataCoordinate:
150 """Return Data ID associated with this formatter (`DataCoordinate`)."""
151 return self._dataId
153 @property
154 def writeParameters(self) -> Mapping[str, Any]:
155 """Parameters to use when writing out datasets."""
156 if self._writeParameters is not None:
157 return self._writeParameters
158 return {}
160 @property
161 def writeRecipes(self) -> Mapping[str, Any]:
162 """Detailed write Recipes indexed by recipe name."""
163 if self._writeRecipes is not None:
164 return self._writeRecipes
165 return {}
167 @classmethod
168 def validateWriteRecipes(cls, recipes: Mapping[str, Any] | None) -> Mapping[str, Any] | None:
169 """Validate supplied recipes for this formatter.
171 The recipes are supplemented with default values where appropriate.
173 Parameters
174 ----------
175 recipes : `dict`
176 Recipes to validate.
178 Returns
179 -------
180 validated : `dict`
181 Validated recipes.
183 Raises
184 ------
185 RuntimeError
186 Raised if validation fails. The default implementation raises
187 if any recipes are given.
188 """
189 if recipes:
190 raise RuntimeError(f"This formatter does not understand these writeRecipes: {recipes}")
191 return recipes
193 @classmethod
194 def name(cls) -> str:
195 """Return the fully qualified name of the formatter.
197 Returns
198 -------
199 name : `str`
200 Fully-qualified name of formatter class.
201 """
202 return get_full_type_name(cls)
204 @abstractmethod
205 def read(self, component: str | None = None) -> Any:
206 """Read a Dataset.
208 Parameters
209 ----------
210 component : `str`, optional
211 Component to read from the file. Only used if the `StorageClass`
212 for reading differed from the `StorageClass` used to write the
213 file.
215 Returns
216 -------
217 inMemoryDataset : `object`
218 The requested Dataset.
219 """
220 raise NotImplementedError("Type does not support reading")
222 @abstractmethod
223 def write(self, inMemoryDataset: Any) -> None:
224 """Write a Dataset.
226 Parameters
227 ----------
228 inMemoryDataset : `object`
229 The Dataset to store.
230 """
231 raise NotImplementedError("Type does not support writing")
233 @classmethod
234 def can_read_bytes(cls) -> bool:
235 """Indicate if this formatter can format from bytes.
237 Returns
238 -------
239 can : `bool`
240 `True` if the `fromBytes` method is implemented.
241 """
242 # We have no property to read so instead try to format from a byte
243 # and see what happens
244 try:
245 # We know the arguments are incompatible
246 cls.fromBytes(cls, b"") # type: ignore
247 except NotImplementedError:
248 return False
249 except Exception:
250 # There will be problems with the bytes we are supplying so ignore
251 pass
252 return True
254 def fromBytes(self, serializedDataset: bytes, component: str | None = None) -> object:
255 """Read serialized data into a Dataset or its component.
257 Parameters
258 ----------
259 serializedDataset : `bytes`
260 Bytes object to unserialize.
261 component : `str`, optional
262 Component to read from the Dataset. Only used if the `StorageClass`
263 for reading differed from the `StorageClass` used to write the
264 file.
266 Returns
267 -------
268 inMemoryDataset : `object`
269 The requested data as a Python object. The type of object
270 is controlled by the specific formatter.
271 """
272 raise NotImplementedError("Type does not support reading from bytes.")
274 def toBytes(self, inMemoryDataset: Any) -> bytes:
275 """Serialize the Dataset to bytes based on formatter.
277 Parameters
278 ----------
279 inMemoryDataset : `object`
280 The Python object to serialize.
282 Returns
283 -------
284 serializedDataset : `bytes`
285 Bytes representing the serialized dataset.
286 """
287 raise NotImplementedError("Type does not support writing to bytes.")
289 @contextlib.contextmanager
290 def _updateLocation(self, location: Location | None) -> Iterator[Location]:
291 """Temporarily replace the location associated with this formatter.
293 Parameters
294 ----------
295 location : `Location`
296 New location to use for this formatter. If `None` the
297 formatter will not change but it will still return
298 the old location. This allows it to be used in a code
299 path where the location may not need to be updated
300 but the with block is still convenient.
302 Yields
303 ------
304 old : `Location`
305 The old location that will be restored.
307 Notes
308 -----
309 This is an internal method that should be used with care.
310 It may change in the future. Should be used as a context
311 manager to restore the location when the temporary is no
312 longer required.
313 """
314 old = self._fileDescriptor.location
315 try:
316 if location is not None:
317 self._fileDescriptor.location = location
318 yield old
319 finally:
320 if location is not None:
321 self._fileDescriptor.location = old
323 def makeUpdatedLocation(self, location: Location) -> Location:
324 """Return a new `Location` updated with this formatter's extension.
326 Parameters
327 ----------
328 location : `Location`
329 The location to update.
331 Returns
332 -------
333 updated : `Location`
334 A new `Location` with a new file extension applied.
336 Raises
337 ------
338 NotImplementedError
339 Raised if there is no ``extension`` attribute associated with
340 this formatter.
342 Notes
343 -----
344 This method is available to all Formatters but might not be
345 implemented by all formatters. It requires that a formatter set
346 an ``extension`` attribute containing the file extension used when
347 writing files. If ``extension`` is `None` the supplied file will
348 not be updated. Not all formatters write files so this is not
349 defined in the base class.
350 """
351 location = copy.deepcopy(location)
352 try:
353 # We are deliberately allowing extension to be undefined by
354 # default in the base class and mypy complains.
355 location.updateExtension(self.extension) # type:ignore
356 except AttributeError:
357 raise NotImplementedError("No file extension registered with this formatter") from None
358 return location
360 @classmethod
361 def validateExtension(cls, location: Location) -> None:
362 """Check the extension of the provided location for compatibility.
364 Parameters
365 ----------
366 location : `Location`
367 Location from which to extract a file extension.
369 Raises
370 ------
371 NotImplementedError
372 Raised if file extensions are a concept not understood by this
373 formatter.
374 ValueError
375 Raised if the formatter does not understand this extension.
377 Notes
378 -----
379 This method is available to all Formatters but might not be
380 implemented by all formatters. It requires that a formatter set
381 an ``extension`` attribute containing the file extension used when
382 writing files. If ``extension`` is `None` only the set of supported
383 extensions will be examined.
384 """
385 supported = set(cls.supportedExtensions)
387 try:
388 # We are deliberately allowing extension to be undefined by
389 # default in the base class and mypy complains.
390 default = cls.extension # type: ignore
391 except AttributeError:
392 raise NotImplementedError("No file extension registered with this formatter") from None
394 # If extension is implemented as an instance property it won't return
395 # a string when called as a class property. Assume that
396 # the supported extensions class property is complete.
397 if default is not None and isinstance(default, str):
398 supported.add(default)
400 # Get the file name from the uri
401 file = location.uri.basename()
403 # Check that this file name ends with one of the supported extensions.
404 # This is less prone to confusion than asking the location for
405 # its extension and then doing a set comparison
406 for ext in supported:
407 if file.endswith(ext):
408 return
410 raise ValueError(
411 f"Extension '{location.getExtension()}' on '{location}' "
412 f"is not supported by Formatter '{cls.__name__}' (supports: {supported})"
413 )
415 def predictPath(self) -> str:
416 """Return the path that would be returned by write.
418 Does not write any data file.
420 Uses the `FileDescriptor` associated with the instance.
422 Returns
423 -------
424 path : `str`
425 Path within datastore that would be associated with the location
426 stored in this `Formatter`.
427 """
428 updated = self.makeUpdatedLocation(self.fileDescriptor.location)
429 return updated.pathInStore.path
431 def segregateParameters(self, parameters: dict[str, Any] | None = None) -> tuple[dict, dict]:
432 """Segregate the supplied parameters.
434 This splits the parameters into those understood by the
435 formatter and those not understood by the formatter.
437 Any unsupported parameters are assumed to be usable by associated
438 assemblers.
440 Parameters
441 ----------
442 parameters : `dict`, optional
443 Parameters with values that have been supplied by the caller
444 and which might be relevant for the formatter. If `None`
445 parameters will be read from the registered `FileDescriptor`.
447 Returns
448 -------
449 supported : `dict`
450 Those parameters supported by this formatter.
451 unsupported : `dict`
452 Those parameters not supported by this formatter.
453 """
454 if parameters is None:
455 parameters = self.fileDescriptor.parameters
457 if parameters is None:
458 return {}, {}
460 if self.unsupportedParameters is None:
461 # Support none of the parameters
462 return {}, parameters.copy()
464 # Start by assuming all are supported
465 supported = parameters.copy()
466 unsupported = {}
468 # And remove any we know are not supported
469 for p in set(supported):
470 if p in self.unsupportedParameters:
471 unsupported[p] = supported.pop(p)
473 return supported, unsupported
476class FormatterFactory:
477 """Factory for `Formatter` instances."""
479 defaultKey = LookupKey("default")
480 """Configuration key associated with default write parameter settings."""
482 writeRecipesKey = LookupKey("write_recipes")
483 """Configuration key associated with write recipes."""
485 def __init__(self) -> None:
486 self._mappingFactory = MappingFactory(Formatter)
488 def __contains__(self, key: LookupKey | str) -> bool:
489 """Indicate whether the supplied key is present in the factory.
491 Parameters
492 ----------
493 key : `LookupKey`, `str` or objects with ``name`` attribute
494 Key to use to lookup in the factory whether a corresponding
495 formatter is present.
497 Returns
498 -------
499 in : `bool`
500 `True` if the supplied key is present in the factory.
501 """
502 return key in self._mappingFactory
504 def registerFormatters(self, config: Config, *, universe: DimensionUniverse) -> None:
505 """Bulk register formatters from a config.
507 Parameters
508 ----------
509 config : `Config`
510 ``formatters`` section of a configuration.
511 universe : `DimensionUniverse`, optional
512 Set of all known dimensions, used to expand and validate any used
513 in lookup keys.
515 Notes
516 -----
517 The configuration can include one level of hierarchy where an
518 instrument-specific section can be defined to override more general
519 template specifications. This is represented in YAML using a
520 key of form ``instrument<name>`` which can then define templates
521 that will be returned if a `DatasetRef` contains a matching instrument
522 name in the data ID.
524 The config is parsed using the function
525 `~lsst.daf.butler.configSubset.processLookupConfigs`.
527 The values for formatter entries can be either a simple string
528 referring to a python type or a dict representing the formatter and
529 parameters to be hard-coded into the formatter constructor. For
530 the dict case the following keys are supported:
532 - formatter: The python type to be used as the formatter class.
533 - parameters: A further dict to be passed directly to the
534 ``writeParameters`` Formatter constructor to seed it.
535 These parameters are validated at instance creation and not at
536 configuration.
538 Additionally, a special ``default`` section can be defined that
539 uses the formatter type (class) name as the keys and specifies
540 default write parameters that should be used whenever an instance
541 of that class is constructed.
543 .. code-block:: yaml
545 formatters:
546 default:
547 lsst.daf.butler.formatters.example.ExampleFormatter:
548 max: 10
549 min: 2
550 comment: Default comment
551 calexp: lsst.daf.butler.formatters.example.ExampleFormatter
552 coadd:
553 formatter: lsst.daf.butler.formatters.example.ExampleFormatter
554 parameters:
555 max: 5
557 Any time an ``ExampleFormatter`` is constructed it will use those
558 parameters. If an explicit entry later in the configuration specifies
559 a different set of parameters, the two will be merged with the later
560 entry taking priority. In the example above ``calexp`` will use
561 the default parameters but ``coadd`` will override the value for
562 ``max``.
564 Formatter configuration can also include a special section describing
565 collections of write parameters that can be accessed through a
566 simple label. This allows common collections of options to be
567 specified in one place in the configuration and reused later.
568 The ``write_recipes`` section is indexed by Formatter class name
569 and each key is the label to associate with the parameters.
571 .. code-block:: yaml
573 formatters:
574 write_recipes:
575 lsst.obs.base.formatters.fitsExposure.FixExposureFormatter:
576 lossless:
577 ...
578 noCompression:
579 ...
581 By convention a formatter that uses write recipes will support a
582 ``recipe`` write parameter that will refer to a recipe name in
583 the ``write_recipes`` component. The `Formatter` will be constructed
584 in the `FormatterFactory` with all the relevant recipes and
585 will not attempt to filter by looking at ``writeParameters`` in
586 advance. See the specific formatter documentation for details on
587 acceptable recipe options.
588 """
589 allowed_keys = {"formatter", "parameters"}
591 contents = processLookupConfigs(config, allow_hierarchy=True, universe=universe)
593 # Extract any default parameter settings
594 defaultParameters = contents.get(self.defaultKey, {})
595 if not isinstance(defaultParameters, Mapping):
596 raise RuntimeError(
597 "Default formatter parameters in config can not be a single string"
598 f" (got: {type(defaultParameters)})"
599 )
601 # Extract any global write recipes -- these are indexed by
602 # Formatter class name.
603 writeRecipes = contents.get(self.writeRecipesKey, {})
604 if isinstance(writeRecipes, str):
605 raise RuntimeError(
606 f"The formatters.{self.writeRecipesKey} section must refer to a dict not '{writeRecipes}'"
607 )
609 for key, f in contents.items():
610 # default is handled in a special way
611 if key == self.defaultKey:
612 continue
613 if key == self.writeRecipesKey:
614 continue
616 # Can be a str or a dict.
617 specificWriteParameters = {}
618 if isinstance(f, str):
619 formatter = f
620 elif isinstance(f, Mapping):
621 all_keys = set(f)
622 unexpected_keys = all_keys - allowed_keys
623 if unexpected_keys:
624 raise ValueError(f"Formatter {key} uses unexpected keys {unexpected_keys} in config")
625 if "formatter" not in f:
626 raise ValueError(f"Mandatory 'formatter' key missing for formatter key {key}")
627 formatter = f["formatter"]
628 if "parameters" in f:
629 specificWriteParameters = f["parameters"]
630 else:
631 raise ValueError(f"Formatter for key {key} has unexpected value: '{f}'")
633 # Apply any default parameters for this formatter
634 writeParameters = copy.deepcopy(defaultParameters.get(formatter, {}))
635 writeParameters.update(specificWriteParameters)
637 kwargs: dict[str, Any] = {}
638 if writeParameters:
639 kwargs["writeParameters"] = writeParameters
641 if formatter in writeRecipes:
642 kwargs["writeRecipes"] = writeRecipes[formatter]
644 self.registerFormatter(key, formatter, **kwargs)
646 def getLookupKeys(self) -> set[LookupKey]:
647 """Retrieve the look up keys for all the registry entries.
649 Returns
650 -------
651 keys : `set` of `LookupKey`
652 The keys available for matching in the registry.
653 """
654 return self._mappingFactory.getLookupKeys()
656 def getFormatterClassWithMatch(self, entity: Entity) -> tuple[LookupKey, type[Formatter], dict[str, Any]]:
657 """Get the matching formatter class along with the registry key.
659 Parameters
660 ----------
661 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
662 Entity to use to determine the formatter to return.
663 `StorageClass` will be used as a last resort if `DatasetRef`
664 or `DatasetType` instance is provided. Supports instrument
665 override if a `DatasetRef` is provided configured with an
666 ``instrument`` value for the data ID.
668 Returns
669 -------
670 matchKey : `LookupKey`
671 The key that resulted in the successful match.
672 formatter : `type`
673 The class of the registered formatter.
674 formatter_kwargs : `dict`
675 Keyword arguments that are associated with this formatter entry.
676 """
677 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames()
678 matchKey, formatter, formatter_kwargs = self._mappingFactory.getClassFromRegistryWithMatch(names)
679 log.debug(
680 "Retrieved formatter %s from key '%s' for entity '%s'",
681 get_full_type_name(formatter),
682 matchKey,
683 entity,
684 )
686 return matchKey, formatter, formatter_kwargs
688 def getFormatterClass(self, entity: Entity) -> type:
689 """Get the matching formatter class.
691 Parameters
692 ----------
693 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
694 Entity to use to determine the formatter to return.
695 `StorageClass` will be used as a last resort if `DatasetRef`
696 or `DatasetType` instance is provided. Supports instrument
697 override if a `DatasetRef` is provided configured with an
698 ``instrument`` value for the data ID.
700 Returns
701 -------
702 formatter : `type`
703 The class of the registered formatter.
704 """
705 _, formatter, _ = self.getFormatterClassWithMatch(entity)
706 return formatter
708 def getFormatterWithMatch(self, entity: Entity, *args: Any, **kwargs: Any) -> tuple[LookupKey, Formatter]:
709 """Get a new formatter instance along with the matching registry key.
711 Parameters
712 ----------
713 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
714 Entity to use to determine the formatter to return.
715 `StorageClass` will be used as a last resort if `DatasetRef`
716 or `DatasetType` instance is provided. Supports instrument
717 override if a `DatasetRef` is provided configured with an
718 ``instrument`` value for the data ID.
719 args : `tuple`
720 Positional arguments to use pass to the object constructor.
721 **kwargs
722 Keyword arguments to pass to object constructor.
724 Returns
725 -------
726 matchKey : `LookupKey`
727 The key that resulted in the successful match.
728 formatter : `Formatter`
729 An instance of the registered formatter.
730 """
731 names = (LookupKey(name=entity),) if isinstance(entity, str) else entity._lookupNames()
732 matchKey, formatter = self._mappingFactory.getFromRegistryWithMatch(names, *args, **kwargs)
733 log.debug(
734 "Retrieved formatter %s from key '%s' for entity '%s'",
735 get_full_type_name(formatter),
736 matchKey,
737 entity,
738 )
740 return matchKey, formatter
742 def getFormatter(self, entity: Entity, *args: Any, **kwargs: Any) -> Formatter:
743 """Get a new formatter instance.
745 Parameters
746 ----------
747 entity : `DatasetRef`, `DatasetType`, `StorageClass`, or `str`
748 Entity to use to determine the formatter to return.
749 `StorageClass` will be used as a last resort if `DatasetRef`
750 or `DatasetType` instance is provided. Supports instrument
751 override if a `DatasetRef` is provided configured with an
752 ``instrument`` value for the data ID.
753 args : `tuple`
754 Positional arguments to use pass to the object constructor.
755 **kwargs
756 Keyword arguments to pass to object constructor.
758 Returns
759 -------
760 formatter : `Formatter`
761 An instance of the registered formatter.
762 """
763 _, formatter = self.getFormatterWithMatch(entity, *args, **kwargs)
764 return formatter
766 def registerFormatter(
767 self,
768 type_: LookupKey | str | StorageClass | DatasetType,
769 formatter: str,
770 *,
771 overwrite: bool = False,
772 **kwargs: Any,
773 ) -> None:
774 """Register a `Formatter`.
776 Parameters
777 ----------
778 type_ : `LookupKey`, `str`, `StorageClass` or `DatasetType`
779 Type for which this formatter is to be used. If a `LookupKey`
780 is not provided, one will be constructed from the supplied string
781 or by using the ``name`` property of the supplied entity.
782 formatter : `str` or class of type `Formatter`
783 Identifies a `Formatter` subclass to use for reading and writing
784 Datasets of this type. Can be a `Formatter` class.
785 overwrite : `bool`, optional
786 If `True` an existing entry will be replaced by the new value.
787 Default is `False`.
788 **kwargs
789 Keyword arguments to always pass to object constructor when
790 retrieved.
792 Raises
793 ------
794 ValueError
795 Raised if the formatter does not name a valid formatter type and
796 ``overwrite`` is `False`.
797 """
798 self._mappingFactory.placeInRegistry(type_, formatter, overwrite=overwrite, **kwargs)
801# Type to use when allowing a Formatter or its class name
802FormatterParameter = str | type[Formatter] | Formatter