Coverage for python / lsst / daf / butler / _dataset_type.py: 19%
244 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-26 08:49 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-26 08:49 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28from __future__ import annotations
30__all__ = ["DatasetType", "SerializedDatasetType"]
32import re
33from collections.abc import Callable, Iterable, Mapping
34from copy import deepcopy
35from types import MappingProxyType
36from typing import TYPE_CHECKING, Any, ClassVar, Self, cast
38from pydantic import BaseModel, StrictBool, StrictStr
40from ._config_support import LookupKey
41from ._exceptions import UnknownComponentError
42from ._storage_class import StorageClass, StorageClassFactory
43from .dimensions import DimensionGroup
44from .json import from_json_pydantic, to_json_pydantic
45from .persistence_context import PersistenceContextVars
47if TYPE_CHECKING:
48 from .dimensions import DimensionUniverse
49 from .registry import Registry
52def _safeMakeMappingProxyType(data: Mapping | None) -> Mapping:
53 if data is None:
54 data = {}
55 return MappingProxyType(data)
58class SerializedDatasetType(BaseModel):
59 """Simplified model of a `DatasetType` suitable for serialization."""
61 name: StrictStr
62 storageClass: StrictStr | None = None
63 dimensions: list[StrictStr] | None = None
64 parentStorageClass: StrictStr | None = None
65 isCalibration: StrictBool = False
67 @classmethod
68 def direct(
69 cls,
70 *,
71 name: str,
72 storageClass: str | None = None,
73 dimensions: list | None = None,
74 parentStorageClass: str | None = None,
75 isCalibration: bool = False,
76 ) -> SerializedDatasetType:
77 """Construct a `SerializedDatasetType` directly without validators.
79 This differs from Pydantic's model_construct method in that the
80 arguments are explicitly what the model requires, and it will recurse
81 through members, constructing them from their corresponding `direct`
82 methods.
84 This method should only be called when the inputs are trusted.
86 Parameters
87 ----------
88 name : `str`
89 The name of the dataset type.
90 storageClass : `str` or `None`
91 The name of the storage class.
92 dimensions : `list` or `None`
93 The dimensions associated with this dataset type.
94 parentStorageClass : `str` or `None`
95 The parent storage class name if this is a component.
96 isCalibration : `bool`
97 Whether this dataset type represents calibrations.
99 Returns
100 -------
101 `SerializedDatasetType`
102 A Pydantic model representing a dataset type.
103 """
104 cache = PersistenceContextVars.serializedDatasetTypeMapping.get()
105 key = (name, storageClass or "")
106 if cache is not None and (type_ := cache.get(key, None)) is not None:
107 return type_
108 serialized_dimensions = dimensions if dimensions is not None else None
109 node = cls.model_construct(
110 name=name,
111 storageClass=storageClass,
112 dimensions=serialized_dimensions,
113 parentStorageClass=parentStorageClass,
114 isCalibration=isCalibration,
115 )
117 if cache is not None:
118 cache[key] = node
119 return node
122class DatasetType:
123 """A named category of Datasets.
125 Defines how they are organized, related, and stored.
127 A concrete, final class whose instances represent a `DatasetType`.
128 `DatasetType` instances may be constructed without a `Registry`,
129 but they must be registered
130 via `Registry.registerDatasetType()` before corresponding Datasets
131 may be added.
132 `DatasetType` instances are immutable.
134 Parameters
135 ----------
136 name : `str`
137 A string name for the Dataset; must correspond to the same
138 `DatasetType` across all Registries. Names must start with an
139 upper or lowercase letter, and may contain only letters, numbers,
140 and underscores. Component dataset types should contain a single
141 period separating the base dataset type name from the component name
142 (and may be recursive).
143 dimensions : `DimensionGroup` or `~collections.abc.Iterable` [ `str` ]
144 Dimensions used to label and relate instances of this `DatasetType`.
145 If not a `DimensionGroup`, ``universe`` must be provided as well.
146 storageClass : `StorageClass` or `str`
147 Instance of a `StorageClass` or name of `StorageClass` that defines
148 how this `DatasetType` is persisted.
149 parentStorageClass : `StorageClass` or `str`, optional
150 Instance of a `StorageClass` or name of `StorageClass` that defines
151 how the composite parent is persisted. Must be `None` if this
152 is not a component.
153 universe : `DimensionUniverse`, optional
154 Set of all known dimensions, used to normalize ``dimensions`` if it
155 is not already a `DimensionGroup`.
156 isCalibration : `bool`, optional
157 If `True`, this dataset type may be included in
158 `~CollectionType.CALIBRATION` collections.
160 Notes
161 -----
162 See also :ref:`daf_butler_organizing_datasets`.
163 """
165 __slots__ = (
166 "_name",
167 "_dimensions",
168 "_storageClass",
169 "_storageClassName",
170 "_parentStorageClass",
171 "_parentStorageClassName",
172 "_isCalibration",
173 )
175 _serializedType: ClassVar[type[BaseModel]] = SerializedDatasetType
177 VALID_NAME_REGEX = re.compile("^[a-zA-Z_][a-zA-Z0-9_]*(\\.[a-zA-Z_][a-zA-Z0-9_]*)*$")
179 @staticmethod
180 def nameWithComponent(datasetTypeName: str, componentName: str) -> str:
181 """Form a valid DatasetTypeName from a parent and component.
183 No validation is performed.
185 Parameters
186 ----------
187 datasetTypeName : `str`
188 Base type name.
189 componentName : `str`
190 Name of component.
192 Returns
193 -------
194 compTypeName : `str`
195 Name to use for component DatasetType.
196 """
197 return f"{datasetTypeName}.{componentName}"
199 def __init__(
200 self,
201 name: str,
202 dimensions: DimensionGroup | Iterable[str],
203 storageClass: StorageClass | str,
204 parentStorageClass: StorageClass | str | None = None,
205 *,
206 universe: DimensionUniverse | None = None,
207 isCalibration: bool = False,
208 ):
209 if self.VALID_NAME_REGEX.match(name) is None:
210 raise ValueError(f"DatasetType name '{name}' is invalid.")
211 self._name = name
212 universe = universe or getattr(dimensions, "universe", None)
213 if universe is None:
214 raise ValueError("If dimensions is not a DimensionGroup, a universe must be provided.")
215 self._dimensions = universe.conform(dimensions)
216 if name in self._dimensions.universe.governor_dimensions:
217 raise ValueError(f"Governor dimension name {name} cannot be used as a dataset type name.")
218 if not isinstance(storageClass, StorageClass | str):
219 raise ValueError(f"StorageClass argument must be StorageClass or str. Got {storageClass}")
220 self._storageClass: StorageClass | None
221 if isinstance(storageClass, StorageClass):
222 self._storageClass = storageClass
223 self._storageClassName = storageClass.name
224 else:
225 self._storageClass = None
226 self._storageClassName = storageClass
228 self._parentStorageClass: StorageClass | None = None
229 self._parentStorageClassName: str | None = None
230 if parentStorageClass is not None:
231 if not isinstance(storageClass, StorageClass | str):
232 raise ValueError(
233 f"Parent StorageClass argument must be StorageClass or str. Got {parentStorageClass}"
234 )
236 # Only allowed for a component dataset type
237 _, componentName = self.splitDatasetTypeName(self._name)
238 if componentName is None:
239 raise ValueError(
240 f"Can not specify a parent storage class if this is not a component ({self._name})"
241 )
242 if isinstance(parentStorageClass, StorageClass):
243 self._parentStorageClass = parentStorageClass
244 self._parentStorageClassName = parentStorageClass.name
245 else:
246 self._parentStorageClassName = parentStorageClass
248 # Ensure that parent storage class is specified when we have
249 # a component and is not specified when we don't
250 _, componentName = self.splitDatasetTypeName(self._name)
251 if parentStorageClass is None and componentName is not None:
252 raise ValueError(
253 f"Component dataset type '{self._name}' constructed without parent storage class"
254 )
255 if parentStorageClass is not None and componentName is None:
256 raise ValueError(f"Parent storage class specified by {self._name} is not a composite")
257 self._isCalibration = isCalibration
259 def __repr__(self) -> str:
260 extra = ""
261 if self._parentStorageClassName:
262 extra = f", parentStorageClass={self._parentStorageClassName}"
263 if self._isCalibration:
264 extra += ", isCalibration=True"
265 return f"DatasetType({self.name!r}, {self._dimensions}, {self._storageClassName}{extra})"
267 def _equal_ignoring_storage_class(self, other: Any) -> bool:
268 """Check everything is equal except the storage class.
270 Parameters
271 ----------
272 other : Any
273 Object to check against this one.
275 Returns
276 -------
277 mostly : `bool`
278 Returns `True` if everything except the storage class is equal.
279 """
280 if not isinstance(other, type(self)):
281 return False
282 if self._name != other._name:
283 return False
284 if self._dimensions != other._dimensions:
285 return False
286 if self._isCalibration != other._isCalibration:
287 return False
288 return True
290 def __eq__(self, other: Any) -> bool:
291 mostly_equal = self._equal_ignoring_storage_class(other)
292 if not mostly_equal:
293 return False
295 # Be careful not to force a storage class to import the corresponding
296 # python code.
297 if self._parentStorageClass is not None and other._parentStorageClass is not None:
298 if self._parentStorageClass != other._parentStorageClass:
299 return False
300 else:
301 if self._parentStorageClassName != other._parentStorageClassName:
302 return False
304 if self._storageClass is not None and other._storageClass is not None:
305 if self._storageClass != other._storageClass:
306 return False
307 else:
308 if self._storageClassName != other._storageClassName:
309 return False
310 return True
312 def is_compatible_with(self, other: DatasetType) -> bool:
313 """Determine if the given `DatasetType` is compatible with this one.
315 Compatibility requires a matching name and dimensions and a storage
316 class for this dataset type that can convert the python type associated
317 with the other storage class to this python type. Parent storage class
318 compatibility is not checked at all for components.
320 Parameters
321 ----------
322 other : `DatasetType`
323 Dataset type to check.
325 Returns
326 -------
327 is_compatible : `bool`
328 Returns `True` if the other dataset type is either the same as this
329 or the storage class associated with the other can be converted to
330 this.
331 """
332 mostly_equal = self._equal_ignoring_storage_class(other)
333 if not mostly_equal:
334 return False
336 # If the storage class names match then they are compatible.
337 if self._storageClassName == other._storageClassName:
338 return True
340 # Now required to check the full storage class.
341 self_sc = self.storageClass
342 other_sc = other.storageClass
344 return self_sc.can_convert(other_sc)
346 def __hash__(self) -> int:
347 """Hash DatasetType instance.
349 This only uses StorageClass name which is it consistent with the
350 implementation of StorageClass hash method.
351 """
352 return hash((self._name, self._dimensions, self._storageClassName, self._parentStorageClassName))
354 def __lt__(self, other: Any) -> bool:
355 """Sort using the dataset type name."""
356 if not isinstance(other, type(self)):
357 return NotImplemented
358 return self.name < other.name
360 @property
361 def name(self) -> str:
362 """Return a string name for the Dataset.
364 Must correspond to the same `DatasetType` across all Registries.
365 """
366 return self._name
368 @property
369 def dimensions(self) -> DimensionGroup:
370 """Return the dimensions of this dataset type (`DimensionGroup`).
372 The dimensions of a define the keys of its datasets' data IDs..
373 """
374 return self._dimensions
376 @property
377 def storageClass(self) -> StorageClass:
378 """Return `StorageClass` instance associated with this dataset type.
380 The `StorageClass` defines how this `DatasetType`
381 is persisted. Note that if DatasetType was constructed with a name
382 of a StorageClass then Butler has to be initialized before using
383 this property.
384 """
385 if self._storageClass is None:
386 self._storageClass = StorageClassFactory().getStorageClass(self._storageClassName)
387 return self._storageClass
389 @property
390 def storageClass_name(self) -> str:
391 """Return the storage class name.
393 This will never force the storage class to be imported.
394 """
395 return self._storageClassName
397 @property
398 def parentStorageClass(self) -> StorageClass | None:
399 """Return the storage class of the composite containing this component.
401 Note that if DatasetType was constructed with a name of a
402 StorageClass then Butler has to be initialized before using this
403 property. Can be `None` if this is not a component of a composite.
404 Must be defined if this is a component.
405 """
406 if self._parentStorageClass is None and self._parentStorageClassName is None:
407 return None
408 if self._parentStorageClass is None and self._parentStorageClassName is not None:
409 self._parentStorageClass = StorageClassFactory().getStorageClass(self._parentStorageClassName)
410 return self._parentStorageClass
412 def isCalibration(self) -> bool:
413 """Return if datasets of this type can be in calibration collections.
415 Returns
416 -------
417 flag : `bool`
418 `True` if datasets of this type may be included in calibration
419 collections.
420 """
421 return self._isCalibration
423 @staticmethod
424 def splitDatasetTypeName(datasetTypeName: str) -> tuple[str, str | None]:
425 """Return the root name and the component from a composite name.
427 Parameters
428 ----------
429 datasetTypeName : `str`
430 The name of the dataset type, can include a component using
431 a "."-separator.
433 Returns
434 -------
435 rootName : `str`
436 Root name without any components.
437 componentName : `str`
438 The component if it has been specified, else `None`.
440 Notes
441 -----
442 If the dataset type name is ``a.b.c`` this method will return a
443 root name of ``a`` and a component name of ``b.c``.
444 """
445 comp = None
446 root = datasetTypeName
447 if "." in root:
448 # If there is doubt, the component is after the first "."
449 root, comp = root.split(".", maxsplit=1)
450 return root, comp
452 def nameAndComponent(self) -> tuple[str, str | None]:
453 """Return the root name of this dataset type and any component.
455 Returns
456 -------
457 rootName : `str`
458 Root name for this `DatasetType` without any components.
459 componentName : `str`
460 The component if it has been specified, else `None`.
461 """
462 return self.splitDatasetTypeName(self.name)
464 def component(self) -> str | None:
465 """Return the component name (if defined).
467 Returns
468 -------
469 comp : `str`
470 Name of component part of DatasetType name. `None` if this
471 `DatasetType` is not associated with a component.
472 """
473 _, comp = self.nameAndComponent()
474 return comp
476 def componentTypeName(self, component: str) -> str:
477 """Derive a component dataset type from a composite.
479 Parameters
480 ----------
481 component : `str`
482 Name of component.
484 Returns
485 -------
486 derived : `str`
487 Compound name of this `DatasetType` and the component.
489 Raises
490 ------
491 KeyError
492 Requested component is not supported by this `DatasetType`.
493 """
494 if component in self.storageClass.allComponents():
495 return self.nameWithComponent(self.name, component)
496 raise UnknownComponentError(
497 f"Requested component ({component}) not understood by this DatasetType ({self})"
498 )
500 def makeCompositeDatasetType(self) -> DatasetType:
501 """Return a composite dataset type from the component.
503 Returns
504 -------
505 composite : `DatasetType`
506 The composite dataset type.
508 Raises
509 ------
510 RuntimeError
511 Raised if this dataset type is not a component dataset type.
512 """
513 if not self.isComponent():
514 raise RuntimeError(f"DatasetType {self.name} must be a component to form the composite")
515 composite_name, _ = self.nameAndComponent()
516 if self.parentStorageClass is None:
517 raise ValueError(
518 f"Parent storage class is not set. Unable to create composite type from {self.name}"
519 )
520 return DatasetType(
521 composite_name,
522 dimensions=self._dimensions,
523 storageClass=self.parentStorageClass,
524 isCalibration=self.isCalibration(),
525 )
527 def makeComponentDatasetType(self, component: str) -> DatasetType:
528 """Return a component dataset type from a composite.
530 Assumes the same dimensions as the parent.
532 Parameters
533 ----------
534 component : `str`
535 Name of component.
537 Returns
538 -------
539 datasetType : `DatasetType`
540 A new DatasetType instance.
541 """
542 # The component could be a read/write or read component
543 return DatasetType(
544 self.componentTypeName(component),
545 dimensions=self._dimensions,
546 storageClass=self.storageClass.allComponents()[component],
547 parentStorageClass=self.storageClass,
548 isCalibration=self.isCalibration(),
549 )
551 def makeAllComponentDatasetTypes(self) -> list[DatasetType]:
552 """Return all component dataset types for this composite.
554 Returns
555 -------
556 all : `list` of `DatasetType`
557 All the component dataset types. If this is not a composite
558 then returns an empty list.
559 """
560 return [
561 self.makeComponentDatasetType(componentName)
562 for componentName in self.storageClass.allComponents()
563 ]
565 def overrideStorageClass(self, storageClass: str | StorageClass) -> DatasetType:
566 """Create a new `DatasetType` from this one but with an updated
567 `StorageClass`.
569 Parameters
570 ----------
571 storageClass : `str` or `StorageClass`
572 The new storage class.
574 Returns
575 -------
576 modified : `DatasetType`
577 A dataset type that is the same as the current one but with a
578 different storage class. Will be ``self`` if the given storage
579 class is the current one.
581 Notes
582 -----
583 If this is a component dataset type, the parent storage class will be
584 retained.
585 """
586 if storageClass == self._storageClassName or storageClass == self._storageClass:
587 return self
588 parent = self._parentStorageClass if self._parentStorageClass else self._parentStorageClassName
589 new = DatasetType(
590 self.name,
591 dimensions=self._dimensions,
592 storageClass=storageClass,
593 parentStorageClass=parent,
594 isCalibration=self.isCalibration(),
595 )
596 # Check validity.
597 if new.is_compatible_with(self) or self.is_compatible_with(new):
598 return new
599 raise ValueError(
600 f"The new storage class ({new.storageClass}) is not compatible with the "
601 f"existing storage class ({self.storageClass})."
602 )
604 def isComponent(self) -> bool:
605 """Return whether this `DatasetType` refers to a component.
607 Returns
608 -------
609 isComponent : `bool`
610 `True` if this `DatasetType` is a component, `False` otherwise.
611 """
612 if self.component():
613 return True
614 return False
616 def isComposite(self) -> bool:
617 """Return whether this `DatasetType` is a composite.
619 Returns
620 -------
621 isComposite : `bool`
622 `True` if this `DatasetType` is a composite type, `False`
623 otherwise.
624 """
625 return self.storageClass.isComposite()
627 def _lookupNames(self) -> tuple[LookupKey, ...]:
628 """Return name keys to use for lookups in configurations.
630 The names are returned in order of priority.
632 Returns
633 -------
634 names : `tuple` of `LookupKey`
635 Tuple of the `DatasetType` name and the `StorageClass` name.
636 If the name includes a component the name with the component
637 is first, then the name without the component and finally
638 the storage class name and the storage class name of the
639 composite.
640 """
641 rootName, componentName = self.nameAndComponent()
642 lookups: tuple[LookupKey, ...] = (LookupKey(name=self.name),)
643 if componentName is not None:
644 lookups = lookups + (LookupKey(name=rootName),)
646 if self._dimensions:
647 # Dimensions are a lower priority than dataset type name
648 lookups = lookups + (LookupKey(dimensions=self._dimensions),)
650 storageClasses = self.storageClass._lookupNames()
651 if componentName is not None and self.parentStorageClass is not None:
652 storageClasses += self.parentStorageClass._lookupNames()
654 return lookups + storageClasses
656 def to_simple(self, minimal: bool = False) -> SerializedDatasetType:
657 """Convert this class to a simple python type.
659 This makes it suitable for serialization.
661 Parameters
662 ----------
663 minimal : `bool`, optional
664 Use minimal serialization. Requires Registry to convert
665 back to a full type.
667 Returns
668 -------
669 simple : `SerializedDatasetType`
670 The object converted to a class suitable for serialization.
671 """
672 as_dict: dict[str, Any]
673 if minimal:
674 # Only needs the name.
675 as_dict = {"name": self.name}
676 else:
677 # Convert to a dict form
678 as_dict = {
679 "name": self.name,
680 "storageClass": self._storageClassName,
681 "isCalibration": self._isCalibration,
682 "dimensions": list(self._dimensions.required),
683 }
685 if self._parentStorageClassName is not None:
686 as_dict["parentStorageClass"] = self._parentStorageClassName
687 return SerializedDatasetType(**as_dict)
689 @classmethod
690 def from_simple(
691 cls,
692 simple: SerializedDatasetType,
693 universe: DimensionUniverse | None = None,
694 registry: Registry | None = None,
695 ) -> DatasetType:
696 """Construct a new object from the simplified form.
698 This is usually data returned from the `to_simple` method.
700 Parameters
701 ----------
702 simple : `SerializedDatasetType`
703 The value returned by `to_simple()`.
704 universe : `DimensionUniverse`
705 The special graph of all known dimensions of which this graph will
706 be a subset. Can be `None` if a registry is provided.
707 registry : `lsst.daf.butler.Registry`, optional
708 Registry to use to convert simple name of a DatasetType to
709 a full `DatasetType`. Can be `None` if a full description of
710 the type is provided along with a universe.
712 Returns
713 -------
714 datasetType : `DatasetType`
715 Newly-constructed object.
716 """
717 # check to see if there is a cache, and if there is, if there is a
718 # cached dataset type
719 cache = PersistenceContextVars.loadedTypes.get()
720 key = (simple.name, simple.storageClass or "")
721 if cache is not None and (type_ := cache.get(key, None)) is not None:
722 return type_
724 if simple.storageClass is None:
725 # Treat this as minimalist representation
726 if registry is None:
727 raise ValueError(
728 f"Unable to convert a DatasetType name '{simple}' to DatasetType without a Registry"
729 )
730 return registry.getDatasetType(simple.name)
732 if universe is None and registry is None:
733 raise ValueError("One of universe or registry must be provided.")
735 if universe is None and registry is not None:
736 # registry should not be none by now but test helps mypy
737 universe = registry.dimensions
739 if universe is None:
740 # this is for mypy
741 raise ValueError("Unable to determine a usable universe")
742 if simple.dimensions is None:
743 raise ValueError(f"Dimensions must be specified in {simple}")
744 dimensions = universe.conform(simple.dimensions)
746 newType = cls(
747 name=simple.name,
748 dimensions=dimensions,
749 storageClass=simple.storageClass,
750 isCalibration=simple.isCalibration,
751 parentStorageClass=simple.parentStorageClass,
752 universe=universe,
753 )
754 if cache is not None:
755 cache[key] = newType
756 return newType
758 to_json = to_json_pydantic
759 from_json: ClassVar[Callable[..., Self]] = cast(Callable[..., Self], classmethod(from_json_pydantic))
761 def __reduce__(
762 self,
763 ) -> tuple[
764 Callable, tuple[type[DatasetType], tuple[str, DimensionGroup, str, str | None], dict[str, bool]]
765 ]:
766 """Support pickling.
768 StorageClass instances can not normally be pickled, so we pickle
769 StorageClass name instead of instance.
770 """
771 return _unpickle_via_factory, (
772 self.__class__,
773 (self.name, self._dimensions, self._storageClassName, self._parentStorageClassName),
774 {"isCalibration": self._isCalibration},
775 )
777 def __deepcopy__(self, memo: Any) -> DatasetType:
778 """Support for deep copy method.
780 Normally ``deepcopy`` will use pickle mechanism to make copies.
781 We want to avoid that to support (possibly degenerate) use case when
782 DatasetType is constructed with StorageClass instance which is not
783 registered with StorageClassFactory (this happens in unit tests).
784 Instead we re-implement ``__deepcopy__`` method.
785 """
786 return DatasetType(
787 name=deepcopy(self.name, memo),
788 dimensions=deepcopy(self._dimensions, memo),
789 storageClass=deepcopy(self._storageClass or self._storageClassName, memo),
790 parentStorageClass=deepcopy(self._parentStorageClass or self._parentStorageClassName, memo),
791 isCalibration=deepcopy(self._isCalibration, memo),
792 )
795def _unpickle_via_factory(factory: Callable, args: Any, kwargs: Any) -> DatasetType:
796 """Unpickle something by calling a factory.
798 Allows subclasses to unpickle using `__reduce__` with keyword
799 arguments as well as positional arguments.
800 """
801 return factory(*args, **kwargs)
804def get_dataset_type_name(datasetTypeOrName: DatasetType | str) -> str:
805 """Given a `DatasetType` object or a dataset type name, return a dataset
806 type name.
808 Parameters
809 ----------
810 datasetTypeOrName : `DatasetType` | `str`
811 A DatasetType, or the name of a DatasetType.
813 Returns
814 -------
815 name
816 The name associated with the given DatasetType, or the given string.
817 """
818 if isinstance(datasetTypeOrName, DatasetType):
819 return datasetTypeOrName.name
820 elif isinstance(datasetTypeOrName, str):
821 return datasetTypeOrName
822 else:
823 raise TypeError(f"Expected DatasetType or str, got unexpected object: {datasetTypeOrName}")