Coverage for python/lsst/daf/butler/core/datasets/type.py: 21%

Shortcuts on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

217 statements  

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ["DatasetType", "SerializedDatasetType"] 

25 

26import re 

27from copy import deepcopy 

28from types import MappingProxyType 

29from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Mapping, Optional, Tuple, Type, Union 

30 

31from pydantic import BaseModel, StrictBool, StrictStr 

32 

33from ..configSupport import LookupKey 

34from ..dimensions import DimensionGraph, SerializedDimensionGraph 

35from ..json import from_json_pydantic, to_json_pydantic 

36from ..storageClass import StorageClass, StorageClassFactory 

37 

38if TYPE_CHECKING: 38 ↛ 39line 38 didn't jump to line 39, because the condition on line 38 was never true

39 from ...registry import Registry 

40 from ..dimensions import Dimension, DimensionUniverse 

41 

42 

43def _safeMakeMappingProxyType(data: Optional[Mapping]) -> Mapping: 

44 if data is None: 

45 data = {} 

46 return MappingProxyType(data) 

47 

48 

49class SerializedDatasetType(BaseModel): 

50 """Simplified model of a `DatasetType` suitable for serialization.""" 

51 

52 name: StrictStr 

53 storageClass: Optional[StrictStr] = None 

54 dimensions: Optional[SerializedDimensionGraph] = None 

55 parentStorageClass: Optional[StrictStr] = None 

56 isCalibration: StrictBool = False 

57 

58 @classmethod 

59 def direct( 

60 cls, 

61 *, 

62 name: str, 

63 storageClass: Optional[str] = None, 

64 dimensions: Optional[Dict] = None, 

65 parentStorageClass: Optional[str] = None, 

66 isCalibration: bool = False, 

67 ) -> SerializedDatasetType: 

68 """Construct a `SerializedDatasetType` directly without validators. 

69 

70 This differs from PyDantics construct method in that the arguments are 

71 explicitly what the model requires, and it will recurse through 

72 members, constructing them from their corresponding `direct` methods. 

73 

74 This method should only be called when the inputs are trusted. 

75 """ 

76 node = SerializedDatasetType.__new__(cls) 

77 setter = object.__setattr__ 

78 setter(node, "name", name) 

79 setter(node, "storageClass", storageClass) 

80 setter( 

81 node, 

82 "dimensions", 

83 dimensions if dimensions is None else SerializedDimensionGraph.direct(**dimensions), 

84 ) 

85 setter(node, "parentStorageClass", parentStorageClass) 

86 setter(node, "isCalibration", isCalibration) 

87 setter( 

88 node, 

89 "__fields_set__", 

90 {"name", "storageClass", "dimensions", "parentStorageClass", "isCalibration"}, 

91 ) 

92 return node 

93 

94 

95class DatasetType: 

96 r"""A named category of Datasets. 

97 

98 Defines how they are organized, related, and stored. 

99 

100 A concrete, final class whose instances represent `DatasetType`\ s. 

101 `DatasetType` instances may be constructed without a `Registry`, 

102 but they must be registered 

103 via `Registry.registerDatasetType()` before corresponding Datasets 

104 may be added. 

105 `DatasetType` instances are immutable. 

106 

107 Parameters 

108 ---------- 

109 name : `str` 

110 A string name for the Dataset; must correspond to the same 

111 `DatasetType` across all Registries. Names must start with an 

112 upper or lowercase letter, and may contain only letters, numbers, 

113 and underscores. Component dataset types should contain a single 

114 period separating the base dataset type name from the component name 

115 (and may be recursive). 

116 dimensions : `DimensionGraph` or iterable of `Dimension` 

117 Dimensions used to label and relate instances of this `DatasetType`. 

118 If not a `DimensionGraph`, ``universe`` must be provided as well. 

119 storageClass : `StorageClass` or `str` 

120 Instance of a `StorageClass` or name of `StorageClass` that defines 

121 how this `DatasetType` is persisted. 

122 parentStorageClass : `StorageClass` or `str`, optional 

123 Instance of a `StorageClass` or name of `StorageClass` that defines 

124 how the composite parent is persisted. Must be `None` if this 

125 is not a component. 

126 universe : `DimensionUniverse`, optional 

127 Set of all known dimensions, used to normalize ``dimensions`` if it 

128 is not already a `DimensionGraph`. 

129 isCalibration : `bool`, optional 

130 If `True`, this dataset type may be included in 

131 `~CollectionType.CALIBRATION` collections. 

132 

133 See Also 

134 -------- 

135 :ref:`daf_butler_organizing_datasets` 

136 """ 

137 

138 __slots__ = ( 

139 "_name", 

140 "_dimensions", 

141 "_storageClass", 

142 "_storageClassName", 

143 "_parentStorageClass", 

144 "_parentStorageClassName", 

145 "_isCalibration", 

146 ) 

147 

148 _serializedType = SerializedDatasetType 

149 

150 VALID_NAME_REGEX = re.compile("^[a-zA-Z_][a-zA-Z0-9_]*(\\.[a-zA-Z_][a-zA-Z0-9_]*)*$") 

151 

152 @staticmethod 

153 def nameWithComponent(datasetTypeName: str, componentName: str) -> str: 

154 """Form a valid DatasetTypeName from a parent and component. 

155 

156 No validation is performed. 

157 

158 Parameters 

159 ---------- 

160 datasetTypeName : `str` 

161 Base type name. 

162 componentName : `str` 

163 Name of component. 

164 

165 Returns 

166 ------- 

167 compTypeName : `str` 

168 Name to use for component DatasetType. 

169 """ 

170 return "{}.{}".format(datasetTypeName, componentName) 

171 

172 def __init__( 

173 self, 

174 name: str, 

175 dimensions: Union[DimensionGraph, Iterable[Dimension]], 

176 storageClass: Union[StorageClass, str], 

177 parentStorageClass: Optional[Union[StorageClass, str]] = None, 

178 *, 

179 universe: Optional[DimensionUniverse] = None, 

180 isCalibration: bool = False, 

181 ): 

182 if self.VALID_NAME_REGEX.match(name) is None: 

183 raise ValueError(f"DatasetType name '{name}' is invalid.") 

184 self._name = name 

185 if not isinstance(dimensions, DimensionGraph): 

186 if universe is None: 

187 raise ValueError( 

188 "If dimensions is not a normalized DimensionGraph, a universe must be provided." 

189 ) 

190 dimensions = universe.extract(dimensions) 

191 self._dimensions = dimensions 

192 if name in self._dimensions.universe.getGovernorDimensions().names: 

193 raise ValueError(f"Governor dimension name {name} cannot be used as a dataset type name.") 

194 if not isinstance(storageClass, (StorageClass, str)): 

195 raise ValueError(f"StorageClass argument must be StorageClass or str. Got {storageClass}") 

196 self._storageClass: Optional[StorageClass] 

197 if isinstance(storageClass, StorageClass): 

198 self._storageClass = storageClass 

199 self._storageClassName = storageClass.name 

200 else: 

201 self._storageClass = None 

202 self._storageClassName = storageClass 

203 

204 self._parentStorageClass: Optional[StorageClass] = None 

205 self._parentStorageClassName: Optional[str] = None 

206 if parentStorageClass is not None: 

207 if not isinstance(storageClass, (StorageClass, str)): 

208 raise ValueError( 

209 f"Parent StorageClass argument must be StorageClass or str. Got {parentStorageClass}" 

210 ) 

211 

212 # Only allowed for a component dataset type 

213 _, componentName = self.splitDatasetTypeName(self._name) 

214 if componentName is None: 

215 raise ValueError( 

216 f"Can not specify a parent storage class if this is not a component ({self._name})" 

217 ) 

218 if isinstance(parentStorageClass, StorageClass): 

219 self._parentStorageClass = parentStorageClass 

220 self._parentStorageClassName = parentStorageClass.name 

221 else: 

222 self._parentStorageClassName = parentStorageClass 

223 

224 # Ensure that parent storage class is specified when we have 

225 # a component and is not specified when we don't 

226 _, componentName = self.splitDatasetTypeName(self._name) 

227 if parentStorageClass is None and componentName is not None: 

228 raise ValueError( 

229 f"Component dataset type '{self._name}' constructed without parent storage class" 

230 ) 

231 if parentStorageClass is not None and componentName is None: 

232 raise ValueError(f"Parent storage class specified by {self._name} is not a composite") 

233 self._isCalibration = isCalibration 

234 

235 def __repr__(self) -> str: 

236 extra = "" 

237 if self._parentStorageClassName: 

238 extra = f", parentStorageClass={self._parentStorageClassName}" 

239 if self._isCalibration: 

240 extra += ", isCalibration=True" 

241 return f"DatasetType({self.name!r}, {self.dimensions}, {self._storageClassName}{extra})" 

242 

243 def _equal_ignoring_storage_class(self, other: Any) -> bool: 

244 """Check everything is equal except the storage class. 

245 

246 Parameters 

247 ---------- 

248 other : Any 

249 Object to check against this one. 

250 

251 Returns 

252 ------- 

253 mostly : `bool` 

254 Returns `True` if everything except the storage class is equal. 

255 """ 

256 if not isinstance(other, type(self)): 

257 return False 

258 if self._name != other._name: 

259 return False 

260 if self._dimensions != other._dimensions: 

261 return False 

262 if self._isCalibration != other._isCalibration: 

263 return False 

264 if self._parentStorageClass is not None and other._parentStorageClass is not None: 

265 return self._parentStorageClass == other._parentStorageClass 

266 else: 

267 return self._parentStorageClassName == other._parentStorageClassName 

268 

269 def __eq__(self, other: Any) -> bool: 

270 mostly_equal = self._equal_ignoring_storage_class(other) 

271 if not mostly_equal: 

272 return False 

273 

274 # Be careful not to force a storage class to import the corresponding 

275 # python code. 

276 if self._storageClass is not None and other._storageClass is not None: 

277 if self._storageClass != other._storageClass: 

278 return False 

279 else: 

280 if self._storageClassName != other._storageClassName: 

281 return False 

282 return True 

283 

284 def is_compatible_with(self, other: DatasetType) -> bool: 

285 """Determine if the given `DatasetType` is compatible with this one. 

286 

287 Compatibility requires a matching name and dimensions and a storage 

288 class for this dataset type that can convert the python type associated 

289 with the other storage class to this python type. 

290 

291 Parameters 

292 ---------- 

293 other : `DatasetType` 

294 Dataset type to check. 

295 

296 Returns 

297 ------- 

298 is_compatible : `bool` 

299 Returns `True` if the other dataset type is either the same as this 

300 or the storage class associated with the other can be converted to 

301 this. 

302 """ 

303 mostly_equal = self._equal_ignoring_storage_class(other) 

304 if not mostly_equal: 

305 return False 

306 

307 # If the storage class names match then they are compatible. 

308 if self._storageClassName == other._storageClassName: 

309 return True 

310 

311 # Now required to check the full storage class. 

312 self_sc = self.storageClass 

313 other_sc = other.storageClass 

314 

315 return self_sc.can_convert(other_sc) 

316 

317 def __hash__(self) -> int: 

318 """Hash DatasetType instance. 

319 

320 This only uses StorageClass name which is it consistent with the 

321 implementation of StorageClass hash method. 

322 """ 

323 return hash((self._name, self._dimensions, self._storageClassName, self._parentStorageClassName)) 

324 

325 def __lt__(self, other: Any) -> bool: 

326 """Sort using the dataset type name.""" 

327 if not isinstance(other, type(self)): 

328 return NotImplemented 

329 return self.name < other.name 

330 

331 @property 

332 def name(self) -> str: 

333 """Return a string name for the Dataset. 

334 

335 Must correspond to the same `DatasetType` across all Registries. 

336 """ 

337 return self._name 

338 

339 @property 

340 def dimensions(self) -> DimensionGraph: 

341 r"""Return the `Dimension`\ s fir this dataset type. 

342 

343 The dimensions label and relate instances of this 

344 `DatasetType` (`DimensionGraph`). 

345 """ 

346 return self._dimensions 

347 

348 @property 

349 def storageClass(self) -> StorageClass: 

350 """Return `StorageClass` instance associated with this dataset type. 

351 

352 The `StorageClass` defines how this `DatasetType` 

353 is persisted. Note that if DatasetType was constructed with a name 

354 of a StorageClass then Butler has to be initialized before using 

355 this property. 

356 """ 

357 if self._storageClass is None: 

358 self._storageClass = StorageClassFactory().getStorageClass(self._storageClassName) 

359 return self._storageClass 

360 

361 @property 

362 def parentStorageClass(self) -> Optional[StorageClass]: 

363 """Return the storage class of the composite containing this component. 

364 

365 Note that if DatasetType was constructed with a name of a 

366 StorageClass then Butler has to be initialized before using this 

367 property. Can be `None` if this is not a component of a composite. 

368 Must be defined if this is a component. 

369 """ 

370 if self._parentStorageClass is None and self._parentStorageClassName is None: 

371 return None 

372 if self._parentStorageClass is None and self._parentStorageClassName is not None: 

373 self._parentStorageClass = StorageClassFactory().getStorageClass(self._parentStorageClassName) 

374 return self._parentStorageClass 

375 

376 def isCalibration(self) -> bool: 

377 """Return if datasets of this type can be in calibration collections. 

378 

379 Returns 

380 ------- 

381 flag : `bool` 

382 `True` if datasets of this type may be included in calibration 

383 collections. 

384 """ 

385 return self._isCalibration 

386 

387 @staticmethod 

388 def splitDatasetTypeName(datasetTypeName: str) -> Tuple[str, Optional[str]]: 

389 """Return the root name and the component from a composite name. 

390 

391 Parameters 

392 ---------- 

393 datasetTypeName : `str` 

394 The name of the dataset type, can include a component using 

395 a "."-separator. 

396 

397 Returns 

398 ------- 

399 rootName : `str` 

400 Root name without any components. 

401 componentName : `str` 

402 The component if it has been specified, else `None`. 

403 

404 Notes 

405 ----- 

406 If the dataset type name is ``a.b.c`` this method will return a 

407 root name of ``a`` and a component name of ``b.c``. 

408 """ 

409 comp = None 

410 root = datasetTypeName 

411 if "." in root: 

412 # If there is doubt, the component is after the first "." 

413 root, comp = root.split(".", maxsplit=1) 

414 return root, comp 

415 

416 def nameAndComponent(self) -> Tuple[str, Optional[str]]: 

417 """Return the root name of this dataset type and any component. 

418 

419 Returns 

420 ------- 

421 rootName : `str` 

422 Root name for this `DatasetType` without any components. 

423 componentName : `str` 

424 The component if it has been specified, else `None`. 

425 """ 

426 return self.splitDatasetTypeName(self.name) 

427 

428 def component(self) -> Optional[str]: 

429 """Return the component name (if defined). 

430 

431 Returns 

432 ------- 

433 comp : `str` 

434 Name of component part of DatasetType name. `None` if this 

435 `DatasetType` is not associated with a component. 

436 """ 

437 _, comp = self.nameAndComponent() 

438 return comp 

439 

440 def componentTypeName(self, component: str) -> str: 

441 """Derive a component dataset type from a composite. 

442 

443 Parameters 

444 ---------- 

445 component : `str` 

446 Name of component 

447 

448 Returns 

449 ------- 

450 derived : `str` 

451 Compound name of this `DatasetType` and the component. 

452 

453 Raises 

454 ------ 

455 KeyError 

456 Requested component is not supported by this `DatasetType`. 

457 """ 

458 if component in self.storageClass.allComponents(): 

459 return self.nameWithComponent(self.name, component) 

460 raise KeyError(f"Requested component ({component}) not understood by this DatasetType ({self})") 

461 

462 def makeCompositeDatasetType(self) -> DatasetType: 

463 """Return a composite dataset type from the component. 

464 

465 Returns 

466 ------- 

467 composite : `DatasetType` 

468 The composite dataset type. 

469 

470 Raises 

471 ------ 

472 RuntimeError 

473 Raised if this dataset type is not a component dataset type. 

474 """ 

475 if not self.isComponent(): 

476 raise RuntimeError(f"DatasetType {self.name} must be a component to form the composite") 

477 composite_name, _ = self.nameAndComponent() 

478 if self.parentStorageClass is None: 

479 raise ValueError( 

480 f"Parent storage class is not set. Unable to create composite type from {self.name}" 

481 ) 

482 return DatasetType(composite_name, dimensions=self.dimensions, storageClass=self.parentStorageClass) 

483 

484 def makeComponentDatasetType(self, component: str) -> DatasetType: 

485 """Return a component dataset type from a composite. 

486 

487 Assumes the same dimensions as the parent. 

488 

489 Parameters 

490 ---------- 

491 component : `str` 

492 Name of component 

493 

494 Returns 

495 ------- 

496 datasetType : `DatasetType` 

497 A new DatasetType instance. 

498 """ 

499 # The component could be a read/write or read component 

500 return DatasetType( 

501 self.componentTypeName(component), 

502 dimensions=self.dimensions, 

503 storageClass=self.storageClass.allComponents()[component], 

504 parentStorageClass=self.storageClass, 

505 ) 

506 

507 def makeAllComponentDatasetTypes(self) -> List[DatasetType]: 

508 """Return all component dataset types for this composite. 

509 

510 Returns 

511 ------- 

512 all : `list` of `DatasetType` 

513 All the component dataset types. If this is not a composite 

514 then returns an empty list. 

515 """ 

516 return [ 

517 self.makeComponentDatasetType(componentName) 

518 for componentName in self.storageClass.allComponents() 

519 ] 

520 

521 def isComponent(self) -> bool: 

522 """Return whether this `DatasetType` refers to a component. 

523 

524 Returns 

525 ------- 

526 isComponent : `bool` 

527 `True` if this `DatasetType` is a component, `False` otherwise. 

528 """ 

529 if self.component(): 

530 return True 

531 return False 

532 

533 def isComposite(self) -> bool: 

534 """Return whether this `DatasetType` is a composite. 

535 

536 Returns 

537 ------- 

538 isComposite : `bool` 

539 `True` if this `DatasetType` is a composite type, `False` 

540 otherwise. 

541 """ 

542 return self.storageClass.isComposite() 

543 

544 def _lookupNames(self) -> Tuple[LookupKey, ...]: 

545 """Return name keys to use for lookups in configurations. 

546 

547 The names are returned in order of priority. 

548 

549 Returns 

550 ------- 

551 names : `tuple` of `LookupKey` 

552 Tuple of the `DatasetType` name and the `StorageClass` name. 

553 If the name includes a component the name with the component 

554 is first, then the name without the component and finally 

555 the storage class name and the storage class name of the 

556 composite. 

557 """ 

558 rootName, componentName = self.nameAndComponent() 

559 lookups: Tuple[LookupKey, ...] = (LookupKey(name=self.name),) 

560 if componentName is not None: 

561 lookups = lookups + (LookupKey(name=rootName),) 

562 

563 if self.dimensions: 

564 # Dimensions are a lower priority than dataset type name 

565 lookups = lookups + (LookupKey(dimensions=self.dimensions),) 

566 

567 storageClasses = self.storageClass._lookupNames() 

568 if componentName is not None and self.parentStorageClass is not None: 

569 storageClasses += self.parentStorageClass._lookupNames() 

570 

571 return lookups + storageClasses 

572 

573 def to_simple(self, minimal: bool = False) -> SerializedDatasetType: 

574 """Convert this class to a simple python type. 

575 

576 This makes it suitable for serialization. 

577 

578 Parameters 

579 ---------- 

580 minimal : `bool`, optional 

581 Use minimal serialization. Requires Registry to convert 

582 back to a full type. 

583 

584 Returns 

585 ------- 

586 simple : `SerializedDatasetType` 

587 The object converted to a class suitable for serialization. 

588 """ 

589 as_dict: Dict[str, Any] 

590 if minimal: 

591 # Only needs the name. 

592 as_dict = {"name": self.name} 

593 else: 

594 # Convert to a dict form 

595 as_dict = { 

596 "name": self.name, 

597 "storageClass": self._storageClassName, 

598 "isCalibration": self._isCalibration, 

599 "dimensions": self.dimensions.to_simple(), 

600 } 

601 

602 if self._parentStorageClassName is not None: 

603 as_dict["parentStorageClass"] = self._parentStorageClassName 

604 return SerializedDatasetType(**as_dict) 

605 

606 @classmethod 

607 def from_simple( 

608 cls, 

609 simple: SerializedDatasetType, 

610 universe: Optional[DimensionUniverse] = None, 

611 registry: Optional[Registry] = None, 

612 ) -> DatasetType: 

613 """Construct a new object from the simplified form. 

614 

615 This is usually data returned from the `to_simple` method. 

616 

617 Parameters 

618 ---------- 

619 simple : `SerializedDatasetType` 

620 The value returned by `to_simple()`. 

621 universe : `DimensionUniverse` 

622 The special graph of all known dimensions of which this graph will 

623 be a subset. Can be `None` if a registry is provided. 

624 registry : `lsst.daf.butler.Registry`, optional 

625 Registry to use to convert simple name of a DatasetType to 

626 a full `DatasetType`. Can be `None` if a full description of 

627 the type is provided along with a universe. 

628 

629 Returns 

630 ------- 

631 datasetType : `DatasetType` 

632 Newly-constructed object. 

633 """ 

634 if simple.storageClass is None: 

635 # Treat this as minimalist representation 

636 if registry is None: 

637 raise ValueError( 

638 f"Unable to convert a DatasetType name '{simple}' to DatasetType without a Registry" 

639 ) 

640 return registry.getDatasetType(simple.name) 

641 

642 if universe is None and registry is None: 

643 raise ValueError("One of universe or registry must be provided.") 

644 

645 if universe is None and registry is not None: 

646 # registry should not be none by now but test helps mypy 

647 universe = registry.dimensions 

648 

649 if universe is None: 

650 # this is for mypy 

651 raise ValueError("Unable to determine a usable universe") 

652 

653 if simple.dimensions is None: 

654 # mypy hint 

655 raise ValueError(f"Dimensions must be specified in {simple}") 

656 

657 return cls( 

658 name=simple.name, 

659 dimensions=DimensionGraph.from_simple(simple.dimensions, universe=universe), 

660 storageClass=simple.storageClass, 

661 isCalibration=simple.isCalibration, 

662 parentStorageClass=simple.parentStorageClass, 

663 universe=universe, 

664 ) 

665 

666 to_json = to_json_pydantic 

667 from_json = classmethod(from_json_pydantic) 

668 

669 def __reduce__( 

670 self, 

671 ) -> Tuple[ 

672 Callable, Tuple[Type[DatasetType], Tuple[str, DimensionGraph, str, Optional[str]], Dict[str, bool]] 

673 ]: 

674 """Support pickling. 

675 

676 StorageClass instances can not normally be pickled, so we pickle 

677 StorageClass name instead of instance. 

678 """ 

679 return _unpickle_via_factory, ( 

680 self.__class__, 

681 (self.name, self.dimensions, self._storageClassName, self._parentStorageClassName), 

682 {"isCalibration": self._isCalibration}, 

683 ) 

684 

685 def __deepcopy__(self, memo: Any) -> DatasetType: 

686 """Support for deep copy method. 

687 

688 Normally ``deepcopy`` will use pickle mechanism to make copies. 

689 We want to avoid that to support (possibly degenerate) use case when 

690 DatasetType is constructed with StorageClass instance which is not 

691 registered with StorageClassFactory (this happens in unit tests). 

692 Instead we re-implement ``__deepcopy__`` method. 

693 """ 

694 return DatasetType( 

695 name=deepcopy(self.name, memo), 

696 dimensions=deepcopy(self.dimensions, memo), 

697 storageClass=deepcopy(self._storageClass or self._storageClassName, memo), 

698 parentStorageClass=deepcopy(self._parentStorageClass or self._parentStorageClassName, memo), 

699 isCalibration=deepcopy(self._isCalibration, memo), 

700 ) 

701 

702 

703def _unpickle_via_factory(factory: Callable, args: Any, kwargs: Any) -> DatasetType: 

704 """Unpickle something by calling a factory. 

705 

706 Allows subclasses to unpickle using `__reduce__` with keyword 

707 arguments as well as positional arguments. 

708 """ 

709 return factory(*args, **kwargs)