Coverage for tests/test_datasets.py: 8%
298 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-21 09:55 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-21 09:55 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import copy
23import pickle
24import unittest
25import uuid
27from lsst.daf.butler import (
28 DataCoordinate,
29 DatasetRef,
30 DatasetType,
31 DimensionUniverse,
32 FileDataset,
33 StorageClass,
34 StorageClassFactory,
35)
37"""Tests for datasets module.
38"""
41class DatasetTypeTestCase(unittest.TestCase):
42 """Test for DatasetType."""
44 def setUp(self) -> None:
45 self.universe = DimensionUniverse()
47 def testConstructor(self) -> None:
48 """Test construction preserves values.
50 Note that construction doesn't check for valid storageClass.
51 This can only be verified for a particular schema.
52 """
53 datasetTypeName = "test"
54 storageClass = StorageClass("test_StructuredData")
55 dimensions = self.universe.extract(("visit", "instrument"))
56 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
57 self.assertEqual(datasetType.name, datasetTypeName)
58 self.assertEqual(datasetType.storageClass, storageClass)
59 self.assertEqual(datasetType.dimensions, dimensions)
61 with self.assertRaises(ValueError, msg="Construct component without parent storage class"):
62 DatasetType(DatasetType.nameWithComponent(datasetTypeName, "comp"), dimensions, storageClass)
63 with self.assertRaises(ValueError, msg="Construct non-component with parent storage class"):
64 DatasetType(datasetTypeName, dimensions, storageClass, parentStorageClass="NotAllowed")
66 def testConstructor2(self) -> None:
67 """Test construction from StorageClass name."""
68 datasetTypeName = "test"
69 storageClass = StorageClass("test_constructor2")
70 StorageClassFactory().registerStorageClass(storageClass)
71 dimensions = self.universe.extract(("instrument", "visit"))
72 datasetType = DatasetType(datasetTypeName, dimensions, "test_constructor2")
73 self.assertEqual(datasetType.name, datasetTypeName)
74 self.assertEqual(datasetType.storageClass, storageClass)
75 self.assertEqual(datasetType.dimensions, dimensions)
77 def testNameValidation(self) -> None:
78 """Test that dataset type names only contain certain characters
79 in certain positions.
80 """
81 dimensions = self.universe.extract(("instrument", "visit"))
82 goodNames = ("a", "A", "z1", "Z1", "a_1B", "A_1b", "_a")
83 badNames = ("1", "a%b", "B+Z", "T[0]")
85 # Construct storage class with all the good names included as
86 # components so that we can test internal consistency
87 storageClass = StorageClass(
88 "test_StructuredData", components={n: StorageClass("component") for n in goodNames}
89 )
91 for name in goodNames:
92 composite = DatasetType(name, dimensions, storageClass)
93 self.assertEqual(composite.name, name)
94 for suffix in goodNames:
95 full = DatasetType.nameWithComponent(name, suffix)
96 component = composite.makeComponentDatasetType(suffix)
97 self.assertEqual(component.name, full)
98 assert component.parentStorageClass is not None
99 self.assertEqual(component.parentStorageClass.name, "test_StructuredData")
100 for suffix in badNames:
101 full = DatasetType.nameWithComponent(name, suffix)
102 with self.subTest(full=full):
103 with self.assertRaises(ValueError):
104 DatasetType(full, dimensions, storageClass)
105 for name in badNames:
106 with self.subTest(name=name):
107 with self.assertRaises(ValueError):
108 DatasetType(name, dimensions, storageClass)
110 def testEquality(self) -> None:
111 storageA = StorageClass("test_a")
112 storageB = StorageClass("test_b")
113 parent = StorageClass("test")
114 dimensionsA = self.universe.extract(["instrument"])
115 dimensionsB = self.universe.extract(["skymap"])
116 self.assertEqual(
117 DatasetType(
118 "a",
119 dimensionsA,
120 storageA,
121 ),
122 DatasetType(
123 "a",
124 dimensionsA,
125 storageA,
126 ),
127 )
128 self.assertEqual(
129 DatasetType(
130 "a",
131 dimensionsA,
132 "test_a",
133 ),
134 DatasetType(
135 "a",
136 dimensionsA,
137 storageA,
138 ),
139 )
140 self.assertEqual(
141 DatasetType(
142 "a",
143 dimensionsA,
144 storageA,
145 ),
146 DatasetType(
147 "a",
148 dimensionsA,
149 "test_a",
150 ),
151 )
152 self.assertEqual(
153 DatasetType(
154 "a",
155 dimensionsA,
156 "test_a",
157 ),
158 DatasetType(
159 "a",
160 dimensionsA,
161 "test_a",
162 ),
163 )
164 self.assertEqual(
165 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent),
166 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent),
167 )
168 self.assertEqual(
169 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
170 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
171 )
172 self.assertNotEqual(
173 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=True),
174 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=False),
175 )
176 self.assertNotEqual(
177 DatasetType(
178 "a",
179 dimensionsA,
180 storageA,
181 ),
182 DatasetType(
183 "b",
184 dimensionsA,
185 storageA,
186 ),
187 )
188 self.assertNotEqual(
189 DatasetType(
190 "a",
191 dimensionsA,
192 storageA,
193 ),
194 DatasetType(
195 "b",
196 dimensionsA,
197 "test_a",
198 ),
199 )
200 self.assertNotEqual(
201 DatasetType(
202 "a",
203 dimensionsA,
204 storageA,
205 ),
206 DatasetType(
207 "a",
208 dimensionsA,
209 storageB,
210 ),
211 )
212 self.assertNotEqual(
213 DatasetType(
214 "a",
215 dimensionsA,
216 storageA,
217 ),
218 DatasetType(
219 "a",
220 dimensionsA,
221 "test_b",
222 ),
223 )
224 self.assertNotEqual(
225 DatasetType(
226 "a",
227 dimensionsA,
228 storageA,
229 ),
230 DatasetType(
231 "a",
232 dimensionsB,
233 storageA,
234 ),
235 )
236 self.assertNotEqual(
237 DatasetType(
238 "a",
239 dimensionsA,
240 storageA,
241 ),
242 DatasetType(
243 "a",
244 dimensionsB,
245 "test_a",
246 ),
247 )
248 self.assertNotEqual(
249 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageA),
250 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageB),
251 )
252 self.assertNotEqual(
253 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageA"),
254 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageB"),
255 )
257 def testCompatibility(self) -> None:
258 storageA = StorageClass("test_a", pytype=set, converters={"list": "builtins.set"})
259 storageB = StorageClass("test_b", pytype=list)
260 storageC = StorageClass("test_c", pytype=dict)
261 self.assertTrue(storageA.can_convert(storageB))
262 dimensionsA = self.universe.extract(["instrument"])
264 dA = DatasetType("a", dimensionsA, storageA)
265 dA2 = DatasetType("a", dimensionsA, storageB)
266 self.assertNotEqual(dA, dA2)
267 self.assertTrue(dA.is_compatible_with(dA))
268 self.assertTrue(dA.is_compatible_with(dA2))
269 self.assertFalse(dA2.is_compatible_with(dA))
271 dA3 = DatasetType("a", dimensionsA, storageC)
272 self.assertFalse(dA.is_compatible_with(dA3))
274 def testOverrideStorageClass(self) -> None:
275 storageA = StorageClass("test_a", pytype=list, converters={"dict": "builtins.list"})
276 storageB = StorageClass("test_b", pytype=dict)
277 dimensions = self.universe.extract(["instrument"])
279 dA = DatasetType("a", dimensions, storageA)
280 dB = dA.overrideStorageClass(storageB)
281 self.assertNotEqual(dA, dB)
282 self.assertEqual(dB.storageClass, storageB)
284 round_trip = dB.overrideStorageClass(storageA)
285 self.assertEqual(round_trip, dA)
287 # Check that parents move over.
288 parent = StorageClass("composite", components={"a": storageA, "c": storageA})
289 dP = DatasetType("comp", dimensions, parent)
290 dP_A = dP.makeComponentDatasetType("a")
291 print(dP_A)
292 dp_B = dP_A.overrideStorageClass(storageB)
293 self.assertEqual(dp_B.storageClass, storageB)
294 self.assertEqual(dp_B.parentStorageClass, parent)
296 def testJson(self) -> None:
297 storageA = StorageClass("test_a")
298 dimensionsA = self.universe.extract(["instrument"])
299 self.assertEqual(
300 DatasetType(
301 "a",
302 dimensionsA,
303 storageA,
304 ),
305 DatasetType.from_json(
306 DatasetType(
307 "a",
308 dimensionsA,
309 storageA,
310 ).to_json(),
311 self.universe,
312 ),
313 )
314 self.assertEqual(
315 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
316 DatasetType.from_json(
317 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent").to_json(),
318 self.universe,
319 ),
320 )
322 def testSorting(self) -> None:
323 """Can we sort a DatasetType"""
324 storage = StorageClass("test_a")
325 dimensions = self.universe.extract(["instrument"])
327 d_a = DatasetType("a", dimensions, storage)
328 d_f = DatasetType("f", dimensions, storage)
329 d_p = DatasetType("p", dimensions, storage)
331 sort = sorted([d_p, d_f, d_a])
332 self.assertEqual(sort, [d_a, d_f, d_p])
334 # Now with strings
335 with self.assertRaises(TypeError):
336 sort = sorted(["z", d_p, "c", d_f, d_a, "d"]) # type: ignore [list-item]
338 def testHashability(self) -> None:
339 """Test `DatasetType.__hash__`.
341 This test is performed by checking that `DatasetType` entries can
342 be inserted into a `set` and that unique values of its
343 (`name`, `storageClass`, `dimensions`) parameters result in separate
344 entries (and equal ones don't).
346 This does not check for uniformity of hashing or the actual values
347 of the hash function.
348 """
349 types: list[DatasetType] = []
350 unique = 0
351 storageC = StorageClass("test_c")
352 storageD = StorageClass("test_d")
353 for name in ["a", "b"]:
354 for storageClass in [storageC, storageD]:
355 for dims in [("instrument",), ("skymap",)]:
356 datasetType = DatasetType(name, self.universe.extract(dims), storageClass)
357 datasetTypeCopy = DatasetType(name, self.universe.extract(dims), storageClass)
358 types.extend((datasetType, datasetTypeCopy))
359 unique += 1 # datasetType should always equal its copy
360 self.assertEqual(len(set(types)), unique) # all other combinations are unique
362 # also check that hashes of instances constructed with StorageClass
363 # name matches hashes of instances constructed with instances
364 dimensions = self.universe.extract(["instrument"])
365 self.assertEqual(
366 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_c"))
367 )
368 self.assertEqual(
369 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_c"))
370 )
371 self.assertNotEqual(
372 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_d"))
373 )
374 self.assertNotEqual(
375 hash(DatasetType("a", dimensions, storageD)), hash(DatasetType("a", dimensions, "test_c"))
376 )
377 self.assertNotEqual(
378 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_d"))
379 )
381 def testDeepCopy(self) -> None:
382 """Test that we can copy a dataset type."""
383 storageClass = StorageClass("test_copy")
384 datasetTypeName = "test"
385 dimensions = self.universe.extract(("instrument", "visit"))
386 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
387 dcopy = copy.deepcopy(datasetType)
388 self.assertEqual(dcopy, datasetType)
390 # Now with calibration flag set
391 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True)
392 dcopy = copy.deepcopy(datasetType)
393 self.assertEqual(dcopy, datasetType)
394 self.assertTrue(dcopy.isCalibration())
396 # And again with a composite
397 componentStorageClass = StorageClass("copy_component")
398 componentDatasetType = DatasetType(
399 DatasetType.nameWithComponent(datasetTypeName, "comp"),
400 dimensions,
401 componentStorageClass,
402 parentStorageClass=storageClass,
403 )
404 dcopy = copy.deepcopy(componentDatasetType)
405 self.assertEqual(dcopy, componentDatasetType)
407 def testPickle(self) -> None:
408 """Test pickle support."""
409 storageClass = StorageClass("test_pickle")
410 datasetTypeName = "test"
411 dimensions = self.universe.extract(("instrument", "visit"))
412 # Un-pickling requires that storage class is registered with factory.
413 StorageClassFactory().registerStorageClass(storageClass)
414 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
415 datasetTypeOut = pickle.loads(pickle.dumps(datasetType))
416 self.assertIsInstance(datasetTypeOut, DatasetType)
417 self.assertEqual(datasetType.name, datasetTypeOut.name)
418 self.assertEqual(datasetType.dimensions.names, datasetTypeOut.dimensions.names)
419 self.assertEqual(datasetType.storageClass, datasetTypeOut.storageClass)
420 self.assertIsNone(datasetTypeOut.parentStorageClass)
421 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration())
422 self.assertFalse(datasetTypeOut.isCalibration())
424 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True)
425 datasetTypeOut = pickle.loads(pickle.dumps(datasetType))
426 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration())
427 self.assertTrue(datasetTypeOut.isCalibration())
429 # And again with a composite
430 componentStorageClass = StorageClass("pickle_component")
431 StorageClassFactory().registerStorageClass(componentStorageClass)
432 componentDatasetType = DatasetType(
433 DatasetType.nameWithComponent(datasetTypeName, "comp"),
434 dimensions,
435 componentStorageClass,
436 parentStorageClass=storageClass,
437 )
438 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
439 self.assertIsInstance(datasetTypeOut, DatasetType)
440 self.assertEqual(componentDatasetType.name, datasetTypeOut.name)
441 self.assertEqual(componentDatasetType.dimensions.names, datasetTypeOut.dimensions.names)
442 self.assertEqual(componentDatasetType.storageClass, datasetTypeOut.storageClass)
443 self.assertEqual(componentDatasetType.parentStorageClass, datasetTypeOut.parentStorageClass)
444 self.assertEqual(datasetTypeOut.parentStorageClass.name, storageClass.name)
445 self.assertEqual(datasetTypeOut, componentDatasetType)
447 # Now with a string and not a real storage class to test that
448 # pickling doesn't force the StorageClass to be resolved
449 componentDatasetType = DatasetType(
450 DatasetType.nameWithComponent(datasetTypeName, "comp"),
451 dimensions,
452 "StrangeComponent",
453 parentStorageClass="UnknownParent",
454 )
455 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
456 self.assertEqual(datasetTypeOut, componentDatasetType)
457 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName)
459 # Now with a storage class that is created by the factory
460 factoryStorageClassClass = StorageClassFactory.makeNewStorageClass("ParentClass")
461 factoryComponentStorageClassClass = StorageClassFactory.makeNewStorageClass("ComponentClass")
462 componentDatasetType = DatasetType(
463 DatasetType.nameWithComponent(datasetTypeName, "comp"),
464 dimensions,
465 factoryComponentStorageClassClass(),
466 parentStorageClass=factoryStorageClassClass(),
467 )
468 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
469 self.assertEqual(datasetTypeOut, componentDatasetType)
470 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName)
472 def test_composites(self) -> None:
473 """Test components within composite DatasetTypes."""
474 storageClassA = StorageClass("compA")
475 storageClassB = StorageClass("compB")
476 storageClass = StorageClass(
477 "test_composite", components={"compA": storageClassA, "compB": storageClassB}
478 )
479 self.assertTrue(storageClass.isComposite())
480 self.assertFalse(storageClassA.isComposite())
481 self.assertFalse(storageClassB.isComposite())
483 dimensions = self.universe.extract(("instrument", "visit"))
485 datasetTypeComposite = DatasetType("composite", dimensions, storageClass)
486 datasetTypeComponentA = datasetTypeComposite.makeComponentDatasetType("compA")
487 datasetTypeComponentB = datasetTypeComposite.makeComponentDatasetType("compB")
489 self.assertTrue(datasetTypeComposite.isComposite())
490 self.assertFalse(datasetTypeComponentA.isComposite())
491 self.assertTrue(datasetTypeComponentB.isComponent())
492 self.assertFalse(datasetTypeComposite.isComponent())
494 self.assertEqual(datasetTypeComposite.name, "composite")
495 self.assertEqual(datasetTypeComponentA.name, "composite.compA")
496 self.assertEqual(datasetTypeComponentB.component(), "compB")
497 self.assertEqual(datasetTypeComposite.nameAndComponent(), ("composite", None))
498 self.assertEqual(datasetTypeComponentA.nameAndComponent(), ("composite", "compA"))
500 self.assertEqual(datasetTypeComponentA.parentStorageClass, storageClass)
501 self.assertEqual(datasetTypeComponentB.parentStorageClass, storageClass)
502 self.assertIsNone(datasetTypeComposite.parentStorageClass)
504 with self.assertRaises(KeyError):
505 datasetTypeComposite.makeComponentDatasetType("compF")
508class DatasetRefTestCase(unittest.TestCase):
509 """Test for DatasetRef."""
511 def setUp(self) -> None:
512 self.universe = DimensionUniverse()
513 datasetTypeName = "test"
514 self.componentStorageClass1 = StorageClass("Component1")
515 self.componentStorageClass2 = StorageClass("Component2")
516 self.parentStorageClass = StorageClass(
517 "Parent", components={"a": self.componentStorageClass1, "b": self.componentStorageClass2}
518 )
519 dimensions = self.universe.extract(("instrument", "visit"))
520 self.dataId = DataCoordinate.standardize(
521 dict(instrument="DummyCam", visit=42), universe=self.universe
522 )
523 self.datasetType = DatasetType(datasetTypeName, dimensions, self.parentStorageClass)
525 def testConstructor(self) -> None:
526 """Test that construction preserves and validates values."""
527 # Constructing a ref requires a run.
528 with self.assertRaises(TypeError):
529 DatasetRef(self.datasetType, self.dataId, id=uuid.uuid4()) # type: ignore [call-arg]
531 # Constructing an unresolved ref with run and/or components should
532 # issue a ref with an id.
533 run = "somerun"
534 ref = DatasetRef(self.datasetType, self.dataId, run=run)
535 self.assertEqual(ref.datasetType, self.datasetType)
536 self.assertEqual(
537 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId
538 )
539 self.assertIsNotNone(ref.id)
541 # Passing a data ID that is missing dimensions should fail.
542 # Create a full DataCoordinate to ensure that we are testing the
543 # right thing.
544 dimensions = self.universe.extract(("instrument",))
545 dataId = DataCoordinate.standardize(instrument="DummyCam", graph=dimensions)
546 with self.assertRaises(KeyError):
547 DatasetRef(self.datasetType, dataId, run="run")
548 # Constructing a resolved ref should preserve run as well as everything
549 # else.
550 id_ = uuid.uuid4()
551 ref = DatasetRef(self.datasetType, self.dataId, id=id_, run=run)
552 self.assertEqual(ref.datasetType, self.datasetType)
553 self.assertEqual(
554 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId
555 )
556 self.assertIsInstance(ref.dataId, DataCoordinate)
557 self.assertEqual(ref.id, id_)
558 self.assertEqual(ref.run, run)
560 with self.assertRaises(ValueError):
561 DatasetRef(self.datasetType, self.dataId, run=run, id_generation_mode=42) # type: ignore
563 def testSorting(self) -> None:
564 """Can we sort a DatasetRef"""
565 # All refs have the same run.
566 dimensions = self.universe.extract(("instrument", "visit"))
567 ref1 = DatasetRef(
568 self.datasetType,
569 DataCoordinate.standardize(instrument="DummyCam", visit=1, graph=dimensions),
570 run="run",
571 )
572 ref2 = DatasetRef(
573 self.datasetType,
574 DataCoordinate.standardize(instrument="DummyCam", visit=10, graph=dimensions),
575 run="run",
576 )
577 ref3 = DatasetRef(
578 self.datasetType,
579 DataCoordinate.standardize(instrument="DummyCam", visit=22, graph=dimensions),
580 run="run",
581 )
583 # Enable detailed diff report
584 self.maxDiff = None
586 # This will sort them on visit number
587 sort = sorted([ref3, ref1, ref2])
588 self.assertEqual(sort, [ref1, ref2, ref3], msg=f"Got order: {[r.dataId for r in sort]}")
590 # Now include different runs.
591 ref1 = DatasetRef(
592 self.datasetType,
593 DataCoordinate.standardize(instrument="DummyCam", visit=43, graph=dimensions),
594 run="b",
595 )
596 self.assertEqual(ref1.run, "b")
597 ref4 = DatasetRef(
598 self.datasetType,
599 DataCoordinate.standardize(instrument="DummyCam", visit=10, graph=dimensions),
600 run="b",
601 )
602 ref2 = DatasetRef(
603 self.datasetType,
604 DataCoordinate.standardize(instrument="DummyCam", visit=4, graph=dimensions),
605 run="a",
606 )
607 ref3 = DatasetRef(
608 self.datasetType,
609 DataCoordinate.standardize(instrument="DummyCam", visit=104, graph=dimensions),
610 run="c",
611 )
613 # This will sort them on run before visit
614 sort = sorted([ref3, ref1, ref2, ref4])
615 self.assertEqual(sort, [ref2, ref4, ref1, ref3], msg=f"Got order: {[r.dataId for r in sort]}")
617 # Now with strings
618 with self.assertRaises(TypeError):
619 sort = sorted(["z", ref1, "c"]) # type: ignore [list-item]
621 def testOverrideStorageClass(self) -> None:
622 storageA = StorageClass("test_a", pytype=list)
624 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
626 ref_new = ref.overrideStorageClass(storageA)
627 self.assertNotEqual(ref, ref_new)
628 self.assertEqual(ref_new.datasetType.storageClass, storageA)
629 self.assertEqual(ref_new.overrideStorageClass(ref.datasetType.storageClass), ref)
630 self.assertTrue(ref.is_compatible_with(ref_new))
631 with self.assertRaises(AttributeError):
632 ref_new.is_compatible_with(None) # type: ignore
634 # Check different code paths of incompatibility.
635 ref_incompat = DatasetRef(ref.datasetType, ref.dataId, run="somerun2", id=ref.id)
636 self.assertFalse(ref.is_compatible_with(ref_incompat)) # bad run
637 ref_incompat = DatasetRef(ref.datasetType, ref.dataId, run="somerun")
638 self.assertFalse(ref.is_compatible_with(ref_incompat)) # bad ID
640 incompatible_sc = StorageClass("my_int", pytype=int)
641 with self.assertRaises(ValueError):
642 # Do not test against "ref" because it has a default storage class
643 # of "object" which is compatible with everything.
644 ref_new.overrideStorageClass(incompatible_sc)
646 def testPickle(self) -> None:
647 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
648 s = pickle.dumps(ref)
649 self.assertEqual(pickle.loads(s), ref)
651 def testJson(self) -> None:
652 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
653 s = ref.to_json()
654 self.assertEqual(DatasetRef.from_json(s, universe=self.universe), ref)
656 def testFileDataset(self) -> None:
657 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
658 file_dataset = FileDataset(path="something.yaml", refs=ref)
659 self.assertEqual(file_dataset.refs, [ref])
661 ref2 = DatasetRef(self.datasetType, self.dataId, run="somerun2")
662 with self.assertRaises(ValueError):
663 FileDataset(path="other.yaml", refs=[ref, ref2])
666if __name__ == "__main__":
667 unittest.main()