Coverage for tests/test_datasets.py: 8%
313 statements
« prev ^ index » next coverage.py v7.3.1, created at 2023-10-02 08:00 +0000
« prev ^ index » next coverage.py v7.3.1, created at 2023-10-02 08:00 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28import copy
29import pickle
30import unittest
31import uuid
33from lsst.daf.butler import (
34 DataCoordinate,
35 DatasetRef,
36 DatasetType,
37 DimensionUniverse,
38 FileDataset,
39 StorageClass,
40 StorageClassFactory,
41)
43"""Tests for datasets module.
44"""
47class DatasetTypeTestCase(unittest.TestCase):
48 """Test for DatasetType."""
50 def setUp(self) -> None:
51 self.universe = DimensionUniverse()
53 def testConstructor(self) -> None:
54 """Test construction preserves values.
56 Note that construction doesn't check for valid storageClass.
57 This can only be verified for a particular schema.
58 """
59 datasetTypeName = "test"
60 storageClass = StorageClass("test_StructuredData")
61 dimensions = self.universe.extract(("visit", "instrument"))
62 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
63 self.assertEqual(datasetType.name, datasetTypeName)
64 self.assertEqual(datasetType.storageClass, storageClass)
65 self.assertEqual(datasetType.dimensions, dimensions)
67 with self.assertRaises(ValueError, msg="Construct component without parent storage class"):
68 DatasetType(DatasetType.nameWithComponent(datasetTypeName, "comp"), dimensions, storageClass)
69 with self.assertRaises(ValueError, msg="Construct non-component with parent storage class"):
70 DatasetType(datasetTypeName, dimensions, storageClass, parentStorageClass="NotAllowed")
72 def testConstructor2(self) -> None:
73 """Test construction from StorageClass name."""
74 datasetTypeName = "test"
75 storageClass = StorageClass("test_constructor2")
76 StorageClassFactory().registerStorageClass(storageClass)
77 dimensions = self.universe.extract(("instrument", "visit"))
78 datasetType = DatasetType(datasetTypeName, dimensions, "test_constructor2")
79 self.assertEqual(datasetType.name, datasetTypeName)
80 self.assertEqual(datasetType.storageClass, storageClass)
81 self.assertEqual(datasetType.dimensions, dimensions)
83 def testNameValidation(self) -> None:
84 """Test that dataset type names only contain certain characters
85 in certain positions.
86 """
87 dimensions = self.universe.extract(("instrument", "visit"))
88 goodNames = ("a", "A", "z1", "Z1", "a_1B", "A_1b", "_a")
89 badNames = ("1", "a%b", "B+Z", "T[0]")
91 # Construct storage class with all the good names included as
92 # components so that we can test internal consistency
93 storageClass = StorageClass(
94 "test_StructuredData", components={n: StorageClass("component") for n in goodNames}
95 )
97 for name in goodNames:
98 composite = DatasetType(name, dimensions, storageClass)
99 self.assertEqual(composite.name, name)
100 for suffix in goodNames:
101 full = DatasetType.nameWithComponent(name, suffix)
102 component = composite.makeComponentDatasetType(suffix)
103 self.assertEqual(component.name, full)
104 assert component.parentStorageClass is not None
105 self.assertEqual(component.parentStorageClass.name, "test_StructuredData")
106 for suffix in badNames:
107 full = DatasetType.nameWithComponent(name, suffix)
108 with self.subTest(full=full):
109 with self.assertRaises(ValueError):
110 DatasetType(full, dimensions, storageClass)
111 for name in badNames:
112 with self.subTest(name=name):
113 with self.assertRaises(ValueError):
114 DatasetType(name, dimensions, storageClass)
116 def testEquality(self) -> None:
117 storageA = StorageClass("test_a")
118 storageB = StorageClass("test_b")
119 parent = StorageClass("test")
120 dimensionsA = self.universe.extract(["instrument"])
121 dimensionsB = self.universe.extract(["skymap"])
122 self.assertEqual(
123 DatasetType(
124 "a",
125 dimensionsA,
126 storageA,
127 ),
128 DatasetType(
129 "a",
130 dimensionsA,
131 storageA,
132 ),
133 )
134 self.assertEqual(
135 DatasetType(
136 "a",
137 dimensionsA,
138 "test_a",
139 ),
140 DatasetType(
141 "a",
142 dimensionsA,
143 storageA,
144 ),
145 )
146 self.assertEqual(
147 DatasetType(
148 "a",
149 dimensionsA,
150 storageA,
151 ),
152 DatasetType(
153 "a",
154 dimensionsA,
155 "test_a",
156 ),
157 )
158 self.assertEqual(
159 DatasetType(
160 "a",
161 dimensionsA,
162 "test_a",
163 ),
164 DatasetType(
165 "a",
166 dimensionsA,
167 "test_a",
168 ),
169 )
170 self.assertEqual(
171 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent),
172 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent),
173 )
174 self.assertEqual(
175 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
176 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
177 )
178 self.assertNotEqual(
179 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=True),
180 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=False),
181 )
182 self.assertNotEqual(
183 DatasetType(
184 "a",
185 dimensionsA,
186 storageA,
187 ),
188 DatasetType(
189 "b",
190 dimensionsA,
191 storageA,
192 ),
193 )
194 self.assertNotEqual(
195 DatasetType(
196 "a",
197 dimensionsA,
198 storageA,
199 ),
200 DatasetType(
201 "b",
202 dimensionsA,
203 "test_a",
204 ),
205 )
206 self.assertNotEqual(
207 DatasetType(
208 "a",
209 dimensionsA,
210 storageA,
211 ),
212 DatasetType(
213 "a",
214 dimensionsA,
215 storageB,
216 ),
217 )
218 self.assertNotEqual(
219 DatasetType(
220 "a",
221 dimensionsA,
222 storageA,
223 ),
224 DatasetType(
225 "a",
226 dimensionsA,
227 "test_b",
228 ),
229 )
230 self.assertNotEqual(
231 DatasetType(
232 "a",
233 dimensionsA,
234 storageA,
235 ),
236 DatasetType(
237 "a",
238 dimensionsB,
239 storageA,
240 ),
241 )
242 self.assertNotEqual(
243 DatasetType(
244 "a",
245 dimensionsA,
246 storageA,
247 ),
248 DatasetType(
249 "a",
250 dimensionsB,
251 "test_a",
252 ),
253 )
254 self.assertNotEqual(
255 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageA),
256 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageB),
257 )
258 self.assertNotEqual(
259 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageA"),
260 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageB"),
261 )
263 def testCompatibility(self) -> None:
264 storageA = StorageClass("test_a", pytype=set, converters={"list": "builtins.set"})
265 storageB = StorageClass("test_b", pytype=list)
266 storageC = StorageClass("test_c", pytype=dict)
267 self.assertTrue(storageA.can_convert(storageB))
268 dimensionsA = self.universe.extract(["instrument"])
270 dA = DatasetType("a", dimensionsA, storageA)
271 dA2 = DatasetType("a", dimensionsA, storageB)
272 self.assertNotEqual(dA, dA2)
273 self.assertTrue(dA.is_compatible_with(dA))
274 self.assertTrue(dA.is_compatible_with(dA2))
275 self.assertFalse(dA2.is_compatible_with(dA))
277 dA3 = DatasetType("a", dimensionsA, storageC)
278 self.assertFalse(dA.is_compatible_with(dA3))
280 def testOverrideStorageClass(self) -> None:
281 storageA = StorageClass("test_a", pytype=list, converters={"dict": "builtins.list"})
282 storageB = StorageClass("test_b", pytype=dict)
283 dimensions = self.universe.extract(["instrument"])
285 dA = DatasetType("a", dimensions, storageA)
286 dB = dA.overrideStorageClass(storageB)
287 self.assertNotEqual(dA, dB)
288 self.assertEqual(dB.storageClass, storageB)
290 round_trip = dB.overrideStorageClass(storageA)
291 self.assertEqual(round_trip, dA)
293 # Check that parents move over.
294 parent = StorageClass("composite", components={"a": storageA, "c": storageA})
295 dP = DatasetType("comp", dimensions, parent)
296 dP_A = dP.makeComponentDatasetType("a")
297 print(dP_A)
298 dp_B = dP_A.overrideStorageClass(storageB)
299 self.assertEqual(dp_B.storageClass, storageB)
300 self.assertEqual(dp_B.parentStorageClass, parent)
302 def testJson(self) -> None:
303 storageA = StorageClass("test_a")
304 dimensionsA = self.universe.extract(["instrument"])
305 self.assertEqual(
306 DatasetType(
307 "a",
308 dimensionsA,
309 storageA,
310 ),
311 DatasetType.from_json(
312 DatasetType(
313 "a",
314 dimensionsA,
315 storageA,
316 ).to_json(),
317 self.universe,
318 ),
319 )
320 self.assertEqual(
321 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
322 DatasetType.from_json(
323 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent").to_json(),
324 self.universe,
325 ),
326 )
328 def testSorting(self) -> None:
329 """Can we sort a DatasetType"""
330 storage = StorageClass("test_a")
331 dimensions = self.universe.extract(["instrument"])
333 d_a = DatasetType("a", dimensions, storage)
334 d_f = DatasetType("f", dimensions, storage)
335 d_p = DatasetType("p", dimensions, storage)
337 sort = sorted([d_p, d_f, d_a])
338 self.assertEqual(sort, [d_a, d_f, d_p])
340 # Now with strings
341 with self.assertRaises(TypeError):
342 sort = sorted(["z", d_p, "c", d_f, d_a, "d"]) # type: ignore [list-item]
344 def testHashability(self) -> None:
345 """Test `DatasetType.__hash__`.
347 This test is performed by checking that `DatasetType` entries can
348 be inserted into a `set` and that unique values of its
349 (`name`, `storageClass`, `dimensions`) parameters result in separate
350 entries (and equal ones don't).
352 This does not check for uniformity of hashing or the actual values
353 of the hash function.
354 """
355 types: list[DatasetType] = []
356 unique = 0
357 storageC = StorageClass("test_c")
358 storageD = StorageClass("test_d")
359 for name in ["a", "b"]:
360 for storageClass in [storageC, storageD]:
361 for dims in [("instrument",), ("skymap",)]:
362 datasetType = DatasetType(name, self.universe.extract(dims), storageClass)
363 datasetTypeCopy = DatasetType(name, self.universe.extract(dims), storageClass)
364 types.extend((datasetType, datasetTypeCopy))
365 unique += 1 # datasetType should always equal its copy
366 self.assertEqual(len(set(types)), unique) # all other combinations are unique
368 # also check that hashes of instances constructed with StorageClass
369 # name matches hashes of instances constructed with instances
370 dimensions = self.universe.extract(["instrument"])
371 self.assertEqual(
372 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_c"))
373 )
374 self.assertEqual(
375 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_c"))
376 )
377 self.assertNotEqual(
378 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_d"))
379 )
380 self.assertNotEqual(
381 hash(DatasetType("a", dimensions, storageD)), hash(DatasetType("a", dimensions, "test_c"))
382 )
383 self.assertNotEqual(
384 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_d"))
385 )
387 def testDeepCopy(self) -> None:
388 """Test that we can copy a dataset type."""
389 storageClass = StorageClass("test_copy")
390 datasetTypeName = "test"
391 dimensions = self.universe.extract(("instrument", "visit"))
392 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
393 dcopy = copy.deepcopy(datasetType)
394 self.assertEqual(dcopy, datasetType)
396 # Now with calibration flag set
397 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True)
398 dcopy = copy.deepcopy(datasetType)
399 self.assertEqual(dcopy, datasetType)
400 self.assertTrue(dcopy.isCalibration())
402 # And again with a composite
403 componentStorageClass = StorageClass("copy_component")
404 componentDatasetType = DatasetType(
405 DatasetType.nameWithComponent(datasetTypeName, "comp"),
406 dimensions,
407 componentStorageClass,
408 parentStorageClass=storageClass,
409 )
410 dcopy = copy.deepcopy(componentDatasetType)
411 self.assertEqual(dcopy, componentDatasetType)
413 def testPickle(self) -> None:
414 """Test pickle support."""
415 storageClass = StorageClass("test_pickle")
416 datasetTypeName = "test"
417 dimensions = self.universe.extract(("instrument", "visit"))
418 # Un-pickling requires that storage class is registered with factory.
419 StorageClassFactory().registerStorageClass(storageClass)
420 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
421 datasetTypeOut = pickle.loads(pickle.dumps(datasetType))
422 self.assertIsInstance(datasetTypeOut, DatasetType)
423 self.assertEqual(datasetType.name, datasetTypeOut.name)
424 self.assertEqual(datasetType.dimensions.names, datasetTypeOut.dimensions.names)
425 self.assertEqual(datasetType.storageClass, datasetTypeOut.storageClass)
426 self.assertIsNone(datasetTypeOut.parentStorageClass)
427 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration())
428 self.assertFalse(datasetTypeOut.isCalibration())
430 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True)
431 datasetTypeOut = pickle.loads(pickle.dumps(datasetType))
432 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration())
433 self.assertTrue(datasetTypeOut.isCalibration())
435 # And again with a composite
436 componentStorageClass = StorageClass("pickle_component")
437 StorageClassFactory().registerStorageClass(componentStorageClass)
438 componentDatasetType = DatasetType(
439 DatasetType.nameWithComponent(datasetTypeName, "comp"),
440 dimensions,
441 componentStorageClass,
442 parentStorageClass=storageClass,
443 )
444 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
445 self.assertIsInstance(datasetTypeOut, DatasetType)
446 self.assertEqual(componentDatasetType.name, datasetTypeOut.name)
447 self.assertEqual(componentDatasetType.dimensions.names, datasetTypeOut.dimensions.names)
448 self.assertEqual(componentDatasetType.storageClass, datasetTypeOut.storageClass)
449 self.assertEqual(componentDatasetType.parentStorageClass, datasetTypeOut.parentStorageClass)
450 self.assertEqual(datasetTypeOut.parentStorageClass.name, storageClass.name)
451 self.assertEqual(datasetTypeOut, componentDatasetType)
453 # Now with a string and not a real storage class to test that
454 # pickling doesn't force the StorageClass to be resolved
455 componentDatasetType = DatasetType(
456 DatasetType.nameWithComponent(datasetTypeName, "comp"),
457 dimensions,
458 "StrangeComponent",
459 parentStorageClass="UnknownParent",
460 )
461 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
462 self.assertEqual(datasetTypeOut, componentDatasetType)
463 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName)
465 # Now with a storage class that is created by the factory
466 factoryStorageClassClass = StorageClassFactory.makeNewStorageClass("ParentClass")
467 factoryComponentStorageClassClass = StorageClassFactory.makeNewStorageClass("ComponentClass")
468 componentDatasetType = DatasetType(
469 DatasetType.nameWithComponent(datasetTypeName, "comp"),
470 dimensions,
471 factoryComponentStorageClassClass(),
472 parentStorageClass=factoryStorageClassClass(),
473 )
474 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
475 self.assertEqual(datasetTypeOut, componentDatasetType)
476 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName)
478 def test_composites(self) -> None:
479 """Test components within composite DatasetTypes."""
480 storageClassA = StorageClass("compA")
481 storageClassB = StorageClass("compB")
482 storageClass = StorageClass(
483 "test_composite", components={"compA": storageClassA, "compB": storageClassB}
484 )
485 self.assertTrue(storageClass.isComposite())
486 self.assertFalse(storageClassA.isComposite())
487 self.assertFalse(storageClassB.isComposite())
489 dimensions = self.universe.extract(("instrument", "visit"))
491 datasetTypeComposite = DatasetType("composite", dimensions, storageClass)
492 datasetTypeComponentA = datasetTypeComposite.makeComponentDatasetType("compA")
493 datasetTypeComponentB = datasetTypeComposite.makeComponentDatasetType("compB")
495 self.assertTrue(datasetTypeComposite.isComposite())
496 self.assertFalse(datasetTypeComponentA.isComposite())
497 self.assertTrue(datasetTypeComponentB.isComponent())
498 self.assertFalse(datasetTypeComposite.isComponent())
500 self.assertEqual(datasetTypeComposite.name, "composite")
501 self.assertEqual(datasetTypeComponentA.name, "composite.compA")
502 self.assertEqual(datasetTypeComponentB.component(), "compB")
503 self.assertEqual(datasetTypeComposite.nameAndComponent(), ("composite", None))
504 self.assertEqual(datasetTypeComponentA.nameAndComponent(), ("composite", "compA"))
506 self.assertEqual(datasetTypeComponentA.parentStorageClass, storageClass)
507 self.assertEqual(datasetTypeComponentB.parentStorageClass, storageClass)
508 self.assertIsNone(datasetTypeComposite.parentStorageClass)
510 with self.assertRaises(KeyError):
511 datasetTypeComposite.makeComponentDatasetType("compF")
514class DatasetRefTestCase(unittest.TestCase):
515 """Test for DatasetRef."""
517 def setUp(self) -> None:
518 self.universe = DimensionUniverse()
519 datasetTypeName = "test"
520 self.componentStorageClass1 = StorageClass("Component1")
521 self.componentStorageClass2 = StorageClass("Component2")
522 self.parentStorageClass = StorageClass(
523 "Parent", components={"a": self.componentStorageClass1, "b": self.componentStorageClass2}
524 )
525 dimensions = self.universe.extract(("instrument", "visit"))
526 self.dataId = DataCoordinate.standardize(
527 dict(instrument="DummyCam", visit=42), universe=self.universe
528 )
529 self.datasetType = DatasetType(datasetTypeName, dimensions, self.parentStorageClass)
531 def testConstructor(self) -> None:
532 """Test that construction preserves and validates values."""
533 # Constructing a ref requires a run.
534 with self.assertRaises(TypeError):
535 DatasetRef(self.datasetType, self.dataId, id=uuid.uuid4()) # type: ignore [call-arg]
537 # Constructing an unresolved ref with run and/or components should
538 # issue a ref with an id.
539 run = "somerun"
540 ref = DatasetRef(self.datasetType, self.dataId, run=run)
541 self.assertEqual(ref.datasetType, self.datasetType)
542 self.assertEqual(
543 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId
544 )
545 self.assertIsNotNone(ref.id)
547 # Passing a data ID that is missing dimensions should fail.
548 # Create a full DataCoordinate to ensure that we are testing the
549 # right thing.
550 dimensions = self.universe.extract(("instrument",))
551 dataId = DataCoordinate.standardize(instrument="DummyCam", graph=dimensions)
552 with self.assertRaises(KeyError):
553 DatasetRef(self.datasetType, dataId, run="run")
554 # Constructing a resolved ref should preserve run as well as everything
555 # else.
556 id_ = uuid.uuid4()
557 ref = DatasetRef(self.datasetType, self.dataId, id=id_, run=run)
558 self.assertEqual(ref.datasetType, self.datasetType)
559 self.assertEqual(
560 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId
561 )
562 self.assertIsInstance(ref.dataId, DataCoordinate)
563 self.assertEqual(ref.id, id_)
564 self.assertEqual(ref.run, run)
566 with self.assertRaises(ValueError):
567 DatasetRef(self.datasetType, self.dataId, run=run, id_generation_mode=42) # type: ignore
569 def testSorting(self) -> None:
570 """Can we sort a DatasetRef"""
571 # All refs have the same run.
572 dimensions = self.universe.extract(("instrument", "visit"))
573 ref1 = DatasetRef(
574 self.datasetType,
575 DataCoordinate.standardize(instrument="DummyCam", visit=1, graph=dimensions),
576 run="run",
577 )
578 ref2 = DatasetRef(
579 self.datasetType,
580 DataCoordinate.standardize(instrument="DummyCam", visit=10, graph=dimensions),
581 run="run",
582 )
583 ref3 = DatasetRef(
584 self.datasetType,
585 DataCoordinate.standardize(instrument="DummyCam", visit=22, graph=dimensions),
586 run="run",
587 )
589 # Enable detailed diff report
590 self.maxDiff = None
592 # This will sort them on visit number
593 sort = sorted([ref3, ref1, ref2])
594 self.assertEqual(sort, [ref1, ref2, ref3], msg=f"Got order: {[r.dataId for r in sort]}")
596 # Now include different runs.
597 ref1 = DatasetRef(
598 self.datasetType,
599 DataCoordinate.standardize(instrument="DummyCam", visit=43, graph=dimensions),
600 run="b",
601 )
602 self.assertEqual(ref1.run, "b")
603 ref4 = DatasetRef(
604 self.datasetType,
605 DataCoordinate.standardize(instrument="DummyCam", visit=10, graph=dimensions),
606 run="b",
607 )
608 ref2 = DatasetRef(
609 self.datasetType,
610 DataCoordinate.standardize(instrument="DummyCam", visit=4, graph=dimensions),
611 run="a",
612 )
613 ref3 = DatasetRef(
614 self.datasetType,
615 DataCoordinate.standardize(instrument="DummyCam", visit=104, graph=dimensions),
616 run="c",
617 )
619 # This will sort them on run before visit
620 sort = sorted([ref3, ref1, ref2, ref4])
621 self.assertEqual(sort, [ref2, ref4, ref1, ref3], msg=f"Got order: {[r.dataId for r in sort]}")
623 # Now with strings
624 with self.assertRaises(TypeError):
625 sort = sorted(["z", ref1, "c"]) # type: ignore [list-item]
627 def testOverrideStorageClass(self) -> None:
628 storageA = StorageClass("test_a", pytype=list)
630 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
632 ref_new = ref.overrideStorageClass(storageA)
633 self.assertNotEqual(ref, ref_new)
634 self.assertEqual(ref_new.datasetType.storageClass, storageA)
635 self.assertEqual(ref_new.overrideStorageClass(ref.datasetType.storageClass), ref)
636 self.assertTrue(ref.is_compatible_with(ref_new))
637 with self.assertRaises(AttributeError):
638 ref_new.is_compatible_with(None) # type: ignore
640 # Check different code paths of incompatibility.
641 ref_incompat = DatasetRef(ref.datasetType, ref.dataId, run="somerun2", id=ref.id)
642 self.assertFalse(ref.is_compatible_with(ref_incompat)) # bad run
643 ref_incompat = DatasetRef(ref.datasetType, ref.dataId, run="somerun")
644 self.assertFalse(ref.is_compatible_with(ref_incompat)) # bad ID
646 incompatible_sc = StorageClass("my_int", pytype=int)
647 with self.assertRaises(ValueError):
648 # Do not test against "ref" because it has a default storage class
649 # of "object" which is compatible with everything.
650 ref_new.overrideStorageClass(incompatible_sc)
652 def testReplace(self) -> None:
653 """Test for `DatasetRef.replace` method."""
654 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
656 ref2 = ref.replace(run="somerun2")
657 self.assertEqual(ref2.run, "somerun2")
658 self.assertIsNotNone(ref2.id)
659 self.assertNotEqual(ref2.id, ref.id)
661 ref3 = ref.replace(run="somerun3", id=ref2.id)
662 self.assertEqual(ref3.run, "somerun3")
663 self.assertEqual(ref3.id, ref2.id)
665 ref4 = ref.replace(id=ref2.id)
666 self.assertEqual(ref4.run, "somerun")
667 self.assertEqual(ref4.id, ref2.id)
669 ref5 = ref.replace()
670 self.assertEqual(ref5.run, "somerun")
671 self.assertEqual(ref5, ref)
673 def testPickle(self) -> None:
674 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
675 s = pickle.dumps(ref)
676 self.assertEqual(pickle.loads(s), ref)
678 def testJson(self) -> None:
679 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
680 s = ref.to_json()
681 self.assertEqual(DatasetRef.from_json(s, universe=self.universe), ref)
683 def testFileDataset(self) -> None:
684 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
685 file_dataset = FileDataset(path="something.yaml", refs=ref)
686 self.assertEqual(file_dataset.refs, [ref])
688 ref2 = DatasetRef(self.datasetType, self.dataId, run="somerun2")
689 with self.assertRaises(ValueError):
690 FileDataset(path="other.yaml", refs=[ref, ref2])
693if __name__ == "__main__":
694 unittest.main()