Coverage for tests/test_datasets.py: 8%
293 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-03-31 02:41 -0700
« prev ^ index » next coverage.py v6.5.0, created at 2023-03-31 02:41 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import copy
23import pickle
24import unittest
25import uuid
27from lsst.daf.butler import (
28 DataCoordinate,
29 DatasetRef,
30 DatasetType,
31 DimensionUniverse,
32 StorageClass,
33 StorageClassFactory,
34)
36"""Tests for datasets module.
37"""
40class DatasetTypeTestCase(unittest.TestCase):
41 """Test for DatasetType."""
43 def setUp(self):
44 self.universe = DimensionUniverse()
46 def testConstructor(self):
47 """Test construction preserves values.
49 Note that construction doesn't check for valid storageClass.
50 This can only be verified for a particular schema.
51 """
52 datasetTypeName = "test"
53 storageClass = StorageClass("test_StructuredData")
54 dimensions = self.universe.extract(("visit", "instrument"))
55 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
56 self.assertEqual(datasetType.name, datasetTypeName)
57 self.assertEqual(datasetType.storageClass, storageClass)
58 self.assertEqual(datasetType.dimensions, dimensions)
60 with self.assertRaises(ValueError, msg="Construct component without parent storage class"):
61 DatasetType(DatasetType.nameWithComponent(datasetTypeName, "comp"), dimensions, storageClass)
62 with self.assertRaises(ValueError, msg="Construct non-component with parent storage class"):
63 DatasetType(datasetTypeName, dimensions, storageClass, parentStorageClass="NotAllowed")
65 def testConstructor2(self):
66 """Test construction from StorageClass name."""
67 datasetTypeName = "test"
68 storageClass = StorageClass("test_constructor2")
69 StorageClassFactory().registerStorageClass(storageClass)
70 dimensions = self.universe.extract(("instrument", "visit"))
71 datasetType = DatasetType(datasetTypeName, dimensions, "test_constructor2")
72 self.assertEqual(datasetType.name, datasetTypeName)
73 self.assertEqual(datasetType.storageClass, storageClass)
74 self.assertEqual(datasetType.dimensions, dimensions)
76 def testNameValidation(self):
77 """Test that dataset type names only contain certain characters
78 in certain positions.
79 """
80 dimensions = self.universe.extract(("instrument", "visit"))
81 goodNames = ("a", "A", "z1", "Z1", "a_1B", "A_1b", "_a")
82 badNames = ("1", "a%b", "B+Z", "T[0]")
84 # Construct storage class with all the good names included as
85 # components so that we can test internal consistency
86 storageClass = StorageClass(
87 "test_StructuredData", components={n: StorageClass("component") for n in goodNames}
88 )
90 for name in goodNames:
91 composite = DatasetType(name, dimensions, storageClass)
92 self.assertEqual(composite.name, name)
93 for suffix in goodNames:
94 full = DatasetType.nameWithComponent(name, suffix)
95 component = composite.makeComponentDatasetType(suffix)
96 self.assertEqual(component.name, full)
97 self.assertEqual(component.parentStorageClass.name, "test_StructuredData")
98 for suffix in badNames:
99 full = DatasetType.nameWithComponent(name, suffix)
100 with self.subTest(full=full):
101 with self.assertRaises(ValueError):
102 DatasetType(full, dimensions, storageClass)
103 for name in badNames:
104 with self.subTest(name=name):
105 with self.assertRaises(ValueError):
106 DatasetType(name, dimensions, storageClass)
108 def testEquality(self):
109 storageA = StorageClass("test_a")
110 storageB = StorageClass("test_b")
111 parent = StorageClass("test")
112 dimensionsA = self.universe.extract(["instrument"])
113 dimensionsB = self.universe.extract(["skymap"])
114 self.assertEqual(
115 DatasetType(
116 "a",
117 dimensionsA,
118 storageA,
119 ),
120 DatasetType(
121 "a",
122 dimensionsA,
123 storageA,
124 ),
125 )
126 self.assertEqual(
127 DatasetType(
128 "a",
129 dimensionsA,
130 "test_a",
131 ),
132 DatasetType(
133 "a",
134 dimensionsA,
135 storageA,
136 ),
137 )
138 self.assertEqual(
139 DatasetType(
140 "a",
141 dimensionsA,
142 storageA,
143 ),
144 DatasetType(
145 "a",
146 dimensionsA,
147 "test_a",
148 ),
149 )
150 self.assertEqual(
151 DatasetType(
152 "a",
153 dimensionsA,
154 "test_a",
155 ),
156 DatasetType(
157 "a",
158 dimensionsA,
159 "test_a",
160 ),
161 )
162 self.assertEqual(
163 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent),
164 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent),
165 )
166 self.assertEqual(
167 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
168 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
169 )
170 self.assertNotEqual(
171 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=True),
172 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=False),
173 )
174 self.assertNotEqual(
175 DatasetType(
176 "a",
177 dimensionsA,
178 storageA,
179 ),
180 DatasetType(
181 "b",
182 dimensionsA,
183 storageA,
184 ),
185 )
186 self.assertNotEqual(
187 DatasetType(
188 "a",
189 dimensionsA,
190 storageA,
191 ),
192 DatasetType(
193 "b",
194 dimensionsA,
195 "test_a",
196 ),
197 )
198 self.assertNotEqual(
199 DatasetType(
200 "a",
201 dimensionsA,
202 storageA,
203 ),
204 DatasetType(
205 "a",
206 dimensionsA,
207 storageB,
208 ),
209 )
210 self.assertNotEqual(
211 DatasetType(
212 "a",
213 dimensionsA,
214 storageA,
215 ),
216 DatasetType(
217 "a",
218 dimensionsA,
219 "test_b",
220 ),
221 )
222 self.assertNotEqual(
223 DatasetType(
224 "a",
225 dimensionsA,
226 storageA,
227 ),
228 DatasetType(
229 "a",
230 dimensionsB,
231 storageA,
232 ),
233 )
234 self.assertNotEqual(
235 DatasetType(
236 "a",
237 dimensionsA,
238 storageA,
239 ),
240 DatasetType(
241 "a",
242 dimensionsB,
243 "test_a",
244 ),
245 )
246 self.assertNotEqual(
247 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageA),
248 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageB),
249 )
250 self.assertNotEqual(
251 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageA"),
252 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageB"),
253 )
255 def testCompatibility(self):
256 storageA = StorageClass("test_a", pytype=set, converters={"list": "builtins.set"})
257 storageB = StorageClass("test_b", pytype=list)
258 storageC = StorageClass("test_c", pytype=dict)
259 self.assertTrue(storageA.can_convert(storageB))
260 dimensionsA = self.universe.extract(["instrument"])
262 dA = DatasetType("a", dimensionsA, storageA)
263 dA2 = DatasetType("a", dimensionsA, storageB)
264 self.assertNotEqual(dA, dA2)
265 self.assertTrue(dA.is_compatible_with(dA))
266 self.assertTrue(dA.is_compatible_with(dA2))
267 self.assertFalse(dA2.is_compatible_with(dA))
269 dA3 = DatasetType("a", dimensionsA, storageC)
270 self.assertFalse(dA.is_compatible_with(dA3))
272 def testOverrideStorageClass(self):
273 storageA = StorageClass("test_a", pytype=list, converters={"dict": "builtins.list"})
274 storageB = StorageClass("test_b", pytype=dict)
275 dimensions = self.universe.extract(["instrument"])
277 dA = DatasetType("a", dimensions, storageA)
278 dB = dA.overrideStorageClass(storageB)
279 self.assertNotEqual(dA, dB)
280 self.assertEqual(dB.storageClass, storageB)
282 round_trip = dB.overrideStorageClass(storageA)
283 self.assertEqual(round_trip, dA)
285 # Check that parents move over.
286 parent = StorageClass("composite", components={"a": storageA, "c": storageA})
287 dP = DatasetType("comp", dimensions, parent)
288 dP_A = dP.makeComponentDatasetType("a")
289 print(dP_A)
290 dp_B = dP_A.overrideStorageClass(storageB)
291 self.assertEqual(dp_B.storageClass, storageB)
292 self.assertEqual(dp_B.parentStorageClass, parent)
294 def testJson(self):
295 storageA = StorageClass("test_a")
296 dimensionsA = self.universe.extract(["instrument"])
297 self.assertEqual(
298 DatasetType(
299 "a",
300 dimensionsA,
301 storageA,
302 ),
303 DatasetType.from_json(
304 DatasetType(
305 "a",
306 dimensionsA,
307 storageA,
308 ).to_json(),
309 self.universe,
310 ),
311 )
312 self.assertEqual(
313 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
314 DatasetType.from_json(
315 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent").to_json(),
316 self.universe,
317 ),
318 )
320 def testSorting(self):
321 """Can we sort a DatasetType"""
322 storage = StorageClass("test_a")
323 dimensions = self.universe.extract(["instrument"])
325 d_a = DatasetType("a", dimensions, storage)
326 d_f = DatasetType("f", dimensions, storage)
327 d_p = DatasetType("p", dimensions, storage)
329 sort = sorted([d_p, d_f, d_a])
330 self.assertEqual(sort, [d_a, d_f, d_p])
332 # Now with strings
333 with self.assertRaises(TypeError):
334 sort = sorted(["z", d_p, "c", d_f, d_a, "d"])
336 def testHashability(self):
337 """Test `DatasetType.__hash__`.
339 This test is performed by checking that `DatasetType` entries can
340 be inserted into a `set` and that unique values of its
341 (`name`, `storageClass`, `dimensions`) parameters result in separate
342 entries (and equal ones don't).
344 This does not check for uniformity of hashing or the actual values
345 of the hash function.
346 """
347 types = []
348 unique = 0
349 storageC = StorageClass("test_c")
350 storageD = StorageClass("test_d")
351 for name in ["a", "b"]:
352 for storageClass in [storageC, storageD]:
353 for dimensions in [("instrument",), ("skymap",)]:
354 datasetType = DatasetType(name, self.universe.extract(dimensions), storageClass)
355 datasetTypeCopy = DatasetType(name, self.universe.extract(dimensions), storageClass)
356 types.extend((datasetType, datasetTypeCopy))
357 unique += 1 # datasetType should always equal its copy
358 self.assertEqual(len(set(types)), unique) # all other combinations are unique
360 # also check that hashes of instances constructed with StorageClass
361 # name matches hashes of instances constructed with instances
362 dimensions = self.universe.extract(["instrument"])
363 self.assertEqual(
364 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_c"))
365 )
366 self.assertEqual(
367 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_c"))
368 )
369 self.assertNotEqual(
370 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_d"))
371 )
372 self.assertNotEqual(
373 hash(DatasetType("a", dimensions, storageD)), hash(DatasetType("a", dimensions, "test_c"))
374 )
375 self.assertNotEqual(
376 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_d"))
377 )
379 def testDeepCopy(self):
380 """Test that we can copy a dataset type."""
381 storageClass = StorageClass("test_copy")
382 datasetTypeName = "test"
383 dimensions = self.universe.extract(("instrument", "visit"))
384 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
385 dcopy = copy.deepcopy(datasetType)
386 self.assertEqual(dcopy, datasetType)
388 # Now with calibration flag set
389 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True)
390 dcopy = copy.deepcopy(datasetType)
391 self.assertEqual(dcopy, datasetType)
392 self.assertTrue(dcopy.isCalibration())
394 # And again with a composite
395 componentStorageClass = StorageClass("copy_component")
396 componentDatasetType = DatasetType(
397 DatasetType.nameWithComponent(datasetTypeName, "comp"),
398 dimensions,
399 componentStorageClass,
400 parentStorageClass=storageClass,
401 )
402 dcopy = copy.deepcopy(componentDatasetType)
403 self.assertEqual(dcopy, componentDatasetType)
405 def testPickle(self):
406 """Test pickle support."""
407 storageClass = StorageClass("test_pickle")
408 datasetTypeName = "test"
409 dimensions = self.universe.extract(("instrument", "visit"))
410 # Un-pickling requires that storage class is registered with factory.
411 StorageClassFactory().registerStorageClass(storageClass)
412 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
413 datasetTypeOut = pickle.loads(pickle.dumps(datasetType))
414 self.assertIsInstance(datasetTypeOut, DatasetType)
415 self.assertEqual(datasetType.name, datasetTypeOut.name)
416 self.assertEqual(datasetType.dimensions.names, datasetTypeOut.dimensions.names)
417 self.assertEqual(datasetType.storageClass, datasetTypeOut.storageClass)
418 self.assertIsNone(datasetTypeOut.parentStorageClass)
419 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration())
420 self.assertFalse(datasetTypeOut.isCalibration())
422 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True)
423 datasetTypeOut = pickle.loads(pickle.dumps(datasetType))
424 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration())
425 self.assertTrue(datasetTypeOut.isCalibration())
427 # And again with a composite
428 componentStorageClass = StorageClass("pickle_component")
429 StorageClassFactory().registerStorageClass(componentStorageClass)
430 componentDatasetType = DatasetType(
431 DatasetType.nameWithComponent(datasetTypeName, "comp"),
432 dimensions,
433 componentStorageClass,
434 parentStorageClass=storageClass,
435 )
436 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
437 self.assertIsInstance(datasetTypeOut, DatasetType)
438 self.assertEqual(componentDatasetType.name, datasetTypeOut.name)
439 self.assertEqual(componentDatasetType.dimensions.names, datasetTypeOut.dimensions.names)
440 self.assertEqual(componentDatasetType.storageClass, datasetTypeOut.storageClass)
441 self.assertEqual(componentDatasetType.parentStorageClass, datasetTypeOut.parentStorageClass)
442 self.assertEqual(datasetTypeOut.parentStorageClass.name, storageClass.name)
443 self.assertEqual(datasetTypeOut, componentDatasetType)
445 # Now with a string and not a real storage class to test that
446 # pickling doesn't force the StorageClass to be resolved
447 componentDatasetType = DatasetType(
448 DatasetType.nameWithComponent(datasetTypeName, "comp"),
449 dimensions,
450 "StrangeComponent",
451 parentStorageClass="UnknownParent",
452 )
453 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
454 self.assertEqual(datasetTypeOut, componentDatasetType)
455 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName)
457 # Now with a storage class that is created by the factory
458 factoryStorageClassClass = StorageClassFactory.makeNewStorageClass("ParentClass")
459 factoryComponentStorageClassClass = StorageClassFactory.makeNewStorageClass("ComponentClass")
460 componentDatasetType = DatasetType(
461 DatasetType.nameWithComponent(datasetTypeName, "comp"),
462 dimensions,
463 factoryComponentStorageClassClass(),
464 parentStorageClass=factoryStorageClassClass(),
465 )
466 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
467 self.assertEqual(datasetTypeOut, componentDatasetType)
468 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName)
470 def test_composites(self):
471 """Test components within composite DatasetTypes."""
472 storageClassA = StorageClass("compA")
473 storageClassB = StorageClass("compB")
474 storageClass = StorageClass(
475 "test_composite", components={"compA": storageClassA, "compB": storageClassB}
476 )
477 self.assertTrue(storageClass.isComposite())
478 self.assertFalse(storageClassA.isComposite())
479 self.assertFalse(storageClassB.isComposite())
481 dimensions = self.universe.extract(("instrument", "visit"))
483 datasetTypeComposite = DatasetType("composite", dimensions, storageClass)
484 datasetTypeComponentA = datasetTypeComposite.makeComponentDatasetType("compA")
485 datasetTypeComponentB = datasetTypeComposite.makeComponentDatasetType("compB")
487 self.assertTrue(datasetTypeComposite.isComposite())
488 self.assertFalse(datasetTypeComponentA.isComposite())
489 self.assertTrue(datasetTypeComponentB.isComponent())
490 self.assertFalse(datasetTypeComposite.isComponent())
492 self.assertEqual(datasetTypeComposite.name, "composite")
493 self.assertEqual(datasetTypeComponentA.name, "composite.compA")
494 self.assertEqual(datasetTypeComponentB.component(), "compB")
495 self.assertEqual(datasetTypeComposite.nameAndComponent(), ("composite", None))
496 self.assertEqual(datasetTypeComponentA.nameAndComponent(), ("composite", "compA"))
498 self.assertEqual(datasetTypeComponentA.parentStorageClass, storageClass)
499 self.assertEqual(datasetTypeComponentB.parentStorageClass, storageClass)
500 self.assertIsNone(datasetTypeComposite.parentStorageClass)
502 with self.assertRaises(KeyError):
503 datasetTypeComposite.makeComponentDatasetType("compF")
506class DatasetRefTestCase(unittest.TestCase):
507 """Test for DatasetRef."""
509 def setUp(self):
510 self.universe = DimensionUniverse()
511 datasetTypeName = "test"
512 self.componentStorageClass1 = StorageClass("Component1")
513 self.componentStorageClass2 = StorageClass("Component2")
514 self.parentStorageClass = StorageClass(
515 "Parent", components={"a": self.componentStorageClass1, "b": self.componentStorageClass2}
516 )
517 dimensions = self.universe.extract(("instrument", "visit"))
518 self.dataId = dict(instrument="DummyCam", visit=42)
519 self.datasetType = DatasetType(datasetTypeName, dimensions, self.parentStorageClass)
521 def testConstructor(self):
522 """Test that construction preserves and validates values."""
523 # Construct an unresolved ref.
524 ref = DatasetRef(self.datasetType, self.dataId)
525 self.assertEqual(ref.datasetType, self.datasetType)
526 self.assertEqual(
527 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId
528 )
529 self.assertIsInstance(ref.dataId, DataCoordinate)
530 # Constructing an unresolved ref with run and/or components should
531 # fail.
532 run = "somerun"
533 with self.assertRaises(ValueError):
534 DatasetRef(self.datasetType, self.dataId, run=run)
535 # Passing a data ID that is missing dimensions should fail.
536 with self.assertRaises(KeyError):
537 DatasetRef(self.datasetType, {"instrument": "DummyCam"})
538 # Constructing a resolved ref should preserve run as well as everything
539 # else.
540 id_ = uuid.uuid4()
541 ref = DatasetRef(self.datasetType, self.dataId, id=id_, run=run)
542 self.assertEqual(ref.datasetType, self.datasetType)
543 self.assertEqual(
544 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId
545 )
546 self.assertIsInstance(ref.dataId, DataCoordinate)
547 self.assertEqual(ref.id, id_)
548 self.assertEqual(ref.run, run)
550 def testSorting(self):
551 """Can we sort a DatasetRef"""
552 ref1 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=1))
553 ref2 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=10))
554 ref3 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=22))
556 # Enable detailed diff report
557 self.maxDiff = None
559 # This will sort them on visit number
560 sort = sorted([ref3, ref1, ref2])
561 self.assertEqual(sort, [ref1, ref2, ref3], msg=f"Got order: {[r.dataId for r in sort]}")
563 # Now include a run
564 ref1 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=43), run="b", id=uuid.uuid4())
565 self.assertEqual(ref1.run, "b")
566 ref4 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=10), run="b", id=uuid.uuid4())
567 ref2 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=4), run="a", id=uuid.uuid4())
568 ref3 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=104), run="c", id=uuid.uuid4())
570 # This will sort them on run before visit
571 sort = sorted([ref3, ref1, ref2, ref4])
572 self.assertEqual(sort, [ref2, ref4, ref1, ref3], msg=f"Got order: {[r.dataId for r in sort]}")
574 # Now with strings
575 with self.assertRaises(TypeError):
576 sort = sorted(["z", ref1, "c"])
578 def testResolving(self):
579 id_ = uuid.uuid4()
580 ref = DatasetRef(self.datasetType, self.dataId, id=id_, run="somerun")
581 unresolvedRef = ref.unresolved()
582 self.assertIsNotNone(ref.id)
583 self.assertIsNone(unresolvedRef.id)
584 self.assertIsNone(unresolvedRef.run)
585 self.assertNotEqual(ref, unresolvedRef)
586 self.assertEqual(ref.unresolved(), unresolvedRef)
587 self.assertEqual(ref.datasetType, unresolvedRef.datasetType)
588 self.assertEqual(ref.dataId, unresolvedRef.dataId)
589 reresolvedRef = unresolvedRef.resolved(id=id_, run="somerun")
590 self.assertEqual(ref, reresolvedRef)
591 self.assertEqual(reresolvedRef.unresolved(), unresolvedRef)
592 self.assertIsNotNone(reresolvedRef.run)
594 def testOverrideStorageClass(self):
595 storageA = StorageClass("test_a", pytype=list)
597 ref = DatasetRef(self.datasetType, self.dataId, id=uuid.uuid4(), run="somerun")
599 ref_new = ref.overrideStorageClass(storageA)
600 self.assertNotEqual(ref, ref_new)
601 self.assertEqual(ref_new.datasetType.storageClass, storageA)
602 self.assertEqual(ref_new.overrideStorageClass(ref.datasetType.storageClass), ref)
604 incompatible_sc = StorageClass("my_int", pytype=int)
605 with self.assertRaises(ValueError):
606 # Do not test against "ref" because it has a default storage class
607 # of "object" which is compatible with everything.
608 ref_new.overrideStorageClass(incompatible_sc)
610 def testPickle(self):
611 ref = DatasetRef(self.datasetType, self.dataId, id=uuid.uuid4(), run="somerun")
612 s = pickle.dumps(ref)
613 self.assertEqual(pickle.loads(s), ref)
615 def testJson(self):
616 ref = DatasetRef(self.datasetType, self.dataId, id=uuid.uuid4(), run="somerun")
617 s = ref.to_json()
618 self.assertEqual(DatasetRef.from_json(s, universe=self.universe), ref)
621if __name__ == "__main__":
622 unittest.main()