Coverage for python/lsst/pipe/base/graph/_versionDeserializers.py: 27%
250 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-14 02:10 -0700
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-14 02:10 -0700
1# This file is part of pipe_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
21from __future__ import annotations
23__all__ = ("DESERIALIZER_MAP",)
25import json
26import lzma
27import pickle
28import struct
29import uuid
30from abc import ABC, abstractmethod
31from collections import defaultdict
32from collections.abc import Callable
33from dataclasses import dataclass
34from types import SimpleNamespace
35from typing import TYPE_CHECKING, ClassVar, cast
37import networkx as nx
38from lsst.daf.butler import (
39 DatasetRef,
40 DatasetType,
41 DimensionConfig,
42 DimensionRecord,
43 DimensionUniverse,
44 Quantum,
45 SerializedDimensionRecord,
46)
47from lsst.utils import doImportType
49from ..config import PipelineTaskConfig
50from ..pipeline import TaskDef
51from ..pipelineTask import PipelineTask
52from ._implDetails import DatasetTypeName, _DatasetTracker
53from .quantumNode import QuantumNode, SerializedQuantumNode
55if TYPE_CHECKING:
56 from .graph import QuantumGraph
59class StructSizeDescriptor:
60 """Class level property. It exists to report the size
61 (number of bytes) of whatever the formatter string is for a deserializer.
62 """
64 def __get__(self, inst: DeserializerBase | None, owner: type[DeserializerBase]) -> int:
65 return struct.calcsize(owner.FMT_STRING())
68@dataclass
69class DeserializerBase(ABC):
70 @classmethod
71 @abstractmethod
72 def FMT_STRING(cls) -> str: # noqa: N805 # flake8 wants self
73 raise NotImplementedError("Base class does not implement this method")
75 structSize: ClassVar[StructSizeDescriptor]
77 preambleSize: int
78 sizeBytes: bytes
80 def __init_subclass__(cls) -> None:
81 # attach the size decriptor
82 cls.structSize = StructSizeDescriptor()
83 super().__init_subclass__()
85 def unpackHeader(self, rawHeader: bytes) -> str | None:
86 """Transform the raw bytes corresponding to the header of a save into
87 a string of the header information.
89 Parameters
90 ----------
91 rawheader : bytes
92 The bytes that are to be parsed into the header information. These
93 are the bytes after the preamble and structsize number of bytes
94 and before the headerSize bytes.
96 Returns
97 -------
98 header : `str` or `None`
99 Header information as a string. Returns `None` if the save format
100 has no header string implementation (such as save format 1 that is
101 all pickle).
102 """
103 raise NotImplementedError("Base class does not implement this method")
105 @property
106 def headerSize(self) -> int:
107 """Returns the number of bytes from the beginning of the file to the
108 end of the metadata.
109 """
110 raise NotImplementedError("Base class does not implement this method")
112 def readHeaderInfo(self, rawHeader: bytes) -> SimpleNamespace:
113 """Parse the supplied raw bytes into the header information and
114 byte ranges of specific TaskDefs and QuantumNodes
116 Parameters
117 ----------
118 rawheader : bytes
119 The bytes that are to be parsed into the header information. These
120 are the bytes after the preamble and structsize number of bytes
121 and before the headerSize bytes
122 """
123 raise NotImplementedError("Base class does not implement this method")
125 def constructGraph(
126 self,
127 nodes: set[uuid.UUID],
128 _readBytes: Callable[[int, int], bytes],
129 universe: DimensionUniverse | None = None,
130 ) -> QuantumGraph:
131 """Construct a graph from the deserialized information.
133 Parameters
134 ----------
135 nodes : `set` of `uuid.UUID`
136 The nodes to include in the graph
137 _readBytes : callable
138 A callable that can be used to read bytes from the file handle.
139 The callable will take two ints, start and stop, to use as the
140 numerical bounds to read and returns a byte stream.
141 universe : `~lsst.daf.butler.DimensionUniverse`
142 The singleton of all dimensions known to the middleware registry
143 """
144 raise NotImplementedError("Base class does not implement this method")
146 def description(self) -> str:
147 """Return the description of the serialized data format"""
148 raise NotImplementedError("Base class does not implement this method")
151Version1Description = """
152The save file starts with the first few bytes corresponding to the magic bytes
153in the QuantumGraph: `qgraph4\xf6\xe8\xa9`.
155The next few bytes are 2 big endian unsigned 64 bit integers.
157The first unsigned 64 bit integer corresponds to the number of bytes of a
158python mapping of TaskDef labels to the byte ranges in the save file where the
159definition can be loaded.
161The second unsigned 64 bit integer corrresponds to the number of bytes of a
162python mapping of QuantumGraph Node number to the byte ranges in the save file
163where the node can be loaded. The byte range is indexed starting after
164the `header` bytes of the magic bytes, size bytes, and bytes of the two
165mappings.
167Each of the above mappings are pickled and then lzma compressed, so to
168deserialize the bytes, first lzma decompression must be performed and the
169results passed to python pickle loader.
171As stated above, each map contains byte ranges of the corresponding
172datastructure. Theses bytes are also lzma compressed pickles, and should
173be deserialized in a similar manner. The byte range is indexed starting after
174the `header` bytes of the magic bytes, size bytes, and bytes of the two
175mappings.
177In addition to the the TaskDef byte locations, the TypeDef map also contains
178an additional key '__GraphBuildID'. The value associated with this is the
179unique id assigned to the graph at its creation time.
180"""
183@dataclass
184class DeserializerV1(DeserializerBase):
185 @classmethod
186 def FMT_STRING(cls) -> str:
187 return ">QQ"
189 def __post_init__(self) -> None:
190 self.taskDefMapSize, self.nodeMapSize = struct.unpack(self.FMT_STRING(), self.sizeBytes)
192 @property
193 def headerSize(self) -> int:
194 return self.preambleSize + self.structSize + self.taskDefMapSize + self.nodeMapSize
196 def readHeaderInfo(self, rawHeader: bytes) -> SimpleNamespace:
197 returnValue = SimpleNamespace()
198 returnValue.taskDefMap = pickle.loads(rawHeader[: self.taskDefMapSize])
199 returnValue._buildId = returnValue.taskDefMap["__GraphBuildID"]
200 returnValue.map = pickle.loads(rawHeader[self.taskDefMapSize :])
201 returnValue.metadata = None
202 self.returnValue = returnValue
203 return returnValue
205 def unpackHeader(self, rawHeader: bytes) -> str | None:
206 return None
208 def constructGraph(
209 self,
210 nodes: set[uuid.UUID],
211 _readBytes: Callable[[int, int], bytes],
212 universe: DimensionUniverse | None = None,
213 ) -> QuantumGraph:
214 # need to import here to avoid cyclic imports
215 from . import QuantumGraph
217 quanta: defaultdict[TaskDef, set[Quantum]] = defaultdict(set)
218 quantumToNodeId: dict[Quantum, uuid.UUID] = {}
219 loadedTaskDef = {}
220 # loop over the nodes specified above
221 for node in nodes:
222 # Get the bytes to read from the map
223 start, stop = self.returnValue.map[node]
224 start += self.headerSize
225 stop += self.headerSize
227 # read the specified bytes, will be overloaded by subclasses
228 # bytes are compressed, so decompress them
229 dump = lzma.decompress(_readBytes(start, stop))
231 # reconstruct node
232 qNode = pickle.loads(dump)
233 object.__setattr__(qNode, "nodeId", uuid.uuid4())
235 # read the saved node, name. If it has been loaded, attach it, if
236 # not read in the taskDef first, and then load it
237 nodeTask = qNode.taskDef
238 if nodeTask not in loadedTaskDef:
239 # Get the byte ranges corresponding to this taskDef
240 start, stop = self.returnValue.taskDefMap[nodeTask]
241 start += self.headerSize
242 stop += self.headerSize
244 # load the taskDef, this method call will be overloaded by
245 # subclasses.
246 # bytes are compressed, so decompress them
247 taskDef = pickle.loads(lzma.decompress(_readBytes(start, stop)))
248 loadedTaskDef[nodeTask] = taskDef
249 # Explicitly overload the "frozen-ness" of nodes to attach the
250 # taskDef back into the un-persisted node
251 object.__setattr__(qNode, "taskDef", loadedTaskDef[nodeTask])
252 quanta[qNode.taskDef].add(qNode.quantum)
254 # record the node for later processing
255 quantumToNodeId[qNode.quantum] = qNode.nodeId
257 # construct an empty new QuantumGraph object, and run the associated
258 # creation method with the un-persisted data
259 qGraph = object.__new__(QuantumGraph)
260 qGraph._buildGraphs(
261 quanta,
262 _quantumToNodeId=quantumToNodeId,
263 _buildId=self.returnValue._buildId,
264 metadata=self.returnValue.metadata,
265 universe=universe,
266 )
267 return qGraph
269 def description(self) -> str:
270 return Version1Description
273Version2Description = """
274The save file starts with the first few bytes corresponding to the magic bytes
275in the QuantumGraph: `qgraph4\xf6\xe8\xa9`.
277The next few bytes are a big endian unsigned long long.
279The unsigned long long corresponds to the number of bytes of a python mapping
280of header information. This mapping is encoded into json and then lzma
281compressed, meaning the operations must be performed in the opposite order to
282deserialize.
284The json encoded header mapping contains 4 fields: TaskDefs, GraphBuildId,
285Nodes, and Metadata.
287The `TaskDefs` key corresponds to a value which is a mapping of Task label to
288task data. The task data is a mapping of key to value, where the only key is
289`bytes` and it corresponds to a tuple of a byte range of the start, stop
290bytes (indexed after all the header bytes)
292The `GraphBuildId` corresponds with a string that is the unique id assigned to
293this graph when it was created.
295The `Nodes` key is like the `TaskDefs` key except it corresponds to
296QuantumNodes instead of TaskDefs. Another important difference is that JSON
297formatting does not allow using numbers as keys, and this mapping is keyed by
298the node number. Thus it is stored in JSON as two equal length lists, the first
299being the keys, and the second the values associated with those keys.
301The `Metadata` key is a mapping of strings to associated values. This metadata
302may be anything that is important to be transported alongside the graph.
304As stated above, each map contains byte ranges of the corresponding
305datastructure. Theses bytes are also lzma compressed pickles, and should
306be deserialized in a similar manner.
307"""
310@dataclass
311class DeserializerV2(DeserializerBase):
312 @classmethod
313 def FMT_STRING(cls) -> str:
314 return ">Q"
316 def __post_init__(self) -> None:
317 (self.mapSize,) = struct.unpack(self.FMT_STRING(), self.sizeBytes)
319 @property
320 def headerSize(self) -> int:
321 return self.preambleSize + self.structSize + self.mapSize
323 def readHeaderInfo(self, rawHeader: bytes) -> SimpleNamespace:
324 uncompressedHeaderMap = self.unpackHeader(rawHeader)
325 if uncompressedHeaderMap is None:
326 raise ValueError(
327 "This error is not possible because self.unpackHeader cannot return None,"
328 " but is done to satisfy type checkers"
329 )
330 header = json.loads(uncompressedHeaderMap)
331 returnValue = SimpleNamespace()
332 returnValue.taskDefMap = header["TaskDefs"]
333 returnValue._buildId = header["GraphBuildID"]
334 returnValue.map = dict(header["Nodes"])
335 returnValue.metadata = header["Metadata"]
336 self.returnValue = returnValue
337 return returnValue
339 def unpackHeader(self, rawHeader: bytes) -> str | None:
340 return lzma.decompress(rawHeader).decode()
342 def constructGraph(
343 self,
344 nodes: set[uuid.UUID],
345 _readBytes: Callable[[int, int], bytes],
346 universe: DimensionUniverse | None = None,
347 ) -> QuantumGraph:
348 # need to import here to avoid cyclic imports
349 from . import QuantumGraph
351 quanta: defaultdict[TaskDef, set[Quantum]] = defaultdict(set)
352 quantumToNodeId: dict[Quantum, uuid.UUID] = {}
353 loadedTaskDef = {}
354 # loop over the nodes specified above
355 for node in nodes:
356 # Get the bytes to read from the map
357 start, stop = self.returnValue.map[node]["bytes"]
358 start += self.headerSize
359 stop += self.headerSize
361 # read the specified bytes, will be overloaded by subclasses
362 # bytes are compressed, so decompress them
363 dump = lzma.decompress(_readBytes(start, stop))
365 # reconstruct node
366 qNode = pickle.loads(dump)
367 object.__setattr__(qNode, "nodeId", uuid.uuid4())
369 # read the saved node, name. If it has been loaded, attach it, if
370 # not read in the taskDef first, and then load it
371 nodeTask = qNode.taskDef
372 if nodeTask not in loadedTaskDef:
373 # Get the byte ranges corresponding to this taskDef
374 start, stop = self.returnValue.taskDefMap[nodeTask]["bytes"]
375 start += self.headerSize
376 stop += self.headerSize
378 # load the taskDef, this method call will be overloaded by
379 # subclasses.
380 # bytes are compressed, so decompress them
381 taskDef = pickle.loads(lzma.decompress(_readBytes(start, stop)))
382 loadedTaskDef[nodeTask] = taskDef
383 # Explicitly overload the "frozen-ness" of nodes to attach the
384 # taskDef back into the un-persisted node
385 object.__setattr__(qNode, "taskDef", loadedTaskDef[nodeTask])
386 quanta[qNode.taskDef].add(qNode.quantum)
388 # record the node for later processing
389 quantumToNodeId[qNode.quantum] = qNode.nodeId
391 # construct an empty new QuantumGraph object, and run the associated
392 # creation method with the un-persisted data
393 qGraph = object.__new__(QuantumGraph)
394 qGraph._buildGraphs(
395 quanta,
396 _quantumToNodeId=quantumToNodeId,
397 _buildId=self.returnValue._buildId,
398 metadata=self.returnValue.metadata,
399 universe=universe,
400 )
401 return qGraph
403 def description(self) -> str:
404 return Version2Description
407Version3Description = """
408The save file starts with the first few bytes corresponding to the magic bytes
409in the QuantumGraph: `qgraph4\xf6\xe8\xa9`.
411The next few bytes are a big endian unsigned long long.
413The unsigned long long corresponds to the number of bytes of a mapping
414of header information. This mapping is encoded into json and then lzma
415compressed, meaning the operations must be performed in the opposite order to
416deserialize.
418The json encoded header mapping contains 5 fields: GraphBuildId, TaskDefs,
419Nodes, Metadata, and DimensionRecords.
421The `GraphBuildId` key corresponds with a string that is the unique id assigned
422to this graph when it was created.
424The `TaskDefs` key corresponds to a value which is a mapping of Task label to
425task data. The task data is a mapping of key to value. The keys of this mapping
426are `bytes`, `inputs`, and `outputs`.
428The `TaskDefs` `bytes` key corresponds to a tuple of a byte range of the
429start, stop bytes (indexed after all the header bytes). This byte rage
430corresponds to a lzma compressed json mapping. This mapping has keys of
431`taskName`, corresponding to a fully qualified python class, `config` a
432pex_config string that is used to configure the class, and `label` which
433corresponds to a string that uniquely identifies the task within a given
434execution pipeline.
436The `TaskDefs` `inputs` key is associated with a list of tuples where each
437tuple is a label of a task that is considered coming before a given task, and
438the name of the dataset that is shared between the tasks (think node and edge
439in a graph sense).
441The `TaskDefs` `outputs` key is like inputs except the values in a list
442correspond to all the output connections of a task.
444The `Nodes` key is also a json mapping with keys corresponding to the UUIDs of
445QuantumNodes. The values associated with these keys is another mapping with
446the keys `bytes`, `inputs`, and `outputs`.
448`Nodes` key `bytes` corresponds to a tuple of a byte range of the start, stop
449bytes (indexed after all the header bytes). These bytes are a lzma compressed
450json mapping which contains many sub elements, this mapping will be referred to
451as the SerializedQuantumNode (related to the python class it corresponds to).
453SerializedQUantumNodes have 3 keys, `quantum` corresponding to a json mapping
454(described below) referred to as a SerializedQuantum, `taskLabel` a string
455which corresponds to a label in the `TaskDefs` mapping, and `nodeId.
457A SerializedQuantum has many keys; taskName, dataId, datasetTypeMapping,
458initInputs, inputs, outputs, dimensionRecords.
460like the `TaskDefs` key except it corresponds to
461QuantumNodes instead of TaskDefs, and the keys of the mappings are string
462representations of the UUIDs of the QuantumNodes.
464The `Metadata` key is a mapping of strings to associated values. This metadata
465may be anything that is important to be transported alongside the graph.
467As stated above, each map contains byte ranges of the corresponding
468datastructure. Theses bytes are also lzma compressed pickles, and should
469be deserialized in a similar manner.
470"""
473@dataclass
474class DeserializerV3(DeserializerBase):
475 @classmethod
476 def FMT_STRING(cls) -> str:
477 return ">Q"
479 def __post_init__(self) -> None:
480 self.infoSize: int
481 (self.infoSize,) = struct.unpack(self.FMT_STRING(), self.sizeBytes)
483 @property
484 def headerSize(self) -> int:
485 return self.preambleSize + self.structSize + self.infoSize
487 def readHeaderInfo(self, rawHeader: bytes) -> SimpleNamespace:
488 uncompressedinfoMap = self.unpackHeader(rawHeader)
489 assert uncompressedinfoMap is not None # for python typing, this variant can't be None
490 infoMap = json.loads(uncompressedinfoMap)
491 infoMappings = SimpleNamespace()
492 infoMappings.taskDefMap = infoMap["TaskDefs"]
493 infoMappings._buildId = infoMap["GraphBuildID"]
494 infoMappings.map = {uuid.UUID(k): v for k, v in infoMap["Nodes"]}
495 infoMappings.metadata = infoMap["Metadata"]
496 infoMappings.dimensionRecords = {}
497 for k, v in infoMap["DimensionRecords"].items():
498 infoMappings.dimensionRecords[int(k)] = SerializedDimensionRecord(**v)
499 # This is important to be a get call here, so that it supports versions
500 # of saved quantum graph that might not have a saved universe without
501 # changing save format
502 if (universeConfig := infoMap.get("universe")) is not None:
503 universe = DimensionUniverse(config=DimensionConfig(universeConfig))
504 else:
505 universe = DimensionUniverse()
506 infoMappings.universe = universe
507 infoMappings.globalInitOutputRefs = []
508 if (json_refs := infoMap.get("GlobalInitOutputRefs")) is not None:
509 infoMappings.globalInitOutputRefs = [
510 DatasetRef.from_json(json_ref, universe=universe) for json_ref in json_refs
511 ]
512 infoMappings.registryDatasetTypes = []
513 if (json_refs := infoMap.get("RegistryDatasetTypes")) is not None:
514 infoMappings.registryDatasetTypes = [
515 DatasetType.from_json(json_ref, universe=universe) for json_ref in json_refs
516 ]
517 self.infoMappings = infoMappings
518 return infoMappings
520 def unpackHeader(self, rawHeader: bytes) -> str | None:
521 return lzma.decompress(rawHeader).decode()
523 def constructGraph(
524 self,
525 nodes: set[uuid.UUID],
526 _readBytes: Callable[[int, int], bytes],
527 universe: DimensionUniverse | None = None,
528 ) -> QuantumGraph:
529 # need to import here to avoid cyclic imports
530 from . import QuantumGraph
532 graph = nx.DiGraph()
533 loadedTaskDef: dict[str, TaskDef] = {}
534 container = {}
535 datasetDict = _DatasetTracker[DatasetTypeName, TaskDef](createInverse=True)
536 taskToQuantumNode: defaultdict[TaskDef, set[QuantumNode]] = defaultdict(set)
537 recontitutedDimensions: dict[int, tuple[str, DimensionRecord]] = {}
538 initInputRefs: dict[TaskDef, list[DatasetRef]] = {}
539 initOutputRefs: dict[TaskDef, list[DatasetRef]] = {}
541 if universe is not None:
542 if not universe.isCompatibleWith(self.infoMappings.universe):
543 saved = self.infoMappings.universe
544 raise RuntimeError(
545 f"The saved dimension universe ({saved.namespace}@v{saved.version}) is not "
546 f"compatible with the supplied universe ({universe.namespace}@v{universe.version})."
547 )
548 else:
549 universe = self.infoMappings.universe
551 for node in nodes:
552 start, stop = self.infoMappings.map[node]["bytes"]
553 start, stop = start + self.headerSize, stop + self.headerSize
554 # Read in the bytes corresponding to the node to load and
555 # decompress it
556 dump = json.loads(lzma.decompress(_readBytes(start, stop)))
558 # Turn the json back into the pydandtic model
559 nodeDeserialized = SerializedQuantumNode.direct(**dump)
560 # attach the dictionary of dimension records to the pydantic model
561 # these are stored separately because the are stored over and over
562 # and this saves a lot of space and time.
563 nodeDeserialized.quantum.dimensionRecords = self.infoMappings.dimensionRecords
564 # get the label for the current task
565 nodeTaskLabel = nodeDeserialized.taskLabel
567 if nodeTaskLabel not in loadedTaskDef:
568 # Get the byte ranges corresponding to this taskDef
569 start, stop = self.infoMappings.taskDefMap[nodeTaskLabel]["bytes"]
570 start, stop = start + self.headerSize, stop + self.headerSize
572 # bytes are compressed, so decompress them
573 taskDefDump = json.loads(lzma.decompress(_readBytes(start, stop)))
574 taskClass: type[PipelineTask] = doImportType(taskDefDump["taskName"])
575 config: PipelineTaskConfig = taskClass.ConfigClass()
576 config.loadFromStream(taskDefDump["config"])
577 # Rebuild TaskDef
578 recreatedTaskDef = TaskDef(
579 taskName=taskDefDump["taskName"],
580 taskClass=taskClass,
581 config=config,
582 label=taskDefDump["label"],
583 )
584 loadedTaskDef[nodeTaskLabel] = recreatedTaskDef
586 # initInputRefs and initOutputRefs are optional
587 if (refs := taskDefDump.get("initInputRefs")) is not None:
588 initInputRefs[recreatedTaskDef] = [
589 cast(DatasetRef, DatasetRef.from_json(ref, universe=universe)) for ref in refs
590 ]
591 if (refs := taskDefDump.get("initOutputRefs")) is not None:
592 initOutputRefs[recreatedTaskDef] = [
593 cast(DatasetRef, DatasetRef.from_json(ref, universe=universe)) for ref in refs
594 ]
596 # rebuild the mappings that associate dataset type names with
597 # TaskDefs
598 for _, input in self.infoMappings.taskDefMap[nodeTaskLabel]["inputs"]:
599 datasetDict.addConsumer(DatasetTypeName(input), recreatedTaskDef)
601 added = set()
602 for outputConnection in self.infoMappings.taskDefMap[nodeTaskLabel]["outputs"]:
603 typeName = outputConnection[1]
604 if typeName not in added:
605 added.add(typeName)
606 datasetDict.addProducer(DatasetTypeName(typeName), recreatedTaskDef)
608 # reconstitute the node, passing in the dictionaries for the
609 # loaded TaskDefs and dimension records. These are used to ensure
610 # that each unique record is only loaded once
611 qnode = QuantumNode.from_simple(nodeDeserialized, loadedTaskDef, universe, recontitutedDimensions)
612 container[qnode.nodeId] = qnode
613 taskToQuantumNode[loadedTaskDef[nodeTaskLabel]].add(qnode)
615 # recreate the relations between each node from stored info
616 graph.add_node(qnode)
617 for id in self.infoMappings.map[qnode.nodeId]["inputs"]:
618 # uuid is stored as a string, turn it back into a uuid
619 id = uuid.UUID(id)
620 # if the id is not yet in the container, dont make a connection
621 # this is not an issue, because once it is, that id will add
622 # the reverse connection
623 if id in container:
624 graph.add_edge(container[id], qnode)
625 for id in self.infoMappings.map[qnode.nodeId]["outputs"]:
626 # uuid is stored as a string, turn it back into a uuid
627 id = uuid.UUID(id)
628 # if the id is not yet in the container, dont make a connection
629 # this is not an issue, because once it is, that id will add
630 # the reverse connection
631 if id in container:
632 graph.add_edge(qnode, container[id])
634 newGraph = object.__new__(QuantumGraph)
635 newGraph._metadata = self.infoMappings.metadata
636 newGraph._buildId = self.infoMappings._buildId
637 newGraph._datasetDict = datasetDict
638 newGraph._nodeIdMap = container
639 newGraph._count = len(nodes)
640 newGraph._taskToQuantumNode = dict(taskToQuantumNode.items())
641 newGraph._taskGraph = datasetDict.makeNetworkXGraph()
642 newGraph._connectedQuanta = graph
643 newGraph._initInputRefs = initInputRefs
644 newGraph._initOutputRefs = initOutputRefs
645 newGraph._globalInitOutputRefs = self.infoMappings.globalInitOutputRefs
646 newGraph._registryDatasetTypes = self.infoMappings.registryDatasetTypes
647 newGraph._universe = universe
648 return newGraph
651DESERIALIZER_MAP: dict[int, type[DeserializerBase]] = {
652 1: DeserializerV1,
653 2: DeserializerV2,
654 3: DeserializerV3,
655}