Coverage for python/lsst/daf/butler/formatters/json.py: 75%
45 statements
« prev ^ index » next coverage.py v7.2.5, created at 2023-05-15 00:09 +0000
« prev ^ index » next coverage.py v7.2.5, created at 2023-05-15 00:09 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("JsonFormatter",)
26import builtins
27import dataclasses
28import json
29from typing import TYPE_CHECKING, Any, Optional, Type
31from .file import FileFormatter
33if TYPE_CHECKING: 33 ↛ 34line 33 didn't jump to line 34, because the condition on line 33 was never true
34 from lsst.daf.butler import StorageClass
37class JsonFormatter(FileFormatter):
38 """Formatter implementation for JSON files."""
40 extension = ".json"
42 unsupportedParameters = None
43 """This formatter does not support any parameters (`frozenset`)"""
45 def _readFile(self, path: str, pytype: Optional[Type[Any]] = None) -> Any:
46 """Read a file from the path in JSON format.
48 Parameters
49 ----------
50 path : `str`
51 Path to use to open JSON format file.
52 pytype : `class`, optional
53 Not used by this implementation.
55 Returns
56 -------
57 data : `object`
58 Data as Python object read from JSON file.
59 """
60 with open(path, "rb") as fd:
61 data = self._fromBytes(fd.read(), pytype)
63 return data
65 def _writeFile(self, inMemoryDataset: Any) -> None:
66 """Write the in memory dataset to file on disk.
68 Will look for `_asdict()` method to aid JSON serialization, following
69 the approach of the simplejson module.
71 Parameters
72 ----------
73 inMemoryDataset : `object`
74 Object to serialize.
76 Raises
77 ------
78 Exception
79 The file could not be written.
80 """
81 self.fileDescriptor.location.uri.write(self._toBytes(inMemoryDataset))
83 def _fromBytes(self, serializedDataset: bytes, pytype: Optional[Type[Any]] = None) -> Any:
84 """Read the bytes object as a python object.
86 Parameters
87 ----------
88 serializedDataset : `bytes`
89 Bytes object to unserialize.
90 pytype : `class`, optional
91 Not used by this implementation.
93 Returns
94 -------
95 inMemoryDataset : `object`
96 The requested data as a Python object or None if the string could
97 not be read.
98 """
99 try:
100 data = json.loads(serializedDataset)
101 except json.JSONDecodeError:
102 data = None
104 return data
106 def _toBytes(self, inMemoryDataset: Any) -> bytes:
107 """Write the in memory dataset to a bytestring.
109 Parameters
110 ----------
111 inMemoryDataset : `object`
112 Object to serialize
114 Returns
115 -------
116 serializedDataset : `bytes`
117 bytes representing the serialized dataset.
119 Raises
120 ------
121 Exception
122 The object could not be serialized.
123 """
124 # For example, Pydantic models have a .json method so use it.
125 try:
126 return inMemoryDataset.json().encode()
127 except AttributeError:
128 pass
130 if hasattr(inMemoryDataset, "_asdict"):
131 inMemoryDataset = inMemoryDataset._asdict()
132 return json.dumps(inMemoryDataset, ensure_ascii=False).encode()
134 def _coerceType(
135 self, inMemoryDataset: Any, writeStorageClass: StorageClass, readStorageClass: StorageClass
136 ) -> Any:
137 """Coerce the supplied inMemoryDataset to the correct python type.
139 Parameters
140 ----------
141 inMemoryDataset : `object`
142 Object to coerce to expected type.
143 writeStorageClass : `StorageClass`
144 Storage class used to serialize this data.
145 readStorageClass : `StorageClass`
146 Storage class requested as the outcome.
148 Returns
149 -------
150 inMemoryDataset : `object`
151 Object of expected type ``readStorageClass.pytype``.
152 """
153 if inMemoryDataset is not None and not hasattr(builtins, readStorageClass.pytype.__name__):
154 if writeStorageClass.isComposite():
155 # We know we must be able to assemble the written
156 # storage class. Coerce later to the read type.
157 inMemoryDataset = writeStorageClass.delegate().assemble(
158 inMemoryDataset, pytype=writeStorageClass.pytype
159 )
160 elif not isinstance(inMemoryDataset, readStorageClass.pytype):
161 # JSON data are returned as simple python types.
162 # The content will match the written storage class.
163 # Pydantic models have their own scheme.
164 try:
165 inMemoryDataset = writeStorageClass.pytype.parse_obj(inMemoryDataset)
166 except AttributeError:
167 if dataclasses.is_dataclass(writeStorageClass.pytype):
168 # dataclasses accept key/value parameters.
169 inMemoryDataset = writeStorageClass.pytype(**inMemoryDataset)
170 else:
171 # Hope that we can pass the arguments in directly
172 inMemoryDataset = writeStorageClass.pytype(inMemoryDataset)
173 # Coerce to the read storage class if necessary.
174 return readStorageClass.coerce_type(inMemoryDataset)