Coverage for python/lsst/daf/butler/formatters/json.py: 81%
43 statements
« prev ^ index » next coverage.py v6.4.2, created at 2022-07-23 02:26 -0700
« prev ^ index » next coverage.py v6.4.2, created at 2022-07-23 02:26 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("JsonFormatter",)
26import builtins
27import json
28from typing import TYPE_CHECKING, Any, Optional, Type
30from .file import FileFormatter
32if TYPE_CHECKING: 32 ↛ 33line 32 didn't jump to line 33, because the condition on line 32 was never true
33 from lsst.daf.butler import StorageClass
36class JsonFormatter(FileFormatter):
37 """Formatter implementation for JSON files."""
39 extension = ".json"
41 unsupportedParameters = None
42 """This formatter does not support any parameters (`frozenset`)"""
44 def _readFile(self, path: str, pytype: Optional[Type[Any]] = None) -> Any:
45 """Read a file from the path in JSON format.
47 Parameters
48 ----------
49 path : `str`
50 Path to use to open JSON format file.
51 pytype : `class`, optional
52 Not used by this implementation.
54 Returns
55 -------
56 data : `object`
57 Data as Python object read from JSON file.
58 """
59 with open(path, "rb") as fd:
60 data = self._fromBytes(fd.read(), pytype)
62 return data
64 def _writeFile(self, inMemoryDataset: Any) -> None:
65 """Write the in memory dataset to file on disk.
67 Will look for `_asdict()` method to aid JSON serialization, following
68 the approach of the simplejson module.
70 Parameters
71 ----------
72 inMemoryDataset : `object`
73 Object to serialize.
75 Raises
76 ------
77 Exception
78 The file could not be written.
79 """
80 self.fileDescriptor.location.uri.write(self._toBytes(inMemoryDataset))
82 def _fromBytes(self, serializedDataset: bytes, pytype: Optional[Type[Any]] = None) -> Any:
83 """Read the bytes object as a python object.
85 Parameters
86 ----------
87 serializedDataset : `bytes`
88 Bytes object to unserialize.
89 pytype : `class`, optional
90 Not used by this implementation.
92 Returns
93 -------
94 inMemoryDataset : `object`
95 The requested data as a Python object or None if the string could
96 not be read.
97 """
98 try:
99 data = json.loads(serializedDataset)
100 except json.JSONDecodeError:
101 data = None
103 return data
105 def _toBytes(self, inMemoryDataset: Any) -> bytes:
106 """Write the in memory dataset to a bytestring.
108 Parameters
109 ----------
110 inMemoryDataset : `object`
111 Object to serialize
113 Returns
114 -------
115 serializedDataset : `bytes`
116 bytes representing the serialized dataset.
118 Raises
119 ------
120 Exception
121 The object could not be serialized.
122 """
123 # For example, Pydantic models have a .json method so use it.
124 try:
125 return inMemoryDataset.json().encode()
126 except AttributeError:
127 pass
129 if hasattr(inMemoryDataset, "_asdict"):
130 inMemoryDataset = inMemoryDataset._asdict()
131 return json.dumps(inMemoryDataset, ensure_ascii=False).encode()
133 def _coerceType(
134 self, inMemoryDataset: Any, writeStorageClass: StorageClass, readStorageClass: StorageClass
135 ) -> Any:
136 """Coerce the supplied inMemoryDataset to the correct python type.
138 Parameters
139 ----------
140 inMemoryDataset : `object`
141 Object to coerce to expected type.
142 writeStorageClass : `StorageClass`
143 Storage class used to serialize this data.
144 readStorageClass : `StorageClass`
145 Storage class requested as the outcome.
147 Returns
148 -------
149 inMemoryDataset : `object`
150 Object of expected type ``readStorageClass.pytype``.
151 """
152 if inMemoryDataset is not None and not hasattr(builtins, readStorageClass.pytype.__name__):
153 if readStorageClass.isComposite():
154 inMemoryDataset = readStorageClass.delegate().assemble(
155 inMemoryDataset, pytype=readStorageClass.pytype
156 )
157 elif not isinstance(inMemoryDataset, readStorageClass.pytype):
158 # JSON data are returned as simple python types.
159 # The content will match the written storage class.
160 # Pydantic models have their own scheme.
161 try:
162 inMemoryDataset = writeStorageClass.pytype.parse_obj(inMemoryDataset)
163 except AttributeError:
164 # Hope that we can pass the arguments in directly
165 inMemoryDataset = writeStorageClass.pytype(inMemoryDataset)
166 inMemoryDataset = readStorageClass.coerce_type(inMemoryDataset)
167 return inMemoryDataset