Coverage for python/lsst/daf/butler/formatters/json.py: 84%

33 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-08-12 09:19 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ("JsonFormatter",) 

25 

26import contextlib 

27import dataclasses 

28import json 

29from typing import Any 

30 

31from .file import FileFormatter 

32 

33 

34class JsonFormatter(FileFormatter): 

35 """Formatter implementation for JSON files.""" 

36 

37 extension = ".json" 

38 

39 unsupportedParameters = None 

40 """This formatter does not support any parameters (`frozenset`)""" 

41 

42 def _readFile(self, path: str, pytype: type[Any] | None = None) -> Any: 

43 """Read a file from the path in JSON format. 

44 

45 Parameters 

46 ---------- 

47 path : `str` 

48 Path to use to open JSON format file. 

49 pytype : `class`, optional 

50 Not used by this implementation. 

51 

52 Returns 

53 ------- 

54 data : `object` 

55 Data as Python object read from JSON file. 

56 """ 

57 with open(path, "rb") as fd: 

58 data = self._fromBytes(fd.read(), pytype) 

59 

60 return data 

61 

62 def _writeFile(self, inMemoryDataset: Any) -> None: 

63 """Write the in memory dataset to file on disk. 

64 

65 Will look for `_asdict()` method to aid JSON serialization, following 

66 the approach of the simplejson module. 

67 

68 Parameters 

69 ---------- 

70 inMemoryDataset : `object` 

71 Object to serialize. 

72 

73 Raises 

74 ------ 

75 Exception 

76 The file could not be written. 

77 """ 

78 self.fileDescriptor.location.uri.write(self._toBytes(inMemoryDataset)) 

79 

80 def _fromBytes(self, serializedDataset: bytes, pytype: type[Any] | None = None) -> Any: 

81 """Read the bytes object as a python object. 

82 

83 Parameters 

84 ---------- 

85 serializedDataset : `bytes` 

86 Bytes object to unserialize. 

87 pytype : `class`, optional 

88 Not used by this implementation. 

89 

90 Returns 

91 ------- 

92 inMemoryDataset : `object` 

93 The requested data as a Python object or None if the string could 

94 not be read. 

95 """ 

96 try: 

97 data = json.loads(serializedDataset) 

98 except json.JSONDecodeError: 

99 data = None 

100 

101 return data 

102 

103 def _toBytes(self, inMemoryDataset: Any) -> bytes: 

104 """Write the in memory dataset to a bytestring. 

105 

106 Parameters 

107 ---------- 

108 inMemoryDataset : `object` 

109 Object to serialize 

110 

111 Returns 

112 ------- 

113 serializedDataset : `bytes` 

114 bytes representing the serialized dataset. 

115 

116 Raises 

117 ------ 

118 Exception 

119 The object could not be serialized. 

120 """ 

121 # Try different standardized methods for native json. 

122 # For example, Pydantic models have a .model_dump_json method. 

123 # v1 models without compatibility layer will need .json() 

124 with contextlib.suppress(AttributeError): 

125 return inMemoryDataset.model_dump_json().encode() 

126 with contextlib.suppress(AttributeError): 

127 return inMemoryDataset.json().encode() 

128 

129 if dataclasses.is_dataclass(inMemoryDataset): 

130 inMemoryDataset = dataclasses.asdict(inMemoryDataset) 

131 elif hasattr(inMemoryDataset, "_asdict"): 

132 inMemoryDataset = inMemoryDataset._asdict() 

133 return json.dumps(inMemoryDataset, ensure_ascii=False).encode()