Coverage for python/lsst/daf/butler/formatters/logs.py: 52%

19 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2022-11-06 12:40 -0800

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = ("ButlerLogRecordsFormatter",) 

23 

24from typing import Any, Optional, Type 

25 

26from lsst.daf.butler.core.logging import ButlerLogRecords 

27 

28from .json import JsonFormatter 

29 

30 

31class ButlerLogRecordsFormatter(JsonFormatter): 

32 """Read and write log records in JSON format. 

33 

34 This is a naive implementation that treats everything as a pydantic. 

35 model. In the future this may be changed to be able to read 

36 `ButlerLogRecord` one at time from the file and return a subset 

37 of records given some filtering parameters. 

38 """ 

39 

40 def _readFile(self, path: str, pytype: Optional[Type[Any]] = None) -> Any: 

41 """Read a file from the path in JSON format. 

42 

43 Parameters 

44 ---------- 

45 path : `str` 

46 Path to use to open JSON format file. 

47 pytype : `class`, optional 

48 Python type being read. Should be a `ButlerLogRecords` or 

49 subclass. 

50 

51 Returns 

52 ------- 

53 data : `object` 

54 Data as Python object read from JSON file. 

55 

56 Notes 

57 ----- 

58 Can read two forms of JSON log file. It can read a full JSON 

59 document created from `ButlerLogRecords`, or a stream of standalone 

60 JSON documents with a log record per line. 

61 """ 

62 if pytype is None: 

63 pytype = ButlerLogRecords 

64 elif not issubclass(pytype, ButlerLogRecords): 

65 raise RuntimeError(f"Python type {pytype} does not seem to be a ButlerLogRecords type") 

66 

67 return pytype.from_file(path) 

68 

69 def _fromBytes(self, serializedDataset: bytes, pytype: Optional[Type[Any]] = None) -> Any: 

70 """Read the bytes object as a python object. 

71 

72 Parameters 

73 ---------- 

74 serializedDataset : `bytes` 

75 Bytes object to unserialize. 

76 pytype : `class`, optional 

77 Python type being read. Should be a `ButlerLogRecords` or 

78 subclass. 

79 

80 Returns 

81 ------- 

82 inMemoryDataset : `object` 

83 The requested data as a Python object or None if the string could 

84 not be read. 

85 """ 

86 # Duplicates some of the logic from ButlerLogRecords.from_file 

87 if pytype is None: 87 ↛ 88line 87 didn't jump to line 88, because the condition on line 87 was never true

88 pytype = ButlerLogRecords 

89 elif not issubclass(pytype, ButlerLogRecords): 89 ↛ 90line 89 didn't jump to line 90, because the condition on line 89 was never true

90 raise RuntimeError(f"Python type {pytype} does not seem to be a ButlerLogRecords type") 

91 

92 return pytype.from_raw(serializedDataset) 

93 

94 def _toBytes(self, inMemoryDataset: Any) -> bytes: 

95 """Write the in memory dataset to a bytestring. 

96 

97 Parameters 

98 ---------- 

99 inMemoryDataset : `object` 

100 Object to serialize 

101 

102 Returns 

103 ------- 

104 serializedDataset : `bytes` 

105 bytes representing the serialized dataset. 

106 

107 Raises 

108 ------ 

109 Exception 

110 The object could not be serialized. 

111 """ 

112 return inMemoryDataset.json(exclude_unset=True, exclude_defaults=True).encode()