Coverage for python / lsst / daf / butler / formatters / logs.py: 0%

17 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-05-06 08:30 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This software is dual licensed under the GNU General Public License and also 

10# under a 3-clause BSD license. Recipients may choose which of these licenses 

11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt, 

12# respectively. If you choose the GPL option then the following text applies 

13# (but note that there is still no warranty even if you opt for BSD instead): 

14# 

15# This program is free software: you can redistribute it and/or modify 

16# it under the terms of the GNU General Public License as published by 

17# the Free Software Foundation, either version 3 of the License, or 

18# (at your option) any later version. 

19# 

20# This program is distributed in the hope that it will be useful, 

21# but WITHOUT ANY WARRANTY; without even the implied warranty of 

22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

23# GNU General Public License for more details. 

24# 

25# You should have received a copy of the GNU General Public License 

26# along with this program. If not, see <http://www.gnu.org/licenses/>. 

27 

28__all__ = ("ButlerLogRecordsFormatter",) 

29 

30from typing import Any 

31 

32from lsst.daf.butler import FormatterV2 

33from lsst.daf.butler.logging import ButlerLogRecords 

34 

35 

36class ButlerLogRecordsFormatter(FormatterV2): 

37 """Read and write log records in JSON format. 

38 

39 This is a naive implementation that treats everything as a pydantic. 

40 model. In the future this may be changed to be able to read 

41 `ButlerLogRecord` one at time from the file and return a subset 

42 of records given some filtering parameters. 

43 

44 Notes 

45 ----- 

46 Log files can be large and ResourcePath.open() does not support 

47 ``readline()`` or ``__iter__`` in all cases and 

48 ``ButlerLogRecords.from_stream`` does not use `.read()` for chunking. 

49 Therefore must use local file. 

50 """ 

51 

52 default_extension = ".json" 

53 supported_extensions = frozenset({".log"}) 

54 can_read_from_local_file = True 

55 

56 def _get_read_pytype(self) -> type[ButlerLogRecords]: 

57 """Get the Python type to allow for subclasses.""" 

58 pytype = self.file_descriptor.storageClass.pytype 

59 if not issubclass(pytype, ButlerLogRecords): 

60 raise RuntimeError(f"Python type {pytype} does not seem to be a ButlerLogRecords type") 

61 return pytype 

62 

63 def read_from_local_file(self, path: str, component: str | None = None, expected_size: int = -1) -> Any: 

64 # ResourcePath open() cannot do a per-line read so can not use 

65 # `read_from_stream` and `read_from_uri` does not give any advantage 

66 # over pre-downloading the whole file (which can be very large). 

67 return self._get_read_pytype().from_file(path) 

68 

69 def to_bytes(self, in_memory_dataset: ButlerLogRecords) -> bytes: 

70 return in_memory_dataset.to_json_data().encode()