Coverage for python/lsst/daf/butler/_column_type_info.py: 37%

61 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-12-05 11:07 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This software is dual licensed under the GNU General Public License and also 

10# under a 3-clause BSD license. Recipients may choose which of these licenses 

11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt, 

12# respectively. If you choose the GPL option then the following text applies 

13# (but note that there is still no warranty even if you opt for BSD instead): 

14# 

15# This program is free software: you can redistribute it and/or modify 

16# it under the terms of the GNU General Public License as published by 

17# the Free Software Foundation, either version 3 of the License, or 

18# (at your option) any later version. 

19# 

20# This program is distributed in the hope that it will be useful, 

21# but WITHOUT ANY WARRANTY; without even the implied warranty of 

22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

23# GNU General Public License for more details. 

24# 

25# You should have received a copy of the GNU General Public License 

26# along with this program. If not, see <http://www.gnu.org/licenses/>. 

27 

28from __future__ import annotations 

29 

30__all__ = ("ColumnTypeInfo", "LogicalColumn") 

31 

32import dataclasses 

33import datetime 

34from collections.abc import Iterable 

35from typing import cast 

36 

37import astropy.time 

38import sqlalchemy 

39from lsst.daf.relation import ColumnTag, sql 

40 

41from . import ddl 

42from ._column_tags import DatasetColumnTag, DimensionKeyColumnTag, DimensionRecordColumnTag 

43from ._timespan import TimespanDatabaseRepresentation 

44from .dimensions import Dimension, DimensionUniverse 

45 

46LogicalColumn = sqlalchemy.sql.ColumnElement | TimespanDatabaseRepresentation 

47"""A type alias for the types used to represent columns in SQL relations. 

48 

49This is the butler specialization of the `lsst.daf.relation.sql.LogicalColumn` 

50concept. 

51""" 

52 

53 

54@dataclasses.dataclass(frozen=True, eq=False) 

55class ColumnTypeInfo: 

56 """A struct that aggregates information about column types that can differ 

57 across data repositories due to `Registry` and dimension configuration. 

58 """ 

59 

60 timespan_cls: type[TimespanDatabaseRepresentation] 

61 """An abstraction around the column type or types used for timespans by 

62 this database engine. 

63 """ 

64 

65 universe: DimensionUniverse 

66 """Object that manages the definitions of all dimension and dimension 

67 elements. 

68 """ 

69 

70 dataset_id_spec: ddl.FieldSpec 

71 """Field specification for the dataset primary key column. 

72 """ 

73 

74 run_key_spec: ddl.FieldSpec 

75 """Field specification for the `~CollectionType.RUN` primary key column. 

76 """ 

77 

78 ingest_date_dtype: type[ddl.AstropyTimeNsecTai] | type[sqlalchemy.TIMESTAMP] 

79 """Type of the ``ingest_date`` column, can be either 

80 `~lsst.daf.butler.ddl.AstropyTimeNsecTai` or `sqlalchemy.TIMESTAMP`. 

81 """ 

82 

83 @property 

84 def ingest_date_pytype(self) -> type: 

85 """Python type corresponding to ``ingest_date`` column type.""" 

86 if self.ingest_date_dtype is ddl.AstropyTimeNsecTai: 

87 return astropy.time.Time 

88 elif self.ingest_date_dtype is sqlalchemy.TIMESTAMP: 

89 return datetime.datetime 

90 else: 

91 raise TypeError(f"Unexpected type of ingest_date_dtype: {self.ingest_date_dtype}") 

92 

93 def make_relation_table_spec( 

94 self, 

95 columns: Iterable[ColumnTag], 

96 unique_keys: Iterable[Iterable[ColumnTag]] = (), 

97 ) -> ddl.TableSpec: 

98 """Create a specification for a table with the given relation columns. 

99 

100 This is used primarily to create temporary tables for query results. 

101 

102 Parameters 

103 ---------- 

104 columns : `~collections.abc.Iterable` [ `ColumnTag` ] 

105 Iterable of column identifiers. 

106 unique_keys : `~collections.abc.Iterable` \ 

107 [ `~collections.abc.Iterable` [ `ColumnTag` ] ] 

108 Unique constraints to add the table, as a nested iterable of 

109 (first) constraint and (second) the columns within that constraint. 

110 

111 Returns 

112 ------- 

113 spec : `ddl.TableSpec` 

114 Specification for a table. 

115 """ 

116 result = ddl.TableSpec(fields=()) 

117 columns = list(columns) 

118 if not columns: 

119 result.fields.add( 

120 ddl.FieldSpec( 

121 sql.Engine.EMPTY_COLUMNS_NAME, 

122 dtype=sql.Engine.EMPTY_COLUMNS_TYPE, 

123 nullable=True, 

124 default=True, 

125 ) 

126 ) 

127 for tag in columns: 

128 match tag: 

129 case DimensionKeyColumnTag(dimension=dimension_name): 

130 result.fields.add( 

131 dataclasses.replace( 

132 cast(Dimension, self.universe[dimension_name]).primaryKey, 

133 name=tag.qualified_name, 

134 primaryKey=False, 

135 nullable=False, 

136 ) 

137 ) 

138 case DimensionRecordColumnTag(column="region"): 

139 result.fields.add(ddl.FieldSpec.for_region(tag.qualified_name)) 

140 case DimensionRecordColumnTag(column="timespan") | DatasetColumnTag(column="timespan"): 

141 result.fields.update( 

142 self.timespan_cls.makeFieldSpecs(nullable=True, name=tag.qualified_name) 

143 ) 

144 case DimensionRecordColumnTag(element=element_name, column=column): 

145 element = self.universe[element_name] 

146 result.fields.add( 

147 dataclasses.replace( 

148 element.RecordClass.fields.facts[column], 

149 name=tag.qualified_name, 

150 nullable=True, 

151 primaryKey=False, 

152 ) 

153 ) 

154 case DatasetColumnTag(column="dataset_id"): 

155 result.fields.add( 

156 dataclasses.replace( 

157 self.dataset_id_spec, name=tag.qualified_name, primaryKey=False, nullable=False 

158 ) 

159 ) 

160 case DatasetColumnTag(column="run"): 

161 result.fields.add( 

162 dataclasses.replace( 

163 self.run_key_spec, name=tag.qualified_name, primaryKey=False, nullable=False 

164 ) 

165 ) 

166 case DatasetColumnTag(column="ingest_date"): 

167 result.fields.add( 

168 ddl.FieldSpec(tag.qualified_name, dtype=self.ingest_date_dtype, nullable=False) 

169 ) 

170 case _: 

171 raise TypeError(f"Unexpected column tag {tag}.") 

172 for unique_key in unique_keys: 

173 result.unique.add(tuple(tag.qualified_name for tag in unique_key)) 

174 return result