Coverage for tests / test_postgres.py: 20%

69 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-04-17 08:49 +0000

1# This file is part of felis. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import os 

23import unittest 

24 

25import yaml 

26from sqlalchemy import text 

27 

28from felis.datamodel import Schema 

29from felis.db.database_context import PostgreSQLContext 

30from felis.metadata import MetaDataBuilder 

31from felis.tests.postgresql import TemporaryPostgresInstance, setup_postgres_test_db # type: ignore 

32 

33TESTDIR = os.path.abspath(os.path.dirname(__file__)) 

34TEST_YAML = os.path.join(TESTDIR, "data", "sales.yaml") 

35 

36 

37class TestPostgresql(unittest.TestCase): 

38 """Test PostgreSQL database setup.""" 

39 

40 postgresql: TemporaryPostgresInstance 

41 

42 @classmethod 

43 def setUpClass(cls) -> None: 

44 # Create the postgres test server. 

45 cls.postgresql = cls.enterClassContext(setup_postgres_test_db()) 

46 super().setUpClass() 

47 

48 def test_initialize_create_and_drop(self) -> None: 

49 """Test database initialization, creation, and deletion in 

50 PostgreSQL. 

51 """ 

52 # Create the schema and metadata 

53 with open(TEST_YAML) as test_file: 

54 yaml_data = yaml.safe_load(test_file) 

55 schema = Schema.model_validate(yaml_data) 

56 md = MetaDataBuilder(schema).build() 

57 

58 # Initialize the database 

59 ctx = PostgreSQLContext(self.postgresql.engine.url, md) 

60 ctx.initialize() 

61 ctx.create_all() 

62 

63 # Get the names of the tables without the schema prepended 

64 table_names = [name.split(".")[-1] for name in md.tables.keys()] 

65 

66 # Check that the tables and columns are created 

67 with self.postgresql.begin() as conn: 

68 res = conn.execute(text("SELECT table_name FROM information_schema.tables")) 

69 tables = [row[0] for row in res.fetchall()] 

70 for table_name in table_names: 

71 self.assertIn(table_name, tables) 

72 # Check that all columns are created 

73 expected_columns = [col.name for col in md.tables[f"sales.{table_name}"].columns] 

74 res = conn.execute( 

75 text("SELECT column_name FROM information_schema.columns WHERE table_name = :table_name"), 

76 {"table_name": table_name}, 

77 ) 

78 actual_columns = [row[0] for row in res.fetchall()] 

79 self.assertSetEqual(set(expected_columns), set(actual_columns)) 

80 

81 # Drop the schema 

82 ctx.drop() 

83 

84 # Check that the "sales" schema was dropped 

85 with self.postgresql.begin() as conn: 

86 res = conn.execute( 

87 text("SELECT schema_name FROM information_schema.schemata WHERE schema_name = 'sales'") 

88 ) 

89 schemas = [row[0] for row in res.fetchall()] 

90 self.assertNotIn("sales", schemas) 

91 

92 def test_create_and_drop_indexes(self) -> None: 

93 """Test creating and dropping indexes separately from table 

94 creation. 

95 """ 

96 # Load the schema 

97 with open(TEST_YAML) as test_file: 

98 yaml_data = yaml.safe_load(test_file) 

99 schema = Schema.model_validate(yaml_data) 

100 

101 # Create metadata without indexes 

102 md_no_indexes = MetaDataBuilder(schema, skip_indexes=True).build() 

103 

104 # Initialize the database and create tables (without indexes) 

105 ctx = PostgreSQLContext(self.postgresql.engine.url, md_no_indexes) 

106 ctx.initialize() 

107 ctx.create_all() 

108 

109 # Create metadata with indexes to get the index definitions 

110 md_with_indexes = MetaDataBuilder(schema, skip_indexes=False).build() 

111 ctx_with_indexes = PostgreSQLContext(self.postgresql.engine.url, md_with_indexes) 

112 

113 def check_indexes_exist(should_exist: bool, message: str) -> None: 

114 """Check if indexes exist or don't exist in the database.""" 

115 with self.postgresql.begin() as conn: 

116 from sqlalchemy import inspect 

117 

118 inspector = inspect(conn) 

119 for table in md_with_indexes.tables.values(): 

120 # Get existing indexes for this table 

121 existing_indexes = { 

122 ix["name"] 

123 for ix in inspector.get_indexes(table.name, schema=table.schema) 

124 if "name" in ix and ix["name"] is not None 

125 } 

126 for index in table.indexes: 

127 if index.name is not None: 

128 exists = index.name in existing_indexes 

129 if should_exist: 

130 self.assertTrue( 

131 exists, 

132 f"Index '{index.name}' {message}", 

133 ) 

134 else: 

135 self.assertFalse( 

136 exists, 

137 f"Index '{index.name}' {message}", 

138 ) 

139 

140 # Check that indexes don't exist yet 

141 check_indexes_exist(False, "should not exist yet") 

142 

143 # Create the indexes 

144 ctx_with_indexes.create_indexes() 

145 

146 # Check that indexes now exist 

147 check_indexes_exist(True, "should exist after creation") 

148 

149 # Create the indexes again; should not raise an error 

150 ctx_with_indexes.create_indexes() 

151 

152 # Drop the indexes 

153 ctx_with_indexes.drop_indexes() 

154 

155 # Check that indexes were dropped 

156 check_indexes_exist(False, "should not exist after dropping") 

157 

158 # Cleanup: drop the schema 

159 ctx.drop() 

160 

161 # Cleanup 

162 ctx.close()