Coverage for python/lsst/daf/butler/registry/databases/sqlite.py : 18%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
21from __future__ import annotations
23__all__ = ["SqliteDatabase"]
25from contextlib import closing
26import copy
27from typing import Any, ContextManager, Dict, Iterable, List, Optional
28from dataclasses import dataclass
29import os
30import urllib.parse
32import sqlite3
33import sqlalchemy
34import sqlalchemy.ext.compiler
36from ..interfaces import Database, StaticTablesContext
37from ...core import ddl
40def _onSqlite3Connect(dbapiConnection: sqlite3.Connection,
41 connectionRecord: sqlalchemy.pool._ConnectionRecord) -> None:
42 assert isinstance(dbapiConnection, sqlite3.Connection)
43 # Prevent pysqlite from emitting BEGIN and COMMIT statements.
44 dbapiConnection.isolation_level = None
45 # Enable foreign keys
46 with closing(dbapiConnection.cursor()) as cursor:
47 cursor.execute("PRAGMA foreign_keys=ON;")
48 cursor.execute("PRAGMA busy_timeout = 300000;") # in ms, so 5min (way longer than should be needed)
51def _onSqlite3Begin(connection: sqlalchemy.engine.Connection) -> sqlalchemy.engine.Connection:
52 assert connection.dialect.name == "sqlite"
53 # Replace pysqlite's buggy transaction handling that never BEGINs with our
54 # own that does, and tell SQLite to try to acquire a lock as soon as we
55 # start a transaction (this should lead to more blocking and fewer
56 # deadlocks).
57 connection.execute("BEGIN IMMEDIATE")
58 return connection
61class _Replace(sqlalchemy.sql.Insert):
62 """A SQLAlchemy query that compiles to INSERT ... ON CONFLICT REPLACE
63 on the primary key constraint for the table.
64 """
65 pass
68# SQLite and PostgreSQL use similar syntax for their ON CONFLICT extension,
69# but SQLAlchemy only knows about PostgreSQL's, so we have to compile some
70# custom text SQL ourselves.
72# Hard to infer what types these should be from SQLAlchemy docs; just disable
73# static typing by calling everything "Any".
74@sqlalchemy.ext.compiler.compiles(_Replace, "sqlite")
75def _replace(insert: Any, compiler: Any, **kwargs: Any) -> Any:
76 """Generate an INSERT ... ON CONFLICT REPLACE query.
77 """
78 result = compiler.visit_insert(insert, **kwargs)
79 preparer = compiler.preparer
80 pk_columns = ", ".join([preparer.format_column(col) for col in insert.table.primary_key])
81 result += f" ON CONFLICT ({pk_columns})"
82 columns = [preparer.format_column(col) for col in insert.table.columns
83 if col.name not in insert.table.primary_key]
84 updates = ", ".join([f"{col} = excluded.{col}" for col in columns])
85 result += f" DO UPDATE SET {updates}"
86 return result
89class _Ensure(sqlalchemy.sql.Insert):
90 """A SQLAlchemy query that compiles to INSERT ... ON CONFLICT DO NOTHING.
91 """
92 pass
95@sqlalchemy.ext.compiler.compiles(_Ensure, "sqlite")
96def _ensure(insert: Any, compiler: Any, **kwargs: Any) -> Any:
97 """Generate an INSERT ... ON CONFLICT DO NOTHING query.
98 """
99 result = compiler.visit_insert(insert, **kwargs)
100 result += " ON CONFLICT DO NOTHING"
101 return result
104_AUTOINCR_TABLE_SPEC = ddl.TableSpec(
105 fields=[ddl.FieldSpec(name="id", dtype=sqlalchemy.Integer, primaryKey=True)]
106)
109@dataclass
110class _AutoincrementCompoundKeyWorkaround:
111 """A workaround for SQLite's lack of support for compound primary keys that
112 include an autoincrement field.
113 """
115 table: sqlalchemy.schema.Table
116 """A single-column internal table that can be inserted into to yield
117 autoincrement values (`sqlalchemy.schema.Table`).
118 """
120 column: str
121 """The name of the column in the original table that needs to be populated
122 with values from the internal table (`str`).
123 """
126class SqliteDatabase(Database):
127 """An implementation of the `Database` interface for SQLite3.
129 Parameters
130 ----------
131 connection : `sqlalchemy.engine.Connection`
132 An existing connection created by a previous call to `connect`.
133 origin : `int`
134 An integer ID that should be used as the default for any datasets,
135 quanta, or other entities that use a (autoincrement, origin) compound
136 primary key.
137 namespace : `str`, optional
138 The namespace (schema) this database is associated with. If `None`,
139 the default schema for the connection is used (which may be `None`).
140 writeable : `bool`, optional
141 If `True`, allow write operations on the database, including
142 ``CREATE TABLE``.
144 Notes
145 -----
146 The case where ``namespace is not None`` is not yet tested, and may be
147 broken; we need an API for attaching to different databases in order to
148 write those tests, but haven't yet worked out what is common/different
149 across databases well enough to define it.
150 """
152 def __init__(self, *, engine: sqlalchemy.engine.Engine, origin: int,
153 namespace: Optional[str] = None, writeable: bool = True):
154 super().__init__(origin=origin, engine=engine, namespace=namespace)
155 # Get the filename from a call to 'PRAGMA database_list'.
156 with engine.connect() as connection:
157 with closing(connection.connection.cursor()) as cursor:
158 dbList = list(cursor.execute("PRAGMA database_list").fetchall())
159 if len(dbList) == 0:
160 raise RuntimeError("No database in connection.")
161 if namespace is None:
162 namespace = "main"
163 for _, dbname, filename in dbList:
164 if dbname == namespace:
165 break
166 else:
167 raise RuntimeError(f"No '{namespace}' database in connection.")
168 if not filename:
169 self.filename = None
170 else:
171 self.filename = filename
172 self._writeable = writeable
173 self._autoincr: Dict[str, _AutoincrementCompoundKeyWorkaround] = {}
175 @classmethod
176 def makeDefaultUri(cls, root: str) -> Optional[str]:
177 return "sqlite:///" + os.path.join(root, "gen3.sqlite3")
179 @classmethod
180 def makeEngine(cls, uri: Optional[str] = None, *, filename: Optional[str] = None,
181 writeable: bool = True) -> sqlalchemy.engine.Engine:
182 """Create a `sqlalchemy.engine.Engine` from a SQLAlchemy URI or
183 filename.
185 Parameters
186 ----------
187 uri : `str`
188 A SQLAlchemy URI connection string.
189 filename : `str`
190 Name of the SQLite database file, or `None` to use an in-memory
191 database. Ignored if ``uri is not None``.
192 writeable : `bool`, optional
193 If `True`, allow write operations on the database, including
194 ``CREATE TABLE``.
196 Returns
197 -------
198 engine : `sqlalchemy.engine.Engine`
199 A database engine.
200 """
201 # In order to be able to tell SQLite that we want a read-only or
202 # read-write connection, we need to make the SQLite DBAPI connection
203 # with a "URI"-based connection string. SQLAlchemy claims it can do
204 # this
205 # (https://docs.sqlalchemy.org/en/13/dialects/sqlite.html#uri-connections),
206 # but it doesn't seem to work as advertised. To work around this, we
207 # use the 'creator' argument to sqlalchemy.engine.create_engine, which
208 # lets us pass a callable that creates the DBAPI connection.
209 if uri is None:
210 if filename is None:
211 target = ":memory:"
212 uri = "sqlite://"
213 else:
214 target = f"file:{filename}"
215 uri = f"sqlite:///{filename}"
216 else:
217 parsed = urllib.parse.urlparse(uri)
218 queries = parsed.query.split("&")
219 if "uri=true" in queries:
220 # This is a SQLAlchemy URI that is already trying to make a
221 # SQLite connection via a SQLite URI, and hence there may
222 # be URI components for both SQLite and SQLAlchemy. We
223 # don't need to support that, and it'd be a
224 # reimplementation of all of the (broken) logic in
225 # SQLAlchemy for doing this, so we just don't.
226 raise NotImplementedError("SQLite connection strings with 'uri=true' are not supported.")
227 # This is just a SQLAlchemy URI with a non-URI SQLite
228 # connection string inside it. Pull that out so we can use it
229 # in the creator call.
230 if parsed.path.startswith("/"):
231 filename = parsed.path[1:]
232 target = f"file:{filename}"
233 else:
234 filename = None
235 target = ":memory:"
236 if filename is None:
237 if not writeable:
238 raise NotImplementedError("Read-only :memory: databases are not supported.")
239 else:
240 if writeable:
241 target += '?mode=rwc&uri=true'
242 else:
243 target += '?mode=ro&uri=true'
245 def creator() -> sqlite3.Connection:
246 return sqlite3.connect(target, check_same_thread=False, uri=True)
248 engine = sqlalchemy.engine.create_engine(uri, creator=creator)
250 sqlalchemy.event.listen(engine, "connect", _onSqlite3Connect)
251 sqlalchemy.event.listen(engine, "begin", _onSqlite3Begin)
252 try:
253 return engine
254 except sqlalchemy.exc.OperationalError as err:
255 raise RuntimeError(f"Error creating connection with uri='{uri}', filename='{filename}', "
256 f"target={target}.") from err
258 @classmethod
259 def fromEngine(cls, engine: sqlalchemy.engine.Engine, *, origin: int,
260 namespace: Optional[str] = None, writeable: bool = True) -> Database:
261 return cls(engine=engine, origin=origin, writeable=writeable, namespace=namespace)
263 def isWriteable(self) -> bool:
264 return self._writeable
266 def __str__(self) -> str:
267 if self.filename:
268 return f"SQLite3@{self.filename}"
269 else:
270 return "SQLite3@:memory:"
272 def _lockTables(self, tables: Iterable[sqlalchemy.schema.Table] = ()) -> None:
273 # Docstring inherited.
274 # Our SQLite database always acquires full-database locks at the
275 # beginning of a transaction, so there's no need to acquire table-level
276 # locks - which is good, because SQLite doesn't have table-level
277 # locking.
278 pass
280 # MyPy claims that the return type here isn't covariant with the return
281 # type of the base class method, which is formally correct but irrelevant
282 # - the base class return type is _GeneratorContextManager, but only
283 # because it's generated by the contextmanager decorator.
284 def declareStaticTables(self, *, create: bool) -> ContextManager[StaticTablesContext]: # type: ignore
285 # If the user asked for an in-memory, writeable database, then we may
286 # need to re-create schema even if create=False because schema can be
287 # lost on re-connect. This is only really relevant for tests, and it's
288 # convenient there.
289 if self.filename is None and self.isWriteable():
290 inspector = sqlalchemy.engine.reflection.Inspector(self._connection)
291 tables = inspector.get_table_names(schema=self.namespace)
292 if not tables:
293 create = True
294 return super().declareStaticTables(create=create)
296 def _convertFieldSpec(self, table: str, spec: ddl.FieldSpec, metadata: sqlalchemy.MetaData,
297 **kwargs: Any) -> sqlalchemy.schema.Column:
298 if spec.autoincrement:
299 if not spec.primaryKey:
300 raise RuntimeError(f"Autoincrement field {table}.{spec.name} that is not a "
301 f"primary key is not supported.")
302 if spec.dtype != sqlalchemy.Integer:
303 # SQLite's autoincrement is really limited; it only works if
304 # the column type is exactly "INTEGER". But it also doesn't
305 # care about the distinctions between different integer types,
306 # so it's safe to change it.
307 spec = copy.copy(spec)
308 spec.dtype = sqlalchemy.Integer
309 return super()._convertFieldSpec(table, spec, metadata, **kwargs)
311 def _makeColumnConstraints(self, table: str, spec: ddl.FieldSpec) -> List[sqlalchemy.CheckConstraint]:
312 # For sqlite we force constraints on all string columns since sqlite
313 # ignores everything otherwise and this leads to problems with
314 # other databases.
316 constraints = []
317 if spec.isStringType():
318 name = self.shrinkDatabaseEntityName("_".join([table, "len", spec.name]))
319 constraints.append(sqlalchemy.CheckConstraint(f"length({spec.name})<={spec.length}"
320 # Oracle converts
321 # empty strings to
322 # NULL so check
323 f" AND length({spec.name})>=1",
324 name=name))
326 constraints.extend(super()._makeColumnConstraints(table, spec))
327 return constraints
329 def _convertTableSpec(self, name: str, spec: ddl.TableSpec, metadata: sqlalchemy.MetaData,
330 **kwargs: Any) -> sqlalchemy.schema.Table:
331 primaryKeyFieldNames = set(field.name for field in spec.fields if field.primaryKey)
332 autoincrFieldNames = set(field.name for field in spec.fields if field.autoincrement)
333 if len(autoincrFieldNames) > 1:
334 raise RuntimeError("At most one autoincrement field per table is allowed.")
335 if len(primaryKeyFieldNames) > 1 and len(autoincrFieldNames) > 0:
336 # SQLite's default rowid-based autoincrement doesn't work if the
337 # field is just one field in a compound primary key. As a
338 # workaround, we create an extra table with just one column that
339 # we'll insert into to generate those IDs. That's only safe if
340 # that single-column table's records are already unique with just
341 # the autoincrement field, not the rest of the primary key. In
342 # practice, that means the single-column table's records are those
343 # for which origin == self.origin.
344 autoincrFieldName, = autoincrFieldNames
345 otherPrimaryKeyFieldNames = primaryKeyFieldNames - autoincrFieldNames
346 if otherPrimaryKeyFieldNames != {"origin"}:
347 # We need the only other field in the key to be 'origin'.
348 raise NotImplementedError(
349 "Compound primary keys with an autoincrement are only supported in SQLite "
350 "if the only non-autoincrement primary key field is 'origin'."
351 )
352 self._autoincr[name] = _AutoincrementCompoundKeyWorkaround(
353 table=self._convertTableSpec(f"_autoinc_{name}", _AUTOINCR_TABLE_SPEC, metadata, **kwargs),
354 column=autoincrFieldName
355 )
356 if not spec.recycleIds:
357 kwargs = dict(kwargs, sqlite_autoincrement=True)
358 return super()._convertTableSpec(name, spec, metadata, **kwargs)
360 def insert(self, table: sqlalchemy.schema.Table, *rows: dict, returnIds: bool = False,
361 select: Optional[sqlalchemy.sql.Select] = None,
362 names: Optional[Iterable[str]] = None,
363 ) -> Optional[List[int]]:
364 self.assertTableWriteable(table, f"Cannot insert into read-only table {table}.")
365 autoincr = self._autoincr.get(table.name)
366 if autoincr is not None:
367 if select is not None:
368 raise NotImplementedError(
369 "Cannot do INSERT INTO ... SELECT on a SQLite table with a simulated autoincrement "
370 "compound primary key"
371 )
372 # This table has a compound primary key that includes an
373 # autoincrement. That doesn't work natively in SQLite, so we
374 # insert into a single-column table and use those IDs.
375 if not rows:
376 return [] if returnIds else None
377 if autoincr.column in rows[0]:
378 # Caller passed the autoincrement key values explicitly in the
379 # first row. They had better have done the same for all rows,
380 # or SQLAlchemy would have a problem, even if we didn't.
381 assert all(autoincr.column in row for row in rows)
382 # We need to insert only the values that correspond to
383 # ``origin == self.origin`` into the single-column table, to
384 # make sure we don't generate conflicting keys there later.
385 rowsForAutoincrTable = [dict(id=row[autoincr.column])
386 for row in rows if row["origin"] == self.origin]
387 # Insert into the autoincr table and the target table inside
388 # a transaction. The main-table insertion can take care of
389 # returnIds for us.
390 with self.transaction():
391 self._connection.execute(autoincr.table.insert(), *rowsForAutoincrTable)
392 return super().insert(table, *rows, returnIds=returnIds)
393 else:
394 # Caller did not pass autoincrement key values on the first
395 # row. Make sure they didn't ever do that, and also make
396 # sure the origin that was passed in is always self.origin,
397 # because we can't safely generate autoincrement values
398 # otherwise.
399 assert all(autoincr.column not in row and row["origin"] == self.origin for row in rows)
400 # Insert into the autoincr table one by one to get the
401 # primary key values back, then insert into the target table
402 # in the same transaction.
403 with self.transaction():
404 newRows = []
405 ids = []
406 for row in rows:
407 newRow = row.copy()
408 id = self._connection.execute(autoincr.table.insert()).inserted_primary_key[0]
409 newRow[autoincr.column] = id
410 newRows.append(newRow)
411 ids.append(id)
412 # Don't ever ask to returnIds here, because we've already
413 # got them.
414 super().insert(table, *newRows)
415 if returnIds:
416 return ids
417 else:
418 return None
419 else:
420 return super().insert(table, *rows, select=select, names=names, returnIds=returnIds)
422 def replace(self, table: sqlalchemy.schema.Table, *rows: dict) -> None:
423 self.assertTableWriteable(table, f"Cannot replace into read-only table {table}.")
424 if not rows:
425 return
426 if table.name in self._autoincr:
427 raise NotImplementedError(
428 "replace does not support compound primary keys with autoincrement fields."
429 )
430 self._connection.execute(_Replace(table), *rows)
432 def ensure(self, table: sqlalchemy.schema.Table, *rows: dict) -> int:
433 self.assertTableWriteable(table, f"Cannot ensure into read-only table {table}.")
434 if not rows:
435 return 0
436 if table.name in self._autoincr:
437 raise NotImplementedError(
438 "ensure does not support compound primary keys with autoincrement fields."
439 )
440 return self._connection.execute(_Ensure(table), *rows).rowcount
442 filename: Optional[str]
443 """Name of the file this database is connected to (`str` or `None`).
445 Set to `None` for in-memory databases.
446 """