Coverage for python/lsst/daf/butler/registry/tests/_database.py : 11%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
21from __future__ import annotations
23__all__ = ["DatabaseTests"]
25from abc import ABC, abstractmethod
26from collections import namedtuple
27from typing import ContextManager
29import sqlalchemy
31from lsst.sphgeom import ConvexPolygon, UnitVector3d
32from ..interfaces import (
33 Database,
34 ReadOnlyDatabaseError,
35 DatabaseConflictError,
36 SchemaAlreadyDefinedError
37)
38from ...core import ddl
40StaticTablesTuple = namedtuple("StaticTablesTuple", ["a", "b", "c"])
42STATIC_TABLE_SPECS = StaticTablesTuple(
43 a=ddl.TableSpec(
44 fields=[
45 ddl.FieldSpec("name", dtype=sqlalchemy.String, length=16, primaryKey=True),
46 ddl.FieldSpec("region", dtype=ddl.Base64Region, nbytes=128),
47 ]
48 ),
49 b=ddl.TableSpec(
50 fields=[
51 ddl.FieldSpec("id", dtype=sqlalchemy.BigInteger, autoincrement=True, primaryKey=True),
52 ddl.FieldSpec("name", dtype=sqlalchemy.String, length=16, nullable=False),
53 ddl.FieldSpec("value", dtype=sqlalchemy.SmallInteger, nullable=True),
54 ],
55 unique=[("name",)],
56 ),
57 c=ddl.TableSpec(
58 fields=[
59 ddl.FieldSpec("id", dtype=sqlalchemy.BigInteger, autoincrement=True, primaryKey=True),
60 ddl.FieldSpec("origin", dtype=sqlalchemy.BigInteger, primaryKey=True),
61 ddl.FieldSpec("b_id", dtype=sqlalchemy.BigInteger, nullable=True),
62 ],
63 foreignKeys=[
64 ddl.ForeignKeySpec("b", source=("b_id",), target=("id",), onDelete="SET NULL"),
65 ]
66 ),
67)
69DYNAMIC_TABLE_SPEC = ddl.TableSpec(
70 fields=[
71 ddl.FieldSpec("c_id", dtype=sqlalchemy.BigInteger, primaryKey=True),
72 ddl.FieldSpec("c_origin", dtype=sqlalchemy.BigInteger, primaryKey=True),
73 ddl.FieldSpec("a_name", dtype=sqlalchemy.String, length=16, nullable=False),
74 ],
75 foreignKeys=[
76 ddl.ForeignKeySpec("c", source=("c_id", "c_origin"), target=("id", "origin"), onDelete="CASCADE"),
77 ddl.ForeignKeySpec("a", source=("a_name",), target=("name",), onDelete="CASCADE"),
78 ]
79)
81TEMPORARY_TABLE_SPEC = ddl.TableSpec(
82 fields=[
83 ddl.FieldSpec("a_name", dtype=sqlalchemy.String, length=16, primaryKey=True),
84 ddl.FieldSpec("b_id", dtype=sqlalchemy.BigInteger, primaryKey=True),
85 ],
86)
89class DatabaseTests(ABC):
90 """Generic tests for the `Database` interface that can be subclassed to
91 generate tests for concrete implementations.
92 """
94 @abstractmethod
95 def makeEmptyDatabase(self, origin: int = 0) -> Database:
96 """Return an empty `Database` with the given origin, or an
97 automatically-generated one if ``origin`` is `None`.
98 """
99 raise NotImplementedError()
101 @abstractmethod
102 def asReadOnly(self, database: Database) -> ContextManager[Database]:
103 """Return a context manager for a read-only connection into the given
104 database.
106 The original database should be considered unusable within the context
107 but safe to use again afterwards (this allows the context manager to
108 block write access by temporarily changing user permissions to really
109 guarantee that write operations are not performed).
110 """
111 raise NotImplementedError()
113 @abstractmethod
114 def getNewConnection(self, database: Database, *, writeable: bool) -> Database:
115 """Return a new `Database` instance that points to the same underlying
116 storage as the given one.
117 """
118 raise NotImplementedError()
120 def checkTable(self, spec: ddl.TableSpec, table: sqlalchemy.schema.Table):
121 self.assertCountEqual(spec.fields.names, table.columns.keys())
122 # Checking more than this currently seems fragile, as it might restrict
123 # what Database implementations do; we don't care if the spec is
124 # actually preserved in terms of types and constraints as long as we
125 # can use the returned table as if it was.
127 def checkStaticSchema(self, tables: StaticTablesTuple):
128 self.checkTable(STATIC_TABLE_SPECS.a, tables.a)
129 self.checkTable(STATIC_TABLE_SPECS.b, tables.b)
130 self.checkTable(STATIC_TABLE_SPECS.c, tables.c)
132 def testDeclareStaticTables(self):
133 """Tests for `Database.declareStaticSchema` and the methods it
134 delegates to.
135 """
136 # Create the static schema in a new, empty database.
137 newDatabase = self.makeEmptyDatabase()
138 with newDatabase.declareStaticTables(create=True) as context:
139 tables = context.addTableTuple(STATIC_TABLE_SPECS)
140 self.checkStaticSchema(tables)
141 # Check that we can load that schema even from a read-only connection.
142 with self.asReadOnly(newDatabase) as existingReadOnlyDatabase:
143 with existingReadOnlyDatabase.declareStaticTables(create=False) as context:
144 tables = context.addTableTuple(STATIC_TABLE_SPECS)
145 self.checkStaticSchema(tables)
147 def testDeclareStaticTablesTwice(self):
148 """Tests for `Database.declareStaticSchema` being called twice.
149 """
150 # Create the static schema in a new, empty database.
151 newDatabase = self.makeEmptyDatabase()
152 with newDatabase.declareStaticTables(create=True) as context:
153 tables = context.addTableTuple(STATIC_TABLE_SPECS)
154 self.checkStaticSchema(tables)
155 # Second time it should raise
156 with self.assertRaises(SchemaAlreadyDefinedError):
157 with newDatabase.declareStaticTables(create=True) as context:
158 tables = context.addTableTuple(STATIC_TABLE_SPECS)
159 # Check schema, it should still contain all tables, and maybe some
160 # extra.
161 with newDatabase.declareStaticTables(create=False) as context:
162 self.assertLessEqual(frozenset(STATIC_TABLE_SPECS._fields), context._tableNames)
164 def testRepr(self):
165 """Test that repr does not return a generic thing."""
166 newDatabase = self.makeEmptyDatabase()
167 rep = repr(newDatabase)
168 # Check that stringification works and gives us something different
169 self.assertNotEqual(rep, str(newDatabase))
170 self.assertNotIn("object at 0x", rep, "Check default repr was not used")
171 self.assertIn("://", rep)
173 def testDynamicTables(self):
174 """Tests for `Database.ensureTableExists` and
175 `Database.getExistingTable`.
176 """
177 # Need to start with the static schema.
178 newDatabase = self.makeEmptyDatabase()
179 with newDatabase.declareStaticTables(create=True) as context:
180 context.addTableTuple(STATIC_TABLE_SPECS)
181 # Try to ensure the dyamic table exists in a read-only version of that
182 # database, which should fail because we can't create it.
183 with self.asReadOnly(newDatabase) as existingReadOnlyDatabase:
184 with existingReadOnlyDatabase.declareStaticTables(create=False) as context:
185 context.addTableTuple(STATIC_TABLE_SPECS)
186 with self.assertRaises(ReadOnlyDatabaseError):
187 existingReadOnlyDatabase.ensureTableExists("d", DYNAMIC_TABLE_SPEC)
188 # Just getting the dynamic table before it exists should return None.
189 self.assertIsNone(newDatabase.getExistingTable("d", DYNAMIC_TABLE_SPEC))
190 # Ensure the new table exists back in the original database, which
191 # should create it.
192 table = newDatabase.ensureTableExists("d", DYNAMIC_TABLE_SPEC)
193 self.checkTable(DYNAMIC_TABLE_SPEC, table)
194 # Ensuring that it exists should just return the exact same table
195 # instance again.
196 self.assertIs(newDatabase.ensureTableExists("d", DYNAMIC_TABLE_SPEC), table)
197 # Try again from the read-only database.
198 with self.asReadOnly(newDatabase) as existingReadOnlyDatabase:
199 with existingReadOnlyDatabase.declareStaticTables(create=False) as context:
200 context.addTableTuple(STATIC_TABLE_SPECS)
201 # Just getting the dynamic table should now work...
202 self.assertIsNotNone(existingReadOnlyDatabase.getExistingTable("d", DYNAMIC_TABLE_SPEC))
203 # ...as should ensuring that it exists, since it now does.
204 existingReadOnlyDatabase.ensureTableExists("d", DYNAMIC_TABLE_SPEC)
205 self.checkTable(DYNAMIC_TABLE_SPEC, table)
206 # Trying to get the table with a different specification (at least
207 # in terms of what columns are present) should raise.
208 with self.assertRaises(DatabaseConflictError):
209 newDatabase.ensureTableExists(
210 "d",
211 ddl.TableSpec(
212 fields=[ddl.FieldSpec("name", dtype=sqlalchemy.String, length=4, primaryKey=True)]
213 )
214 )
215 # Calling ensureTableExists inside a transaction block is an error,
216 # even if it would do nothing.
217 with newDatabase.transaction():
218 with self.assertRaises(AssertionError):
219 newDatabase.ensureTableExists("d", DYNAMIC_TABLE_SPEC)
221 def testTemporaryTables(self):
222 """Tests for `Database.makeTemporaryTable`,
223 `Database.dropTemporaryTable`, and `Database.insert` with
224 the ``select`` argument.
225 """
226 # Need to start with the static schema; also insert some test data.
227 newDatabase = self.makeEmptyDatabase()
228 with newDatabase.declareStaticTables(create=True) as context:
229 static = context.addTableTuple(STATIC_TABLE_SPECS)
230 newDatabase.insert(static.a,
231 {"name": "a1", "region": None},
232 {"name": "a2", "region": None})
233 bIds = newDatabase.insert(static.b,
234 {"name": "b1", "value": 11},
235 {"name": "b2", "value": 12},
236 {"name": "b3", "value": 13},
237 returnIds=True)
238 # Create the table.
239 table1 = newDatabase.makeTemporaryTable(TEMPORARY_TABLE_SPEC, "e1")
240 self.checkTable(TEMPORARY_TABLE_SPEC, table1)
241 # Insert via a INSERT INTO ... SELECT query.
242 newDatabase.insert(
243 table1,
244 select=sqlalchemy.sql.select(
245 [static.a.columns.name.label("a_name"), static.b.columns.id.label("b_id")]
246 ).select_from(
247 static.a.join(static.b, onclause=sqlalchemy.sql.literal(True))
248 ).where(
249 sqlalchemy.sql.and_(
250 static.a.columns.name == "a1",
251 static.b.columns.value <= 12,
252 )
253 )
254 )
255 # Check that the inserted rows are present.
256 self.assertCountEqual(
257 [{"a_name": "a1", "b_id": bId} for bId in bIds[:2]],
258 [dict(row) for row in newDatabase.query(table1.select())]
259 )
260 # Create another one via a read-only connection to the database.
261 # We _do_ allow temporary table modifications in read-only databases.
262 with self.asReadOnly(newDatabase) as existingReadOnlyDatabase:
263 with existingReadOnlyDatabase.declareStaticTables(create=False) as context:
264 context.addTableTuple(STATIC_TABLE_SPECS)
265 table2 = existingReadOnlyDatabase.makeTemporaryTable(TEMPORARY_TABLE_SPEC)
266 self.checkTable(TEMPORARY_TABLE_SPEC, table2)
267 # Those tables should not be the same, despite having the same ddl.
268 self.assertIsNot(table1, table2)
269 # Do a slightly different insert into this table, to check that
270 # it works in a read-only database. This time we pass column
271 # names as a kwarg to insert instead of by labeling the columns in
272 # the select.
273 existingReadOnlyDatabase.insert(
274 table2,
275 select=sqlalchemy.sql.select(
276 [static.a.columns.name, static.b.columns.id]
277 ).select_from(
278 static.a.join(static.b, onclause=sqlalchemy.sql.literal(True))
279 ).where(
280 sqlalchemy.sql.and_(
281 static.a.columns.name == "a2",
282 static.b.columns.value >= 12,
283 )
284 ),
285 names=["a_name", "b_id"],
286 )
287 # Check that the inserted rows are present.
288 self.assertCountEqual(
289 [{"a_name": "a2", "b_id": bId} for bId in bIds[1:]],
290 [dict(row) for row in existingReadOnlyDatabase.query(table2.select())]
291 )
292 # Drop the temporary table from the read-only DB. It's unspecified
293 # whether attempting to use it after this point is an error or just
294 # never returns any results, so we can't test what it does, only
295 # that it's not an error.
296 existingReadOnlyDatabase.dropTemporaryTable(table2)
297 # Drop the original temporary table.
298 newDatabase.dropTemporaryTable(table1)
300 def testSchemaSeparation(self):
301 """Test that creating two different `Database` instances allows us
302 to create different tables with the same name in each.
303 """
304 db1 = self.makeEmptyDatabase(origin=1)
305 with db1.declareStaticTables(create=True) as context:
306 tables = context.addTableTuple(STATIC_TABLE_SPECS)
307 self.checkStaticSchema(tables)
309 db2 = self.makeEmptyDatabase(origin=2)
310 # Make the DDL here intentionally different so we'll definitely
311 # notice if db1 and db2 are pointing at the same schema.
312 spec = ddl.TableSpec(fields=[ddl.FieldSpec("id", dtype=sqlalchemy.Integer, primaryKey=True)])
313 with db2.declareStaticTables(create=True) as context:
314 # Make the DDL here intentionally different so we'll definitely
315 # notice if db1 and db2 are pointing at the same schema.
316 table = context.addTable("a", spec)
317 self.checkTable(spec, table)
319 def testInsertQueryDelete(self):
320 """Test the `Database.insert`, `Database.query`, and `Database.delete`
321 methods, as well as the `Base64Region` type and the ``onDelete``
322 argument to `ddl.ForeignKeySpec`.
323 """
324 db = self.makeEmptyDatabase(origin=1)
325 with db.declareStaticTables(create=True) as context:
326 tables = context.addTableTuple(STATIC_TABLE_SPECS)
327 # Insert a single, non-autoincrement row that contains a region and
328 # query to get it back.
329 region = ConvexPolygon((UnitVector3d(1, 0, 0), UnitVector3d(0, 1, 0), UnitVector3d(0, 0, 1)))
330 row = {"name": "a1", "region": region}
331 db.insert(tables.a, row)
332 self.assertEqual([dict(r) for r in db.query(tables.a.select()).fetchall()], [row])
333 # Insert multiple autoincrement rows but do not try to get the IDs
334 # back immediately.
335 db.insert(tables.b, {"name": "b1", "value": 10}, {"name": "b2", "value": 20})
336 results = [dict(r) for r in db.query(tables.b.select().order_by("id")).fetchall()]
337 self.assertEqual(len(results), 2)
338 for row in results:
339 self.assertIn(row["name"], ("b1", "b2"))
340 self.assertIsInstance(row["id"], int)
341 self.assertGreater(results[1]["id"], results[0]["id"])
342 # Insert multiple autoincrement rows and get the IDs back from insert.
343 rows = [{"name": "b3", "value": 30}, {"name": "b4", "value": 40}]
344 ids = db.insert(tables.b, *rows, returnIds=True)
345 results = [
346 dict(r) for r in db.query(
347 tables.b.select().where(tables.b.columns.id > results[1]["id"])
348 ).fetchall()
349 ]
350 expected = [dict(row, id=id) for row, id in zip(rows, ids)]
351 self.assertCountEqual(results, expected)
352 self.assertTrue(all(result["id"] is not None for result in results))
353 # Insert multiple rows into a table with an autoincrement+origin
354 # primary key, then use the returned IDs to insert into a dynamic
355 # table.
356 rows = [{"origin": db.origin, "b_id": results[0]["id"]},
357 {"origin": db.origin, "b_id": None}]
358 ids = db.insert(tables.c, *rows, returnIds=True)
359 results = [dict(r) for r in db.query(tables.c.select()).fetchall()]
360 expected = [dict(row, id=id) for row, id in zip(rows, ids)]
361 self.assertCountEqual(results, expected)
362 self.assertTrue(all(result["id"] is not None for result in results))
363 # Add the dynamic table.
364 d = db.ensureTableExists("d", DYNAMIC_TABLE_SPEC)
365 # Insert into it.
366 rows = [{"c_origin": db.origin, "c_id": id, "a_name": "a1"} for id in ids]
367 db.insert(d, *rows)
368 results = [dict(r) for r in db.query(d.select()).fetchall()]
369 self.assertCountEqual(rows, results)
370 # Insert multiple rows into a table with an autoincrement+origin
371 # primary key (this is especially tricky for SQLite, but good to test
372 # for all DBs), but pass in a value for the autoincrement key.
373 # For extra complexity, we re-use the autoincrement value with a
374 # different value for origin.
375 rows2 = [{"id": 700, "origin": db.origin, "b_id": None},
376 {"id": 700, "origin": 60, "b_id": None},
377 {"id": 1, "origin": 60, "b_id": None}]
378 db.insert(tables.c, *rows2)
379 results = [dict(r) for r in db.query(tables.c.select()).fetchall()]
380 self.assertCountEqual(results, expected + rows2)
381 self.assertTrue(all(result["id"] is not None for result in results))
383 # Define 'SELECT COUNT(*)' query for later use.
384 count = sqlalchemy.sql.select([sqlalchemy.sql.func.count()])
385 # Get the values we inserted into table b.
386 bValues = [dict(r) for r in db.query(tables.b.select()).fetchall()]
387 # Remove two row from table b by ID.
388 n = db.delete(tables.b, ["id"], {"id": bValues[0]["id"]}, {"id": bValues[1]["id"]})
389 self.assertEqual(n, 2)
390 # Remove the other two rows from table b by name.
391 n = db.delete(tables.b, ["name"], {"name": bValues[2]["name"]}, {"name": bValues[3]["name"]})
392 self.assertEqual(n, 2)
393 # There should now be no rows in table b.
394 self.assertEqual(
395 db.query(count.select_from(tables.b)).scalar(),
396 0
397 )
398 # All b_id values in table c should now be NULL, because there's an
399 # onDelete='SET NULL' foreign key.
400 self.assertEqual(
401 db.query(count.select_from(tables.c).where(tables.c.columns.b_id != None)).scalar(), # noqa:E711
402 0
403 )
404 # Remove all rows in table a (there's only one); this should remove all
405 # rows in d due to onDelete='CASCADE'.
406 n = db.delete(tables.a, [])
407 self.assertEqual(n, 1)
408 self.assertEqual(db.query(count.select_from(tables.a)).scalar(), 0)
409 self.assertEqual(db.query(count.select_from(d)).scalar(), 0)
411 def testUpdate(self):
412 """Tests for `Database.update`.
413 """
414 db = self.makeEmptyDatabase(origin=1)
415 with db.declareStaticTables(create=True) as context:
416 tables = context.addTableTuple(STATIC_TABLE_SPECS)
417 # Insert two rows into table a, both without regions.
418 db.insert(tables.a, {"name": "a1"}, {"name": "a2"})
419 # Update one of the rows with a region.
420 region = ConvexPolygon((UnitVector3d(1, 0, 0), UnitVector3d(0, 1, 0), UnitVector3d(0, 0, 1)))
421 n = db.update(tables.a, {"name": "k"}, {"k": "a2", "region": region})
422 self.assertEqual(n, 1)
423 sql = sqlalchemy.sql.select([tables.a.columns.name, tables.a.columns.region]).select_from(tables.a)
424 self.assertCountEqual(
425 [dict(r) for r in db.query(sql).fetchall()],
426 [{"name": "a1", "region": None}, {"name": "a2", "region": region}]
427 )
429 def testSync(self):
430 """Tests for `Database.sync`.
431 """
432 db = self.makeEmptyDatabase(origin=1)
433 with db.declareStaticTables(create=True) as context:
434 tables = context.addTableTuple(STATIC_TABLE_SPECS)
435 # Insert a row with sync, because it doesn't exist yet.
436 values, inserted = db.sync(tables.b, keys={"name": "b1"}, extra={"value": 10}, returning=["id"])
437 self.assertTrue(inserted)
438 self.assertEqual([{"id": values["id"], "name": "b1", "value": 10}],
439 [dict(r) for r in db.query(tables.b.select()).fetchall()])
440 # Repeat that operation, which should do nothing but return the
441 # requested values.
442 values, inserted = db.sync(tables.b, keys={"name": "b1"}, extra={"value": 10}, returning=["id"])
443 self.assertFalse(inserted)
444 self.assertEqual([{"id": values["id"], "name": "b1", "value": 10}],
445 [dict(r) for r in db.query(tables.b.select()).fetchall()])
446 # Repeat the operation without the 'extra' arg, which should also just
447 # return the existing row.
448 values, inserted = db.sync(tables.b, keys={"name": "b1"}, returning=["id"])
449 self.assertFalse(inserted)
450 self.assertEqual([{"id": values["id"], "name": "b1", "value": 10}],
451 [dict(r) for r in db.query(tables.b.select()).fetchall()])
452 # Repeat the operation with a different value in 'extra'. That still
453 # shouldn't be an error, because 'extra' is only used if we really do
454 # insert. Also drop the 'returning' argument.
455 _, inserted = db.sync(tables.b, keys={"name": "b1"}, extra={"value": 20})
456 self.assertFalse(inserted)
457 self.assertEqual([{"id": values["id"], "name": "b1", "value": 10}],
458 [dict(r) for r in db.query(tables.b.select()).fetchall()])
459 # Repeat the operation with the correct value in 'compared' instead of
460 # 'extra'.
461 _, inserted = db.sync(tables.b, keys={"name": "b1"}, compared={"value": 10})
462 self.assertFalse(inserted)
463 self.assertEqual([{"id": values["id"], "name": "b1", "value": 10}],
464 [dict(r) for r in db.query(tables.b.select()).fetchall()])
465 # Repeat the operation with an incorrect value in 'compared'; this
466 # should raise.
467 with self.assertRaises(DatabaseConflictError):
468 db.sync(tables.b, keys={"name": "b1"}, compared={"value": 20})
469 # Try to sync inside a transaction. That's always an error, regardless
470 # of whether there would be an insertion or not.
471 with self.assertRaises(AssertionError):
472 with db.transaction():
473 db.sync(tables.b, keys={"name": "b1"}, extra={"value": 10})
474 with self.assertRaises(AssertionError):
475 with db.transaction():
476 db.sync(tables.b, keys={"name": "b2"}, extra={"value": 20})
477 # Try to sync in a read-only database. This should work if and only
478 # if the matching row already exists.
479 with self.asReadOnly(db) as rodb:
480 with rodb.declareStaticTables(create=False) as context:
481 tables = context.addTableTuple(STATIC_TABLE_SPECS)
482 _, inserted = rodb.sync(tables.b, keys={"name": "b1"})
483 self.assertFalse(inserted)
484 self.assertEqual([{"id": values["id"], "name": "b1", "value": 10}],
485 [dict(r) for r in rodb.query(tables.b.select()).fetchall()])
486 with self.assertRaises(ReadOnlyDatabaseError):
487 rodb.sync(tables.b, keys={"name": "b2"}, extra={"value": 20})
489 def testReplace(self):
490 """Tests for `Database.replace`.
491 """
492 db = self.makeEmptyDatabase(origin=1)
493 with db.declareStaticTables(create=True) as context:
494 tables = context.addTableTuple(STATIC_TABLE_SPECS)
495 # Use 'replace' to insert a single row that contains a region and
496 # query to get it back.
497 region = ConvexPolygon((UnitVector3d(1, 0, 0), UnitVector3d(0, 1, 0), UnitVector3d(0, 0, 1)))
498 row1 = {"name": "a1", "region": region}
499 db.replace(tables.a, row1)
500 self.assertEqual([dict(r) for r in db.query(tables.a.select()).fetchall()], [row1])
501 # Insert another row without a region.
502 row2 = {"name": "a2", "region": None}
503 db.replace(tables.a, row2)
504 self.assertCountEqual([dict(r) for r in db.query(tables.a.select()).fetchall()], [row1, row2])
505 # Use replace to re-insert both of those rows again, which should do
506 # nothing.
507 db.replace(tables.a, row1, row2)
508 self.assertCountEqual([dict(r) for r in db.query(tables.a.select()).fetchall()], [row1, row2])
509 # Replace row1 with a row with no region, while reinserting row2.
510 row1a = {"name": "a1", "region": None}
511 db.replace(tables.a, row1a, row2)
512 self.assertCountEqual([dict(r) for r in db.query(tables.a.select()).fetchall()], [row1a, row2])
513 # Replace both rows, returning row1 to its original state, while adding
514 # a new one. Pass them in in a different order.
515 row2a = {"name": "a2", "region": region}
516 row3 = {"name": "a3", "region": None}
517 db.replace(tables.a, row3, row2a, row1)
518 self.assertCountEqual([dict(r) for r in db.query(tables.a.select()).fetchall()], [row1, row2a, row3])