Coverage for tests/test_obscore.py: 18%
313 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-25 10:50 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-25 10:50 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28import gc
29import os
30import tempfile
31import unittest
32import warnings
33from abc import abstractmethod
34from typing import cast
36import astropy.time
37import sqlalchemy
38from lsst.daf.butler import (
39 CollectionType,
40 Config,
41 DataCoordinate,
42 DatasetRef,
43 DatasetType,
44 StorageClassFactory,
45)
46from lsst.daf.butler.registry import RegistryConfig, _RegistryFactory
47from lsst.daf.butler.registry.obscore import (
48 DatasetTypeConfig,
49 ObsCoreConfig,
50 ObsCoreLiveTableManager,
51 ObsCoreSchema,
52)
53from lsst.daf.butler.registry.obscore._schema import _STATIC_COLUMNS
54from lsst.daf.butler.registry.sql_registry import SqlRegistry
55from lsst.daf.butler.tests.utils import TestCaseMixin, makeTestTempDir, removeTestTempDir
56from lsst.sphgeom import Box, ConvexPolygon, LonLat, UnitVector3d
58try:
59 import testing.postgresql # type: ignore
60except ImportError:
61 testing = None
63TESTDIR = os.path.abspath(os.path.dirname(__file__))
66class ObsCoreTests(TestCaseMixin):
67 """Base class for testing obscore manager functionality."""
69 root: str
71 def make_registry(
72 self, collections: list[str] | None = None, collection_type: str | None = None
73 ) -> SqlRegistry:
74 """Create new empty Registry."""
75 config = self.make_registry_config(collections, collection_type)
76 registry = _RegistryFactory(config).create_from_config(butlerRoot=self.root)
77 self.initialize_registry(registry)
78 return registry
80 @abstractmethod
81 def make_registry_config(
82 self, collections: list[str] | None = None, collection_type: str | None = None
83 ) -> RegistryConfig:
84 """Make Registry configuration."""
85 raise NotImplementedError()
87 def initialize_registry(self, registry: SqlRegistry) -> None:
88 """Populate Registry with the things that we need for tests."""
89 registry.insertDimensionData("instrument", {"name": "DummyCam"})
90 registry.insertDimensionData(
91 "physical_filter", {"instrument": "DummyCam", "name": "d-r", "band": "r"}
92 )
93 for detector in (1, 2, 3, 4):
94 registry.insertDimensionData(
95 "detector", {"instrument": "DummyCam", "id": detector, "full_name": f"detector{detector}"}
96 )
98 for exposure in (1, 2, 3, 4):
99 registry.insertDimensionData(
100 "exposure",
101 {
102 "instrument": "DummyCam",
103 "id": exposure,
104 "obs_id": f"exposure{exposure}",
105 "physical_filter": "d-r",
106 },
107 )
109 registry.insertDimensionData("visit_system", {"instrument": "DummyCam", "id": 1, "name": "default"})
111 for visit in (1, 2, 3, 4, 9):
112 visit_start = astropy.time.Time(f"2020-01-01 08:0{visit}:00", scale="tai")
113 visit_end = astropy.time.Time(f"2020-01-01 08:0{visit}:45", scale="tai")
114 registry.insertDimensionData(
115 "visit",
116 {
117 "instrument": "DummyCam",
118 "id": visit,
119 "name": f"visit{visit}",
120 "physical_filter": "d-r",
121 "datetime_begin": visit_start,
122 "datetime_end": visit_end,
123 },
124 )
125 registry.insertDimensionData(
126 "visit_system_membership",
127 {"instrument": "DummyCam", "visit": visit, "visit_system": 1},
128 )
130 # Only couple of exposures are linked to visits.
131 for visit in (1, 2):
132 registry.insertDimensionData(
133 "visit_definition",
134 {
135 "instrument": "DummyCam",
136 "exposure": visit,
137 "visit": visit,
138 },
139 )
141 # map visit and detector to region
142 self.regions: dict[tuple[int, int], ConvexPolygon] = {}
143 for visit in (1, 2, 3, 4):
144 for detector in (1, 2, 3, 4):
145 lon = visit * 90 - 88
146 lat = detector * 2 - 5
147 region = ConvexPolygon(
148 [
149 UnitVector3d(LonLat.fromDegrees(lon - 1.0, lat - 1.0)),
150 UnitVector3d(LonLat.fromDegrees(lon + 1.0, lat - 1.0)),
151 UnitVector3d(LonLat.fromDegrees(lon + 1.0, lat + 1.0)),
152 UnitVector3d(LonLat.fromDegrees(lon - 1.0, lat + 1.0)),
153 ]
154 )
155 registry.insertDimensionData(
156 "visit_detector_region",
157 {
158 "instrument": "DummyCam",
159 "visit": visit,
160 "detector": detector,
161 "region": region,
162 },
163 )
164 self.regions[(visit, detector)] = region
166 # Visit 9 has non-polygon region
167 for detector in (1, 2, 3, 4):
168 lat = detector * 2 - 5
169 region = Box.fromDegrees(17.0, lat - 1.0, 19.0, lat + 1.0)
170 registry.insertDimensionData(
171 "visit_detector_region",
172 {
173 "instrument": "DummyCam",
174 "visit": 9,
175 "detector": detector,
176 "region": region,
177 },
178 )
180 # Add few dataset types
181 storage_class_factory = StorageClassFactory()
182 storage_class = storage_class_factory.getStorageClass("StructuredDataDict")
184 self.dataset_types: dict[str, DatasetType] = {}
186 dimensions = registry.dimensions.conform(["instrument", "physical_filter", "detector", "exposure"])
187 self.dataset_types["raw"] = DatasetType("raw", dimensions, storage_class)
189 dimensions = registry.dimensions.conform(["instrument", "physical_filter", "detector", "visit"])
190 self.dataset_types["calexp"] = DatasetType("calexp", dimensions, storage_class)
192 dimensions = registry.dimensions.conform(["instrument", "physical_filter", "detector", "visit"])
193 self.dataset_types["no_obscore"] = DatasetType("no_obscore", dimensions, storage_class)
195 dimensions = registry.dimensions.conform(["instrument", "physical_filter", "detector"])
196 self.dataset_types["calib"] = DatasetType("calib", dimensions, storage_class, isCalibration=True)
198 for dataset_type in self.dataset_types.values():
199 registry.registerDatasetType(dataset_type)
201 # Add few run collections.
202 for run in (1, 2, 3, 4, 5, 6):
203 registry.registerRun(f"run{run}")
205 # Add few chained collections, run6 is not in any chained collections.
206 registry.registerCollection("chain12", CollectionType.CHAINED)
207 registry.setCollectionChain("chain12", ("run1", "run2"))
208 registry.registerCollection("chain34", CollectionType.CHAINED)
209 registry.setCollectionChain("chain34", ("run3", "run4"))
210 registry.registerCollection("chain-all", CollectionType.CHAINED)
211 registry.setCollectionChain("chain-all", ("chain12", "chain34", "run5"))
213 # And a tagged collection
214 registry.registerCollection("tagged", CollectionType.TAGGED)
216 def make_obscore_config(
217 self, collections: list[str] | None = None, collection_type: str | None = None
218 ) -> Config:
219 """Make configuration for obscore manager."""
220 obscore_config = Config(os.path.join(TESTDIR, "config", "basic", "obscore.yaml"))
221 if collections is not None:
222 obscore_config["collections"] = collections
223 if collection_type is not None:
224 obscore_config["collection_type"] = collection_type
225 return obscore_config
227 def _insert_dataset(
228 self, registry: SqlRegistry, run: str, dataset_type: str, do_import: bool = False, **kwargs
229 ) -> DatasetRef:
230 """Insert or import one dataset into a specified run collection."""
231 data_id = {"instrument": "DummyCam", "physical_filter": "d-r"}
232 data_id.update(kwargs)
233 coordinate = DataCoordinate.standardize(data_id, universe=registry.dimensions)
234 if do_import:
235 ds_type = self.dataset_types[dataset_type]
236 ref = DatasetRef(ds_type, coordinate, run=run)
237 [ref] = registry._importDatasets([ref])
238 else:
239 [ref] = registry.insertDatasets(dataset_type, [data_id], run=run)
240 return ref
242 def _insert_datasets(self, registry: SqlRegistry, do_import: bool = False) -> list[DatasetRef]:
243 """Inset a small bunch of datasets into every run collection."""
244 return [
245 self._insert_dataset(registry, "run1", "raw", detector=1, exposure=1, do_import=do_import),
246 self._insert_dataset(registry, "run2", "calexp", detector=2, visit=2, do_import=do_import),
247 self._insert_dataset(registry, "run3", "raw", detector=3, exposure=3, do_import=do_import),
248 self._insert_dataset(registry, "run4", "calexp", detector=4, visit=4, do_import=do_import),
249 self._insert_dataset(registry, "run5", "calexp", detector=4, visit=4, do_import=do_import),
250 # This dataset type is not configured, will not be in obscore.
251 self._insert_dataset(registry, "run5", "no_obscore", detector=1, visit=1, do_import=do_import),
252 self._insert_dataset(registry, "run6", "raw", detector=1, exposure=4, do_import=do_import),
253 ]
255 def test_config_errors(self):
256 """Test for handling various configuration problems."""
257 # This raises pydantic ValidationError, which wraps ValueError
258 exception_re = "'collections' must have one element"
259 with self.assertRaisesRegex(ValueError, exception_re):
260 self.make_registry(None, "TAGGED")
262 with self.assertRaisesRegex(ValueError, exception_re):
263 self.make_registry([], "TAGGED")
265 with self.assertRaisesRegex(ValueError, exception_re):
266 self.make_registry(["run1", "run2"], "TAGGED")
268 # Invalid regex.
269 with self.assertRaisesRegex(ValueError, "Failed to compile regex"):
270 self.make_registry(["+run"], "RUN")
272 def test_schema(self):
273 """Check how obscore schema is constructed"""
274 config = ObsCoreConfig(obs_collection="", dataset_types={}, facility_name="FACILITY")
275 schema = ObsCoreSchema(config, [])
276 table_spec = schema.table_spec
277 self.assertEqual(list(table_spec.fields.names), [col.name for col in _STATIC_COLUMNS])
279 # extra columns from top-level config
280 config = ObsCoreConfig(
281 obs_collection="",
282 extra_columns={"c1": 1, "c2": "string", "c3": {"template": "{calib_level}", "type": "float"}},
283 dataset_types={},
284 facility_name="FACILITY",
285 )
286 schema = ObsCoreSchema(config, [])
287 table_spec = schema.table_spec
288 self.assertEqual(
289 list(table_spec.fields.names),
290 [col.name for col in _STATIC_COLUMNS] + ["c1", "c2", "c3"],
291 )
292 self.assertEqual(table_spec.fields["c1"].dtype, sqlalchemy.BigInteger)
293 self.assertEqual(table_spec.fields["c2"].dtype, sqlalchemy.String)
294 self.assertEqual(table_spec.fields["c3"].dtype, sqlalchemy.Float)
296 # extra columns from per-dataset type configs
297 config = ObsCoreConfig(
298 obs_collection="",
299 extra_columns={"c1": 1},
300 dataset_types={
301 "raw": DatasetTypeConfig(
302 name="raw",
303 dataproduct_type="image",
304 calib_level=1,
305 extra_columns={"c2": "string"},
306 ),
307 "calexp": DatasetTypeConfig(
308 dataproduct_type="image",
309 calib_level=2,
310 extra_columns={"c3": 1e10},
311 ),
312 },
313 facility_name="FACILITY",
314 )
315 schema = ObsCoreSchema(config, [])
316 table_spec = schema.table_spec
317 self.assertEqual(
318 list(table_spec.fields.names),
319 [col.name for col in _STATIC_COLUMNS] + ["c1", "c2", "c3"],
320 )
321 self.assertEqual(table_spec.fields["c1"].dtype, sqlalchemy.BigInteger)
322 self.assertEqual(table_spec.fields["c2"].dtype, sqlalchemy.String)
323 self.assertEqual(table_spec.fields["c3"].dtype, sqlalchemy.Float)
325 # Columns with the same names as in static list in configs, types
326 # are not overriden.
327 config = ObsCoreConfig(
328 version=0,
329 obs_collection="",
330 extra_columns={"t_xel": 1e10},
331 dataset_types={
332 "raw": DatasetTypeConfig(
333 dataproduct_type="image",
334 calib_level=1,
335 extra_columns={"target_name": 1},
336 ),
337 "calexp": DatasetTypeConfig(
338 dataproduct_type="image",
339 calib_level=2,
340 extra_columns={"em_xel": "string"},
341 ),
342 },
343 facility_name="FACILITY",
344 )
345 schema = ObsCoreSchema(config, [])
346 table_spec = schema.table_spec
347 self.assertEqual(list(table_spec.fields.names), [col.name for col in _STATIC_COLUMNS])
348 self.assertEqual(table_spec.fields["t_xel"].dtype, sqlalchemy.Integer)
349 self.assertEqual(table_spec.fields["target_name"].dtype, sqlalchemy.String)
350 self.assertEqual(table_spec.fields["em_xel"].dtype, sqlalchemy.Integer)
352 def test_insert_existing_collection(self):
353 """Test insert and import registry methods, with various restrictions
354 on collection names.
355 """
356 # First item is collections, second item is expected record count.
357 test_data = (
358 (None, 6),
359 (["run1", "run2"], 2),
360 (["run[34]"], 2),
361 (["[rR]un[^6]"], 5),
362 )
364 for collections, count in test_data:
365 for do_import in (False, True):
366 registry = self.make_registry(collections)
367 obscore = registry.obsCoreTableManager
368 assert obscore is not None
369 self._insert_datasets(registry, do_import)
371 with obscore.query() as result:
372 rows = list(result)
373 self.assertEqual(len(rows), count)
375 # Also check `query` method with COUNT(*)
376 with obscore.query([sqlalchemy.sql.func.count()]) as result:
377 scalar = result.scalar_one()
378 self.assertEqual(scalar, count)
380 def test_drop_datasets(self):
381 """Test for dropping datasets after obscore insert."""
382 collections = None
383 registry = self.make_registry(collections)
384 obscore = registry.obsCoreTableManager
385 assert obscore is not None
386 refs = self._insert_datasets(registry)
388 with obscore.query() as result:
389 rows = list(result)
390 self.assertEqual(len(rows), 6)
392 # drop single dataset
393 registry.removeDatasets(ref for ref in refs if ref.run == "run1")
394 with obscore.query() as result:
395 rows = list(result)
396 self.assertEqual(len(rows), 5)
398 # drop whole run collection
399 registry.removeCollection("run6")
400 with obscore.query() as result:
401 rows = list(result)
402 self.assertEqual(len(rows), 4)
404 def test_associate(self):
405 """Test for associating datasets to TAGGED collection."""
406 collections = ["tagged"]
407 registry = self.make_registry(collections, "TAGGED")
408 obscore = registry.obsCoreTableManager
409 assert obscore is not None
410 refs = self._insert_datasets(registry)
412 with obscore.query() as result:
413 rows = list(result)
414 self.assertEqual(len(rows), 0)
416 # Associate datasets that are already in obscore, changes nothing.
417 registry.associate("tagged", (ref for ref in refs if ref.run == "run1"))
418 with obscore.query() as result:
419 rows = list(result)
420 self.assertEqual(len(rows), 1)
422 # Associate datasets that are not in obscore
423 registry.associate("tagged", (ref for ref in refs if ref.run == "run3"))
424 with obscore.query() as result:
425 rows = list(result)
426 self.assertEqual(len(rows), 2)
428 # Disassociate them
429 registry.disassociate("tagged", (ref for ref in refs if ref.run == "run3"))
430 with obscore.query() as result:
431 rows = list(result)
432 self.assertEqual(len(rows), 1)
434 # Non-associated dataset, should be OK and not throw.
435 registry.disassociate("tagged", (ref for ref in refs if ref.run == "run2"))
436 with obscore.query() as result:
437 rows = list(result)
438 self.assertEqual(len(rows), 1)
440 registry.disassociate("tagged", (ref for ref in refs if ref.run == "run1"))
441 with obscore.query() as result:
442 rows = list(result)
443 self.assertEqual(len(rows), 0)
445 @unittest.skip("Temporary, while deprecation warnings are present.")
446 def test_region_type_warning(self) -> None:
447 """Test that non-polygon region generates one or more warnings."""
448 collections = None
449 registry = self.make_registry(collections)
451 with warnings.catch_warnings(record=True) as warning_records:
452 self._insert_dataset(registry, "run2", "calexp", detector=2, visit=9)
453 self.assertEqual(len(warning_records), 1)
454 for record in warning_records:
455 self.assertRegex(
456 str(record.message),
457 "Unexpected region type: .*lsst.sphgeom._sphgeom.Box.*",
458 )
460 def test_update_exposure_region(self) -> None:
461 """Test for update_exposure_regions method."""
462 registry = self.make_registry(["run1"])
463 obscore = registry.obsCoreTableManager
464 assert obscore is not None
466 # Exposure 4 is not associated with any visit.
467 for detector in (1, 2, 3, 4):
468 self._insert_dataset(registry, "run1", "raw", detector=detector, exposure=4)
470 # All spatial columns should be None.
471 with obscore.query() as result:
472 rows = list(result)
473 self.assertEqual(len(rows), 4)
474 for row in rows:
475 self.assertIsNone(row.s_ra)
476 self.assertIsNone(row.s_dec)
477 self.assertIsNone(row.s_region)
479 # Assign Region from visit 4.
480 count = obscore.update_exposure_regions(
481 "DummyCam", [(4, 1, self.regions[(4, 1)]), (4, 2, self.regions[(4, 2)])]
482 )
483 self.assertEqual(count, 2)
485 with obscore.query(["s_ra", "s_dec", "s_region", "lsst_detector"]) as result:
486 rows = list(result)
487 self.assertEqual(len(rows), 4)
488 for row in rows:
489 if row.lsst_detector in (1, 2):
490 self.assertIsNotNone(row.s_ra)
491 self.assertIsNotNone(row.s_dec)
492 self.assertIsNotNone(row.s_region)
493 else:
494 self.assertIsNone(row.s_ra)
495 self.assertIsNone(row.s_dec)
496 self.assertIsNone(row.s_region)
499class SQLiteObsCoreTest(ObsCoreTests, unittest.TestCase):
500 """Unit test for obscore with SQLite backend."""
502 def setUp(self):
503 self.root = makeTestTempDir(TESTDIR)
505 def tearDown(self):
506 removeTestTempDir(self.root)
508 def make_registry_config(
509 self, collections: list[str] | None = None, collection_type: str | None = None
510 ) -> RegistryConfig:
511 # docstring inherited from a base class
512 _, filename = tempfile.mkstemp(dir=self.root, suffix=".sqlite3")
513 config = RegistryConfig()
514 config["db"] = f"sqlite:///{filename}"
515 config["managers", "obscore"] = {
516 "cls": "lsst.daf.butler.registry.obscore.ObsCoreLiveTableManager",
517 "config": self.make_obscore_config(collections, collection_type),
518 }
519 return config
522class ClonedSqliteObscoreTest(SQLiteObsCoreTest, unittest.TestCase):
523 """Unit test for obscore manager created via clone()"""
525 def make_registry(
526 self, collections: list[str] | None = None, collection_type: str | None = None
527 ) -> SqlRegistry:
528 """Create new empty Registry."""
529 original = super().make_registry(collections, collection_type)
530 return original.copy()
533@unittest.skipUnless(testing is not None, "testing.postgresql module not found")
534class PostgresObsCoreTest(ObsCoreTests, unittest.TestCase):
535 """Unit test for obscore with PostgreSQL backend."""
537 @classmethod
538 def _handler(cls, postgresql):
539 engine = sqlalchemy.engine.create_engine(postgresql.url())
540 with engine.begin() as connection:
541 connection.execute(sqlalchemy.text("CREATE EXTENSION btree_gist;"))
543 @classmethod
544 def setUpClass(cls):
545 # Create the postgres test server.
546 cls.postgresql = testing.postgresql.PostgresqlFactory(
547 cache_initialized_db=True, on_initialized=cls._handler
548 )
549 super().setUpClass()
551 @classmethod
552 def tearDownClass(cls):
553 # Clean up any lingering SQLAlchemy engines/connections
554 # so they're closed before we shut down the server.
555 gc.collect()
556 cls.postgresql.clear_cache()
557 super().tearDownClass()
559 def setUp(self):
560 self.root = makeTestTempDir(TESTDIR)
561 self.server = self.postgresql()
562 self.count = 0
564 def tearDown(self):
565 removeTestTempDir(self.root)
566 self.server = self.postgresql()
568 def make_registry_config(
569 self, collections: list[str] | None = None, collection_type: str | None = None
570 ) -> RegistryConfig:
571 # docstring inherited from a base class
572 self.count += 1
573 config = RegistryConfig()
574 config["db"] = self.server.url()
575 # Use unique namespace for each instance, some tests may use sub-tests.
576 config["namespace"] = f"namespace{self.count}"
577 config["managers", "obscore"] = {
578 "cls": "lsst.daf.butler.registry.obscore.ObsCoreLiveTableManager",
579 "config": self.make_obscore_config(collections, collection_type),
580 }
581 return config
584@unittest.skipUnless(testing is not None, "testing.postgresql module not found")
585class PostgresPgSphereObsCoreTest(PostgresObsCoreTest):
586 """Unit test for obscore with PostgreSQL backend and pgsphere plugin."""
588 @classmethod
589 def _handler(cls, postgresql):
590 super()._handler(postgresql)
591 engine = sqlalchemy.engine.create_engine(postgresql.url())
592 with engine.begin() as connection:
593 try:
594 connection.execute(sqlalchemy.text("CREATE EXTENSION pg_sphere"))
595 except sqlalchemy.exc.DatabaseError as exc:
596 raise unittest.SkipTest(f"pg_sphere extension does not exist: {exc}") from None
598 def make_obscore_config(
599 self, collections: list[str] | None = None, collection_type: str | None = None
600 ) -> Config:
601 """Make configuration for obscore manager."""
602 obscore_config = super().make_obscore_config(collections, collection_type)
603 obscore_config["spatial_plugins"] = {
604 "pgsphere": {
605 "cls": "lsst.daf.butler.registry.obscore.pgsphere.PgSphereObsCorePlugin",
606 "config": {
607 "region_column": "pgs_region",
608 "position_column": "pgs_center",
609 },
610 }
611 }
612 return obscore_config
614 def test_spatial(self):
615 """Test that pgsphere plugin fills spatial columns."""
616 collections = None
617 registry = self.make_registry(collections)
618 obscore = registry.obsCoreTableManager
619 assert obscore is not None
620 self._insert_datasets(registry)
622 # select everything
623 with obscore.query() as result:
624 rows = list(result)
625 self.assertEqual(len(rows), 6)
627 db = registry._db
628 assert registry.obsCoreTableManager is not None
629 table = cast(ObsCoreLiveTableManager, registry.obsCoreTableManager).table
631 # It's not easy to generate spatial queries in sqlalchemy, use plain
632 # text queries for testing.
634 # position matching visit=1, there is a single dataset
635 query = f"SELECT * FROM {table.key} WHERE pgs_center <-> '(2d,0d)'::spoint < .1"
636 with db.query(sqlalchemy.text(query)) as results:
637 self.assertEqual(len(list(results)), 1)
639 # position matching visit=4, there are two datasets
640 query = f"SELECT * FROM {table.key} WHERE pgs_center <-> '(272d,0d)'::spoint < .1"
641 with db.query(sqlalchemy.text(query)) as results:
642 self.assertEqual(len(list(results)), 2)
644 # position matching visit=1, there is a single dataset
645 query = f"SELECT * FROM {table.key} WHERE '(2d,-3d)'::spoint @ pgs_region"
646 with db.query(sqlalchemy.text(query)) as results:
647 self.assertEqual(len(list(results)), 1)
649 # position matching visit=4, there are two datasets
650 query = f"SELECT * FROM {table.key} WHERE '(272d,3d)'::spoint @ pgs_region"
651 with db.query(sqlalchemy.text(query)) as results:
652 self.assertEqual(len(list(results)), 2)
655if __name__ == "__main__":
656 unittest.main()