Coverage for tests/test_obscore.py: 15%
308 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-04-04 02:06 -0700
« prev ^ index » next coverage.py v6.5.0, created at 2023-04-04 02:06 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import gc
23import os
24import tempfile
25import unittest
26import warnings
27from abc import abstractmethod
28from typing import TYPE_CHECKING, Any, cast
30import astropy.time
31import sqlalchemy
32from lsst.daf.butler import (
33 CollectionType,
34 Config,
35 DataCoordinate,
36 DatasetIdGenEnum,
37 DatasetRef,
38 DatasetType,
39 StorageClassFactory,
40)
41from lsst.daf.butler.registries.sql import SqlRegistry
42from lsst.daf.butler.registry import Registry, RegistryConfig
43from lsst.daf.butler.registry.obscore import (
44 DatasetTypeConfig,
45 ObsCoreConfig,
46 ObsCoreLiveTableManager,
47 ObsCoreSchema,
48)
49from lsst.daf.butler.registry.obscore._schema import _STATIC_COLUMNS
50from lsst.daf.butler.tests.utils import makeTestTempDir, removeTestTempDir
51from lsst.sphgeom import Box, ConvexPolygon, LonLat, UnitVector3d
53try:
54 import testing.postgresql # type: ignore
55except ImportError:
56 testing = None
58TESTDIR = os.path.abspath(os.path.dirname(__file__))
61class ObsCoreTests:
62 """Base class for testing obscore manager functionality."""
64 root: str
66 def make_registry(
67 self, collections: list[str] | None = None, collection_type: str | None = None
68 ) -> Registry:
69 """Create new empty Registry."""
70 config = self.make_registry_config(collections, collection_type)
71 registry = Registry.createFromConfig(config, butlerRoot=self.root)
72 self.initialize_registry(registry)
73 return registry
75 @abstractmethod
76 def make_registry_config(
77 self, collections: list[str] | None = None, collection_type: str | None = None
78 ) -> RegistryConfig:
79 """Make Registry configuration."""
80 raise NotImplementedError()
82 def initialize_registry(self, registry: Registry) -> None:
83 """Populate Registry with the things that we need for tests."""
85 registry.insertDimensionData("instrument", {"name": "DummyCam"})
86 registry.insertDimensionData(
87 "physical_filter", {"instrument": "DummyCam", "name": "d-r", "band": "r"}
88 )
89 for detector in (1, 2, 3, 4):
90 registry.insertDimensionData(
91 "detector", {"instrument": "DummyCam", "id": detector, "full_name": f"detector{detector}"}
92 )
94 for exposure in (1, 2, 3, 4):
95 registry.insertDimensionData(
96 "exposure",
97 {
98 "instrument": "DummyCam",
99 "id": exposure,
100 "obs_id": f"exposure{exposure}",
101 "physical_filter": "d-r",
102 },
103 )
105 registry.insertDimensionData("visit_system", {"instrument": "DummyCam", "id": 1, "name": "default"})
107 for visit in (1, 2, 3, 4, 9):
108 visit_start = astropy.time.Time(f"2020-01-01 08:0{visit}:00", scale="tai")
109 visit_end = astropy.time.Time(f"2020-01-01 08:0{visit}:45", scale="tai")
110 registry.insertDimensionData(
111 "visit",
112 {
113 "instrument": "DummyCam",
114 "id": visit,
115 "name": f"visit{visit}",
116 "physical_filter": "d-r",
117 "visit_system": 1,
118 "datetime_begin": visit_start,
119 "datetime_end": visit_end,
120 },
121 )
123 # Only couple of exposures are linked to visits.
124 for visit in (1, 2):
125 registry.insertDimensionData(
126 "visit_definition",
127 {
128 "instrument": "DummyCam",
129 "exposure": visit,
130 "visit": visit,
131 },
132 )
134 # map visit and detector to region
135 self.regions: dict[tuple[int, int], ConvexPolygon] = {}
136 for visit in (1, 2, 3, 4):
137 for detector in (1, 2, 3, 4):
138 lon = visit * 90 - 88
139 lat = detector * 2 - 5
140 region = ConvexPolygon(
141 [
142 UnitVector3d(LonLat.fromDegrees(lon - 1.0, lat - 1.0)),
143 UnitVector3d(LonLat.fromDegrees(lon + 1.0, lat - 1.0)),
144 UnitVector3d(LonLat.fromDegrees(lon + 1.0, lat + 1.0)),
145 UnitVector3d(LonLat.fromDegrees(lon - 1.0, lat + 1.0)),
146 ]
147 )
148 registry.insertDimensionData(
149 "visit_detector_region",
150 {
151 "instrument": "DummyCam",
152 "visit": visit,
153 "detector": detector,
154 "region": region,
155 },
156 )
157 self.regions[(visit, detector)] = region
159 # Visit 9 has non-polygon region
160 for detector in (1, 2, 3, 4):
161 lat = detector * 2 - 5
162 region = Box.fromDegrees(17.0, lat - 1.0, 19.0, lat + 1.0)
163 registry.insertDimensionData(
164 "visit_detector_region",
165 {
166 "instrument": "DummyCam",
167 "visit": 9,
168 "detector": detector,
169 "region": region,
170 },
171 )
173 # Add few dataset types
174 storage_class_factory = StorageClassFactory()
175 storage_class = storage_class_factory.getStorageClass("StructuredDataDict")
177 self.dataset_types: dict[str, DatasetType] = {}
179 dimensions = registry.dimensions.extract(["instrument", "physical_filter", "detector", "exposure"])
180 self.dataset_types["raw"] = DatasetType("raw", dimensions, storage_class)
182 dimensions = registry.dimensions.extract(["instrument", "physical_filter", "detector", "visit"])
183 self.dataset_types["calexp"] = DatasetType("calexp", dimensions, storage_class)
185 dimensions = registry.dimensions.extract(["instrument", "physical_filter", "detector", "visit"])
186 self.dataset_types["no_obscore"] = DatasetType("no_obscore", dimensions, storage_class)
188 dimensions = registry.dimensions.extract(["instrument", "physical_filter", "detector"])
189 self.dataset_types["calib"] = DatasetType("calib", dimensions, storage_class, isCalibration=True)
191 for dataset_type in self.dataset_types.values():
192 registry.registerDatasetType(dataset_type)
194 # Add few run collections.
195 for run in (1, 2, 3, 4, 5, 6):
196 registry.registerRun(f"run{run}")
198 # Add few chained collections, run6 is not in any chained collections.
199 registry.registerCollection("chain12", CollectionType.CHAINED)
200 registry.setCollectionChain("chain12", ("run1", "run2"))
201 registry.registerCollection("chain34", CollectionType.CHAINED)
202 registry.setCollectionChain("chain34", ("run3", "run4"))
203 registry.registerCollection("chain-all", CollectionType.CHAINED)
204 registry.setCollectionChain("chain-all", ("chain12", "chain34", "run5"))
206 # And a tagged collection
207 registry.registerCollection("tagged", CollectionType.TAGGED)
209 def make_obscore_config(
210 self, collections: list[str] | None = None, collection_type: str | None = None
211 ) -> Config:
212 """Make configuration for obscore manager."""
213 obscore_config = Config(os.path.join(TESTDIR, "config", "basic", "obscore.yaml"))
214 if collections is not None:
215 obscore_config["collections"] = collections
216 if collection_type is not None:
217 obscore_config["collection_type"] = collection_type
218 return obscore_config
220 def _insert_dataset(
221 self, registry: Registry, run: str, dataset_type: str, do_import: bool = False, **kwargs
222 ) -> DatasetRef:
223 """Insert or import one dataset into a specified run collection."""
224 data_id = {"instrument": "DummyCam", "physical_filter": "d-r"}
225 data_id.update(kwargs)
226 coordinate = DataCoordinate.standardize(data_id, universe=registry.dimensions)
227 if do_import:
228 ds_type = self.dataset_types[dataset_type]
229 dataset_id = registry.datasetIdFactory.makeDatasetId(
230 run, ds_type, coordinate, DatasetIdGenEnum.UNIQUE
231 )
232 ref = DatasetRef(ds_type, coordinate, id=dataset_id, run=run)
233 [ref] = registry._importDatasets([ref])
234 else:
235 [ref] = registry.insertDatasets(dataset_type, [data_id], run=run)
236 return ref
238 def _insert_datasets(self, registry: Registry, do_import: bool = False) -> list[DatasetRef]:
239 """Inset a small bunch of datasets into every run collection."""
240 return [
241 self._insert_dataset(registry, "run1", "raw", detector=1, exposure=1, do_import=do_import),
242 self._insert_dataset(registry, "run2", "calexp", detector=2, visit=2, do_import=do_import),
243 self._insert_dataset(registry, "run3", "raw", detector=3, exposure=3, do_import=do_import),
244 self._insert_dataset(registry, "run4", "calexp", detector=4, visit=4, do_import=do_import),
245 self._insert_dataset(registry, "run5", "calexp", detector=4, visit=4, do_import=do_import),
246 # This dataset type is not configured, will not be in obscore.
247 self._insert_dataset(registry, "run5", "no_obscore", detector=1, visit=1, do_import=do_import),
248 self._insert_dataset(registry, "run6", "raw", detector=1, exposure=4, do_import=do_import),
249 ]
251 def test_config_errors(self):
252 """Test for handling various configuration problems."""
254 # This raises pydantic ValidationError, which wraps ValueError
255 exception_re = "'collections' must have one element"
256 with self.assertRaisesRegex(ValueError, exception_re):
257 self.make_registry(None, "TAGGED")
259 with self.assertRaisesRegex(ValueError, exception_re):
260 self.make_registry([], "TAGGED")
262 with self.assertRaisesRegex(ValueError, exception_re):
263 self.make_registry(["run1", "run2"], "TAGGED")
265 # Invalid regex.
266 with self.assertRaisesRegex(ValueError, "Failed to compile regex"):
267 self.make_registry(["+run"], "RUN")
269 def test_schema(self):
270 """Check how obscore schema is constructed"""
272 config = ObsCoreConfig(obs_collection="", dataset_types=[], facility_name="FACILITY")
273 schema = ObsCoreSchema(config, [])
274 table_spec = schema.table_spec
275 self.assertEqual(list(table_spec.fields.names), [col.name for col in _STATIC_COLUMNS])
277 # extra columns from top-level config
278 config = ObsCoreConfig(
279 obs_collection="",
280 extra_columns={"c1": 1, "c2": "string", "c3": {"template": "{calib_level}", "type": "float"}},
281 dataset_types=[],
282 facility_name="FACILITY",
283 )
284 schema = ObsCoreSchema(config, [])
285 table_spec = schema.table_spec
286 self.assertEqual(
287 list(table_spec.fields.names),
288 [col.name for col in _STATIC_COLUMNS] + ["c1", "c2", "c3"],
289 )
290 self.assertEqual(table_spec.fields["c1"].dtype, sqlalchemy.BigInteger)
291 self.assertEqual(table_spec.fields["c2"].dtype, sqlalchemy.String)
292 self.assertEqual(table_spec.fields["c3"].dtype, sqlalchemy.Float)
294 # extra columns from per-dataset type configs
295 config = ObsCoreConfig(
296 obs_collection="",
297 extra_columns={"c1": 1},
298 dataset_types={
299 "raw": DatasetTypeConfig(
300 name="raw",
301 dataproduct_type="image",
302 calib_level=1,
303 extra_columns={"c2": "string"},
304 ),
305 "calexp": DatasetTypeConfig(
306 dataproduct_type="image",
307 calib_level=2,
308 extra_columns={"c3": 1e10},
309 ),
310 },
311 facility_name="FACILITY",
312 )
313 schema = ObsCoreSchema(config, [])
314 table_spec = schema.table_spec
315 self.assertEqual(
316 list(table_spec.fields.names),
317 [col.name for col in _STATIC_COLUMNS] + ["c1", "c2", "c3"],
318 )
319 self.assertEqual(table_spec.fields["c1"].dtype, sqlalchemy.BigInteger)
320 self.assertEqual(table_spec.fields["c2"].dtype, sqlalchemy.String)
321 self.assertEqual(table_spec.fields["c3"].dtype, sqlalchemy.Float)
323 # Columns with the same names as in static list in configs, types
324 # are not overriden.
325 config = ObsCoreConfig(
326 version=0,
327 obs_collection="",
328 extra_columns={"t_xel": 1e10},
329 dataset_types={
330 "raw": DatasetTypeConfig(
331 dataproduct_type="image",
332 calib_level=1,
333 extra_columns={"target_name": 1},
334 ),
335 "calexp": DatasetTypeConfig(
336 dataproduct_type="image",
337 calib_level=2,
338 extra_columns={"em_xel": "string"},
339 ),
340 },
341 facility_name="FACILITY",
342 )
343 schema = ObsCoreSchema(config, [])
344 table_spec = schema.table_spec
345 self.assertEqual(list(table_spec.fields.names), [col.name for col in _STATIC_COLUMNS])
346 self.assertEqual(table_spec.fields["t_xel"].dtype, sqlalchemy.Integer)
347 self.assertEqual(table_spec.fields["target_name"].dtype, sqlalchemy.String)
348 self.assertEqual(table_spec.fields["em_xel"].dtype, sqlalchemy.Integer)
350 def test_insert_existing_collection(self):
351 """Test insert and import registry methods, with various restrictions
352 on collection names.
353 """
355 # First item is collections, second item is expected record count.
356 test_data = (
357 (None, 6),
358 (["run1", "run2"], 2),
359 (["run[34]"], 2),
360 (["[rR]un[^6]"], 5),
361 )
363 for collections, count in test_data:
364 for do_import in (False, True):
365 registry = self.make_registry(collections)
366 obscore = registry.obsCoreTableManager
367 assert obscore is not None
368 self._insert_datasets(registry, do_import)
370 with obscore.query() as result:
371 rows = list(result)
372 self.assertEqual(len(rows), count)
374 # Also check `query` method with COUNT(*)
375 with obscore.query([sqlalchemy.sql.func.count()]) as result:
376 scalar = result.scalar_one()
377 self.assertEqual(scalar, count)
379 def test_drop_datasets(self):
380 """Test for dropping datasets after obscore insert."""
382 collections = None
383 registry = self.make_registry(collections)
384 obscore = registry.obsCoreTableManager
385 assert obscore is not None
386 refs = self._insert_datasets(registry)
388 with obscore.query() as result:
389 rows = list(result)
390 self.assertEqual(len(rows), 6)
392 # drop single dataset
393 registry.removeDatasets(ref for ref in refs if ref.run == "run1")
394 with obscore.query() as result:
395 rows = list(result)
396 self.assertEqual(len(rows), 5)
398 # drop whole run collection
399 registry.removeCollection("run6")
400 with obscore.query() as result:
401 rows = list(result)
402 self.assertEqual(len(rows), 4)
404 def test_associate(self):
405 """Test for associating datasets to TAGGED collection."""
407 collections = ["tagged"]
408 registry = self.make_registry(collections, "TAGGED")
409 obscore = registry.obsCoreTableManager
410 assert obscore is not None
411 refs = self._insert_datasets(registry)
413 with obscore.query() as result:
414 rows = list(result)
415 self.assertEqual(len(rows), 0)
417 # Associate datasets that are already in obscore, changes nothing.
418 registry.associate("tagged", (ref for ref in refs if ref.run == "run1"))
419 with obscore.query() as result:
420 rows = list(result)
421 self.assertEqual(len(rows), 1)
423 # Associate datasets that are not in obscore
424 registry.associate("tagged", (ref for ref in refs if ref.run == "run3"))
425 with obscore.query() as result:
426 rows = list(result)
427 self.assertEqual(len(rows), 2)
429 # Disassociate them
430 registry.disassociate("tagged", (ref for ref in refs if ref.run == "run3"))
431 with obscore.query() as result:
432 rows = list(result)
433 self.assertEqual(len(rows), 1)
435 # Non-associated dataset, should be OK and not throw.
436 registry.disassociate("tagged", (ref for ref in refs if ref.run == "run2"))
437 with obscore.query() as result:
438 rows = list(result)
439 self.assertEqual(len(rows), 1)
441 registry.disassociate("tagged", (ref for ref in refs if ref.run == "run1"))
442 with obscore.query() as result:
443 rows = list(result)
444 self.assertEqual(len(rows), 0)
446 def test_region_type_warning(self) -> None:
447 """Test that non-polygon region generates one or more warnings."""
449 collections = None
450 registry = self.make_registry(collections)
452 with warnings.catch_warnings(record=True) as warning_records:
453 self._insert_dataset(registry, "run2", "calexp", detector=2, visit=9)
454 self.assertEqual(len(warning_records), 1)
455 for record in warning_records:
456 self.assertRegex(
457 str(record.message),
458 "Unexpected region type: .*lsst.sphgeom._sphgeom.Box.*",
459 )
461 def test_update_exposure_region(self) -> None:
462 """Test for update_exposure_regions method."""
464 registry = self.make_registry(["run1"])
465 obscore = registry.obsCoreTableManager
466 assert obscore is not None
468 # Exposure 4 is not associated with any visit.
469 for detector in (1, 2, 3, 4):
470 self._insert_dataset(registry, "run1", "raw", detector=detector, exposure=4)
472 # All spatial columns should be None.
473 with obscore.query() as result:
474 rows = list(result)
475 self.assertEqual(len(rows), 4)
476 for row in rows:
477 self.assertIsNone(row.s_ra)
478 self.assertIsNone(row.s_dec)
479 self.assertIsNone(row.s_region)
481 # Assign Region from visit 4.
482 count = obscore.update_exposure_regions(
483 "DummyCam", [(4, 1, self.regions[(4, 1)]), (4, 2, self.regions[(4, 2)])]
484 )
485 self.assertEqual(count, 2)
487 with obscore.query(["s_ra", "s_dec", "s_region", "lsst_detector"]) as result:
488 rows = list(result)
489 self.assertEqual(len(rows), 4)
490 for row in rows:
491 if row.lsst_detector in (1, 2):
492 self.assertIsNotNone(row.s_ra)
493 self.assertIsNotNone(row.s_dec)
494 self.assertIsNotNone(row.s_region)
495 else:
496 self.assertIsNone(row.s_ra)
497 self.assertIsNone(row.s_dec)
498 self.assertIsNone(row.s_region)
500 if TYPE_CHECKING:
501 # This is a mixin class, some methods from unittest.TestCase declared
502 # here to silence mypy.
503 def assertEqual(self, first: Any, second: Any, msg: str | None = None) -> None:
504 ...
506 def assertIsNone(self, obj: Any, msg: str | None = None) -> None:
507 ...
509 def assertIsNotNone(self, obj: Any, msg: str | None = None) -> None:
510 ...
512 def assertRegex(self, text: Any, expected_regex: Any, msg: str | None = None) -> None:
513 ...
516class SQLiteObsCoreTest(ObsCoreTests, unittest.TestCase):
517 """Unit test for obscore with SQLite backend."""
519 def setUp(self):
520 self.root = makeTestTempDir(TESTDIR)
522 def tearDown(self):
523 removeTestTempDir(self.root)
525 def make_registry_config(
526 self, collections: list[str] | None = None, collection_type: str | None = None
527 ) -> RegistryConfig:
528 # docstring inherited from a base class
529 _, filename = tempfile.mkstemp(dir=self.root, suffix=".sqlite3")
530 config = RegistryConfig()
531 config["db"] = f"sqlite:///{filename}"
532 config["managers", "obscore"] = {
533 "cls": "lsst.daf.butler.registry.obscore.ObsCoreLiveTableManager",
534 "config": self.make_obscore_config(collections, collection_type),
535 }
536 return config
539@unittest.skipUnless(testing is not None, "testing.postgresql module not found")
540class PostgresObsCoreTest(ObsCoreTests, unittest.TestCase):
541 """Unit test for obscore with PostgreSQL backend."""
543 @classmethod
544 def _handler(cls, postgresql):
545 engine = sqlalchemy.engine.create_engine(postgresql.url())
546 with engine.begin() as connection:
547 connection.execute(sqlalchemy.text("CREATE EXTENSION btree_gist;"))
549 @classmethod
550 def setUpClass(cls):
551 # Create the postgres test server.
552 cls.postgresql = testing.postgresql.PostgresqlFactory(
553 cache_initialized_db=True, on_initialized=cls._handler
554 )
555 super().setUpClass()
557 @classmethod
558 def tearDownClass(cls):
559 # Clean up any lingering SQLAlchemy engines/connections
560 # so they're closed before we shut down the server.
561 gc.collect()
562 cls.postgresql.clear_cache()
563 super().tearDownClass()
565 def setUp(self):
566 self.root = makeTestTempDir(TESTDIR)
567 self.server = self.postgresql()
568 self.count = 0
570 def tearDown(self):
571 removeTestTempDir(self.root)
572 self.server = self.postgresql()
574 def make_registry_config(
575 self, collections: list[str] | None = None, collection_type: str | None = None
576 ) -> RegistryConfig:
577 # docstring inherited from a base class
578 self.count += 1
579 config = RegistryConfig()
580 config["db"] = self.server.url()
581 # Use unique namespace for each instance, some tests may use sub-tests.
582 config["namespace"] = f"namespace{self.count}"
583 config["managers", "obscore"] = {
584 "cls": "lsst.daf.butler.registry.obscore.ObsCoreLiveTableManager",
585 "config": self.make_obscore_config(collections, collection_type),
586 }
587 return config
590@unittest.skipUnless(testing is not None, "testing.postgresql module not found")
591class PostgresPgSphereObsCoreTest(PostgresObsCoreTest):
592 """Unit test for obscore with PostgreSQL backend and pgsphere plugin."""
594 @classmethod
595 def _handler(cls, postgresql):
596 super()._handler(postgresql)
597 engine = sqlalchemy.engine.create_engine(postgresql.url())
598 with engine.begin() as connection:
599 try:
600 connection.execute(sqlalchemy.text("CREATE EXTENSION pg_sphere"))
601 except sqlalchemy.exc.DatabaseError as exc:
602 raise unittest.SkipTest(f"pg_sphere extension does not exist: {exc}")
604 def make_obscore_config(
605 self, collections: list[str] | None = None, collection_type: str | None = None
606 ) -> Config:
607 """Make configuration for obscore manager."""
608 obscore_config = super().make_obscore_config(collections, collection_type)
609 obscore_config["spatial_plugins"] = {
610 "pgsphere": {
611 "cls": "lsst.daf.butler.registry.obscore.pgsphere.PgSphereObsCorePlugin",
612 "config": {
613 "region_column": "pgs_region",
614 "position_column": "pgs_center",
615 },
616 }
617 }
618 return obscore_config
620 def test_spatial(self):
621 """Test that pgsphere plugin fills spatial columns."""
623 collections = None
624 registry = self.make_registry(collections)
625 obscore = registry.obsCoreTableManager
626 assert obscore is not None
627 self._insert_datasets(registry)
629 # select everything
630 with obscore.query() as result:
631 rows = list(result)
632 self.assertEqual(len(rows), 6)
634 db = cast(SqlRegistry, registry)._db
635 assert registry.obsCoreTableManager is not None
636 table = cast(ObsCoreLiveTableManager, registry.obsCoreTableManager).table
638 # It's not easy to generate spatial queries in sqlalchemy, use plain
639 # text queries for testing.
641 # position matching visit=1, there is a single dataset
642 query = f"SELECT * FROM {table.key} WHERE pgs_center <-> '(2d,0d)'::spoint < .1"
643 with db.query(sqlalchemy.text(query)) as results:
644 self.assertEqual(len(list(results)), 1)
646 # position matching visit=4, there are two datasets
647 query = f"SELECT * FROM {table.key} WHERE pgs_center <-> '(272d,0d)'::spoint < .1"
648 with db.query(sqlalchemy.text(query)) as results:
649 self.assertEqual(len(list(results)), 2)
651 # position matching visit=1, there is a single dataset
652 query = f"SELECT * FROM {table.key} WHERE '(2d,-3d)'::spoint @ pgs_region"
653 with db.query(sqlalchemy.text(query)) as results:
654 self.assertEqual(len(list(results)), 1)
656 # position matching visit=4, there are two datasets
657 query = f"SELECT * FROM {table.key} WHERE '(272d,3d)'::spoint @ pgs_region"
658 with db.query(sqlalchemy.text(query)) as results:
659 self.assertEqual(len(list(results)), 2)
662if __name__ == "__main__":
663 unittest.main()