Coverage for tests/test_obscore.py: 15%

305 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-03-30 02:32 -0700

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22import gc 

23import os 

24import tempfile 

25import unittest 

26import warnings 

27from abc import abstractmethod 

28from typing import TYPE_CHECKING, Any, cast 

29 

30import astropy.time 

31import sqlalchemy 

32from lsst.daf.butler import ( 

33 CollectionType, 

34 Config, 

35 DataCoordinate, 

36 DatasetIdGenEnum, 

37 DatasetRef, 

38 DatasetType, 

39 StorageClassFactory, 

40) 

41from lsst.daf.butler.registries.sql import SqlRegistry 

42from lsst.daf.butler.registry import Registry, RegistryConfig 

43from lsst.daf.butler.registry.obscore import ( 

44 DatasetTypeConfig, 

45 ObsCoreConfig, 

46 ObsCoreLiveTableManager, 

47 ObsCoreSchema, 

48) 

49from lsst.daf.butler.registry.obscore._schema import _STATIC_COLUMNS 

50from lsst.daf.butler.tests.utils import makeTestTempDir, removeTestTempDir 

51from lsst.sphgeom import Box, ConvexPolygon, LonLat, UnitVector3d 

52 

53try: 

54 import testing.postgresql # type: ignore 

55except ImportError: 

56 testing = None 

57 

58TESTDIR = os.path.abspath(os.path.dirname(__file__)) 

59 

60 

61class ObsCoreTests: 

62 """Base class for testing obscore manager functionality.""" 

63 

64 root: str 

65 

66 def make_registry( 

67 self, collections: list[str] | None = None, collection_type: str | None = None 

68 ) -> Registry: 

69 """Create new empty Registry.""" 

70 config = self.make_registry_config(collections, collection_type) 

71 registry = Registry.createFromConfig(config, butlerRoot=self.root) 

72 self.initialize_registry(registry) 

73 return registry 

74 

75 @abstractmethod 

76 def make_registry_config( 

77 self, collections: list[str] | None = None, collection_type: str | None = None 

78 ) -> RegistryConfig: 

79 """Make Registry configuration.""" 

80 raise NotImplementedError() 

81 

82 def initialize_registry(self, registry: Registry) -> None: 

83 """Populate Registry with the things that we need for tests.""" 

84 

85 registry.insertDimensionData("instrument", {"name": "DummyCam"}) 

86 registry.insertDimensionData( 

87 "physical_filter", {"instrument": "DummyCam", "name": "d-r", "band": "r"} 

88 ) 

89 for detector in (1, 2, 3, 4): 

90 registry.insertDimensionData( 

91 "detector", {"instrument": "DummyCam", "id": detector, "full_name": f"detector{detector}"} 

92 ) 

93 

94 for exposure in (1, 2, 3, 4): 

95 registry.insertDimensionData( 

96 "exposure", 

97 { 

98 "instrument": "DummyCam", 

99 "id": exposure, 

100 "obs_id": f"exposure{exposure}", 

101 "physical_filter": "d-r", 

102 }, 

103 ) 

104 

105 registry.insertDimensionData("visit_system", {"instrument": "DummyCam", "id": 1, "name": "default"}) 

106 

107 for visit in (1, 2, 3, 4, 9): 

108 visit_start = astropy.time.Time(f"2020-01-01 08:0{visit}:00", scale="tai") 

109 visit_end = astropy.time.Time(f"2020-01-01 08:0{visit}:45", scale="tai") 

110 registry.insertDimensionData( 

111 "visit", 

112 { 

113 "instrument": "DummyCam", 

114 "id": visit, 

115 "name": f"visit{visit}", 

116 "physical_filter": "d-r", 

117 "visit_system": 1, 

118 "datetime_begin": visit_start, 

119 "datetime_end": visit_end, 

120 }, 

121 ) 

122 

123 # Only couple of exposures are linked to visits. 

124 for visit in (1, 2): 

125 registry.insertDimensionData( 

126 "visit_definition", 

127 { 

128 "instrument": "DummyCam", 

129 "exposure": visit, 

130 "visit": visit, 

131 }, 

132 ) 

133 

134 # map visit and detector to region 

135 self.regions: dict[tuple[int, int], ConvexPolygon] = {} 

136 for visit in (1, 2, 3, 4): 

137 for detector in (1, 2, 3, 4): 

138 lon = visit * 90 - 88 

139 lat = detector * 2 - 5 

140 region = ConvexPolygon( 

141 [ 

142 UnitVector3d(LonLat.fromDegrees(lon - 1.0, lat - 1.0)), 

143 UnitVector3d(LonLat.fromDegrees(lon + 1.0, lat - 1.0)), 

144 UnitVector3d(LonLat.fromDegrees(lon + 1.0, lat + 1.0)), 

145 UnitVector3d(LonLat.fromDegrees(lon - 1.0, lat + 1.0)), 

146 ] 

147 ) 

148 registry.insertDimensionData( 

149 "visit_detector_region", 

150 { 

151 "instrument": "DummyCam", 

152 "visit": visit, 

153 "detector": detector, 

154 "region": region, 

155 }, 

156 ) 

157 self.regions[(visit, detector)] = region 

158 

159 # Visit 9 has non-polygon region 

160 for detector in (1, 2, 3, 4): 

161 lat = detector * 2 - 5 

162 region = Box.fromDegrees(17.0, lat - 1.0, 19.0, lat + 1.0) 

163 registry.insertDimensionData( 

164 "visit_detector_region", 

165 { 

166 "instrument": "DummyCam", 

167 "visit": 9, 

168 "detector": detector, 

169 "region": region, 

170 }, 

171 ) 

172 

173 # Add few dataset types 

174 storage_class_factory = StorageClassFactory() 

175 storage_class = storage_class_factory.getStorageClass("StructuredDataDict") 

176 

177 self.dataset_types: dict[str, DatasetType] = {} 

178 

179 dimensions = registry.dimensions.extract(["instrument", "physical_filter", "detector", "exposure"]) 

180 self.dataset_types["raw"] = DatasetType("raw", dimensions, storage_class) 

181 

182 dimensions = registry.dimensions.extract(["instrument", "physical_filter", "detector", "visit"]) 

183 self.dataset_types["calexp"] = DatasetType("calexp", dimensions, storage_class) 

184 

185 dimensions = registry.dimensions.extract(["instrument", "physical_filter", "detector", "visit"]) 

186 self.dataset_types["no_obscore"] = DatasetType("no_obscore", dimensions, storage_class) 

187 

188 dimensions = registry.dimensions.extract(["instrument", "physical_filter", "detector"]) 

189 self.dataset_types["calib"] = DatasetType("calib", dimensions, storage_class, isCalibration=True) 

190 

191 for dataset_type in self.dataset_types.values(): 

192 registry.registerDatasetType(dataset_type) 

193 

194 # Add few run collections. 

195 for run in (1, 2, 3, 4, 5, 6): 

196 registry.registerRun(f"run{run}") 

197 

198 # Add few chained collections, run6 is not in any chained collections. 

199 registry.registerCollection("chain12", CollectionType.CHAINED) 

200 registry.setCollectionChain("chain12", ("run1", "run2")) 

201 registry.registerCollection("chain34", CollectionType.CHAINED) 

202 registry.setCollectionChain("chain34", ("run3", "run4")) 

203 registry.registerCollection("chain-all", CollectionType.CHAINED) 

204 registry.setCollectionChain("chain-all", ("chain12", "chain34", "run5")) 

205 

206 # And a tagged collection 

207 registry.registerCollection("tagged", CollectionType.TAGGED) 

208 

209 def make_obscore_config( 

210 self, collections: list[str] | None = None, collection_type: str | None = None 

211 ) -> Config: 

212 """Make configuration for obscore manager.""" 

213 obscore_config = Config(os.path.join(TESTDIR, "config", "basic", "obscore.yaml")) 

214 if collections is not None: 

215 obscore_config["collections"] = collections 

216 if collection_type is not None: 

217 obscore_config["collection_type"] = collection_type 

218 return obscore_config 

219 

220 def _insert_dataset( 

221 self, registry: Registry, run: str, dataset_type: str, do_import: bool = False, **kwargs 

222 ) -> DatasetRef: 

223 """Insert or import one dataset into a specified run collection.""" 

224 data_id = {"instrument": "DummyCam", "physical_filter": "d-r"} 

225 data_id.update(kwargs) 

226 coordinate = DataCoordinate.standardize(data_id, universe=registry.dimensions) 

227 if do_import: 

228 ds_type = self.dataset_types[dataset_type] 

229 dataset_id = registry.datasetIdFactory.makeDatasetId( 

230 run, ds_type, coordinate, DatasetIdGenEnum.UNIQUE 

231 ) 

232 ref = DatasetRef(ds_type, coordinate, id=dataset_id, run=run) 

233 [ref] = registry._importDatasets([ref]) 

234 else: 

235 [ref] = registry.insertDatasets(dataset_type, [data_id], run=run) 

236 return ref 

237 

238 def _insert_datasets(self, registry: Registry, do_import: bool = False) -> list[DatasetRef]: 

239 """Inset a small bunch of datasets into every run collection.""" 

240 return [ 

241 self._insert_dataset(registry, "run1", "raw", detector=1, exposure=1, do_import=do_import), 

242 self._insert_dataset(registry, "run2", "calexp", detector=2, visit=2, do_import=do_import), 

243 self._insert_dataset(registry, "run3", "raw", detector=3, exposure=3, do_import=do_import), 

244 self._insert_dataset(registry, "run4", "calexp", detector=4, visit=4, do_import=do_import), 

245 self._insert_dataset(registry, "run5", "calexp", detector=4, visit=4, do_import=do_import), 

246 # This dataset type is not configured, will not be in obscore. 

247 self._insert_dataset(registry, "run5", "no_obscore", detector=1, visit=1, do_import=do_import), 

248 self._insert_dataset(registry, "run6", "raw", detector=1, exposure=4, do_import=do_import), 

249 ] 

250 

251 def test_config_errors(self): 

252 """Test for handling various configuration problems.""" 

253 

254 # This raises pydantic ValidationError, which wraps ValueError 

255 exception_re = "'collections' must have one element" 

256 with self.assertRaisesRegex(ValueError, exception_re): 

257 self.make_registry(None, "TAGGED") 

258 

259 with self.assertRaisesRegex(ValueError, exception_re): 

260 self.make_registry([], "TAGGED") 

261 

262 with self.assertRaisesRegex(ValueError, exception_re): 

263 self.make_registry(["run1", "run2"], "TAGGED") 

264 

265 # Invalid regex. 

266 with self.assertRaisesRegex(ValueError, "Failed to compile regex"): 

267 self.make_registry(["+run"], "RUN") 

268 

269 def test_schema(self): 

270 """Check how obscore schema is constructed""" 

271 

272 config = ObsCoreConfig(obs_collection="", dataset_types=[], facility_name="FACILITY") 

273 schema = ObsCoreSchema(config, []) 

274 table_spec = schema.table_spec 

275 self.assertEqual(list(table_spec.fields.names), [col.name for col in _STATIC_COLUMNS]) 

276 

277 # extra columns from top-level config 

278 config = ObsCoreConfig( 

279 obs_collection="", 

280 extra_columns={"c1": 1, "c2": "string", "c3": {"template": "{calib_level}", "type": "float"}}, 

281 dataset_types=[], 

282 facility_name="FACILITY", 

283 ) 

284 schema = ObsCoreSchema(config, []) 

285 table_spec = schema.table_spec 

286 self.assertEqual( 

287 list(table_spec.fields.names), 

288 [col.name for col in _STATIC_COLUMNS] + ["c1", "c2", "c3"], 

289 ) 

290 self.assertEqual(table_spec.fields["c1"].dtype, sqlalchemy.BigInteger) 

291 self.assertEqual(table_spec.fields["c2"].dtype, sqlalchemy.String) 

292 self.assertEqual(table_spec.fields["c3"].dtype, sqlalchemy.Float) 

293 

294 # extra columns from per-dataset type configs 

295 config = ObsCoreConfig( 

296 obs_collection="", 

297 extra_columns={"c1": 1}, 

298 dataset_types={ 

299 "raw": DatasetTypeConfig( 

300 name="raw", 

301 dataproduct_type="image", 

302 calib_level=1, 

303 extra_columns={"c2": "string"}, 

304 ), 

305 "calexp": DatasetTypeConfig( 

306 dataproduct_type="image", 

307 calib_level=2, 

308 extra_columns={"c3": 1e10}, 

309 ), 

310 }, 

311 facility_name="FACILITY", 

312 ) 

313 schema = ObsCoreSchema(config, []) 

314 table_spec = schema.table_spec 

315 self.assertEqual( 

316 list(table_spec.fields.names), 

317 [col.name for col in _STATIC_COLUMNS] + ["c1", "c2", "c3"], 

318 ) 

319 self.assertEqual(table_spec.fields["c1"].dtype, sqlalchemy.BigInteger) 

320 self.assertEqual(table_spec.fields["c2"].dtype, sqlalchemy.String) 

321 self.assertEqual(table_spec.fields["c3"].dtype, sqlalchemy.Float) 

322 

323 # Columns with the same names as in static list in configs, types 

324 # are not overriden. 

325 config = ObsCoreConfig( 

326 version=0, 

327 obs_collection="", 

328 extra_columns={"t_xel": 1e10}, 

329 dataset_types={ 

330 "raw": DatasetTypeConfig( 

331 dataproduct_type="image", 

332 calib_level=1, 

333 extra_columns={"target_name": 1}, 

334 ), 

335 "calexp": DatasetTypeConfig( 

336 dataproduct_type="image", 

337 calib_level=2, 

338 extra_columns={"em_xel": "string"}, 

339 ), 

340 }, 

341 facility_name="FACILITY", 

342 ) 

343 schema = ObsCoreSchema(config, []) 

344 table_spec = schema.table_spec 

345 self.assertEqual(list(table_spec.fields.names), [col.name for col in _STATIC_COLUMNS]) 

346 self.assertEqual(table_spec.fields["t_xel"].dtype, sqlalchemy.Integer) 

347 self.assertEqual(table_spec.fields["target_name"].dtype, sqlalchemy.String) 

348 self.assertEqual(table_spec.fields["em_xel"].dtype, sqlalchemy.Integer) 

349 

350 def test_insert_existing_collection(self): 

351 """Test insert and import registry methods, with various restrictions 

352 on collection names. 

353 """ 

354 

355 # First item is collections, second item is expected record count. 

356 test_data = ( 

357 (None, 6), 

358 (["run1", "run2"], 2), 

359 (["run[34]"], 2), 

360 (["[rR]un[^6]"], 5), 

361 ) 

362 

363 for collections, count in test_data: 

364 for do_import in (False, True): 

365 registry = self.make_registry(collections) 

366 obscore = registry.obsCoreTableManager 

367 assert obscore is not None 

368 self._insert_datasets(registry, do_import) 

369 

370 with obscore.query() as result: 

371 rows = list(result) 

372 self.assertEqual(len(rows), count) 

373 

374 def test_drop_datasets(self): 

375 """Test for dropping datasets after obscore insert.""" 

376 

377 collections = None 

378 registry = self.make_registry(collections) 

379 obscore = registry.obsCoreTableManager 

380 assert obscore is not None 

381 refs = self._insert_datasets(registry) 

382 

383 with obscore.query() as result: 

384 rows = list(result) 

385 self.assertEqual(len(rows), 6) 

386 

387 # drop single dataset 

388 registry.removeDatasets(ref for ref in refs if ref.run == "run1") 

389 with obscore.query() as result: 

390 rows = list(result) 

391 self.assertEqual(len(rows), 5) 

392 

393 # drop whole run collection 

394 registry.removeCollection("run6") 

395 with obscore.query() as result: 

396 rows = list(result) 

397 self.assertEqual(len(rows), 4) 

398 

399 def test_associate(self): 

400 """Test for associating datasets to TAGGED collection.""" 

401 

402 collections = ["tagged"] 

403 registry = self.make_registry(collections, "TAGGED") 

404 obscore = registry.obsCoreTableManager 

405 assert obscore is not None 

406 refs = self._insert_datasets(registry) 

407 

408 with obscore.query() as result: 

409 rows = list(result) 

410 self.assertEqual(len(rows), 0) 

411 

412 # Associate datasets that are already in obscore, changes nothing. 

413 registry.associate("tagged", (ref for ref in refs if ref.run == "run1")) 

414 with obscore.query() as result: 

415 rows = list(result) 

416 self.assertEqual(len(rows), 1) 

417 

418 # Associate datasets that are not in obscore 

419 registry.associate("tagged", (ref for ref in refs if ref.run == "run3")) 

420 with obscore.query() as result: 

421 rows = list(result) 

422 self.assertEqual(len(rows), 2) 

423 

424 # Disassociate them 

425 registry.disassociate("tagged", (ref for ref in refs if ref.run == "run3")) 

426 with obscore.query() as result: 

427 rows = list(result) 

428 self.assertEqual(len(rows), 1) 

429 

430 # Non-associated dataset, should be OK and not throw. 

431 registry.disassociate("tagged", (ref for ref in refs if ref.run == "run2")) 

432 with obscore.query() as result: 

433 rows = list(result) 

434 self.assertEqual(len(rows), 1) 

435 

436 registry.disassociate("tagged", (ref for ref in refs if ref.run == "run1")) 

437 with obscore.query() as result: 

438 rows = list(result) 

439 self.assertEqual(len(rows), 0) 

440 

441 def test_region_type_warning(self) -> None: 

442 """Test that non-polygon region generates one or more warnings.""" 

443 

444 collections = None 

445 registry = self.make_registry(collections) 

446 

447 with warnings.catch_warnings(record=True) as warning_records: 

448 self._insert_dataset(registry, "run2", "calexp", detector=2, visit=9) 

449 self.assertEqual(len(warning_records), 1) 

450 for record in warning_records: 

451 self.assertRegex( 

452 str(record.message), 

453 "Unexpected region type: .*lsst.sphgeom._sphgeom.Box.*", 

454 ) 

455 

456 def test_update_exposure_region(self) -> None: 

457 """Test for update_exposure_regions method.""" 

458 

459 registry = self.make_registry(["run1"]) 

460 obscore = registry.obsCoreTableManager 

461 assert obscore is not None 

462 

463 # Exposure 4 is not associated with any visit. 

464 for detector in (1, 2, 3, 4): 

465 self._insert_dataset(registry, "run1", "raw", detector=detector, exposure=4) 

466 

467 # All spatial columns should be None. 

468 with obscore.query() as result: 

469 rows = list(result) 

470 self.assertEqual(len(rows), 4) 

471 for row in rows: 

472 self.assertIsNone(row.s_ra) 

473 self.assertIsNone(row.s_dec) 

474 self.assertIsNone(row.s_region) 

475 

476 # Assign Region from visit 4. 

477 count = obscore.update_exposure_regions( 

478 "DummyCam", [(4, 1, self.regions[(4, 1)]), (4, 2, self.regions[(4, 2)])] 

479 ) 

480 self.assertEqual(count, 2) 

481 

482 with obscore.query() as result: 

483 rows = list(result) 

484 self.assertEqual(len(rows), 4) 

485 for row in rows: 

486 if row.lsst_detector in (1, 2): 

487 self.assertIsNotNone(row.s_ra) 

488 self.assertIsNotNone(row.s_dec) 

489 self.assertIsNotNone(row.s_region) 

490 else: 

491 self.assertIsNone(row.s_ra) 

492 self.assertIsNone(row.s_dec) 

493 self.assertIsNone(row.s_region) 

494 

495 if TYPE_CHECKING: 

496 # This is a mixin class, some methods from unittest.TestCase declared 

497 # here to silence mypy. 

498 def assertEqual(self, first: Any, second: Any, msg: str | None = None) -> None: 

499 ... 

500 

501 def assertIsNone(self, obj: Any, msg: str | None = None) -> None: 

502 ... 

503 

504 def assertIsNotNone(self, obj: Any, msg: str | None = None) -> None: 

505 ... 

506 

507 def assertRegex(self, text: Any, expected_regex: Any, msg: str | None = None) -> None: 

508 ... 

509 

510 

511class SQLiteObsCoreTest(ObsCoreTests, unittest.TestCase): 

512 """Unit test for obscore with SQLite backend.""" 

513 

514 def setUp(self): 

515 self.root = makeTestTempDir(TESTDIR) 

516 

517 def tearDown(self): 

518 removeTestTempDir(self.root) 

519 

520 def make_registry_config( 

521 self, collections: list[str] | None = None, collection_type: str | None = None 

522 ) -> RegistryConfig: 

523 # docstring inherited from a base class 

524 _, filename = tempfile.mkstemp(dir=self.root, suffix=".sqlite3") 

525 config = RegistryConfig() 

526 config["db"] = f"sqlite:///{filename}" 

527 config["managers", "obscore"] = { 

528 "cls": "lsst.daf.butler.registry.obscore.ObsCoreLiveTableManager", 

529 "config": self.make_obscore_config(collections, collection_type), 

530 } 

531 return config 

532 

533 

534@unittest.skipUnless(testing is not None, "testing.postgresql module not found") 

535class PostgresObsCoreTest(ObsCoreTests, unittest.TestCase): 

536 """Unit test for obscore with PostgreSQL backend.""" 

537 

538 @classmethod 

539 def _handler(cls, postgresql): 

540 engine = sqlalchemy.engine.create_engine(postgresql.url()) 

541 with engine.begin() as connection: 

542 connection.execute(sqlalchemy.text("CREATE EXTENSION btree_gist;")) 

543 

544 @classmethod 

545 def setUpClass(cls): 

546 # Create the postgres test server. 

547 cls.postgresql = testing.postgresql.PostgresqlFactory( 

548 cache_initialized_db=True, on_initialized=cls._handler 

549 ) 

550 super().setUpClass() 

551 

552 @classmethod 

553 def tearDownClass(cls): 

554 # Clean up any lingering SQLAlchemy engines/connections 

555 # so they're closed before we shut down the server. 

556 gc.collect() 

557 cls.postgresql.clear_cache() 

558 super().tearDownClass() 

559 

560 def setUp(self): 

561 self.root = makeTestTempDir(TESTDIR) 

562 self.server = self.postgresql() 

563 self.count = 0 

564 

565 def tearDown(self): 

566 removeTestTempDir(self.root) 

567 self.server = self.postgresql() 

568 

569 def make_registry_config( 

570 self, collections: list[str] | None = None, collection_type: str | None = None 

571 ) -> RegistryConfig: 

572 # docstring inherited from a base class 

573 self.count += 1 

574 config = RegistryConfig() 

575 config["db"] = self.server.url() 

576 # Use unique namespace for each instance, some tests may use sub-tests. 

577 config["namespace"] = f"namespace{self.count}" 

578 config["managers", "obscore"] = { 

579 "cls": "lsst.daf.butler.registry.obscore.ObsCoreLiveTableManager", 

580 "config": self.make_obscore_config(collections, collection_type), 

581 } 

582 return config 

583 

584 

585@unittest.skipUnless(testing is not None, "testing.postgresql module not found") 

586class PostgresPgSphereObsCoreTest(PostgresObsCoreTest): 

587 """Unit test for obscore with PostgreSQL backend and pgsphere plugin.""" 

588 

589 @classmethod 

590 def _handler(cls, postgresql): 

591 super()._handler(postgresql) 

592 engine = sqlalchemy.engine.create_engine(postgresql.url()) 

593 with engine.begin() as connection: 

594 try: 

595 connection.execute(sqlalchemy.text("CREATE EXTENSION pg_sphere")) 

596 except sqlalchemy.exc.DatabaseError as exc: 

597 raise unittest.SkipTest(f"pg_sphere extension does not exist: {exc}") 

598 

599 def make_obscore_config( 

600 self, collections: list[str] | None = None, collection_type: str | None = None 

601 ) -> Config: 

602 """Make configuration for obscore manager.""" 

603 obscore_config = super().make_obscore_config(collections, collection_type) 

604 obscore_config["spatial_plugins"] = { 

605 "pgsphere": { 

606 "cls": "lsst.daf.butler.registry.obscore.pgsphere.PgSphereObsCorePlugin", 

607 "config": { 

608 "region_column": "pgs_region", 

609 "position_column": "pgs_center", 

610 }, 

611 } 

612 } 

613 return obscore_config 

614 

615 def test_spatial(self): 

616 """Test that pgsphere plugin fills spatial columns.""" 

617 

618 collections = None 

619 registry = self.make_registry(collections) 

620 obscore = registry.obsCoreTableManager 

621 assert obscore is not None 

622 self._insert_datasets(registry) 

623 

624 # select everything 

625 with obscore.query() as result: 

626 rows = list(result) 

627 self.assertEqual(len(rows), 6) 

628 

629 db = cast(SqlRegistry, registry)._db 

630 assert registry.obsCoreTableManager is not None 

631 table = cast(ObsCoreLiveTableManager, registry.obsCoreTableManager).table 

632 

633 # It's not easy to generate spatial queries in sqlalchemy, use plain 

634 # text queries for testing. 

635 

636 # position matching visit=1, there is a single dataset 

637 query = f"SELECT * FROM {table.key} WHERE pgs_center <-> '(2d,0d)'::spoint < .1" 

638 with db.query(sqlalchemy.text(query)) as results: 

639 self.assertEqual(len(list(results)), 1) 

640 

641 # position matching visit=4, there are two datasets 

642 query = f"SELECT * FROM {table.key} WHERE pgs_center <-> '(272d,0d)'::spoint < .1" 

643 with db.query(sqlalchemy.text(query)) as results: 

644 self.assertEqual(len(list(results)), 2) 

645 

646 # position matching visit=1, there is a single dataset 

647 query = f"SELECT * FROM {table.key} WHERE '(2d,-3d)'::spoint @ pgs_region" 

648 with db.query(sqlalchemy.text(query)) as results: 

649 self.assertEqual(len(list(results)), 1) 

650 

651 # position matching visit=4, there are two datasets 

652 query = f"SELECT * FROM {table.key} WHERE '(272d,3d)'::spoint @ pgs_region" 

653 with db.query(sqlalchemy.text(query)) as results: 

654 self.assertEqual(len(list(results)), 2) 

655 

656 

657if __name__ == "__main__": 

658 unittest.main()