Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import gc 

23import os 

24import sqlite3 

25import unittest 

26import tempfile 

27 

28import numpy as np 

29 

30import lsst.utils.tests 

31import lsst.geom as geom 

32import lsst.afw.table as afwTable 

33import lsst.daf.persistence as dafPersist 

34import lsst.obs.base 

35import shutil 

36 

37from lsst.obs.base.test import BaseMapper 

38 

39ROOT = os.path.abspath(os.path.dirname(__file__)) 

40 

41 

42def setup_module(module): 

43 lsst.utils.tests.init() 

44 

45 

46class MinMapper1(lsst.obs.base.CameraMapper): 

47 packageName = 'larry' 

48 

49 def __init__(self, **kwargs): 

50 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper1.yaml")) 

51 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, **kwargs) 

52 return 

53 

54 def std_x(self, item, dataId): 

55 return float(item) 

56 

57 @classmethod 

58 def getCameraName(cls): 

59 """Return the name of the camera that this CameraMapper is for.""" 

60 return "min" 

61 

62 @classmethod 

63 def getPackageDir(cls): 

64 return "/path/to/nowhere" 

65 

66 

67class MinMapper2(lsst.obs.base.CameraMapper): 

68 packageName = 'moe' 

69 

70 # CalibRoot in policy 

71 # needCalibRegistry 

72 def __init__(self, **kwargs): 

73 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper2.yaml")) 

74 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, 

75 registry="cfhtls.sqlite3", **kwargs) 

76 return 

77 

78 def _transformId(self, dataId): 

79 return dataId 

80 

81 def _extractDetectorName(self, dataId): 

82 return "ccd00" 

83 

84 def std_x(self, item, dataId): 

85 return float(item) 

86 

87 @classmethod 

88 def getCameraName(cls): 

89 """Return the name of the camera that this CameraMapper is for.""" 

90 return "min" 

91 

92 @classmethod 

93 def getPackageDir(cls): 

94 return "/path/to/nowhere" 

95 

96 

97# does not assign packageName 

98class MinMapper3(lsst.obs.base.CameraMapper): 

99 

100 def __init__(self, **kwargs): 

101 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper1.yaml")) 

102 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, root=ROOT) 

103 return 

104 

105 @classmethod 

106 def getPackageDir(cls): 

107 return "/path/to/nowhere" 

108 

109 

110def checkCompression(testCase, additionalData): 

111 """Check that compression settings are present 

112 

113 We check that we can access the required settings, and that 

114 the seed is non-zero (zero causes lsst.afw.math.Random to fail). 

115 """ 

116 for plane in ("image", "mask", "variance"): 

117 for entry in ("compression.algorithm", 

118 "compression.columns", 

119 "compression.rows", 

120 "compression.quantizeLevel", 

121 "scaling.algorithm", 

122 "scaling.bitpix", 

123 "scaling.maskPlanes", 

124 "scaling.seed", 

125 "scaling.quantizeLevel", 

126 "scaling.quantizePad", 

127 "scaling.fuzz", 

128 "scaling.bscale", 

129 "scaling.bzero", 

130 ): 

131 additionalData.getScalar(plane + "." + entry) 

132 testCase.assertNotEqual(additionalData.getScalar(plane + ".scaling.seed"), 0) 

133 

134 

135class Mapper1TestCase(unittest.TestCase): 

136 """A test case for the mapper used by the data butler.""" 

137 

138 def setUp(self): 

139 self.mapper = MinMapper1(root=ROOT) 

140 

141 def tearDown(self): 

142 del self.mapper 

143 

144 def testGetDatasetTypes(self): 

145 expectedTypes = BaseMapper(ROOT).getDatasetTypes() 

146 # Add the expected additional types to what the base class provides 

147 expectedTypes.extend(["x", "x_filename", 

148 "badSourceHist", "badSourceHist_filename", ]) 

149 self.assertEqual(set(self.mapper.getDatasetTypes()), set(expectedTypes)) 

150 

151 def testMap(self): 

152 loc = self.mapper.map("x", {"sensor": "1,1"}, write=True) 

153 self.assertEqual(loc.getPythonType(), "lsst.afw.geom.BoxI") 

154 self.assertEqual(loc.getCppType(), "BoxI") 

155 self.assertEqual(loc.getStorageName(), "PickleStorage") 

156 expectedRoot = ROOT 

157 expectedLocations = ["foo-1,1.pickle"] 

158 self.assertEqual(loc.getStorage().root, expectedRoot) 

159 self.assertEqual(loc.getLocations(), expectedLocations) 

160 self.assertEqual(loc.getAdditionalData().toString(), 

161 "sensor = \"1,1\"\n") 

162 

163 def testQueryMetadata(self): 

164 self.assertEqual(self.mapper.queryMetadata("x", ["sensor"], None), [("1,1",)]) 

165 

166 def testStandardize(self): 

167 self.assertTrue(self.mapper.canStandardize("x")) 

168 self.assertFalse(self.mapper.canStandardize("badSourceHist")) 

169 self.assertFalse(self.mapper.canStandardize("notPresent")) 

170 result = self.mapper.standardize("x", 3, None) 

171 self.assertIsInstance(result, float) 

172 self.assertEqual(result, 3.0) 

173 result = self.mapper.standardize("x", 3.14, None) 

174 self.assertIsInstance(result, float) 

175 self.assertEqual(result, 3.14) 

176 result = self.mapper.standardize("x", "3.14", None) 

177 self.assertIsInstance(result, float) 

178 self.assertEqual(result, 3.14) 

179 

180 def testNames(self): 

181 self.assertEqual(MinMapper1.getCameraName(), "min") 

182 self.assertEqual(MinMapper1.getPackageName(), "larry") 

183 

184 

185class Mapper2TestCase(unittest.TestCase): 

186 """A test case for the mapper used by the data butler.""" 

187 

188 def testGetDatasetTypes(self): 

189 mapper = MinMapper2(root=ROOT) 

190 expectedTypes = BaseMapper(ROOT).getDatasetTypes() 

191 # Add the expected additional types to what the base class provides 

192 expectedTypes.extend(["flat", "flat_md", "flat_filename", "flat_sub", 

193 "raw", "raw_md", "raw_filename", "raw_sub", 

194 "some", "some_filename", "some_md", "some_sub", 

195 "someCatalog", "someCatalog_md", "someCatalog_filename", 

196 "someCatalog_len", "someCatalog_schema", 

197 "forced_src", "forced_src_md", "forced_src_filename", 

198 "forced_src_len", "forced_src_schema", 

199 "other_sub", "other_filename", "other_md", "other", 

200 "someGz", "someGz_filename", "someFz", "someFz_filename", "someGz_md", 

201 "someFz_sub", "someFz_md", "someGz_sub", 

202 "someGz_bbox", "someFz_bbox", "some_bbox", "other_bbox", 

203 ]) 

204 self.assertEqual(set(mapper.getDatasetTypes()), 

205 set(expectedTypes)) 

206 

207 def testMap(self): 

208 mapper = MinMapper2(root=ROOT) 

209 loc = mapper.map("raw", {"ccd": 13}, write=True) 

210 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU") 

211 self.assertEqual(loc.getCppType(), "ImageU") 

212 self.assertEqual(loc.getStorageName(), "FitsStorage") 

213 self.assertEqual(loc.getLocations(), ["foo-13.fits"]) 

214 self.assertEqual(loc.getStorage().root, ROOT) 

215 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) 

216 checkCompression(self, loc.getAdditionalData()) 

217 

218 def testSubMap(self): 

219 bbox = geom.BoxI(geom.Point2I(200, 100), 

220 geom.Extent2I(300, 400)) 

221 mapper = MinMapper2(root=ROOT) 

222 loc = mapper.map("raw_sub", {"ccd": 13, "bbox": bbox}, write=True) 

223 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU") 

224 self.assertEqual(loc.getCppType(), "ImageU") 

225 self.assertEqual(loc.getStorageName(), "FitsStorage") 

226 self.assertEqual(loc.getLocations(), ["foo-13.fits"]) 

227 self.assertEqual(loc.getStorage().root, ROOT) 

228 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) 

229 self.assertEqual(loc.getAdditionalData().getScalar("width"), 300) 

230 self.assertEqual(loc.getAdditionalData().getScalar("height"), 400) 

231 self.assertEqual(loc.getAdditionalData().getScalar("llcX"), 200) 

232 self.assertEqual(loc.getAdditionalData().getScalar("llcY"), 100) 

233 checkCompression(self, loc.getAdditionalData()) 

234 

235 loc = mapper.map("raw_sub", {"ccd": 13, "bbox": bbox, "imageOrigin": "PARENT"}, write=True) 

236 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU") 

237 self.assertEqual(loc.getCppType(), "ImageU") 

238 self.assertEqual(loc.getStorageName(), "FitsStorage") 

239 self.assertEqual(loc.getLocations(), ["foo-13.fits"]) 

240 self.assertEqual(loc.getStorage().root, ROOT) 

241 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) 

242 self.assertEqual(loc.getAdditionalData().getScalar("width"), 300) 

243 self.assertEqual(loc.getAdditionalData().getScalar("height"), 400) 

244 self.assertEqual(loc.getAdditionalData().getScalar("llcX"), 200) 

245 self.assertEqual(loc.getAdditionalData().getScalar("llcY"), 100) 

246 self.assertEqual(loc.getAdditionalData().getScalar("imageOrigin"), "PARENT") 

247 checkCompression(self, loc.getAdditionalData()) 

248 

249 def testCatalogExtras(self): 

250 butler = dafPersist.Butler(root=ROOT, mapper=MinMapper2) 

251 schema = afwTable.Schema() 

252 aa = schema.addField("a", type=np.int32, doc="a") 

253 bb = schema.addField("b", type=np.float64, doc="b") 

254 catalog = lsst.afw.table.BaseCatalog(schema) 

255 row = catalog.addNew() 

256 row.set(aa, 12345) 

257 row.set(bb, 1.2345) 

258 size = len(catalog) 

259 dataId = dict(visit=123, ccd=45) 

260 butler.put(catalog, "someCatalog", dataId) 

261 filename = butler.get("someCatalog_filename", dataId)[0] 

262 try: 

263 self.assertTrue(os.path.exists(filename)) 

264 self.assertEqual(butler.get("someCatalog_schema", dataId), schema) 

265 self.assertEqual(butler.get("someCatalog_len", dataId), size) 

266 header = butler.get("someCatalog_md", dataId) 

267 self.assertEqual(header.getScalar("NAXIS2"), size) 

268 finally: 

269 try: 

270 os.remove(filename) 

271 except OSError as exc: 

272 print("Warning: could not remove file %r: %s" % (filename, exc)) 

273 

274 def testImage(self): 

275 mapper = MinMapper2(root=ROOT) 

276 loc = mapper.map("some", dict(ccd=35)) 

277 expectedLocations = ["bar-35.fits"] 

278 self.assertEqual(loc.getStorage().root, ROOT) 

279 self.assertEqual(loc.getLocations(), expectedLocations) 

280 

281 butler = dafPersist.ButlerFactory(mapper=mapper).create() 

282 image = butler.get("some", ccd=35) 

283 self.assertEqual(image.getFilter().getName(), "r") 

284 

285 self.assertEqual(butler.get("some_bbox", ccd=35), image.getBBox()) 

286 

287 bbox = geom.BoxI(geom.Point2I(200, 100), 

288 geom.Extent2I(300, 400)) 

289 image = butler.get("some_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True) 

290 self.assertEqual(image.getHeight(), 400) 

291 self.assertEqual(image.getWidth(), 300) 

292 

293 def testDetector(self): 

294 mapper = MinMapper2(root=ROOT) 

295 butler = dafPersist.ButlerFactory(mapper=mapper).create() 

296 detector = butler.get("raw_detector", ccd=0) 

297 self.assertEqual(detector.getName(), "ccd00") 

298 

299 def testGzImage(self): 

300 mapper = MinMapper2(root=ROOT) 

301 loc = mapper.map("someGz", dict(ccd=35)) 

302 expectedLocations = [os.path.join("gz", "bar-35.fits.gz")] 

303 self.assertEqual(loc.getStorage().root, ROOT) 

304 self.assertEqual(loc.getLocations(), expectedLocations) 

305 

306 butler = dafPersist.ButlerFactory(mapper=mapper).create() 

307 image = butler.get("someGz", ccd=35) 

308 self.assertEqual(image.getFilter().getName(), "r") 

309 

310 bbox = geom.BoxI(geom.Point2I(200, 100), 

311 geom.Extent2I(300, 400)) 

312 image = butler.get("someGz_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True) 

313 self.assertEqual(image.getHeight(), 400) 

314 self.assertEqual(image.getWidth(), 300) 

315 

316 def testFzImage(self): 

317 mapper = MinMapper2(root=ROOT) 

318 loc = mapper.map("someFz", dict(ccd=35)) 

319 expectedRoot = ROOT 

320 expectedLocations = [os.path.join("fz", "bar-35.fits.fz")] 

321 self.assertEqual(loc.getStorage().root, expectedRoot) 

322 self.assertEqual(loc.getLocations(), expectedLocations) 

323 

324 butler = dafPersist.ButlerFactory(mapper=mapper).create() 

325 image = butler.get("someFz", ccd=35) 

326 self.assertEqual(image.getFilter().getName(), "r") 

327 

328 bbox = geom.BoxI(geom.Point2I(200, 100), 

329 geom.Extent2I(300, 400)) 

330 image = butler.get("someFz_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True) 

331 self.assertEqual(image.getHeight(), 400) 

332 self.assertEqual(image.getWidth(), 300) 

333 

334 def testButlerQueryMetadata(self): 

335 mapper = MinMapper2(root=ROOT) 

336 butler = dafPersist.ButlerFactory(mapper=mapper).create() 

337 kwargs = {"ccd": 35, "filter": "r", "visit": 787731, 

338 "taiObs": "2005-04-02T09:24:49.933440000"} 

339 self.assertEqual(butler.queryMetadata("other", "visit", **kwargs), [787731]) 

340 self.assertEqual(butler.queryMetadata("other", "visit", 

341 visit=kwargs["visit"], ccd=kwargs["ccd"], 

342 taiObs=kwargs["taiObs"], filter=kwargs["filter"]), 

343 [787731]) 

344 # now test we get no matches if ccd is out of range 

345 self.assertEqual(butler.queryMetadata("raw", "ccd", ccd=36, filter="r", visit=787731), []) 

346 

347 def testQueryMetadata(self): 

348 mapper = MinMapper2(root=ROOT) 

349 self.assertEqual(mapper.queryMetadata("raw", ["ccd"], None), 

350 [(x,) for x in range(36) if x != 3]) 

351 

352 def testStandardize(self): 

353 mapper = MinMapper2(root=ROOT) 

354 self.assertEqual(mapper.canStandardize("raw"), True) 

355 self.assertEqual(mapper.canStandardize("notPresent"), False) 

356 

357 def testCalib(self): 

358 mapper = MinMapper2(root=ROOT) 

359 loc = mapper.map("flat", {"visit": 787650, "ccd": 13}, write=True) 

360 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureF") 

361 self.assertEqual(loc.getCppType(), "ExposureF") 

362 self.assertEqual(loc.getStorageName(), "FitsStorage") 

363 expectedRoot = ROOT 

364 expectedLocations = ["flat-05Am03-fi.fits"] 

365 self.assertEqual(loc.getStorage().root, expectedRoot) 

366 self.assertEqual(loc.getLocations(), expectedLocations) 

367 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) 

368 self.assertEqual(loc.getAdditionalData().getScalar("visit"), 787650) 

369 self.assertEqual(loc.getAdditionalData().getScalar("derivedRunId"), "05Am03") 

370 self.assertEqual(loc.getAdditionalData().getScalar("filter"), "i") 

371 checkCompression(self, loc.getAdditionalData()) 

372 

373 def testNames(self): 

374 self.assertEqual(MinMapper2.getCameraName(), "min") 

375 self.assertEqual(MinMapper2.getPackageName(), "moe") 

376 

377 @unittest.expectedFailure 

378 def testParentSearch(self): 

379 mapper = MinMapper2(root=ROOT) 

380 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), 

381 os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits'))) 

382 self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits'))]) 

383 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), 

384 os.path.join(ROOT, 

385 os.path.join('testParentSearch', 'bar.fits[1]'))) 

386 self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits[1]'))]) 

387 

388 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), 

389 os.path.join(ROOT, os.path.join('testParentSearch', 'baz.fits'))) 

390 self.assertEqual(paths, [os.path.join(ROOT, 

391 os.path.join('testParentSearch', '_parent', 'baz.fits'))]) 

392 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), 

393 os.path.join(ROOT, 

394 os.path.join('testParentSearch', 'baz.fits[1]'))) 

395 self.assertEqual(paths, [os.path.join(ROOT, 

396 os.path.join('testParentSearch', '_parent', 'baz.fits[1]'))]) 

397 

398 def testSkymapLookups(self): 

399 """Test that metadata lookups don't try to get skymap data ID values 

400 from the registry. 

401 """ 

402 mapper = MinMapper2(root=ROOT) 

403 butler = dafPersist.Butler(mapper=mapper) 

404 with self.assertRaises(RuntimeError) as manager: 

405 butler.dataRef("forced_src", visit=787650, ccd=13) 

406 self.assertIn("Cannot lookup skymap key 'tract'", str(manager.exception)) 

407 # We're mostly concerned that the statements below will raise an 

408 # exception; if they don't, it's not likely the following tests will 

409 # fail. 

410 subset = butler.subset("forced_src", visit=787650, ccd=13, tract=0) 

411 self.assertEqual(len(subset), 1) 

412 dataRef = butler.dataRef("forced_src", visit=787650, ccd=13, tract=0) 

413 self.assertFalse(dataRef.datasetExists("forced_src")) 

414 

415 

416class Mapper3TestCase(unittest.TestCase): 

417 """A test case for a mapper subclass which does not assign packageName.""" 

418 

419 def testPackageName(self): 

420 with self.assertRaises(ValueError): 

421 MinMapper3() 

422 with self.assertRaises(ValueError): 

423 MinMapper3.getPackageName() 

424 

425 

426class ParentRegistryTestCase(unittest.TestCase): 

427 

428 @staticmethod 

429 def _createRegistry(path): 

430 cmd = """CREATE TABLE x( 

431 id INT, 

432 visit INT, 

433 filter TEXT, 

434 snap INT, 

435 raft TEXT, 

436 sensor TEXT, 

437 channel TEXT, 

438 taiObs TEXT, 

439 expTime REAL 

440 ); 

441 """ 

442 conn = sqlite3.connect(path) 

443 conn.cursor().execute(cmd) 

444 conn.commit() 

445 conn.close() 

446 

447 def setUp(self): 

448 self.ROOT = tempfile.mkdtemp(dir=ROOT, prefix="ParentRegistryTestCase-") 

449 self.repoARoot = os.path.join(self.ROOT, 'a') 

450 args = dafPersist.RepositoryArgs(root=self.repoARoot, mapper=MinMapper1) 

451 butler = dafPersist.Butler(outputs=args) 

452 self._createRegistry(os.path.join(self.repoARoot, 'registry.sqlite3')) 

453 del butler 

454 

455 def tearDown(self): 

456 # the butler sql registry closes its database connection in __del__. 

457 # To trigger __del__ we explicitly collect the garbage here. If we 

458 # find having or closing the open database connection is a problem in 

459 # production code, we may need to add api to butler to explicity 

460 # release database connections (and maybe other things like in-memory 

461 # cached objects). 

462 gc.collect() 

463 if os.path.exists(self.ROOT): 

464 shutil.rmtree(self.ROOT) 

465 

466 def test(self): 

467 """Verify that when the child repo does not have a registry it is 

468 assigned the registry from the parent. 

469 """ 

470 repoBRoot = os.path.join(self.ROOT, 'b') 

471 butler = dafPersist.Butler(inputs=self.repoARoot, outputs=repoBRoot) 

472 # This way of getting the registry from the mapping is obviously going 

473 # way into private members and the python lambda implementation code. 

474 # It is very brittle and should not be duplicated in user code 

475 # or any location that is not trivial to fix along with changes to the 

476 # CameraMapper or Mapping. 

477 registryA = butler._repos.inputs()[0].repo._mapper.registry 

478 registryB = butler._repos.outputs()[0].repo._mapper.registry 

479 self.assertEqual(id(registryA), id(registryB)) 

480 

481 self._createRegistry(os.path.join(repoBRoot, 'registry.sqlite3')) 

482 butler = dafPersist.Butler(inputs=self.repoARoot, outputs=repoBRoot) 

483 # see above; don't copy this way of getting the registry. 

484 registryA = butler._repos.inputs()[0].repo._mapper.registry 

485 registryB = butler._repos.outputs()[0].repo._mapper.registry 

486 self.assertNotEqual(id(registryA), id(registryB)) 

487 

488 

489class MissingPolicyKeyTestCase(unittest.TestCase): 

490 

491 def testGetRaises(self): 

492 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) 

493 # MinMapper1 does not specify a template for the raw dataset type so 

494 # trying to use it for get should raise 

495 with self.assertRaises(RuntimeError) as contextManager: 

496 butler.get('raw') 

497 # This test demonstrates and verifies that simple use of the incomplete 

498 # dataset type returns a helpful (I hope) error message. 

499 self.assertEqual( 

500 str(contextManager.exception), 

501 'Template is not defined for the raw dataset type, ' 

502 'it must be set before it can be used.') 

503 with self.assertRaises(RuntimeError) as contextManager: 

504 butler.queryMetadata('raw', 'unused', {}) 

505 

506 def testQueryMetadataRaises(self): 

507 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) 

508 # MinMapper1 does not specify a template for the raw dataset type so 

509 # trying to use it for queryMetadata should raise 

510 with self.assertRaises(RuntimeError) as contextManager: 

511 butler.queryMetadata('raw', 'unused', {}) 

512 # This test demonstrates and verifies that simple use of the incomplete 

513 # dataset type returns a helpful (I hope) error message. 

514 self.assertEqual( 

515 str(contextManager.exception), 

516 'Template is not defined for the raw dataset type, ' 

517 'it must be set before it can be used.') 

518 

519 def testFilenameRaises(self): 

520 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) 

521 # MinMapper1 does not specify a template for the raw dataset type so 

522 # trying to use it for <datasetType>_filename should raise 

523 with self.assertRaises(RuntimeError) as contextManager: 

524 butler.get('raw_filename') 

525 # This test demonstrates and verifies that simple use of the 

526 # incomplete dataset type returns a helpful (I hope) error message. 

527 self.assertEqual( 

528 str(contextManager.exception), 

529 'Template is not defined for the raw dataset type, ' 

530 'it must be set before it can be used.') 

531 

532 def testWcsRaises(self): 

533 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) 

534 # MinMapper1 does not specify a template for the raw dataset type so 

535 # trying to use it for <datasetType>_wcs should raise 

536 with self.assertRaises(RuntimeError) as contextManager: 

537 butler.get('raw_wcs') 

538 # This test demonstrates and verifies that simple use of the 

539 # incomplete dataset type returns a helpful (I hope) error message. 

540 self.assertEqual( 

541 str(contextManager.exception), 

542 'Template is not defined for the raw dataset type, ' 

543 'it must be set before it can be used.') 

544 

545 def testConflictRaises(self): 

546 policy = dafPersist.Policy(os.path.join(ROOT, "ConflictMapper.yaml")) 

547 with self.assertRaisesRegex( 

548 ValueError, 

549 r"Duplicate mapping policy for dataset type packages"): 

550 mapper = lsst.obs.base.CameraMapper(policy=policy, repositoryDir=ROOT, root=ROOT) # noqa F841 

551 

552 

553class MemoryTester(lsst.utils.tests.MemoryTestCase): 

554 pass 

555 

556 

557if __name__ == '__main__': 557 ↛ 558line 557 didn't jump to line 558, because the condition on line 557 was never true

558 lsst.utils.tests.init() 

559 unittest.main()