Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import gc 

23import os 

24import sqlite3 

25import unittest 

26import tempfile 

27 

28import numpy as np 

29 

30import lsst.utils.tests 

31import lsst.geom as geom 

32import lsst.afw.image as afwImage 

33import lsst.afw.table as afwTable 

34import lsst.daf.persistence as dafPersist 

35import lsst.obs.base 

36import shutil 

37 

38from lsst.obs.base.test import BaseMapper 

39 

40ROOT = os.path.abspath(os.path.dirname(__file__)) 

41 

42 

43def setup_module(module): 

44 lsst.utils.tests.init() 

45 

46 

47class MinCam(lsst.obs.base.Instrument): 

48 @property 

49 def filterDefinitions(self): 

50 return lsst.obs.base.FilterDefinitionCollection( 

51 lsst.obs.base.FilterDefinition(physical_filter="u.MP9301", band="u", lambdaEff=374), 

52 lsst.obs.base.FilterDefinition(physical_filter="g.MP9401", band="g", lambdaEff=487), 

53 lsst.obs.base.FilterDefinition(physical_filter="r.MP9601", band="r", alias={"old-r"}, 

54 lambdaEff=628), 

55 lsst.obs.base.FilterDefinition(physical_filter="i.MP9701", band="i", lambdaEff=778), 

56 lsst.obs.base.FilterDefinition(physical_filter="z.MP9801", band="z", lambdaEff=1170), 

57 # afw_name is so special-cased that only a real example will work 

58 lsst.obs.base.FilterDefinition(physical_filter="HSC-R2", band="r", afw_name="r2", lambdaEff=623), 

59 ) 

60 

61 @classmethod 

62 def getName(cls): 

63 return "min" 

64 

65 def getCamera(self): 

66 raise NotImplementedError() 

67 

68 def register(self, registry): 

69 raise NotImplementedError() 

70 

71 def getRawFormatter(self, dataId): 

72 raise NotImplementedError() 

73 

74 def makeDataIdTranslatorFactory(self): 

75 raise NotImplementedError() 

76 

77 

78class MinMapper1(lsst.obs.base.CameraMapper): 

79 packageName = 'larry' 

80 

81 def __init__(self, **kwargs): 

82 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper1.yaml")) 

83 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, **kwargs) 

84 return 

85 

86 def std_x(self, item, dataId): 

87 return float(item) 

88 

89 @classmethod 

90 def getCameraName(cls): 

91 """Return the name of the camera that this CameraMapper is for.""" 

92 return "min" 

93 

94 @classmethod 

95 def getPackageDir(cls): 

96 return "/path/to/nowhere" 

97 

98 

99class MinMapper2(lsst.obs.base.CameraMapper): 

100 packageName = 'moe' 

101 _gen3instrument = MinCam 

102 

103 # CalibRoot in policy 

104 # needCalibRegistry 

105 def __init__(self, **kwargs): 

106 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper2.yaml")) 

107 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, 

108 registry="cfhtls.sqlite3", **kwargs) 

109 return 

110 

111 def _transformId(self, dataId): 

112 return dataId 

113 

114 def _extractDetectorName(self, dataId): 

115 return "ccd00" 

116 

117 def std_x(self, item, dataId): 

118 return float(item) 

119 

120 @classmethod 

121 def getCameraName(cls): 

122 """Return the name of the camera that this CameraMapper is for.""" 

123 return "min" 

124 

125 @classmethod 

126 def getPackageDir(cls): 

127 return "/path/to/nowhere" 

128 

129 

130# does not assign packageName 

131class MinMapper3(lsst.obs.base.CameraMapper): 

132 

133 def __init__(self, **kwargs): 

134 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper1.yaml")) 

135 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, root=ROOT) 

136 return 

137 

138 @classmethod 

139 def getPackageDir(cls): 

140 return "/path/to/nowhere" 

141 

142 

143def checkCompression(testCase, additionalData): 

144 """Check that compression settings are present 

145 

146 We check that we can access the required settings, and that 

147 the seed is non-zero (zero causes lsst.afw.math.Random to fail). 

148 """ 

149 for plane in ("image", "mask", "variance"): 

150 for entry in ("compression.algorithm", 

151 "compression.columns", 

152 "compression.rows", 

153 "compression.quantizeLevel", 

154 "scaling.algorithm", 

155 "scaling.bitpix", 

156 "scaling.maskPlanes", 

157 "scaling.seed", 

158 "scaling.quantizeLevel", 

159 "scaling.quantizePad", 

160 "scaling.fuzz", 

161 "scaling.bscale", 

162 "scaling.bzero", 

163 ): 

164 additionalData.getScalar(plane + "." + entry) 

165 testCase.assertNotEqual(additionalData.getScalar(plane + ".scaling.seed"), 0) 

166 

167 

168class Mapper1TestCase(unittest.TestCase): 

169 """A test case for the mapper used by the data butler.""" 

170 

171 def setUp(self): 

172 self.mapper = MinMapper1(root=ROOT) 

173 

174 def tearDown(self): 

175 del self.mapper 

176 

177 def testGetDatasetTypes(self): 

178 expectedTypes = BaseMapper(ROOT).getDatasetTypes() 

179 # Add the expected additional types to what the base class provides 

180 expectedTypes.extend(["x", "x_filename", 

181 "badSourceHist", "badSourceHist_filename", ]) 

182 self.assertEqual(set(self.mapper.getDatasetTypes()), set(expectedTypes)) 

183 

184 def testMap(self): 

185 loc = self.mapper.map("x", {"sensor": "1,1"}, write=True) 

186 self.assertEqual(loc.getPythonType(), "lsst.afw.geom.BoxI") 

187 self.assertEqual(loc.getCppType(), "BoxI") 

188 self.assertEqual(loc.getStorageName(), "PickleStorage") 

189 expectedRoot = ROOT 

190 expectedLocations = ["foo-1,1.pickle"] 

191 self.assertEqual(loc.getStorage().root, expectedRoot) 

192 self.assertEqual(loc.getLocations(), expectedLocations) 

193 self.assertEqual(loc.getAdditionalData().toString(), 

194 "sensor = \"1,1\"\n") 

195 

196 def testQueryMetadata(self): 

197 self.assertEqual(self.mapper.queryMetadata("x", ["sensor"], None), [("1,1",)]) 

198 

199 def testStandardize(self): 

200 self.assertTrue(self.mapper.canStandardize("x")) 

201 self.assertFalse(self.mapper.canStandardize("badSourceHist")) 

202 self.assertFalse(self.mapper.canStandardize("notPresent")) 

203 result = self.mapper.standardize("x", 3, None) 

204 self.assertIsInstance(result, float) 

205 self.assertEqual(result, 3.0) 

206 result = self.mapper.standardize("x", 3.14, None) 

207 self.assertIsInstance(result, float) 

208 self.assertEqual(result, 3.14) 

209 result = self.mapper.standardize("x", "3.14", None) 

210 self.assertIsInstance(result, float) 

211 self.assertEqual(result, 3.14) 

212 

213 def testNames(self): 

214 self.assertEqual(MinMapper1.getCameraName(), "min") 

215 self.assertEqual(MinMapper1.getPackageName(), "larry") 

216 

217 

218class Mapper2TestCase(unittest.TestCase): 

219 """A test case for the mapper used by the data butler.""" 

220 

221 def setUp(self): 

222 super().setUp() 

223 # Force a standard set of filters even for tests that don't use 

224 # MinCam directly. 

225 MinCam() 

226 

227 def testGetDatasetTypes(self): 

228 mapper = MinMapper2(root=ROOT) 

229 expectedTypes = BaseMapper(ROOT).getDatasetTypes() 

230 # Add the expected additional types to what the base class provides 

231 expectedTypes.extend(["flat", "flat_md", "flat_filename", "flat_sub", 

232 "raw", "raw_md", "raw_filename", "raw_sub", 

233 "some", "some_filename", "some_md", "some_sub", 

234 "someCatalog", "someCatalog_md", "someCatalog_filename", 

235 "someCatalog_len", "someCatalog_schema", 

236 "forced_src", "forced_src_md", "forced_src_filename", 

237 "forced_src_len", "forced_src_schema", 

238 "other_sub", "other_filename", "other_md", "other", 

239 "someGz", "someGz_filename", "someFz", "someFz_filename", "someGz_md", 

240 "someFz_sub", "someFz_md", "someGz_sub", 

241 "someGz_bbox", "someFz_bbox", "some_bbox", "other_bbox", 

242 ]) 

243 self.assertEqual(set(mapper.getDatasetTypes()), 

244 set(expectedTypes)) 

245 

246 def testMap(self): 

247 mapper = MinMapper2(root=ROOT) 

248 loc = mapper.map("raw", {"ccd": 13}, write=True) 

249 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU") 

250 self.assertEqual(loc.getCppType(), "ImageU") 

251 self.assertEqual(loc.getStorageName(), "FitsStorage") 

252 self.assertEqual(loc.getLocations(), ["foo-13.fits"]) 

253 self.assertEqual(loc.getStorage().root, ROOT) 

254 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) 

255 checkCompression(self, loc.getAdditionalData()) 

256 

257 def testSubMap(self): 

258 bbox = geom.BoxI(geom.Point2I(200, 100), 

259 geom.Extent2I(300, 400)) 

260 mapper = MinMapper2(root=ROOT) 

261 loc = mapper.map("raw_sub", {"ccd": 13, "bbox": bbox}, write=True) 

262 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU") 

263 self.assertEqual(loc.getCppType(), "ImageU") 

264 self.assertEqual(loc.getStorageName(), "FitsStorage") 

265 self.assertEqual(loc.getLocations(), ["foo-13.fits"]) 

266 self.assertEqual(loc.getStorage().root, ROOT) 

267 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) 

268 self.assertEqual(loc.getAdditionalData().getScalar("width"), 300) 

269 self.assertEqual(loc.getAdditionalData().getScalar("height"), 400) 

270 self.assertEqual(loc.getAdditionalData().getScalar("llcX"), 200) 

271 self.assertEqual(loc.getAdditionalData().getScalar("llcY"), 100) 

272 checkCompression(self, loc.getAdditionalData()) 

273 

274 loc = mapper.map("raw_sub", {"ccd": 13, "bbox": bbox, "imageOrigin": "PARENT"}, write=True) 

275 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU") 

276 self.assertEqual(loc.getCppType(), "ImageU") 

277 self.assertEqual(loc.getStorageName(), "FitsStorage") 

278 self.assertEqual(loc.getLocations(), ["foo-13.fits"]) 

279 self.assertEqual(loc.getStorage().root, ROOT) 

280 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) 

281 self.assertEqual(loc.getAdditionalData().getScalar("width"), 300) 

282 self.assertEqual(loc.getAdditionalData().getScalar("height"), 400) 

283 self.assertEqual(loc.getAdditionalData().getScalar("llcX"), 200) 

284 self.assertEqual(loc.getAdditionalData().getScalar("llcY"), 100) 

285 self.assertEqual(loc.getAdditionalData().getScalar("imageOrigin"), "PARENT") 

286 checkCompression(self, loc.getAdditionalData()) 

287 

288 def testCatalogExtras(self): 

289 butler = dafPersist.Butler(root=ROOT, mapper=MinMapper2) 

290 schema = afwTable.Schema() 

291 aa = schema.addField("a", type=np.int32, doc="a") 

292 bb = schema.addField("b", type=np.float64, doc="b") 

293 catalog = lsst.afw.table.BaseCatalog(schema) 

294 row = catalog.addNew() 

295 row.set(aa, 12345) 

296 row.set(bb, 1.2345) 

297 size = len(catalog) 

298 dataId = dict(visit=123, ccd=45) 

299 butler.put(catalog, "someCatalog", dataId) 

300 filename = butler.get("someCatalog_filename", dataId)[0] 

301 try: 

302 self.assertTrue(os.path.exists(filename)) 

303 self.assertEqual(butler.get("someCatalog_schema", dataId), schema) 

304 self.assertEqual(butler.get("someCatalog_len", dataId), size) 

305 header = butler.get("someCatalog_md", dataId) 

306 self.assertEqual(header.getScalar("NAXIS2"), size) 

307 finally: 

308 try: 

309 os.remove(filename) 

310 except OSError as exc: 

311 print("Warning: could not remove file %r: %s" % (filename, exc)) 

312 

313 def testImage(self): 

314 mapper = MinMapper2(root=ROOT) 

315 loc = mapper.map("some", dict(ccd=35)) 

316 expectedLocations = ["bar-35.fits"] 

317 self.assertEqual(loc.getStorage().root, ROOT) 

318 self.assertEqual(loc.getLocations(), expectedLocations) 

319 

320 butler = dafPersist.ButlerFactory(mapper=mapper).create() 

321 image = butler.get("some", ccd=35) 

322 self.assertEqual(image.getFilter().getName(), "r") 

323 self.assertEqual(image.getFilterLabel().bandLabel, "r") 

324 

325 self.assertEqual(butler.get("some_bbox", ccd=35), image.getBBox()) 

326 

327 bbox = geom.BoxI(geom.Point2I(200, 100), 

328 geom.Extent2I(300, 400)) 

329 image = butler.get("some_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True) 

330 self.assertEqual(image.getHeight(), 400) 

331 self.assertEqual(image.getWidth(), 300) 

332 

333 def testDetector(self): 

334 mapper = MinMapper2(root=ROOT) 

335 butler = dafPersist.ButlerFactory(mapper=mapper).create() 

336 detector = butler.get("raw_detector", ccd=0) 

337 self.assertEqual(detector.getName(), "ccd00") 

338 

339 def testGzImage(self): 

340 mapper = MinMapper2(root=ROOT) 

341 loc = mapper.map("someGz", dict(ccd=35)) 

342 expectedLocations = [os.path.join("gz", "bar-35.fits.gz")] 

343 self.assertEqual(loc.getStorage().root, ROOT) 

344 self.assertEqual(loc.getLocations(), expectedLocations) 

345 

346 butler = dafPersist.ButlerFactory(mapper=mapper).create() 

347 image = butler.get("someGz", ccd=35) 

348 self.assertEqual(image.getFilter().getName(), "r") 

349 self.assertEqual(image.getFilterLabel().bandLabel, "r") 

350 

351 bbox = geom.BoxI(geom.Point2I(200, 100), 

352 geom.Extent2I(300, 400)) 

353 image = butler.get("someGz_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True) 

354 self.assertEqual(image.getHeight(), 400) 

355 self.assertEqual(image.getWidth(), 300) 

356 

357 def testFzImage(self): 

358 mapper = MinMapper2(root=ROOT) 

359 loc = mapper.map("someFz", dict(ccd=35)) 

360 expectedRoot = ROOT 

361 expectedLocations = [os.path.join("fz", "bar-35.fits.fz")] 

362 self.assertEqual(loc.getStorage().root, expectedRoot) 

363 self.assertEqual(loc.getLocations(), expectedLocations) 

364 

365 butler = dafPersist.ButlerFactory(mapper=mapper).create() 

366 image = butler.get("someFz", ccd=35) 

367 self.assertEqual(image.getFilter().getName(), "r") 

368 self.assertEqual(image.getFilterLabel().bandLabel, "r") 

369 

370 bbox = geom.BoxI(geom.Point2I(200, 100), 

371 geom.Extent2I(300, 400)) 

372 image = butler.get("someFz_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True) 

373 self.assertEqual(image.getHeight(), 400) 

374 self.assertEqual(image.getWidth(), 300) 

375 

376 def testButlerQueryMetadata(self): 

377 mapper = MinMapper2(root=ROOT) 

378 butler = dafPersist.ButlerFactory(mapper=mapper).create() 

379 kwargs = {"ccd": 35, "filter": "r", "visit": 787731, 

380 "taiObs": "2005-04-02T09:24:49.933440000"} 

381 self.assertEqual(butler.queryMetadata("other", "visit", **kwargs), [787731]) 

382 self.assertEqual(butler.queryMetadata("other", "visit", 

383 visit=kwargs["visit"], ccd=kwargs["ccd"], 

384 taiObs=kwargs["taiObs"], filter=kwargs["filter"]), 

385 [787731]) 

386 # now test we get no matches if ccd is out of range 

387 self.assertEqual(butler.queryMetadata("raw", "ccd", ccd=36, filter="r", visit=787731), []) 

388 

389 def testQueryMetadata(self): 

390 mapper = MinMapper2(root=ROOT) 

391 self.assertEqual(mapper.queryMetadata("raw", ["ccd"], None), 

392 [(x,) for x in range(36) if x != 3]) 

393 

394 def testStandardize(self): 

395 mapper = MinMapper2(root=ROOT) 

396 self.assertEqual(mapper.canStandardize("raw"), True) 

397 self.assertEqual(mapper.canStandardize("notPresent"), False) 

398 

399 def testStandardizeFiltersFilterDefs(self): 

400 testLabels = [ 

401 (None, None), 

402 (afwImage.FilterLabel(band="r", physical="r.MP9601"), 

403 afwImage.FilterLabel(band="r", physical="r.MP9601")), 

404 (afwImage.FilterLabel(band="r"), afwImage.FilterLabel(band="r", physical="r.MP9601")), 

405 (afwImage.FilterLabel(physical="r.MP9601"), 

406 afwImage.FilterLabel(band="r", physical="r.MP9601")), 

407 (afwImage.FilterLabel(band="r", physical="old-r"), 

408 afwImage.FilterLabel(band="r", physical="r.MP9601")), 

409 (afwImage.FilterLabel(physical="old-r"), 

410 afwImage.FilterLabel(band="r", physical="r.MP9601")), 

411 (afwImage.FilterLabel(physical="r2"), afwImage.FilterLabel(band="r", physical="HSC-R2")), 

412 ] 

413 testIds = [{"visit": 12345, "ccd": 42, "filter": f} for f in { 

414 "r", "r.MP9601", "old-r", "r2", 

415 }] 

416 testData = [] 

417 # Resolve special combinations where the expected output is different 

418 for input, corrected in testLabels: 

419 for dataId in testIds: 

420 if input is None: 

421 if dataId["filter"] == "r": 

422 data = (input, dataId, afwImage.FilterLabel(band="r")) 

423 elif dataId["filter"] == "r2": 

424 data = (input, dataId, afwImage.FilterLabel(band="r", physical="HSC-R2")) 

425 else: 

426 data = (input, dataId, afwImage.FilterLabel(band="r", physical="r.MP9601")) 

427 elif input == afwImage.FilterLabel(band="r"): 

428 if dataId["filter"] == "r": 

429 # There are two "r" filters, can't tell which 

430 data = (input, dataId, input) 

431 elif dataId["filter"] == "r2": 

432 data = (input, dataId, afwImage.FilterLabel(band="r", physical="HSC-R2")) 

433 elif corrected.physicalLabel == "HSC-R2" and dataId["filter"] in ("r.MP9601", "old-r"): 

434 # Contradictory inputs, leave as-is 

435 data = (input, dataId, input) 

436 elif corrected.physicalLabel == "r.MP9601" and dataId["filter"] == "r2": 

437 # Contradictory inputs, leave as-is 

438 data = (input, dataId, input) 

439 else: 

440 data = (input, dataId, corrected) 

441 testData.append(data) 

442 

443 mapper = MinMapper2(root=ROOT) 

444 for label, dataId, corrected in testData: 

445 exposure = afwImage.ExposureF() 

446 exposure.setFilterLabel(label) 

447 mapper._setFilter(mapper.exposures['raw'], exposure, dataId) 

448 self.assertEqual(exposure.getFilterLabel(), corrected, msg=f"Started from {label} and {dataId}") 

449 

450 def testStandardizeFiltersFilterNoDefs(self): 

451 testLabels = [ 

452 None, 

453 afwImage.FilterLabel(band="r", physical="r.MP9601"), 

454 afwImage.FilterLabel(band="r"), 

455 afwImage.FilterLabel(physical="r.MP9601"), 

456 afwImage.FilterLabel(band="r", physical="old-r"), 

457 afwImage.FilterLabel(physical="old-r"), 

458 afwImage.FilterLabel(physical="r2"), 

459 ] 

460 testIds = [{"visit": 12345, "ccd": 42, "filter": f} for f in { 

461 "r", "r.MP9601", "old-r", "r2", 

462 }] 

463 testData = [] 

464 # Resolve special combinations where the expected output is different 

465 for input in testLabels: 

466 for dataId in testIds: 

467 if input is None: 

468 # Can still get some filter info out of the Filter registry 

469 if dataId["filter"] == "r2": 

470 data = (input, dataId, 

471 afwImage.FilterLabel(band="r", physical="HSC-R2")) 

472 else: 

473 # Data ID maps to filter(s) with aliases; can't 

474 # unambiguously determine physical filter. 

475 data = (input, dataId, afwImage.FilterLabel(band="r")) 

476 else: 

477 data = (input, dataId, input) 

478 testData.append(data) 

479 

480 mapper = MinMapper1(root=ROOT) 

481 for label, dataId, corrected in testData: 

482 exposure = afwImage.ExposureF() 

483 exposure.setFilterLabel(label) 

484 mapper._setFilter(mapper.exposures['raw'], exposure, dataId) 

485 self.assertEqual(exposure.getFilterLabel(), corrected, msg=f"Started from {label} and {dataId}") 

486 

487 def testCalib(self): 

488 mapper = MinMapper2(root=ROOT) 

489 loc = mapper.map("flat", {"visit": 787650, "ccd": 13}, write=True) 

490 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureF") 

491 self.assertEqual(loc.getCppType(), "ExposureF") 

492 self.assertEqual(loc.getStorageName(), "FitsStorage") 

493 expectedRoot = ROOT 

494 expectedLocations = ["flat-05Am03-fi.fits"] 

495 self.assertEqual(loc.getStorage().root, expectedRoot) 

496 self.assertEqual(loc.getLocations(), expectedLocations) 

497 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) 

498 self.assertEqual(loc.getAdditionalData().getScalar("visit"), 787650) 

499 self.assertEqual(loc.getAdditionalData().getScalar("derivedRunId"), "05Am03") 

500 self.assertEqual(loc.getAdditionalData().getScalar("filter"), "i") 

501 checkCompression(self, loc.getAdditionalData()) 

502 

503 def testNames(self): 

504 self.assertEqual(MinMapper2.getCameraName(), "min") 

505 self.assertEqual(MinMapper2.getPackageName(), "moe") 

506 

507 @unittest.expectedFailure 

508 def testParentSearch(self): 

509 mapper = MinMapper2(root=ROOT) 

510 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), 

511 os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits'))) 

512 self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits'))]) 

513 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), 

514 os.path.join(ROOT, 

515 os.path.join('testParentSearch', 'bar.fits[1]'))) 

516 self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits[1]'))]) 

517 

518 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), 

519 os.path.join(ROOT, os.path.join('testParentSearch', 'baz.fits'))) 

520 self.assertEqual(paths, [os.path.join(ROOT, 

521 os.path.join('testParentSearch', '_parent', 'baz.fits'))]) 

522 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), 

523 os.path.join(ROOT, 

524 os.path.join('testParentSearch', 'baz.fits[1]'))) 

525 self.assertEqual(paths, [os.path.join(ROOT, 

526 os.path.join('testParentSearch', '_parent', 'baz.fits[1]'))]) 

527 

528 def testSkymapLookups(self): 

529 """Test that metadata lookups don't try to get skymap data ID values 

530 from the registry. 

531 """ 

532 mapper = MinMapper2(root=ROOT) 

533 butler = dafPersist.Butler(mapper=mapper) 

534 with self.assertRaises(RuntimeError) as manager: 

535 butler.dataRef("forced_src", visit=787650, ccd=13) 

536 self.assertIn("Cannot lookup skymap key 'tract'", str(manager.exception)) 

537 # We're mostly concerned that the statements below will raise an 

538 # exception; if they don't, it's not likely the following tests will 

539 # fail. 

540 subset = butler.subset("forced_src", visit=787650, ccd=13, tract=0) 

541 self.assertEqual(len(subset), 1) 

542 dataRef = butler.dataRef("forced_src", visit=787650, ccd=13, tract=0) 

543 self.assertFalse(dataRef.datasetExists("forced_src")) 

544 

545 

546class Mapper3TestCase(unittest.TestCase): 

547 """A test case for a mapper subclass which does not assign packageName.""" 

548 

549 def testPackageName(self): 

550 with self.assertRaises(ValueError): 

551 MinMapper3() 

552 with self.assertRaises(ValueError): 

553 MinMapper3.getPackageName() 

554 

555 

556class ParentRegistryTestCase(unittest.TestCase): 

557 

558 @staticmethod 

559 def _createRegistry(path): 

560 cmd = """CREATE TABLE x( 

561 id INT, 

562 visit INT, 

563 filter TEXT, 

564 snap INT, 

565 raft TEXT, 

566 sensor TEXT, 

567 channel TEXT, 

568 taiObs TEXT, 

569 expTime REAL 

570 ); 

571 """ 

572 conn = sqlite3.connect(path) 

573 conn.cursor().execute(cmd) 

574 conn.commit() 

575 conn.close() 

576 

577 def setUp(self): 

578 self.ROOT = tempfile.mkdtemp(dir=ROOT, prefix="ParentRegistryTestCase-") 

579 self.repoARoot = os.path.join(self.ROOT, 'a') 

580 args = dafPersist.RepositoryArgs(root=self.repoARoot, mapper=MinMapper1) 

581 butler = dafPersist.Butler(outputs=args) 

582 self._createRegistry(os.path.join(self.repoARoot, 'registry.sqlite3')) 

583 del butler 

584 

585 def tearDown(self): 

586 # the butler sql registry closes its database connection in __del__. 

587 # To trigger __del__ we explicitly collect the garbage here. If we 

588 # find having or closing the open database connection is a problem in 

589 # production code, we may need to add api to butler to explicity 

590 # release database connections (and maybe other things like in-memory 

591 # cached objects). 

592 gc.collect() 

593 if os.path.exists(self.ROOT): 

594 shutil.rmtree(self.ROOT) 

595 

596 def test(self): 

597 """Verify that when the child repo does not have a registry it is 

598 assigned the registry from the parent. 

599 """ 

600 repoBRoot = os.path.join(self.ROOT, 'b') 

601 butler = dafPersist.Butler(inputs=self.repoARoot, outputs=repoBRoot) 

602 # This way of getting the registry from the mapping is obviously going 

603 # way into private members and the python lambda implementation code. 

604 # It is very brittle and should not be duplicated in user code 

605 # or any location that is not trivial to fix along with changes to the 

606 # CameraMapper or Mapping. 

607 registryA = butler._repos.inputs()[0].repo._mapper.registry 

608 registryB = butler._repos.outputs()[0].repo._mapper.registry 

609 self.assertEqual(id(registryA), id(registryB)) 

610 

611 self._createRegistry(os.path.join(repoBRoot, 'registry.sqlite3')) 

612 butler = dafPersist.Butler(inputs=self.repoARoot, outputs=repoBRoot) 

613 # see above; don't copy this way of getting the registry. 

614 registryA = butler._repos.inputs()[0].repo._mapper.registry 

615 registryB = butler._repos.outputs()[0].repo._mapper.registry 

616 self.assertNotEqual(id(registryA), id(registryB)) 

617 

618 

619class MissingPolicyKeyTestCase(unittest.TestCase): 

620 

621 def testGetRaises(self): 

622 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) 

623 # MinMapper1 does not specify a template for the raw dataset type so 

624 # trying to use it for get should raise 

625 with self.assertRaises(RuntimeError) as contextManager: 

626 butler.get('raw') 

627 # This test demonstrates and verifies that simple use of the incomplete 

628 # dataset type returns a helpful (I hope) error message. 

629 self.assertEqual( 

630 str(contextManager.exception), 

631 'Template is not defined for the raw dataset type, ' 

632 'it must be set before it can be used.') 

633 with self.assertRaises(RuntimeError) as contextManager: 

634 butler.queryMetadata('raw', 'unused', {}) 

635 

636 def testQueryMetadataRaises(self): 

637 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) 

638 # MinMapper1 does not specify a template for the raw dataset type so 

639 # trying to use it for queryMetadata should raise 

640 with self.assertRaises(RuntimeError) as contextManager: 

641 butler.queryMetadata('raw', 'unused', {}) 

642 # This test demonstrates and verifies that simple use of the incomplete 

643 # dataset type returns a helpful (I hope) error message. 

644 self.assertEqual( 

645 str(contextManager.exception), 

646 'Template is not defined for the raw dataset type, ' 

647 'it must be set before it can be used.') 

648 

649 def testFilenameRaises(self): 

650 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) 

651 # MinMapper1 does not specify a template for the raw dataset type so 

652 # trying to use it for <datasetType>_filename should raise 

653 with self.assertRaises(RuntimeError) as contextManager: 

654 butler.get('raw_filename') 

655 # This test demonstrates and verifies that simple use of the 

656 # incomplete dataset type returns a helpful (I hope) error message. 

657 self.assertEqual( 

658 str(contextManager.exception), 

659 'Template is not defined for the raw dataset type, ' 

660 'it must be set before it can be used.') 

661 

662 def testWcsRaises(self): 

663 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) 

664 # MinMapper1 does not specify a template for the raw dataset type so 

665 # trying to use it for <datasetType>_wcs should raise 

666 with self.assertRaises(RuntimeError) as contextManager: 

667 butler.get('raw_wcs') 

668 # This test demonstrates and verifies that simple use of the 

669 # incomplete dataset type returns a helpful (I hope) error message. 

670 self.assertEqual( 

671 str(contextManager.exception), 

672 'Template is not defined for the raw dataset type, ' 

673 'it must be set before it can be used.') 

674 

675 def testConflictRaises(self): 

676 policy = dafPersist.Policy(os.path.join(ROOT, "ConflictMapper.yaml")) 

677 with self.assertRaisesRegex( 

678 ValueError, 

679 r"Duplicate mapping policy for dataset type packages"): 

680 mapper = lsst.obs.base.CameraMapper(policy=policy, repositoryDir=ROOT, root=ROOT) # noqa F841 

681 

682 

683class MemoryTester(lsst.utils.tests.MemoryTestCase): 

684 pass 

685 

686 

687if __name__ == '__main__': 687 ↛ 688line 687 didn't jump to line 688, because the condition on line 687 was never true

688 lsst.utils.tests.init() 

689 unittest.main()