Coverage for tests/test_cameraMapper.py : 22%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import gc
23import os
24import sqlite3
25import unittest
26import tempfile
28import numpy as np
30import lsst.utils.tests
31import lsst.geom as geom
32import lsst.afw.table as afwTable
33import lsst.daf.persistence as dafPersist
34import lsst.obs.base
35import shutil
37from lsst.obs.base.test import BaseMapper
39ROOT = os.path.abspath(os.path.dirname(__file__))
42def setup_module(module):
43 lsst.utils.tests.init()
46class MinMapper1(lsst.obs.base.CameraMapper):
47 packageName = 'larry'
49 def __init__(self, **kwargs):
50 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper1.yaml"))
51 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, **kwargs)
52 return
54 def std_x(self, item, dataId):
55 return float(item)
57 @classmethod
58 def getCameraName(cls):
59 """Return the name of the camera that this CameraMapper is for."""
60 return "min"
62 @classmethod
63 def getPackageDir(cls):
64 return "/path/to/nowhere"
67class MinMapper2(lsst.obs.base.CameraMapper):
68 packageName = 'moe'
70 # CalibRoot in policy
71 # needCalibRegistry
72 def __init__(self, **kwargs):
73 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper2.yaml"))
74 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT,
75 registry="cfhtls.sqlite3", **kwargs)
76 return
78 def _transformId(self, dataId):
79 return dataId
81 def _extractDetectorName(self, dataId):
82 return "ccd00"
84 def std_x(self, item, dataId):
85 return float(item)
87 @classmethod
88 def getCameraName(cls):
89 """Return the name of the camera that this CameraMapper is for."""
90 return "min"
92 @classmethod
93 def getPackageDir(cls):
94 return "/path/to/nowhere"
97# does not assign packageName
98class MinMapper3(lsst.obs.base.CameraMapper):
100 def __init__(self, **kwargs):
101 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper1.yaml"))
102 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, root=ROOT)
103 return
105 @classmethod
106 def getPackageDir(cls):
107 return "/path/to/nowhere"
110def checkCompression(testCase, additionalData):
111 """Check that compression settings are present
113 We check that we can access the required settings, and that
114 the seed is non-zero (zero causes lsst.afw.math.Random to fail).
115 """
116 for plane in ("image", "mask", "variance"):
117 for entry in ("compression.algorithm",
118 "compression.columns",
119 "compression.rows",
120 "compression.quantizeLevel",
121 "scaling.algorithm",
122 "scaling.bitpix",
123 "scaling.maskPlanes",
124 "scaling.seed",
125 "scaling.quantizeLevel",
126 "scaling.quantizePad",
127 "scaling.fuzz",
128 "scaling.bscale",
129 "scaling.bzero",
130 ):
131 additionalData.getScalar(plane + "." + entry)
132 testCase.assertNotEqual(additionalData.getScalar(plane + ".scaling.seed"), 0)
135class Mapper1TestCase(unittest.TestCase):
136 """A test case for the mapper used by the data butler."""
138 def setUp(self):
139 self.mapper = MinMapper1(root=ROOT)
141 def tearDown(self):
142 del self.mapper
144 def testGetDatasetTypes(self):
145 expectedTypes = BaseMapper(ROOT).getDatasetTypes()
146 # Add the expected additional types to what the base class provides
147 expectedTypes.extend(["x", "x_filename",
148 "badSourceHist", "badSourceHist_filename", ])
149 self.assertEqual(set(self.mapper.getDatasetTypes()), set(expectedTypes))
151 def testMap(self):
152 loc = self.mapper.map("x", {"sensor": "1,1"}, write=True)
153 self.assertEqual(loc.getPythonType(), "lsst.afw.geom.BoxI")
154 self.assertEqual(loc.getCppType(), "BoxI")
155 self.assertEqual(loc.getStorageName(), "PickleStorage")
156 expectedRoot = ROOT
157 expectedLocations = ["foo-1,1.pickle"]
158 self.assertEqual(loc.getStorage().root, expectedRoot)
159 self.assertEqual(loc.getLocations(), expectedLocations)
160 self.assertEqual(loc.getAdditionalData().toString(),
161 "sensor = \"1,1\"\n")
163 def testQueryMetadata(self):
164 self.assertEqual(self.mapper.queryMetadata("x", ["sensor"], None), [("1,1",)])
166 def testStandardize(self):
167 self.assertTrue(self.mapper.canStandardize("x"))
168 self.assertFalse(self.mapper.canStandardize("badSourceHist"))
169 self.assertFalse(self.mapper.canStandardize("notPresent"))
170 result = self.mapper.standardize("x", 3, None)
171 self.assertIsInstance(result, float)
172 self.assertEqual(result, 3.0)
173 result = self.mapper.standardize("x", 3.14, None)
174 self.assertIsInstance(result, float)
175 self.assertEqual(result, 3.14)
176 result = self.mapper.standardize("x", "3.14", None)
177 self.assertIsInstance(result, float)
178 self.assertEqual(result, 3.14)
180 def testNames(self):
181 self.assertEqual(MinMapper1.getCameraName(), "min")
182 self.assertEqual(MinMapper1.getPackageName(), "larry")
185class Mapper2TestCase(unittest.TestCase):
186 """A test case for the mapper used by the data butler."""
188 def testGetDatasetTypes(self):
189 mapper = MinMapper2(root=ROOT)
190 expectedTypes = BaseMapper(ROOT).getDatasetTypes()
191 # Add the expected additional types to what the base class provides
192 expectedTypes.extend(["flat", "flat_md", "flat_filename", "flat_sub",
193 "raw", "raw_md", "raw_filename", "raw_sub",
194 "some", "some_filename", "some_md", "some_sub",
195 "someCatalog", "someCatalog_md", "someCatalog_filename",
196 "someCatalog_len", "someCatalog_schema",
197 "forced_src", "forced_src_md", "forced_src_filename",
198 "forced_src_len", "forced_src_schema",
199 "other_sub", "other_filename", "other_md", "other",
200 "someGz", "someGz_filename", "someFz", "someFz_filename", "someGz_md",
201 "someFz_sub", "someFz_md", "someGz_sub",
202 "someGz_bbox", "someFz_bbox", "some_bbox", "other_bbox",
203 ])
204 self.assertEqual(set(mapper.getDatasetTypes()),
205 set(expectedTypes))
207 def testMap(self):
208 mapper = MinMapper2(root=ROOT)
209 loc = mapper.map("raw", {"ccd": 13}, write=True)
210 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU")
211 self.assertEqual(loc.getCppType(), "ImageU")
212 self.assertEqual(loc.getStorageName(), "FitsStorage")
213 self.assertEqual(loc.getLocations(), ["foo-13.fits"])
214 self.assertEqual(loc.getStorage().root, ROOT)
215 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13)
216 checkCompression(self, loc.getAdditionalData())
218 def testSubMap(self):
219 bbox = geom.BoxI(geom.Point2I(200, 100),
220 geom.Extent2I(300, 400))
221 mapper = MinMapper2(root=ROOT)
222 loc = mapper.map("raw_sub", {"ccd": 13, "bbox": bbox}, write=True)
223 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU")
224 self.assertEqual(loc.getCppType(), "ImageU")
225 self.assertEqual(loc.getStorageName(), "FitsStorage")
226 self.assertEqual(loc.getLocations(), ["foo-13.fits"])
227 self.assertEqual(loc.getStorage().root, ROOT)
228 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13)
229 self.assertEqual(loc.getAdditionalData().getScalar("width"), 300)
230 self.assertEqual(loc.getAdditionalData().getScalar("height"), 400)
231 self.assertEqual(loc.getAdditionalData().getScalar("llcX"), 200)
232 self.assertEqual(loc.getAdditionalData().getScalar("llcY"), 100)
233 checkCompression(self, loc.getAdditionalData())
235 loc = mapper.map("raw_sub", {"ccd": 13, "bbox": bbox, "imageOrigin": "PARENT"}, write=True)
236 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU")
237 self.assertEqual(loc.getCppType(), "ImageU")
238 self.assertEqual(loc.getStorageName(), "FitsStorage")
239 self.assertEqual(loc.getLocations(), ["foo-13.fits"])
240 self.assertEqual(loc.getStorage().root, ROOT)
241 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13)
242 self.assertEqual(loc.getAdditionalData().getScalar("width"), 300)
243 self.assertEqual(loc.getAdditionalData().getScalar("height"), 400)
244 self.assertEqual(loc.getAdditionalData().getScalar("llcX"), 200)
245 self.assertEqual(loc.getAdditionalData().getScalar("llcY"), 100)
246 self.assertEqual(loc.getAdditionalData().getScalar("imageOrigin"), "PARENT")
247 checkCompression(self, loc.getAdditionalData())
249 def testCatalogExtras(self):
250 butler = dafPersist.Butler(root=ROOT, mapper=MinMapper2)
251 schema = afwTable.Schema()
252 aa = schema.addField("a", type=np.int32, doc="a")
253 bb = schema.addField("b", type=np.float64, doc="b")
254 catalog = lsst.afw.table.BaseCatalog(schema)
255 row = catalog.addNew()
256 row.set(aa, 12345)
257 row.set(bb, 1.2345)
258 size = len(catalog)
259 dataId = dict(visit=123, ccd=45)
260 butler.put(catalog, "someCatalog", dataId)
261 filename = butler.get("someCatalog_filename", dataId)[0]
262 try:
263 self.assertTrue(os.path.exists(filename))
264 self.assertEqual(butler.get("someCatalog_schema", dataId), schema)
265 self.assertEqual(butler.get("someCatalog_len", dataId), size)
266 header = butler.get("someCatalog_md", dataId)
267 self.assertEqual(header.getScalar("NAXIS2"), size)
268 finally:
269 try:
270 os.remove(filename)
271 except OSError as exc:
272 print("Warning: could not remove file %r: %s" % (filename, exc))
274 def testImage(self):
275 mapper = MinMapper2(root=ROOT)
276 loc = mapper.map("some", dict(ccd=35))
277 expectedLocations = ["bar-35.fits"]
278 self.assertEqual(loc.getStorage().root, ROOT)
279 self.assertEqual(loc.getLocations(), expectedLocations)
281 butler = dafPersist.ButlerFactory(mapper=mapper).create()
282 image = butler.get("some", ccd=35)
283 self.assertEqual(image.getFilter().getName(), "r")
284 self.assertEqual(image.getFilterLabel().bandLabel, "r")
286 self.assertEqual(butler.get("some_bbox", ccd=35), image.getBBox())
288 bbox = geom.BoxI(geom.Point2I(200, 100),
289 geom.Extent2I(300, 400))
290 image = butler.get("some_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True)
291 self.assertEqual(image.getHeight(), 400)
292 self.assertEqual(image.getWidth(), 300)
294 def testDetector(self):
295 mapper = MinMapper2(root=ROOT)
296 butler = dafPersist.ButlerFactory(mapper=mapper).create()
297 detector = butler.get("raw_detector", ccd=0)
298 self.assertEqual(detector.getName(), "ccd00")
300 def testGzImage(self):
301 mapper = MinMapper2(root=ROOT)
302 loc = mapper.map("someGz", dict(ccd=35))
303 expectedLocations = [os.path.join("gz", "bar-35.fits.gz")]
304 self.assertEqual(loc.getStorage().root, ROOT)
305 self.assertEqual(loc.getLocations(), expectedLocations)
307 butler = dafPersist.ButlerFactory(mapper=mapper).create()
308 image = butler.get("someGz", ccd=35)
309 self.assertEqual(image.getFilter().getName(), "r")
310 self.assertEqual(image.getFilterLabel().bandLabel, "r")
312 bbox = geom.BoxI(geom.Point2I(200, 100),
313 geom.Extent2I(300, 400))
314 image = butler.get("someGz_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True)
315 self.assertEqual(image.getHeight(), 400)
316 self.assertEqual(image.getWidth(), 300)
318 def testFzImage(self):
319 mapper = MinMapper2(root=ROOT)
320 loc = mapper.map("someFz", dict(ccd=35))
321 expectedRoot = ROOT
322 expectedLocations = [os.path.join("fz", "bar-35.fits.fz")]
323 self.assertEqual(loc.getStorage().root, expectedRoot)
324 self.assertEqual(loc.getLocations(), expectedLocations)
326 butler = dafPersist.ButlerFactory(mapper=mapper).create()
327 image = butler.get("someFz", ccd=35)
328 self.assertEqual(image.getFilter().getName(), "r")
329 self.assertEqual(image.getFilterLabel().bandLabel, "r")
331 bbox = geom.BoxI(geom.Point2I(200, 100),
332 geom.Extent2I(300, 400))
333 image = butler.get("someFz_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True)
334 self.assertEqual(image.getHeight(), 400)
335 self.assertEqual(image.getWidth(), 300)
337 def testButlerQueryMetadata(self):
338 mapper = MinMapper2(root=ROOT)
339 butler = dafPersist.ButlerFactory(mapper=mapper).create()
340 kwargs = {"ccd": 35, "filter": "r", "visit": 787731,
341 "taiObs": "2005-04-02T09:24:49.933440000"}
342 self.assertEqual(butler.queryMetadata("other", "visit", **kwargs), [787731])
343 self.assertEqual(butler.queryMetadata("other", "visit",
344 visit=kwargs["visit"], ccd=kwargs["ccd"],
345 taiObs=kwargs["taiObs"], filter=kwargs["filter"]),
346 [787731])
347 # now test we get no matches if ccd is out of range
348 self.assertEqual(butler.queryMetadata("raw", "ccd", ccd=36, filter="r", visit=787731), [])
350 def testQueryMetadata(self):
351 mapper = MinMapper2(root=ROOT)
352 self.assertEqual(mapper.queryMetadata("raw", ["ccd"], None),
353 [(x,) for x in range(36) if x != 3])
355 def testStandardize(self):
356 mapper = MinMapper2(root=ROOT)
357 self.assertEqual(mapper.canStandardize("raw"), True)
358 self.assertEqual(mapper.canStandardize("notPresent"), False)
360 def testCalib(self):
361 mapper = MinMapper2(root=ROOT)
362 loc = mapper.map("flat", {"visit": 787650, "ccd": 13}, write=True)
363 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureF")
364 self.assertEqual(loc.getCppType(), "ExposureF")
365 self.assertEqual(loc.getStorageName(), "FitsStorage")
366 expectedRoot = ROOT
367 expectedLocations = ["flat-05Am03-fi.fits"]
368 self.assertEqual(loc.getStorage().root, expectedRoot)
369 self.assertEqual(loc.getLocations(), expectedLocations)
370 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13)
371 self.assertEqual(loc.getAdditionalData().getScalar("visit"), 787650)
372 self.assertEqual(loc.getAdditionalData().getScalar("derivedRunId"), "05Am03")
373 self.assertEqual(loc.getAdditionalData().getScalar("filter"), "i")
374 checkCompression(self, loc.getAdditionalData())
376 def testNames(self):
377 self.assertEqual(MinMapper2.getCameraName(), "min")
378 self.assertEqual(MinMapper2.getPackageName(), "moe")
380 @unittest.expectedFailure
381 def testParentSearch(self):
382 mapper = MinMapper2(root=ROOT)
383 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'),
384 os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits')))
385 self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits'))])
386 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'),
387 os.path.join(ROOT,
388 os.path.join('testParentSearch', 'bar.fits[1]')))
389 self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits[1]'))])
391 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'),
392 os.path.join(ROOT, os.path.join('testParentSearch', 'baz.fits')))
393 self.assertEqual(paths, [os.path.join(ROOT,
394 os.path.join('testParentSearch', '_parent', 'baz.fits'))])
395 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'),
396 os.path.join(ROOT,
397 os.path.join('testParentSearch', 'baz.fits[1]')))
398 self.assertEqual(paths, [os.path.join(ROOT,
399 os.path.join('testParentSearch', '_parent', 'baz.fits[1]'))])
401 def testSkymapLookups(self):
402 """Test that metadata lookups don't try to get skymap data ID values
403 from the registry.
404 """
405 mapper = MinMapper2(root=ROOT)
406 butler = dafPersist.Butler(mapper=mapper)
407 with self.assertRaises(RuntimeError) as manager:
408 butler.dataRef("forced_src", visit=787650, ccd=13)
409 self.assertIn("Cannot lookup skymap key 'tract'", str(manager.exception))
410 # We're mostly concerned that the statements below will raise an
411 # exception; if they don't, it's not likely the following tests will
412 # fail.
413 subset = butler.subset("forced_src", visit=787650, ccd=13, tract=0)
414 self.assertEqual(len(subset), 1)
415 dataRef = butler.dataRef("forced_src", visit=787650, ccd=13, tract=0)
416 self.assertFalse(dataRef.datasetExists("forced_src"))
419class Mapper3TestCase(unittest.TestCase):
420 """A test case for a mapper subclass which does not assign packageName."""
422 def testPackageName(self):
423 with self.assertRaises(ValueError):
424 MinMapper3()
425 with self.assertRaises(ValueError):
426 MinMapper3.getPackageName()
429class ParentRegistryTestCase(unittest.TestCase):
431 @staticmethod
432 def _createRegistry(path):
433 cmd = """CREATE TABLE x(
434 id INT,
435 visit INT,
436 filter TEXT,
437 snap INT,
438 raft TEXT,
439 sensor TEXT,
440 channel TEXT,
441 taiObs TEXT,
442 expTime REAL
443 );
444 """
445 conn = sqlite3.connect(path)
446 conn.cursor().execute(cmd)
447 conn.commit()
448 conn.close()
450 def setUp(self):
451 self.ROOT = tempfile.mkdtemp(dir=ROOT, prefix="ParentRegistryTestCase-")
452 self.repoARoot = os.path.join(self.ROOT, 'a')
453 args = dafPersist.RepositoryArgs(root=self.repoARoot, mapper=MinMapper1)
454 butler = dafPersist.Butler(outputs=args)
455 self._createRegistry(os.path.join(self.repoARoot, 'registry.sqlite3'))
456 del butler
458 def tearDown(self):
459 # the butler sql registry closes its database connection in __del__.
460 # To trigger __del__ we explicitly collect the garbage here. If we
461 # find having or closing the open database connection is a problem in
462 # production code, we may need to add api to butler to explicity
463 # release database connections (and maybe other things like in-memory
464 # cached objects).
465 gc.collect()
466 if os.path.exists(self.ROOT):
467 shutil.rmtree(self.ROOT)
469 def test(self):
470 """Verify that when the child repo does not have a registry it is
471 assigned the registry from the parent.
472 """
473 repoBRoot = os.path.join(self.ROOT, 'b')
474 butler = dafPersist.Butler(inputs=self.repoARoot, outputs=repoBRoot)
475 # This way of getting the registry from the mapping is obviously going
476 # way into private members and the python lambda implementation code.
477 # It is very brittle and should not be duplicated in user code
478 # or any location that is not trivial to fix along with changes to the
479 # CameraMapper or Mapping.
480 registryA = butler._repos.inputs()[0].repo._mapper.registry
481 registryB = butler._repos.outputs()[0].repo._mapper.registry
482 self.assertEqual(id(registryA), id(registryB))
484 self._createRegistry(os.path.join(repoBRoot, 'registry.sqlite3'))
485 butler = dafPersist.Butler(inputs=self.repoARoot, outputs=repoBRoot)
486 # see above; don't copy this way of getting the registry.
487 registryA = butler._repos.inputs()[0].repo._mapper.registry
488 registryB = butler._repos.outputs()[0].repo._mapper.registry
489 self.assertNotEqual(id(registryA), id(registryB))
492class MissingPolicyKeyTestCase(unittest.TestCase):
494 def testGetRaises(self):
495 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1})
496 # MinMapper1 does not specify a template for the raw dataset type so
497 # trying to use it for get should raise
498 with self.assertRaises(RuntimeError) as contextManager:
499 butler.get('raw')
500 # This test demonstrates and verifies that simple use of the incomplete
501 # dataset type returns a helpful (I hope) error message.
502 self.assertEqual(
503 str(contextManager.exception),
504 'Template is not defined for the raw dataset type, '
505 'it must be set before it can be used.')
506 with self.assertRaises(RuntimeError) as contextManager:
507 butler.queryMetadata('raw', 'unused', {})
509 def testQueryMetadataRaises(self):
510 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1})
511 # MinMapper1 does not specify a template for the raw dataset type so
512 # trying to use it for queryMetadata should raise
513 with self.assertRaises(RuntimeError) as contextManager:
514 butler.queryMetadata('raw', 'unused', {})
515 # This test demonstrates and verifies that simple use of the incomplete
516 # dataset type returns a helpful (I hope) error message.
517 self.assertEqual(
518 str(contextManager.exception),
519 'Template is not defined for the raw dataset type, '
520 'it must be set before it can be used.')
522 def testFilenameRaises(self):
523 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1})
524 # MinMapper1 does not specify a template for the raw dataset type so
525 # trying to use it for <datasetType>_filename should raise
526 with self.assertRaises(RuntimeError) as contextManager:
527 butler.get('raw_filename')
528 # This test demonstrates and verifies that simple use of the
529 # incomplete dataset type returns a helpful (I hope) error message.
530 self.assertEqual(
531 str(contextManager.exception),
532 'Template is not defined for the raw dataset type, '
533 'it must be set before it can be used.')
535 def testWcsRaises(self):
536 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1})
537 # MinMapper1 does not specify a template for the raw dataset type so
538 # trying to use it for <datasetType>_wcs should raise
539 with self.assertRaises(RuntimeError) as contextManager:
540 butler.get('raw_wcs')
541 # This test demonstrates and verifies that simple use of the
542 # incomplete dataset type returns a helpful (I hope) error message.
543 self.assertEqual(
544 str(contextManager.exception),
545 'Template is not defined for the raw dataset type, '
546 'it must be set before it can be used.')
548 def testConflictRaises(self):
549 policy = dafPersist.Policy(os.path.join(ROOT, "ConflictMapper.yaml"))
550 with self.assertRaisesRegex(
551 ValueError,
552 r"Duplicate mapping policy for dataset type packages"):
553 mapper = lsst.obs.base.CameraMapper(policy=policy, repositoryDir=ROOT, root=ROOT) # noqa F841
556class MemoryTester(lsst.utils.tests.MemoryTestCase):
557 pass
560if __name__ == '__main__': 560 ↛ 561line 560 didn't jump to line 561, because the condition on line 560 was never true
561 lsst.utils.tests.init()
562 unittest.main()