Coverage for tests/test_cameraMapper.py : 22%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import gc
23import os
24import sqlite3
25import unittest
26import tempfile
28import numpy as np
30import lsst.utils.tests
31import lsst.geom as geom
32import lsst.afw.table as afwTable
33import lsst.daf.persistence as dafPersist
34import lsst.obs.base
35import shutil
37from lsst.obs.base.test import BaseMapper
39ROOT = os.path.abspath(os.path.dirname(__file__))
42def setup_module(module):
43 lsst.utils.tests.init()
46class MinMapper1(lsst.obs.base.CameraMapper):
47 packageName = 'larry'
49 def __init__(self, **kwargs):
50 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper1.yaml"))
51 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, **kwargs)
52 return
54 def std_x(self, item, dataId):
55 return float(item)
57 @classmethod
58 def getCameraName(cls):
59 """Return the name of the camera that this CameraMapper is for."""
60 return "min"
62 @classmethod
63 def getPackageDir(cls):
64 return "/path/to/nowhere"
67class MinMapper2(lsst.obs.base.CameraMapper):
68 packageName = 'moe'
70 # CalibRoot in policy
71 # needCalibRegistry
72 def __init__(self, **kwargs):
73 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper2.yaml"))
74 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT,
75 registry="cfhtls.sqlite3", **kwargs)
76 return
78 def _transformId(self, dataId):
79 return dataId
81 def _extractDetectorName(self, dataId):
82 return "ccd00"
84 def std_x(self, item, dataId):
85 return float(item)
87 @classmethod
88 def getCameraName(cls):
89 """Return the name of the camera that this CameraMapper is for."""
90 return "min"
92 @classmethod
93 def getPackageDir(cls):
94 return "/path/to/nowhere"
97# does not assign packageName
98class MinMapper3(lsst.obs.base.CameraMapper):
100 def __init__(self, **kwargs):
101 policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper1.yaml"))
102 lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, root=ROOT)
103 return
105 @classmethod
106 def getPackageDir(cls):
107 return "/path/to/nowhere"
110def checkCompression(testCase, additionalData):
111 """Check that compression settings are present
113 We check that we can access the required settings, and that
114 the seed is non-zero (zero causes lsst.afw.math.Random to fail).
115 """
116 for plane in ("image", "mask", "variance"):
117 for entry in ("compression.algorithm",
118 "compression.columns",
119 "compression.rows",
120 "compression.quantizeLevel",
121 "scaling.algorithm",
122 "scaling.bitpix",
123 "scaling.maskPlanes",
124 "scaling.seed",
125 "scaling.quantizeLevel",
126 "scaling.quantizePad",
127 "scaling.fuzz",
128 "scaling.bscale",
129 "scaling.bzero",
130 ):
131 additionalData.getScalar(plane + "." + entry)
132 testCase.assertNotEqual(additionalData.getScalar(plane + ".scaling.seed"), 0)
135class Mapper1TestCase(unittest.TestCase):
136 """A test case for the mapper used by the data butler."""
138 def setUp(self):
139 self.mapper = MinMapper1(root=ROOT)
141 def tearDown(self):
142 del self.mapper
144 def testGetDatasetTypes(self):
145 expectedTypes = BaseMapper(ROOT).getDatasetTypes()
146 # Add the expected additional types to what the base class provides
147 expectedTypes.extend(["x", "x_filename",
148 "badSourceHist", "badSourceHist_filename", ])
149 self.assertEqual(set(self.mapper.getDatasetTypes()), set(expectedTypes))
151 def testMap(self):
152 loc = self.mapper.map("x", {"sensor": "1,1"}, write=True)
153 self.assertEqual(loc.getPythonType(), "lsst.afw.geom.BoxI")
154 self.assertEqual(loc.getCppType(), "BoxI")
155 self.assertEqual(loc.getStorageName(), "PickleStorage")
156 expectedRoot = ROOT
157 expectedLocations = ["foo-1,1.pickle"]
158 self.assertEqual(loc.getStorage().root, expectedRoot)
159 self.assertEqual(loc.getLocations(), expectedLocations)
160 self.assertEqual(loc.getAdditionalData().toString(),
161 "sensor = \"1,1\"\n")
163 def testQueryMetadata(self):
164 self.assertEqual(self.mapper.queryMetadata("x", ["sensor"], None), [("1,1",)])
166 def testStandardize(self):
167 self.assertTrue(self.mapper.canStandardize("x"))
168 self.assertFalse(self.mapper.canStandardize("badSourceHist"))
169 self.assertFalse(self.mapper.canStandardize("notPresent"))
170 result = self.mapper.standardize("x", 3, None)
171 self.assertIsInstance(result, float)
172 self.assertEqual(result, 3.0)
173 result = self.mapper.standardize("x", 3.14, None)
174 self.assertIsInstance(result, float)
175 self.assertEqual(result, 3.14)
176 result = self.mapper.standardize("x", "3.14", None)
177 self.assertIsInstance(result, float)
178 self.assertEqual(result, 3.14)
180 def testNames(self):
181 self.assertEqual(MinMapper1.getCameraName(), "min")
182 self.assertEqual(MinMapper1.getPackageName(), "larry")
185class Mapper2TestCase(unittest.TestCase):
186 """A test case for the mapper used by the data butler."""
188 def testGetDatasetTypes(self):
189 mapper = MinMapper2(root=ROOT)
190 expectedTypes = BaseMapper(ROOT).getDatasetTypes()
191 # Add the expected additional types to what the base class provides
192 expectedTypes.extend(["flat", "flat_md", "flat_filename", "flat_sub",
193 "raw", "raw_md", "raw_filename", "raw_sub",
194 "some", "some_filename", "some_md", "some_sub",
195 "someCatalog", "someCatalog_md", "someCatalog_filename",
196 "someCatalog_len", "someCatalog_schema",
197 "forced_src", "forced_src_md", "forced_src_filename",
198 "forced_src_len", "forced_src_schema",
199 "other_sub", "other_filename", "other_md", "other",
200 "someGz", "someGz_filename", "someFz", "someFz_filename", "someGz_md",
201 "someFz_sub", "someFz_md", "someGz_sub",
202 "someGz_bbox", "someFz_bbox", "some_bbox", "other_bbox",
203 ])
204 self.assertEqual(set(mapper.getDatasetTypes()),
205 set(expectedTypes))
207 def testMap(self):
208 mapper = MinMapper2(root=ROOT)
209 loc = mapper.map("raw", {"ccd": 13}, write=True)
210 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU")
211 self.assertEqual(loc.getCppType(), "ImageU")
212 self.assertEqual(loc.getStorageName(), "FitsStorage")
213 self.assertEqual(loc.getLocations(), ["foo-13.fits"])
214 self.assertEqual(loc.getStorage().root, ROOT)
215 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13)
216 checkCompression(self, loc.getAdditionalData())
218 def testSubMap(self):
219 bbox = geom.BoxI(geom.Point2I(200, 100),
220 geom.Extent2I(300, 400))
221 mapper = MinMapper2(root=ROOT)
222 loc = mapper.map("raw_sub", {"ccd": 13, "bbox": bbox}, write=True)
223 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU")
224 self.assertEqual(loc.getCppType(), "ImageU")
225 self.assertEqual(loc.getStorageName(), "FitsStorage")
226 self.assertEqual(loc.getLocations(), ["foo-13.fits"])
227 self.assertEqual(loc.getStorage().root, ROOT)
228 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13)
229 self.assertEqual(loc.getAdditionalData().getScalar("width"), 300)
230 self.assertEqual(loc.getAdditionalData().getScalar("height"), 400)
231 self.assertEqual(loc.getAdditionalData().getScalar("llcX"), 200)
232 self.assertEqual(loc.getAdditionalData().getScalar("llcY"), 100)
233 checkCompression(self, loc.getAdditionalData())
235 loc = mapper.map("raw_sub", {"ccd": 13, "bbox": bbox, "imageOrigin": "PARENT"}, write=True)
236 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU")
237 self.assertEqual(loc.getCppType(), "ImageU")
238 self.assertEqual(loc.getStorageName(), "FitsStorage")
239 self.assertEqual(loc.getLocations(), ["foo-13.fits"])
240 self.assertEqual(loc.getStorage().root, ROOT)
241 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13)
242 self.assertEqual(loc.getAdditionalData().getScalar("width"), 300)
243 self.assertEqual(loc.getAdditionalData().getScalar("height"), 400)
244 self.assertEqual(loc.getAdditionalData().getScalar("llcX"), 200)
245 self.assertEqual(loc.getAdditionalData().getScalar("llcY"), 100)
246 self.assertEqual(loc.getAdditionalData().getScalar("imageOrigin"), "PARENT")
247 checkCompression(self, loc.getAdditionalData())
249 def testCatalogExtras(self):
250 butler = dafPersist.Butler(root=ROOT, mapper=MinMapper2)
251 schema = afwTable.Schema()
252 aa = schema.addField("a", type=np.int32, doc="a")
253 bb = schema.addField("b", type=np.float64, doc="b")
254 catalog = lsst.afw.table.BaseCatalog(schema)
255 row = catalog.addNew()
256 row.set(aa, 12345)
257 row.set(bb, 1.2345)
258 size = len(catalog)
259 dataId = dict(visit=123, ccd=45)
260 butler.put(catalog, "someCatalog", dataId)
261 filename = butler.get("someCatalog_filename", dataId)[0]
262 try:
263 self.assertTrue(os.path.exists(filename))
264 self.assertEqual(butler.get("someCatalog_schema", dataId), schema)
265 self.assertEqual(butler.get("someCatalog_len", dataId), size)
266 header = butler.get("someCatalog_md", dataId)
267 self.assertEqual(header.getScalar("NAXIS2"), size)
268 finally:
269 try:
270 os.remove(filename)
271 except OSError as exc:
272 print("Warning: could not remove file %r: %s" % (filename, exc))
274 def testImage(self):
275 mapper = MinMapper2(root=ROOT)
276 loc = mapper.map("some", dict(ccd=35))
277 expectedLocations = ["bar-35.fits"]
278 self.assertEqual(loc.getStorage().root, ROOT)
279 self.assertEqual(loc.getLocations(), expectedLocations)
281 butler = dafPersist.ButlerFactory(mapper=mapper).create()
282 image = butler.get("some", ccd=35)
283 self.assertEqual(image.getFilter().getName(), "r")
285 self.assertEqual(butler.get("some_bbox", ccd=35), image.getBBox())
287 bbox = geom.BoxI(geom.Point2I(200, 100),
288 geom.Extent2I(300, 400))
289 image = butler.get("some_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True)
290 self.assertEqual(image.getHeight(), 400)
291 self.assertEqual(image.getWidth(), 300)
293 def testDetector(self):
294 mapper = MinMapper2(root=ROOT)
295 butler = dafPersist.ButlerFactory(mapper=mapper).create()
296 detector = butler.get("raw_detector", ccd=0)
297 self.assertEqual(detector.getName(), "ccd00")
299 def testGzImage(self):
300 mapper = MinMapper2(root=ROOT)
301 loc = mapper.map("someGz", dict(ccd=35))
302 expectedLocations = [os.path.join("gz", "bar-35.fits.gz")]
303 self.assertEqual(loc.getStorage().root, ROOT)
304 self.assertEqual(loc.getLocations(), expectedLocations)
306 butler = dafPersist.ButlerFactory(mapper=mapper).create()
307 image = butler.get("someGz", ccd=35)
308 self.assertEqual(image.getFilter().getName(), "r")
310 bbox = geom.BoxI(geom.Point2I(200, 100),
311 geom.Extent2I(300, 400))
312 image = butler.get("someGz_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True)
313 self.assertEqual(image.getHeight(), 400)
314 self.assertEqual(image.getWidth(), 300)
316 def testFzImage(self):
317 mapper = MinMapper2(root=ROOT)
318 loc = mapper.map("someFz", dict(ccd=35))
319 expectedRoot = ROOT
320 expectedLocations = [os.path.join("fz", "bar-35.fits.fz")]
321 self.assertEqual(loc.getStorage().root, expectedRoot)
322 self.assertEqual(loc.getLocations(), expectedLocations)
324 butler = dafPersist.ButlerFactory(mapper=mapper).create()
325 image = butler.get("someFz", ccd=35)
326 self.assertEqual(image.getFilter().getName(), "r")
328 bbox = geom.BoxI(geom.Point2I(200, 100),
329 geom.Extent2I(300, 400))
330 image = butler.get("someFz_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True)
331 self.assertEqual(image.getHeight(), 400)
332 self.assertEqual(image.getWidth(), 300)
334 def testButlerQueryMetadata(self):
335 mapper = MinMapper2(root=ROOT)
336 butler = dafPersist.ButlerFactory(mapper=mapper).create()
337 kwargs = {"ccd": 35, "filter": "r", "visit": 787731,
338 "taiObs": "2005-04-02T09:24:49.933440000"}
339 self.assertEqual(butler.queryMetadata("other", "visit", **kwargs), [787731])
340 self.assertEqual(butler.queryMetadata("other", "visit",
341 visit=kwargs["visit"], ccd=kwargs["ccd"],
342 taiObs=kwargs["taiObs"], filter=kwargs["filter"]),
343 [787731])
344 # now test we get no matches if ccd is out of range
345 self.assertEqual(butler.queryMetadata("raw", "ccd", ccd=36, filter="r", visit=787731), [])
347 def testQueryMetadata(self):
348 mapper = MinMapper2(root=ROOT)
349 self.assertEqual(mapper.queryMetadata("raw", ["ccd"], None),
350 [(x,) for x in range(36) if x != 3])
352 def testStandardize(self):
353 mapper = MinMapper2(root=ROOT)
354 self.assertEqual(mapper.canStandardize("raw"), True)
355 self.assertEqual(mapper.canStandardize("notPresent"), False)
357 def testCalib(self):
358 mapper = MinMapper2(root=ROOT)
359 loc = mapper.map("flat", {"visit": 787650, "ccd": 13}, write=True)
360 self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureF")
361 self.assertEqual(loc.getCppType(), "ExposureF")
362 self.assertEqual(loc.getStorageName(), "FitsStorage")
363 expectedRoot = ROOT
364 expectedLocations = ["flat-05Am03-fi.fits"]
365 self.assertEqual(loc.getStorage().root, expectedRoot)
366 self.assertEqual(loc.getLocations(), expectedLocations)
367 self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13)
368 self.assertEqual(loc.getAdditionalData().getScalar("visit"), 787650)
369 self.assertEqual(loc.getAdditionalData().getScalar("derivedRunId"), "05Am03")
370 self.assertEqual(loc.getAdditionalData().getScalar("filter"), "i")
371 checkCompression(self, loc.getAdditionalData())
373 def testNames(self):
374 self.assertEqual(MinMapper2.getCameraName(), "min")
375 self.assertEqual(MinMapper2.getPackageName(), "moe")
377 @unittest.expectedFailure
378 def testParentSearch(self):
379 mapper = MinMapper2(root=ROOT)
380 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'),
381 os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits')))
382 self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits'))])
383 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'),
384 os.path.join(ROOT,
385 os.path.join('testParentSearch', 'bar.fits[1]')))
386 self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits[1]'))])
388 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'),
389 os.path.join(ROOT, os.path.join('testParentSearch', 'baz.fits')))
390 self.assertEqual(paths, [os.path.join(ROOT,
391 os.path.join('testParentSearch', '_parent', 'baz.fits'))])
392 paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'),
393 os.path.join(ROOT,
394 os.path.join('testParentSearch', 'baz.fits[1]')))
395 self.assertEqual(paths, [os.path.join(ROOT,
396 os.path.join('testParentSearch', '_parent', 'baz.fits[1]'))])
398 def testSkymapLookups(self):
399 """Test that metadata lookups don't try to get skymap data ID values from the registry.
400 """
401 mapper = MinMapper2(root=ROOT)
402 butler = dafPersist.Butler(mapper=mapper)
403 with self.assertRaises(RuntimeError) as manager:
404 butler.dataRef("forced_src", visit=787650, ccd=13)
405 self.assertIn("Cannot lookup skymap key 'tract'", str(manager.exception))
406 # We're mostly concerned that the statements below will raise an exception;
407 # if they don't, it's not likely the following tests will fail.
408 subset = butler.subset("forced_src", visit=787650, ccd=13, tract=0)
409 self.assertEqual(len(subset), 1)
410 dataRef = butler.dataRef("forced_src", visit=787650, ccd=13, tract=0)
411 self.assertFalse(dataRef.datasetExists("forced_src"))
414class Mapper3TestCase(unittest.TestCase):
415 """A test case for a mapper subclass which does not assign packageName."""
417 def testPackageName(self):
418 with self.assertRaises(ValueError):
419 MinMapper3()
420 with self.assertRaises(ValueError):
421 MinMapper3.getPackageName()
424class ParentRegistryTestCase(unittest.TestCase):
426 @staticmethod
427 def _createRegistry(path):
428 cmd = """CREATE TABLE x(
429 id INT,
430 visit INT,
431 filter TEXT,
432 snap INT,
433 raft TEXT,
434 sensor TEXT,
435 channel TEXT,
436 taiObs TEXT,
437 expTime REAL
438 );
439 """
440 conn = sqlite3.connect(path)
441 conn.cursor().execute(cmd)
442 conn.commit()
443 conn.close()
445 def setUp(self):
446 self.ROOT = tempfile.mkdtemp(dir=ROOT, prefix="ParentRegistryTestCase-")
447 self.repoARoot = os.path.join(self.ROOT, 'a')
448 args = dafPersist.RepositoryArgs(root=self.repoARoot, mapper=MinMapper1)
449 butler = dafPersist.Butler(outputs=args)
450 self._createRegistry(os.path.join(self.repoARoot, 'registry.sqlite3'))
451 del butler
453 def tearDown(self):
454 # the butler sql registry closes its database connection in __del__. To trigger __del__ we explicitly
455 # collect the garbage here. If we find having or closing the open database connection is a problem in
456 # production code, we may need to add api to butler to explicity release database connections (and
457 # maybe other things like in-memory cached objects).
458 gc.collect()
459 if os.path.exists(self.ROOT):
460 shutil.rmtree(self.ROOT)
462 def test(self):
463 """Verify that when the child repo does not have a registry it is assigned the registry from the
464 parent."""
465 repoBRoot = os.path.join(self.ROOT, 'b')
466 butler = dafPersist.Butler(inputs=self.repoARoot, outputs=repoBRoot)
467 # This way of getting the registry from the mapping is obviously going way into private members and
468 # the python lambda implementation code. It is very brittle and should not be duplicated in user code
469 # or any location that is not trivial to fix along with changes to the CameraMapper or Mapping.
470 registryA = butler._repos.inputs()[0].repo._mapper.registry
471 registryB = butler._repos.outputs()[0].repo._mapper.registry
472 self.assertEqual(id(registryA), id(registryB))
474 self._createRegistry(os.path.join(repoBRoot, 'registry.sqlite3'))
475 butler = dafPersist.Butler(inputs=self.repoARoot, outputs=repoBRoot)
476 # see above; don't copy this way of getting the registry.
477 registryA = butler._repos.inputs()[0].repo._mapper.registry
478 registryB = butler._repos.outputs()[0].repo._mapper.registry
479 self.assertNotEqual(id(registryA), id(registryB))
482class MissingPolicyKeyTestCase(unittest.TestCase):
484 def testGetRaises(self):
485 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1})
486 # MinMapper1 does not specify a template for the raw dataset type so trying to use it for get should
487 # raise
488 with self.assertRaises(RuntimeError) as contextManager:
489 butler.get('raw')
490 # This test demonstrates and verifies that simple use of the incomplete dataset type returns a helpful
491 # (I hope) error message.
492 self.assertEqual(
493 str(contextManager.exception),
494 'Template is not defined for the raw dataset type, '
495 'it must be set before it can be used.')
496 with self.assertRaises(RuntimeError) as contextManager:
497 butler.queryMetadata('raw', 'unused', {})
499 def testQueryMetadataRaises(self):
500 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1})
501 # MinMapper1 does not specify a template for the raw dataset type so trying to use it for
502 # queryMetadata should raise
503 with self.assertRaises(RuntimeError) as contextManager:
504 butler.queryMetadata('raw', 'unused', {})
505 # This test demonstrates and verifies that simple use of the incomplete dataset type returns a helpful
506 # (I hope) error message.
507 self.assertEqual(
508 str(contextManager.exception),
509 'Template is not defined for the raw dataset type, '
510 'it must be set before it can be used.')
512 def testFilenameRaises(self):
513 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1})
514 # MinMapper1 does not specify a template for the raw dataset type so trying to use it for
515 # <datasetType>_filename should raise
516 with self.assertRaises(RuntimeError) as contextManager:
517 butler.get('raw_filename')
518 # This test demonstrates and verifies that simple use of the incomplete dataset type returns a helpful
519 # (I hope) error message.
520 self.assertEqual(
521 str(contextManager.exception),
522 'Template is not defined for the raw dataset type, '
523 'it must be set before it can be used.')
525 def testWcsRaises(self):
526 butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1})
527 # MinMapper1 does not specify a template for the raw dataset type so trying to use it for
528 # <datasetType>_wcs should raise
529 with self.assertRaises(RuntimeError) as contextManager:
530 butler.get('raw_wcs')
531 # This test demonstrates and verifies that simple use of the incomplete dataset type returns a helpful
532 # (I hope) error message.
533 self.assertEqual(
534 str(contextManager.exception),
535 'Template is not defined for the raw dataset type, '
536 'it must be set before it can be used.')
538 def testConflictRaises(self):
539 policy = dafPersist.Policy(os.path.join(ROOT, "ConflictMapper.yaml"))
540 with self.assertRaisesRegex(
541 ValueError,
542 r"Duplicate mapping policy for dataset type packages"):
543 mapper = lsst.obs.base.CameraMapper(policy=policy, repositoryDir=ROOT, root=ROOT) # noqa F841
546class MemoryTester(lsst.utils.tests.MemoryTestCase):
547 pass
550if __name__ == '__main__': 550 ↛ 551line 550 didn't jump to line 551, because the condition on line 550 was never true
551 lsst.utils.tests.init()
552 unittest.main()