Coverage for tests/test_cameraMapper.py : 22%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
# This file is part of obs_base. # # Developed for the LSST Data Management System. # This product includes software developed by the LSST Project # (https://www.lsst.org). # See the COPYRIGHT file at the top-level directory of this distribution # for details of code ownership. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>.
lsst.utils.tests.init()
policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper1.yaml")) lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, **kwargs) return
return float(item)
def getCameraName(cls): """Return the name of the camera that this CameraMapper is for.""" return "min"
def getPackageDir(cls): return "/path/to/nowhere"
# CalibRoot in policy # needCalibRegistry policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper2.yaml")) lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, registry="cfhtls.sqlite3", **kwargs) return
return dataId
return "ccd00"
return float(item)
def getCameraName(cls): """Return the name of the camera that this CameraMapper is for.""" return "min"
def getPackageDir(cls): return "/path/to/nowhere"
# does not assign packageName
policy = dafPersist.Policy(os.path.join(ROOT, "MinMapper1.yaml")) lsst.obs.base.CameraMapper.__init__(self, policy=policy, repositoryDir=ROOT, root=ROOT) return
def getPackageDir(cls): return "/path/to/nowhere"
"""Check that compression settings are present
We check that we can access the required settings, and that the seed is non-zero (zero causes lsst.afw.math.Random to fail). """ for plane in ("image", "mask", "variance"): for entry in ("compression.algorithm", "compression.columns", "compression.rows", "compression.quantizeLevel", "scaling.algorithm", "scaling.bitpix", "scaling.maskPlanes", "scaling.seed", "scaling.quantizeLevel", "scaling.quantizePad", "scaling.fuzz", "scaling.bscale", "scaling.bzero", ): additionalData.getScalar(plane + "." + entry) testCase.assertNotEqual(additionalData.getScalar(plane + ".scaling.seed"), 0)
"""A test case for the mapper used by the data butler."""
self.mapper = MinMapper1(root=ROOT)
del self.mapper
expectedTypes = BaseMapper(ROOT).getDatasetTypes() # Add the expected additional types to what the base class provides expectedTypes.extend(["x", "x_filename", "badSourceHist", "badSourceHist_filename", ]) self.assertEqual(set(self.mapper.getDatasetTypes()), set(expectedTypes))
loc = self.mapper.map("x", {"sensor": "1,1"}, write=True) self.assertEqual(loc.getPythonType(), "lsst.afw.geom.BoxI") self.assertEqual(loc.getCppType(), "BoxI") self.assertEqual(loc.getStorageName(), "PickleStorage") expectedRoot = ROOT expectedLocations = ["foo-1,1.pickle"] self.assertEqual(loc.getStorage().root, expectedRoot) self.assertEqual(loc.getLocations(), expectedLocations) self.assertEqual(loc.getAdditionalData().toString(), "sensor = \"1,1\"\n")
self.assertEqual(self.mapper.queryMetadata("x", ["sensor"], None), [("1,1",)])
self.assertTrue(self.mapper.canStandardize("x")) self.assertFalse(self.mapper.canStandardize("badSourceHist")) self.assertFalse(self.mapper.canStandardize("notPresent")) result = self.mapper.standardize("x", 3, None) self.assertIsInstance(result, float) self.assertEqual(result, 3.0) result = self.mapper.standardize("x", 3.14, None) self.assertIsInstance(result, float) self.assertEqual(result, 3.14) result = self.mapper.standardize("x", "3.14", None) self.assertIsInstance(result, float) self.assertEqual(result, 3.14)
self.assertEqual(MinMapper1.getCameraName(), "min") self.assertEqual(MinMapper1.getPackageName(), "larry")
"""A test case for the mapper used by the data butler."""
mapper = MinMapper2(root=ROOT) expectedTypes = BaseMapper(ROOT).getDatasetTypes() # Add the expected additional types to what the base class provides expectedTypes.extend(["flat", "flat_md", "flat_filename", "flat_sub", "raw", "raw_md", "raw_filename", "raw_sub", "some", "some_filename", "some_md", "some_sub", "someCatalog", "someCatalog_md", "someCatalog_filename", "someCatalog_len", "someCatalog_schema", "forced_src", "forced_src_md", "forced_src_filename", "forced_src_len", "forced_src_schema", "other_sub", "other_filename", "other_md", "other", "someGz", "someGz_filename", "someFz", "someFz_filename", "someGz_md", "someFz_sub", "someFz_md", "someGz_sub", "someGz_bbox", "someFz_bbox", "some_bbox", "other_bbox", ]) self.assertEqual(set(mapper.getDatasetTypes()), set(expectedTypes))
mapper = MinMapper2(root=ROOT) loc = mapper.map("raw", {"ccd": 13}, write=True) self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU") self.assertEqual(loc.getCppType(), "ImageU") self.assertEqual(loc.getStorageName(), "FitsStorage") self.assertEqual(loc.getLocations(), ["foo-13.fits"]) self.assertEqual(loc.getStorage().root, ROOT) self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) checkCompression(self, loc.getAdditionalData())
bbox = geom.BoxI(geom.Point2I(200, 100), geom.Extent2I(300, 400)) mapper = MinMapper2(root=ROOT) loc = mapper.map("raw_sub", {"ccd": 13, "bbox": bbox}, write=True) self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU") self.assertEqual(loc.getCppType(), "ImageU") self.assertEqual(loc.getStorageName(), "FitsStorage") self.assertEqual(loc.getLocations(), ["foo-13.fits"]) self.assertEqual(loc.getStorage().root, ROOT) self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) self.assertEqual(loc.getAdditionalData().getScalar("width"), 300) self.assertEqual(loc.getAdditionalData().getScalar("height"), 400) self.assertEqual(loc.getAdditionalData().getScalar("llcX"), 200) self.assertEqual(loc.getAdditionalData().getScalar("llcY"), 100) checkCompression(self, loc.getAdditionalData())
loc = mapper.map("raw_sub", {"ccd": 13, "bbox": bbox, "imageOrigin": "PARENT"}, write=True) self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureU") self.assertEqual(loc.getCppType(), "ImageU") self.assertEqual(loc.getStorageName(), "FitsStorage") self.assertEqual(loc.getLocations(), ["foo-13.fits"]) self.assertEqual(loc.getStorage().root, ROOT) self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) self.assertEqual(loc.getAdditionalData().getScalar("width"), 300) self.assertEqual(loc.getAdditionalData().getScalar("height"), 400) self.assertEqual(loc.getAdditionalData().getScalar("llcX"), 200) self.assertEqual(loc.getAdditionalData().getScalar("llcY"), 100) self.assertEqual(loc.getAdditionalData().getScalar("imageOrigin"), "PARENT") checkCompression(self, loc.getAdditionalData())
butler = dafPersist.Butler(root=ROOT, mapper=MinMapper2) schema = afwTable.Schema() aa = schema.addField("a", type=np.int32, doc="a") bb = schema.addField("b", type=np.float64, doc="b") catalog = lsst.afw.table.BaseCatalog(schema) row = catalog.addNew() row.set(aa, 12345) row.set(bb, 1.2345) size = len(catalog) dataId = dict(visit=123, ccd=45) butler.put(catalog, "someCatalog", dataId) filename = butler.get("someCatalog_filename", dataId)[0] try: self.assertTrue(os.path.exists(filename)) self.assertEqual(butler.get("someCatalog_schema", dataId), schema) self.assertEqual(butler.get("someCatalog_len", dataId), size) header = butler.get("someCatalog_md", dataId) self.assertEqual(header.getScalar("NAXIS2"), size) finally: try: os.remove(filename) except OSError as exc: print("Warning: could not remove file %r: %s" % (filename, exc))
mapper = MinMapper2(root=ROOT) loc = mapper.map("some", dict(ccd=35)) expectedLocations = ["bar-35.fits"] self.assertEqual(loc.getStorage().root, ROOT) self.assertEqual(loc.getLocations(), expectedLocations)
butler = dafPersist.ButlerFactory(mapper=mapper).create() image = butler.get("some", ccd=35) self.assertEqual(image.getFilter().getName(), "r")
self.assertEqual(butler.get("some_bbox", ccd=35), image.getBBox())
bbox = geom.BoxI(geom.Point2I(200, 100), geom.Extent2I(300, 400)) image = butler.get("some_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True) self.assertEqual(image.getHeight(), 400) self.assertEqual(image.getWidth(), 300)
mapper = MinMapper2(root=ROOT) butler = dafPersist.ButlerFactory(mapper=mapper).create() detector = butler.get("raw_detector", ccd=0) self.assertEqual(detector.getName(), "ccd00")
mapper = MinMapper2(root=ROOT) loc = mapper.map("someGz", dict(ccd=35)) expectedLocations = [os.path.join("gz", "bar-35.fits.gz")] self.assertEqual(loc.getStorage().root, ROOT) self.assertEqual(loc.getLocations(), expectedLocations)
butler = dafPersist.ButlerFactory(mapper=mapper).create() image = butler.get("someGz", ccd=35) self.assertEqual(image.getFilter().getName(), "r")
bbox = geom.BoxI(geom.Point2I(200, 100), geom.Extent2I(300, 400)) image = butler.get("someGz_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True) self.assertEqual(image.getHeight(), 400) self.assertEqual(image.getWidth(), 300)
mapper = MinMapper2(root=ROOT) loc = mapper.map("someFz", dict(ccd=35)) expectedRoot = ROOT expectedLocations = [os.path.join("fz", "bar-35.fits.fz")] self.assertEqual(loc.getStorage().root, expectedRoot) self.assertEqual(loc.getLocations(), expectedLocations)
butler = dafPersist.ButlerFactory(mapper=mapper).create() image = butler.get("someFz", ccd=35) self.assertEqual(image.getFilter().getName(), "r")
bbox = geom.BoxI(geom.Point2I(200, 100), geom.Extent2I(300, 400)) image = butler.get("someFz_sub", ccd=35, bbox=bbox, imageOrigin="LOCAL", immediate=True) self.assertEqual(image.getHeight(), 400) self.assertEqual(image.getWidth(), 300)
mapper = MinMapper2(root=ROOT) butler = dafPersist.ButlerFactory(mapper=mapper).create() kwargs = {"ccd": 35, "filter": "r", "visit": 787731, "taiObs": "2005-04-02T09:24:49.933440000"} self.assertEqual(butler.queryMetadata("other", "visit", **kwargs), [787731]) self.assertEqual(butler.queryMetadata("other", "visit", visit=kwargs["visit"], ccd=kwargs["ccd"], taiObs=kwargs["taiObs"], filter=kwargs["filter"]), [787731]) # now test we get no matches if ccd is out of range self.assertEqual(butler.queryMetadata("raw", "ccd", ccd=36, filter="r", visit=787731), [])
mapper = MinMapper2(root=ROOT) self.assertEqual(mapper.queryMetadata("raw", ["ccd"], None), [(x,) for x in range(36) if x != 3])
mapper = MinMapper2(root=ROOT) self.assertEqual(mapper.canStandardize("raw"), True) self.assertEqual(mapper.canStandardize("notPresent"), False)
mapper = MinMapper2(root=ROOT) loc = mapper.map("flat", {"visit": 787650, "ccd": 13}, write=True) self.assertEqual(loc.getPythonType(), "lsst.afw.image.ExposureF") self.assertEqual(loc.getCppType(), "ExposureF") self.assertEqual(loc.getStorageName(), "FitsStorage") expectedRoot = ROOT expectedLocations = ["flat-05Am03-fi.fits"] self.assertEqual(loc.getStorage().root, expectedRoot) self.assertEqual(loc.getLocations(), expectedLocations) self.assertEqual(loc.getAdditionalData().getScalar("ccd"), 13) self.assertEqual(loc.getAdditionalData().getScalar("visit"), 787650) self.assertEqual(loc.getAdditionalData().getScalar("derivedRunId"), "05Am03") self.assertEqual(loc.getAdditionalData().getScalar("filter"), "i") checkCompression(self, loc.getAdditionalData())
self.assertEqual(MinMapper2.getCameraName(), "min") self.assertEqual(MinMapper2.getPackageName(), "moe")
def testParentSearch(self): mapper = MinMapper2(root=ROOT) paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits'))) self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits'))]) paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits[1]'))) self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', 'bar.fits[1]'))])
paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), os.path.join(ROOT, os.path.join('testParentSearch', 'baz.fits'))) self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', '_parent', 'baz.fits'))]) paths = mapper.parentSearch(os.path.join(ROOT, 'testParentSearch'), os.path.join(ROOT, os.path.join('testParentSearch', 'baz.fits[1]'))) self.assertEqual(paths, [os.path.join(ROOT, os.path.join('testParentSearch', '_parent', 'baz.fits[1]'))])
"""Test that metadata lookups don't try to get skymap data ID values from the registry. """ mapper = MinMapper2(root=ROOT) butler = dafPersist.Butler(mapper=mapper) with self.assertRaises(RuntimeError) as manager: butler.dataRef("forced_src", visit=787650, ccd=13) self.assertIn("Cannot lookup skymap key 'tract'", str(manager.exception)) # We're mostly concerned that the statements below will raise an exception; # if they don't, it's not likely the following tests will fail. subset = butler.subset("forced_src", visit=787650, ccd=13, tract=0) self.assertEqual(len(subset), 1) dataRef = butler.dataRef("forced_src", visit=787650, ccd=13, tract=0) self.assertFalse(dataRef.datasetExists("forced_src"))
"""A test case for a mapper subclass which does not assign packageName."""
with self.assertRaises(ValueError): MinMapper3() with self.assertRaises(ValueError): MinMapper3.getPackageName()
def _createRegistry(path): cmd = """CREATE TABLE x( id INT, visit INT, filter TEXT, snap INT, raft TEXT, sensor TEXT, channel TEXT, taiObs TEXT, expTime REAL ); """ conn = sqlite3.connect(path) conn.cursor().execute(cmd) conn.commit() conn.close()
self.ROOT = tempfile.mkdtemp(dir=ROOT, prefix="ParentRegistryTestCase-") self.repoARoot = os.path.join(self.ROOT, 'a') args = dafPersist.RepositoryArgs(root=self.repoARoot, mapper=MinMapper1) butler = dafPersist.Butler(outputs=args) self._createRegistry(os.path.join(self.repoARoot, 'registry.sqlite3')) del butler
# the butler sql registry closes its database connection in __del__. To trigger __del__ we explicitly # collect the garbage here. If we find having or closing the open database connection is a problem in # production code, we may need to add api to butler to explicity release database connections (and # maybe other things like in-memory cached objects). gc.collect() if os.path.exists(self.ROOT): shutil.rmtree(self.ROOT)
"""Verify that when the child repo does not have a registry it is assigned the registry from the parent.""" repoBRoot = os.path.join(self.ROOT, 'b') butler = dafPersist.Butler(inputs=self.repoARoot, outputs=repoBRoot) # This way of getting the registry from the mapping is obviously going way into private members and # the python lambda implementation code. It is very brittle and should not be duplicated in user code # or any location that is not trivial to fix along with changes to the CameraMapper or Mapping. registryA = butler._repos.inputs()[0].repo._mapper.registry registryB = butler._repos.outputs()[0].repo._mapper.registry self.assertEqual(id(registryA), id(registryB))
self._createRegistry(os.path.join(repoBRoot, 'registry.sqlite3')) butler = dafPersist.Butler(inputs=self.repoARoot, outputs=repoBRoot) # see above; don't copy this way of getting the registry. registryA = butler._repos.inputs()[0].repo._mapper.registry registryB = butler._repos.outputs()[0].repo._mapper.registry self.assertNotEqual(id(registryA), id(registryB))
butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) # MinMapper1 does not specify a template for the raw dataset type so trying to use it for get should # raise with self.assertRaises(RuntimeError) as contextManager: butler.get('raw') # This test demonstrates and verifies that simple use of the incomplete dataset type returns a helpful # (I hope) error message. self.assertEqual( str(contextManager.exception), 'Template is not defined for the raw dataset type, ' + 'it must be set before it can be used.') with self.assertRaises(RuntimeError) as contextManager: butler.queryMetadata('raw', 'unused', {})
butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) # MinMapper1 does not specify a template for the raw dataset type so trying to use it for # queryMetadata should raise with self.assertRaises(RuntimeError) as contextManager: butler.queryMetadata('raw', 'unused', {}) # This test demonstrates and verifies that simple use of the incomplete dataset type returns a helpful # (I hope) error message. self.assertEqual( str(contextManager.exception), 'Template is not defined for the raw dataset type, ' + 'it must be set before it can be used.')
butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) # MinMapper1 does not specify a template for the raw dataset type so trying to use it for # <datasetType>_filename should raise with self.assertRaises(RuntimeError) as contextManager: butler.get('raw_filename') # This test demonstrates and verifies that simple use of the incomplete dataset type returns a helpful # (I hope) error message. self.assertEqual( str(contextManager.exception), 'Template is not defined for the raw dataset type, ' + 'it must be set before it can be used.')
butler = dafPersist.Butler(inputs={'root': ROOT, 'mapper': MinMapper1}) # MinMapper1 does not specify a template for the raw dataset type so trying to use it for # <datasetType>_wcs should raise with self.assertRaises(RuntimeError) as contextManager: butler.get('raw_wcs') # This test demonstrates and verifies that simple use of the incomplete dataset type returns a helpful # (I hope) error message. self.assertEqual( str(contextManager.exception), 'Template is not defined for the raw dataset type, ' + 'it must be set before it can be used.')
policy = dafPersist.Policy(os.path.join(ROOT, "ConflictMapper.yaml")) with self.assertRaisesRegex( ValueError, r"Duplicate mapping policy for dataset type packages"): mapper = lsst.obs.base.CameraMapper(policy=policy, repositoryDir=ROOT, root=ROOT) # noqa F841
lsst.utils.tests.init() unittest.main() |