Coverage for tests/test_butlerFits.py : 35%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
# This file is part of daf_butler. # # Developed for the LSST Data Management System. # This product includes software developed by the LSST Project # (http://www.lsst.org). # See the COPYRIGHT file at the top-level directory of this distribution # for details of code ownership. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>.
import botocore from moto import mock_s3
"""A no-op decorator in case moto mock_s3 can not be imported. """
except ImportError: lsst.afw = None
def registerDatasetTypes(datasetTypeName, dimensions, storageClass, registry): """Bulk register DatasetTypes """ datasetType = DatasetType(datasetTypeName, dimensions, storageClass) registry.registerDatasetType(datasetType)
for compName, compStorageClass in storageClass.components.items(): compType = DatasetType(datasetType.componentTypeName(compName), dimensions, compStorageClass) registry.registerDatasetType(compType)
def setUpClass(cls): if lsst.afw is None: raise unittest.SkipTest("afw not available.") cls.storageClassFactory = StorageClassFactory() cls.storageClassFactory.addFromConfig(cls.configFile)
"""Create a new butler root for each test.""" if self.useTempRoot: self.root = tempfile.mkdtemp(dir=TESTDIR) Butler.makeRepo(self.root, config=Config(self.configFile)) self.tmpConfigFile = os.path.join(self.root, "butler.yaml") else: self.root = None self.tmpConfigFile = self.configFile
if self.root is not None and os.path.exists(self.root): shutil.rmtree(self.root, ignore_errors=True)
storageClass = self.storageClassFactory.getStorageClass("ExposureF") self.runExposureCompositePutGetTest(storageClass, "calexp")
storageClass = self.storageClassFactory.getStorageClass("ExposureCompositeF") self.runExposureCompositePutGetTest(storageClass, "unknown")
example = os.path.join(TESTDIR, "data", "basic", "small.fits") exposure = lsst.afw.image.ExposureF(example) butler = Butler(self.tmpConfigFile) dimensions = butler.registry.dimensions.extract(["instrument", "visit"]) self.registerDatasetTypes(datasetTypeName, dimensions, storageClass, butler.registry) dataId = {"visit": 42, "instrument": "DummyCam", "physical_filter": "d-r"} # Add needed Dimensions butler.registry.insertDimensionData("instrument", {"instrument": "DummyCam"}) butler.registry.insertDimensionData("physical_filter", {"instrument": "DummyCam", "name": "d-r", "abstract_filter": "R"}) butler.registry.insertDimensionData("visit", {"instrument": "DummyCam", "id": 42, "name": "fortytwo", "physical_filter": "d-r"}) butler.put(exposure, datasetTypeName, dataId) # Get the full thing butler.get(datasetTypeName, dataId) # TODO enable check for equality (fix for Exposure type) # self.assertEqual(full, exposure) # Get a component compsRead = {} for compName in ("wcs", "image", "mask", "coaddInputs", "psf"): compTypeName = DatasetType.nameWithComponent(datasetTypeName, compName) component = butler.get(compTypeName, dataId) # TODO enable check for component instance types # compRef = butler.registry.find(butler.run.collection, # f"calexp.{compName}", dataId) # self.assertIsInstance(component, # compRef.datasetType.storageClass.pytype) compsRead[compName] = component # Simple check of WCS bbox = Box2I(Point2I(0, 0), Extent2I(9, 9)) self.assertWcsAlmostEqualOverBBox(compsRead["wcs"], exposure.getWcs(), bbox)
# With parameters inBBox = Box2I(minimum=Point2I(0, 0), maximum=Point2I(3, 3)) parameters = dict(bbox=inBBox, origin=LOCAL) subset = butler.get(datasetTypeName, dataId, parameters=parameters) outBBox = subset.getBBox() self.assertEqual(inBBox, outBBox)
"""PosixDatastore specialization of a butler"""
"""InMemoryDatastore specialization of a butler"""
"""PosixDatastore specialization"""
"""S3Datastore specialization of a butler; an S3 storage Datastore + a local in-memory SqlRegistry. """
"""Name of the Bucket that will be used in the tests. The name is read from the config file used with the tests during set-up. """
"""Root repository directory expected to be used in case useTempRoot=False. Otherwise the root is set to a 20 characters long randomly generated string during set-up. """
"""Returns a random string of len 20 to serve as a root name for the temporary bucket repo.
This is equivalent to tempfile.mkdtemp as this is what self.root becomes when useTempRoot is True. """ rndstr = "".join( random.choice(string.ascii_uppercase + string.digits) for _ in range(20) ) return rndstr + "/"
config = Config(self.configFile) uri = ButlerURI(config[".datastore.datastore.root"]) self.bucketName = uri.netloc
if self.useTempRoot: self.root = self.genRoot() rooturi = f"s3://{self.bucketName}/{self.root}" config.update({"datastore": {"datastore": {"root": rooturi}}})
# set up some fake credentials if they do not exist self.usingDummyCredentials = setAwsEnvCredentials()
# MOTO needs to know that we expect Bucket bucketname to exist # (this used to be the class attribute bucketName) s3 = boto3.resource("s3") s3.create_bucket(Bucket=self.bucketName)
self.datastoreStr = f"datastore={self.root}" self.datastoreName = [f"S3Datastore@{rooturi}"] Butler.makeRepo(rooturi, config=config, forceConfigRoot=False) self.tmpConfigFile = os.path.join(rooturi, "butler.yaml")
s3 = boto3.resource("s3") bucket = s3.Bucket(self.bucketName) try: bucket.objects.all().delete() except botocore.exceptions.ClientError as err: errorcode = err.response["ResponseMetadata"]["HTTPStatusCode"] if errorcode == 404: # the key was not reachable - pass pass else: raise
bucket = s3.Bucket(self.bucketName) bucket.delete()
# unset any potentially set dummy credentials if self.usingDummyCredentials: unsetAwsEnvCredentials()
unittest.main() |