Coverage for tests/test_s3utils.py: 34%
49 statements
« prev ^ index » next coverage.py v6.4.1, created at 2022-07-03 01:04 -0700
« prev ^ index » next coverage.py v6.4.1, created at 2022-07-03 01:04 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import unittest
24try:
25 import boto3
26 from moto import mock_s3
27except ImportError:
28 boto3 = None
30 def mock_s3(cls):
31 """A no-op decorator in case moto mock_s3 can not be imported."""
32 return cls
35from lsst.resources import ResourcePath
36from lsst.resources.location import Location
37from lsst.resources.s3utils import (
38 bucketExists,
39 getS3Client,
40 s3CheckFileExists,
41 setAwsEnvCredentials,
42 unsetAwsEnvCredentials,
43)
46@unittest.skipIf(not boto3, "Warning: boto3 AWS SDK not found!")
47@mock_s3
48class S3UtilsTestCase(unittest.TestCase):
49 """Test for the S3 related utilities."""
51 bucketName = "test_bucket_name"
52 fileName = "testFileName"
54 def setUp(self):
55 # set up some fake credentials if they do not exist
56 self.usingDummyCredentials = setAwsEnvCredentials()
58 self.client = getS3Client()
59 try:
60 self.client.create_bucket(Bucket=self.bucketName)
61 self.client.put_object(Bucket=self.bucketName, Key=self.fileName, Body=b"test content")
62 except self.client.exceptions.BucketAlreadyExists:
63 pass
65 def tearDown(self):
66 objects = self.client.list_objects(Bucket=self.bucketName)
67 if "Contents" in objects:
68 for item in objects["Contents"]:
69 self.client.delete_object(Bucket=self.bucketName, Key=item["Key"])
71 self.client.delete_bucket(Bucket=self.bucketName)
73 # unset any potentially set dummy credentials
74 if self.usingDummyCredentials:
75 unsetAwsEnvCredentials()
77 def testBucketExists(self):
78 self.assertTrue(bucketExists(f"{self.bucketName}"))
79 self.assertFalse(bucketExists(f"{self.bucketName}_no_exist"))
81 def testFileExists(self):
82 self.assertTrue(s3CheckFileExists(client=self.client, bucket=self.bucketName, path=self.fileName)[0])
83 self.assertFalse(
84 s3CheckFileExists(client=self.client, bucket=self.bucketName, path=self.fileName + "_NO_EXIST")[0]
85 )
87 datastoreRootUri = f"s3://{self.bucketName}/"
88 uri = f"s3://{self.bucketName}/{self.fileName}"
90 buri = ResourcePath(uri)
91 location = Location(datastoreRootUri, self.fileName)
93 self.assertTrue(s3CheckFileExists(client=self.client, path=buri)[0])
94 # just to make sure the overloaded keyword works correctly
95 self.assertTrue(s3CheckFileExists(buri, client=self.client)[0])
96 self.assertTrue(s3CheckFileExists(client=self.client, path=location)[0])
98 # make sure supplying strings resolves correctly too
99 self.assertTrue(s3CheckFileExists(uri, client=self.client))
100 self.assertTrue(s3CheckFileExists(uri))
103if __name__ == "__main__": 103 ↛ 104line 103 didn't jump to line 104, because the condition on line 103 was never true
104 unittest.main()