Coverage for tests/test_s3utils.py: 29%
73 statements
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-16 02:51 -0700
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-16 02:51 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import os
23import unittest
24from unittest import mock
26try:
27 import boto3
28 from botocore.exceptions import ParamValidationError
30 try:
31 from moto import mock_aws # v5
32 except ImportError:
33 from moto import mock_s3 as mock_aws
34except ImportError:
35 boto3 = None
37from lsst.resources import ResourcePath
38from lsst.resources.location import Location
39from lsst.resources.s3utils import (
40 _parse_endpoint_config,
41 bucketExists,
42 clean_test_environment_for_s3,
43 getS3Client,
44 s3CheckFileExists,
45)
46from urllib3.exceptions import LocationParseError
49@unittest.skipIf(not boto3, "Warning: boto3 AWS SDK not found!")
50class S3UtilsTestCase(unittest.TestCase):
51 """Test for the S3 related utilities."""
53 bucketName = "test_bucket_name"
54 fileName = "testFileName"
56 def setUp(self):
57 self.enterContext(clean_test_environment_for_s3())
58 self.enterContext(mock_aws())
60 self.client = getS3Client()
61 try:
62 self.client.create_bucket(Bucket=self.bucketName)
63 self.client.put_object(Bucket=self.bucketName, Key=self.fileName, Body=b"test content")
64 except self.client.exceptions.BucketAlreadyExists:
65 pass
67 def tearDown(self):
68 objects = self.client.list_objects(Bucket=self.bucketName)
69 if "Contents" in objects:
70 for item in objects["Contents"]:
71 self.client.delete_object(Bucket=self.bucketName, Key=item["Key"])
73 self.client.delete_bucket(Bucket=self.bucketName)
75 def testBucketExists(self):
76 self.assertTrue(bucketExists(f"{self.bucketName}"))
77 self.assertFalse(bucketExists(f"{self.bucketName}_no_exist"))
79 def testCephBucket(self):
80 with mock.patch.dict(os.environ, {"LSST_DISABLE_BUCKET_VALIDATION": "N"}):
81 self.assertEqual(os.environ["LSST_DISABLE_BUCKET_VALIDATION"], "N")
82 local_client = getS3Client()
83 with self.assertRaises(ParamValidationError):
84 bucketExists("foo:bar", local_client)
85 with mock.patch.dict(os.environ, {"LSST_DISABLE_BUCKET_VALIDATION": "1"}):
86 self.assertEqual(os.environ["LSST_DISABLE_BUCKET_VALIDATION"], "1")
87 local_client = getS3Client()
88 self.assertFalse(bucketExists("foo:bar", local_client))
90 def testFileExists(self):
91 self.assertTrue(s3CheckFileExists(client=self.client, bucket=self.bucketName, path=self.fileName)[0])
92 self.assertFalse(
93 s3CheckFileExists(client=self.client, bucket=self.bucketName, path=self.fileName + "_NO_EXIST")[0]
94 )
96 datastoreRootUri = f"s3://{self.bucketName}/"
97 uri = f"s3://{self.bucketName}/{self.fileName}"
99 buri = ResourcePath(uri)
100 location = Location(datastoreRootUri, self.fileName)
102 self.assertTrue(s3CheckFileExists(client=self.client, path=buri)[0])
103 # just to make sure the overloaded keyword works correctly
104 self.assertTrue(s3CheckFileExists(buri, client=self.client)[0])
105 self.assertTrue(s3CheckFileExists(client=self.client, path=location)[0])
107 # make sure supplying strings resolves correctly too
108 self.assertTrue(s3CheckFileExists(uri, client=self.client))
109 self.assertTrue(s3CheckFileExists(uri))
111 def test_parsing_profile_config(self):
112 with self.assertRaises(LocationParseError):
113 _parse_endpoint_config(
114 "https://AKIAIOSFODNN7EXAMPLE:wJalrXUtnFEMI/FK7MDENG/FbPxRfiCYEXAMPLEKEY@endpoint.com"
115 )
117 parsed = _parse_endpoint_config(
118 "https://AKIAIOSFODNN7EXAMPLE:wJalrXUtnFEMI%2FK7MDENG%2FbPxRfiCYEXAMPLEKEY@endpoint.com"
119 )
120 self.assertEqual(parsed.endpoint_url, "https://endpoint.com")
121 self.assertEqual(parsed.access_key_id, "AKIAIOSFODNN7EXAMPLE")
122 self.assertEqual(parsed.secret_access_key, "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY")
124 simple = _parse_endpoint_config("https://other.endpoint.com")
125 self.assertEqual(simple.endpoint_url, "https://other.endpoint.com")
126 self.assertIsNone(simple.access_key_id)
127 self.assertIsNone(simple.secret_access_key)
129 with self.assertRaisesRegex(ValueError, "S3 access key and secret not in expected format."):
130 _parse_endpoint_config("https://key@endpoint.com")
133if __name__ == "__main__":
134 unittest.main()