Coverage for tests/test_http.py: 14%
491 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-12 10:52 -0700
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-12 10:52 -0700
1# This file is part of lsst-resources.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# Use of this source code is governed by a 3-clause BSD-style
10# license that can be found in the LICENSE file.
12import hashlib
13import importlib
14import io
15import os.path
16import random
17import shutil
18import socket
19import stat
20import string
21import tempfile
22import time
23import unittest
24import warnings
25from collections.abc import Callable
26from threading import Thread
27from typing import cast
29try:
30 from cheroot import wsgi
31 from wsgidav.wsgidav_app import WsgiDAVApp
32except ImportError:
33 WsgiDAVApp = None
35import lsst.resources
36import requests
37import responses
38from lsst.resources import ResourcePath
39from lsst.resources._resourceHandles._httpResourceHandle import HttpReadResourceHandle
40from lsst.resources.http import (
41 BearerTokenAuth,
42 HttpResourcePathConfig,
43 SessionStore,
44 _is_protected,
45 _is_webdav_endpoint,
46)
47from lsst.resources.tests import GenericReadWriteTestCase, GenericTestCase
48from lsst.resources.utils import makeTestTempDir, removeTestTempDir
50TESTDIR = os.path.abspath(os.path.dirname(__file__))
53class GenericHttpTestCase(GenericTestCase, unittest.TestCase):
54 """Generic tests of http URIs."""
56 scheme = "http"
57 netloc = "server.example"
60class HttpReadWriteWebdavTestCase(GenericReadWriteTestCase, unittest.TestCase):
61 """Test with a real webDAV server, as opposed to mocking responses."""
63 scheme = "http"
65 @classmethod
66 def setUpClass(cls):
67 cls.webdav_tmpdir = tempfile.mkdtemp(prefix="webdav-server-test-")
68 cls.local_files_to_remove = []
69 cls.server_thread = None
71 # Disable warnings about socket connections left open. We purposedly
72 # keep network connections to the remote server open and have no
73 # means through the API exposed by Requests of actually close the
74 # underlyng sockets to make tests pass without warning.
75 warnings.filterwarnings(action="ignore", message=r"unclosed.*socket", category=ResourceWarning)
77 # Should we test against a running server?
78 #
79 # This is convenient for testing against real servers in the
80 # developer environment by initializing the environment variable
81 # LSST_RESOURCES_HTTP_TEST_SERVER_URL with the URL of the server, e.g.
82 # https://dav.example.org:1234/path/to/top/dir
83 if (test_endpoint := os.getenv("LSST_RESOURCES_HTTP_TEST_SERVER_URL")) is not None:
84 # Run this test case against the specified server.
85 uri = ResourcePath(test_endpoint)
86 cls.scheme = uri.scheme
87 cls.netloc = uri.netloc
88 cls.base_path = uri.path
89 elif WsgiDAVApp is not None:
90 # WsgiDAVApp is available, launch a local server in its own
91 # thread to expose a local temporary directory and run this
92 # test case against it.
93 cls.port_number = cls._get_port_number()
94 cls.stop_webdav_server = False
95 cls.server_thread = Thread(
96 target=cls._serve_webdav,
97 args=(cls, cls.webdav_tmpdir, cls.port_number, lambda: cls.stop_webdav_server),
98 daemon=True,
99 )
100 cls.server_thread.start()
102 # Wait for it to start
103 time.sleep(1)
105 # Initialize the server endpoint
106 cls.netloc = f"127.0.0.1:{cls.port_number}"
107 else:
108 cls.skipTest(
109 cls,
110 "neither WsgiDAVApp is available nor a webDAV test endpoint is configured to test against",
111 )
113 @classmethod
114 def tearDownClass(cls):
115 # Stop the WsgiDAVApp server, if any
116 if WsgiDAVApp is not None:
117 # Shut down of the webdav server and wait for the thread to exit
118 cls.stop_webdav_server = True
119 if cls.server_thread is not None:
120 cls.server_thread.join()
122 # Remove local temporary files
123 for file in cls.local_files_to_remove:
124 if os.path.exists(file):
125 os.remove(file)
127 # Remove temp dir
128 if cls.webdav_tmpdir:
129 shutil.rmtree(cls.webdav_tmpdir, ignore_errors=True)
131 # Reset the warnings filter.
132 warnings.resetwarnings()
134 def tearDown(self):
135 if self.tmpdir:
136 self.tmpdir.remove()
138 # Clear sessions. Some sockets may be left open, because urllib3
139 # doest not close in-flight connections.
140 # See https://urllib3.readthedocs.io > API Reference >
141 # Pool Manager > clear()
142 # I cannot add the full URL here because it is longer than 79
143 # characters.
144 self.tmpdir._clear_sessions()
146 super().tearDown()
148 def test_dav_file_handle(self):
149 # Upload a new file with known contents.
150 contents = "These are some \n bytes to read"
151 remote_file = self.tmpdir.join(self._get_file_name())
152 self.assertIsNone(remote_file.write(data=contents, overwrite=True))
154 # Test that the correct handle is returned.
155 with remote_file.open("rb") as handle:
156 self.assertIsInstance(handle, HttpReadResourceHandle)
158 # Test reading byte ranges works
159 with remote_file.open("rb") as handle:
160 sub_contents = contents[:10]
161 handle = cast(HttpReadResourceHandle, handle)
162 result = handle.read(len(sub_contents)).decode()
163 self.assertEqual(result, sub_contents)
164 # Verify there is no internal buffer.
165 self.assertIsNone(handle._completeBuffer)
166 # Verify the position.
167 self.assertEqual(handle.tell(), len(sub_contents))
169 # Jump back to the beginning and test if reading the whole file
170 # prompts the internal buffer to be read.
171 handle.seek(0)
172 self.assertEqual(handle.tell(), 0)
173 result = handle.read().decode()
174 self.assertIsNotNone(handle._completeBuffer)
175 self.assertEqual(result, contents)
177 # Check that flush works on read-only handle.
178 handle.flush()
180 # Verify reading as a string handle works as expected.
181 with remote_file.open("r") as handle:
182 self.assertIsInstance(handle, io.TextIOWrapper)
184 handle = cast(io.TextIOWrapper, handle)
185 self.assertIsInstance(handle.buffer, HttpReadResourceHandle)
187 # Check if string methods work.
188 result = handle.read()
189 self.assertEqual(result, contents)
191 # Check that flush works on read-only handle.
192 handle.flush()
194 # Verify that write modes invoke the default base method
195 with remote_file.open("w") as handle:
196 self.assertIsInstance(handle, io.StringIO)
198 def test_dav_is_dav_enpoint(self):
199 # Ensure the server is a webDAV endpoint
200 self.assertTrue(self.tmpdir.is_webdav_endpoint)
202 def test_dav_mkdir(self):
203 # Check creation and deletion of an empty directory
204 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
205 self.assertIsNone(subdir.mkdir())
206 self.assertTrue(subdir.exists())
208 # Creating an existing remote directory must succeed
209 self.assertIsNone(subdir.mkdir())
211 # Deletion of an existing directory must succeed
212 self.assertIsNone(subdir.remove())
214 # Deletion of an non-existing directory must succeed
215 subir_not_exists = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
216 self.assertIsNone(subir_not_exists.remove())
218 # Creation of a directory at a path where a file exists must raise
219 file = self.tmpdir.join(self._get_file_name(), forceDirectory=False)
220 file.write(data=None, overwrite=True)
221 self.assertTrue(file.exists())
223 existing_file = self.tmpdir.join(file.basename(), forceDirectory=True)
224 with self.assertRaises(NotADirectoryError):
225 self.assertIsNone(existing_file.mkdir())
227 def test_dav_upload_download(self):
228 # Test upload a randomly-generated file via write() with and without
229 # overwrite
230 local_file, file_size = self._generate_file()
231 with open(local_file, "rb") as f:
232 data = f.read()
234 remote_file = self.tmpdir.join(self._get_file_name())
235 self.assertIsNone(remote_file.write(data, overwrite=True))
236 self.assertTrue(remote_file.exists())
237 self.assertEqual(remote_file.size(), file_size)
239 # Write without overwrite must raise since target file exists
240 with self.assertRaises(FileExistsError):
241 remote_file.write(data, overwrite=False)
243 # Download the file we just uploaded. Compute and compare a digest of
244 # the uploaded and downloaded data and ensure they match
245 downloaded_data = remote_file.read()
246 self.assertEqual(len(downloaded_data), file_size)
247 upload_digest = self._compute_digest(data)
248 download_digest = self._compute_digest(downloaded_data)
249 self.assertEqual(upload_digest, download_digest)
250 os.remove(local_file)
252 def test_dav_as_local(self):
253 contents = str.encode("12345")
254 remote_file = self.tmpdir.join(self._get_file_name())
255 self.assertIsNone(remote_file.write(data=contents, overwrite=True))
257 local_path, is_temp = remote_file._as_local()
258 self.assertTrue(is_temp)
259 self.assertTrue(os.path.exists(local_path))
260 self.assertTrue(os.stat(local_path).st_size, len(contents))
261 self.assertEqual(ResourcePath(local_path).read(), contents)
262 os.remove(local_path)
264 def test_dav_size(self):
265 # Size of a non-existent file must raise.
266 remote_file = self.tmpdir.join(self._get_file_name())
267 with self.assertRaises(FileNotFoundError):
268 remote_file.size()
270 # Retrieving the size of a remote directory using a file-like path must
271 # raise
272 remote_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
273 self.assertIsNone(remote_dir.mkdir())
274 self.assertTrue(remote_dir.exists())
276 dir_as_file = ResourcePath(remote_dir.geturl().rstrip("/"), forceDirectory=False)
277 with self.assertRaises(IsADirectoryError):
278 dir_as_file.size()
280 def test_dav_upload_creates_dir(self):
281 # Uploading a file to a non existing directory must ensure its
282 # parent directories are automatically created and upload succeeds
283 non_existing_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
284 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True)
285 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True)
286 remote_file = non_existing_dir.join(self._get_file_name())
288 local_file, file_size = self._generate_file()
289 with open(local_file, "rb") as f:
290 data = f.read()
291 self.assertIsNone(remote_file.write(data, overwrite=True))
293 self.assertTrue(remote_file.exists())
294 self.assertEqual(remote_file.size(), file_size)
295 self.assertTrue(remote_file.parent().exists())
297 downloaded_data = remote_file.read()
298 upload_digest = self._compute_digest(data)
299 download_digest = self._compute_digest(downloaded_data)
300 self.assertEqual(upload_digest, download_digest)
301 os.remove(local_file)
303 def test_dav_transfer_from(self):
304 # Transfer from local file via "copy", with and without overwrite
305 remote_file = self.tmpdir.join(self._get_file_name())
306 local_file, _ = self._generate_file()
307 source_file = ResourcePath(local_file)
308 self.assertIsNone(remote_file.transfer_from(source_file, transfer="copy", overwrite=True))
309 self.assertTrue(remote_file.exists())
310 self.assertEqual(remote_file.size(), source_file.size())
311 with self.assertRaises(FileExistsError):
312 remote_file.transfer_from(ResourcePath(local_file), transfer="copy", overwrite=False)
314 # Transfer from remote file via "copy", with and without overwrite
315 source_file = remote_file
316 target_file = self.tmpdir.join(self._get_file_name())
317 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True))
318 self.assertTrue(target_file.exists())
319 self.assertEqual(target_file.size(), source_file.size())
321 # Transfer without overwrite must raise since target resource exists
322 with self.assertRaises(FileExistsError):
323 target_file.transfer_from(source_file, transfer="copy", overwrite=False)
325 # Test transfer from local file via "move", with and without overwrite
326 source_file = ResourcePath(local_file)
327 source_size = source_file.size()
328 target_file = self.tmpdir.join(self._get_file_name())
329 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True))
330 self.assertTrue(target_file.exists())
331 self.assertEqual(target_file.size(), source_size)
332 self.assertFalse(source_file.exists())
334 # Test transfer without overwrite must raise since target resource
335 # exists
336 local_file, file_size = self._generate_file()
337 with self.assertRaises(FileExistsError):
338 source_file = ResourcePath(local_file)
339 target_file.transfer_from(source_file, transfer="move", overwrite=False)
341 # Test transfer from remote file via "move" with and without overwrite
342 # must succeed
343 source_file = target_file
344 source_size = source_file.size()
345 target_file = self.tmpdir.join(self._get_file_name())
346 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True))
347 self.assertTrue(target_file.exists())
348 self.assertEqual(target_file.size(), source_size)
349 self.assertFalse(source_file.exists())
351 # Transfer without overwrite must raise since target resource exists
352 with self.assertRaises(FileExistsError):
353 source_file = ResourcePath(local_file)
354 target_file.transfer_from(source_file, transfer="move", overwrite=False)
356 def test_dav_handle(self):
357 # Resource handle must succeed
358 target_file = self.tmpdir.join(self._get_file_name())
359 data = "abcdefghi"
360 self.assertIsNone(target_file.write(data, overwrite=True))
361 with target_file.open("rb") as handle:
362 handle.seek(1)
363 self.assertEqual(handle.read(4).decode("utf-8"), data[1:5])
365 def test_dav_delete(self):
366 # Deletion of an existing remote file must succeed
367 local_file, file_size = self._generate_file()
368 with open(local_file, "rb") as f:
369 data = f.read()
371 remote_file = self.tmpdir.join(self._get_file_name())
372 self.assertIsNone(remote_file.write(data, overwrite=True))
373 self.assertTrue(remote_file.exists())
374 self.assertEqual(remote_file.size(), file_size)
375 self.assertIsNone(remote_file.remove())
376 os.remove(local_file)
378 # Deletion of a non-existing remote file must succeed
379 non_existing_file = self.tmpdir.join(self._get_file_name())
380 self.assertIsNone(non_existing_file.remove())
382 # Deletion of a non-empty remote directory must succeed
383 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
384 self.assertIsNone(subdir.mkdir())
385 self.assertTrue(subdir.exists())
386 local_file, _ = self._generate_file()
387 source_file = ResourcePath(local_file)
388 target_file = self.tmpdir.join(self._get_file_name(), forceDirectory=True)
389 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True))
390 self.assertIsNone(subdir.remove())
391 self.assertFalse(subdir.exists())
392 os.remove(local_file)
394 @classmethod
395 def _get_port_number(cls) -> int:
396 """Return a port number the webDAV server can use to listen to."""
397 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
398 s.bind(("127.0.0.1", 0))
399 s.listen()
400 port = s.getsockname()[1]
401 s.close()
402 return port
404 def _serve_webdav(self, local_path: str, port: int, stop_webdav_server: Callable[[], bool]):
405 """Start a local webDAV server, listening on http://localhost:port
406 and exposing local_path.
408 This server only runs when this test class is instantiated,
409 and then shuts down. The server must be started is a separate thread.
411 Parameters
412 ----------
413 port : `int`
414 The port number on which the server should listen
415 local_path : `str`
416 Path to an existing local directory for the server to expose.
417 stop_webdav_server : `Callable[[], bool]`
418 Boolean function which returns True when the server should be
419 stopped.
420 """
421 try:
422 # Start the wsgi server in a separate thread
423 config = {
424 "host": "127.0.0.1",
425 "port": port,
426 "provider_mapping": {"/": local_path},
427 "http_authenticator": {"domain_controller": None},
428 "simple_dc": {"user_mapping": {"*": True}},
429 "verbose": 0,
430 "lock_storage": False,
431 "dir_browser": {
432 "enable": False,
433 "ms_sharepoint_support": False,
434 "libre_office_support": False,
435 "response_trailer": False,
436 "davmount_links": False,
437 },
438 }
439 server = wsgi.Server(wsgi_app=WsgiDAVApp(config), bind_addr=(config["host"], config["port"]))
440 t = Thread(target=server.start, daemon=True)
441 t.start()
443 # Shut down the server when done: stop_webdav_server() returns
444 # True when this test suite is being teared down
445 while not stop_webdav_server():
446 time.sleep(1)
447 except KeyboardInterrupt:
448 # Caught Ctrl-C, shut down the server
449 pass
450 finally:
451 server.stop()
452 t.join()
454 @classmethod
455 def _get_name(cls, prefix: str) -> str:
456 alphabet = string.ascii_lowercase + string.digits
457 return f"{prefix}-" + "".join(random.choices(alphabet, k=8))
459 @classmethod
460 def _get_dir_name(cls) -> str:
461 """Return a randomly selected name for a file"""
462 return cls._get_name(prefix="dir")
464 @classmethod
465 def _get_file_name(cls) -> str:
466 """Return a randomly selected name for a file"""
467 return cls._get_name(prefix="file")
469 def _generate_file(self, remove_when_done=True) -> tuple[str, int]:
470 """Create a local file of random size with random contents.
472 Returns
473 -------
474 path : `str`
475 Path to local temporary file. The caller is responsible for
476 removing the file when appropriate.
477 size : `int`
478 Size of the generated file, in bytes.
479 """
480 megabyte = 1024 * 1024
481 size = random.randint(2 * megabyte, 5 * megabyte)
482 tmpfile, path = tempfile.mkstemp()
483 self.assertEqual(os.write(tmpfile, os.urandom(size)), size)
484 os.close(tmpfile)
486 if remove_when_done:
487 self.local_files_to_remove.append(path)
489 return path, size
491 @classmethod
492 def _compute_digest(cls, data: bytes) -> str:
493 """Compute a SHA256 hash of data."""
494 m = hashlib.sha256()
495 m.update(data)
496 return m.hexdigest()
498 @classmethod
499 def _is_server_running(cls, port: int) -> bool:
500 """Return True if there is a server listening on local address
501 127.0.0.1:<port>.
502 """
503 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
504 try:
505 s.connect(("127.0.0.1", port))
506 return True
507 except ConnectionRefusedError:
508 return False
511class HttpResourcePathConfigTestCase(unittest.TestCase):
512 """Test for the HttpResourcePathConfig class."""
514 def test_send_expect_header(self):
515 # Ensure environment variable LSST_HTTP_PUT_SEND_EXPECT_HEADER is
516 # inspected to initialize the HttpResourcePathConfig config class.
517 with unittest.mock.patch.dict(os.environ, {}, clear=True):
518 importlib.reload(lsst.resources.http)
519 config = HttpResourcePathConfig()
520 self.assertFalse(config.send_expect_on_put)
522 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_PUT_SEND_EXPECT_HEADER": "true"}, clear=True):
523 importlib.reload(lsst.resources.http)
524 config = HttpResourcePathConfig()
525 self.assertTrue(config.send_expect_on_put)
527 def test_collect_memory_usage(self):
528 # Ensure environment variable LSST_HTTP_COLLECT_MEMORY_USAGE is
529 # inspected to initialize the HttpResourcePathConfig class.
530 with unittest.mock.patch.dict(os.environ, {}, clear=True):
531 importlib.reload(lsst.resources.http)
532 config = HttpResourcePathConfig()
533 self.assertFalse(config.collect_memory_usage)
535 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_COLLECT_MEMORY_USAGE": "true"}, clear=True):
536 importlib.reload(lsst.resources.http)
537 config = HttpResourcePathConfig()
538 self.assertTrue(config.collect_memory_usage)
540 def test_timeout(self):
541 # Ensure that when the connect and read timeouts are not specified
542 # the default values are stored in the config.
543 with unittest.mock.patch.dict(os.environ, {}, clear=True):
544 importlib.reload(lsst.resources.http)
545 config = HttpResourcePathConfig()
546 self.assertAlmostEqual(config.timeout[0], config.DEFAULT_TIMEOUT_CONNECT)
547 self.assertAlmostEqual(config.timeout[1], config.DEFAULT_TIMEOUT_READ)
549 # Ensure that when both the connect and read timeouts are specified
550 # they are stored in the config.
551 connect_timeout, read_timeout = 100.5, 200.8
552 with unittest.mock.patch.dict(
553 os.environ,
554 {"LSST_HTTP_TIMEOUT_CONNECT": str(connect_timeout), "LSST_HTTP_TIMEOUT_READ": str(read_timeout)},
555 clear=True,
556 ):
557 # Force module reload.
558 importlib.reload(lsst.resources.http)
559 config = HttpResourcePathConfig()
560 self.assertAlmostEqual(config.timeout[0], connect_timeout)
561 self.assertAlmostEqual(config.timeout[1], read_timeout)
563 # Ensure that NaN values are ignored and the defaults values are used.
564 with unittest.mock.patch.dict(
565 os.environ,
566 {"LSST_HTTP_TIMEOUT_CONNECT": "NaN", "LSST_HTTP_TIMEOUT_READ": "NaN"},
567 clear=True,
568 ):
569 # Force module reload.
570 importlib.reload(lsst.resources.http)
571 config = HttpResourcePathConfig()
572 self.assertAlmostEqual(config.timeout[0], config.DEFAULT_TIMEOUT_CONNECT)
573 self.assertAlmostEqual(config.timeout[1], config.DEFAULT_TIMEOUT_READ)
575 def test_front_end_connections(self):
576 # Ensure that when the number of front end connections is not specified
577 # the default is stored in the config.
578 with unittest.mock.patch.dict(os.environ, {}, clear=True):
579 importlib.reload(lsst.resources.http)
580 config = HttpResourcePathConfig()
581 self.assertEqual(config.front_end_connections, config.DEFAULT_FRONTEND_PERSISTENT_CONNECTIONS)
583 # Ensure that when the number of front end connections is specified
584 # it is stored in the config.
585 connections = 42
586 with unittest.mock.patch.dict(
587 os.environ, {"LSST_HTTP_FRONTEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True
588 ):
589 importlib.reload(lsst.resources.http)
590 config = HttpResourcePathConfig()
591 self.assertTrue(config.front_end_connections, connections)
593 def test_back_end_connections(self):
594 # Ensure that when the number of back end connections is not specified
595 # the default is stored in the config.
596 with unittest.mock.patch.dict(os.environ, {}, clear=True):
597 importlib.reload(lsst.resources.http)
598 config = HttpResourcePathConfig()
599 self.assertEqual(config.back_end_connections, config.DEFAULT_BACKEND_PERSISTENT_CONNECTIONS)
601 # Ensure that when the number of back end connections is specified
602 # it is stored in the config.
603 connections = 42
604 with unittest.mock.patch.dict(
605 os.environ, {"LSST_HTTP_BACKEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True
606 ):
607 importlib.reload(lsst.resources.http)
608 config = HttpResourcePathConfig()
609 self.assertTrue(config.back_end_connections, connections)
611 def test_digest_algorithm(self):
612 # Ensure that when no digest is specified in the environment, the
613 # configured digest algorithm is the empty string.
614 with unittest.mock.patch.dict(os.environ, {}, clear=True):
615 importlib.reload(lsst.resources.http)
616 config = HttpResourcePathConfig()
617 self.assertEqual(config.digest_algorithm, "")
619 # Ensure that an invalid digest algorithm is ignored.
620 digest = "invalid"
621 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True):
622 importlib.reload(lsst.resources.http)
623 config = HttpResourcePathConfig()
624 self.assertEqual(config.digest_algorithm, "")
626 # Ensure that an accepted digest algorithm is stored.
627 for digest in HttpResourcePathConfig().ACCEPTED_DIGESTS:
628 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True):
629 importlib.reload(lsst.resources.http)
630 config = HttpResourcePathConfig()
631 self.assertTrue(config.digest_algorithm, digest)
633 def test_backoff_interval(self):
634 # Ensure that when no backoff interval is defined, the default values
635 # are used.
636 with unittest.mock.patch.dict(os.environ, {}, clear=True):
637 importlib.reload(lsst.resources.http)
638 config = HttpResourcePathConfig()
639 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN)
640 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX)
642 # Ensure that an invalid value for backoff interval is ignored and
643 # the default value is used.
644 with unittest.mock.patch.dict(
645 os.environ, {"LSST_HTTP_BACKOFF_MIN": "XXX", "LSST_HTTP_BACKOFF_MAX": "YYY"}, clear=True
646 ):
647 importlib.reload(lsst.resources.http)
648 config = HttpResourcePathConfig()
649 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN)
650 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX)
652 # Ensure that NaN values are ignored and the defaults values are used.
653 with unittest.mock.patch.dict(
654 os.environ, {"LSST_HTTP_BACKOFF_MIN": "NaN", "LSST_HTTP_BACKOFF_MAX": "NaN"}, clear=True
655 ):
656 importlib.reload(lsst.resources.http)
657 config = HttpResourcePathConfig()
658 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN)
659 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX)
661 # Ensure that when specified, valid limits backoff interval are used.
662 backoff_min, backoff_max = 3.0, 8.0
663 with unittest.mock.patch.dict(
664 os.environ,
665 {"LSST_HTTP_BACKOFF_MIN": str(backoff_min), "LSST_HTTP_BACKOFF_MAX": str(backoff_max)},
666 clear=True,
667 ):
668 importlib.reload(lsst.resources.http)
669 config = HttpResourcePathConfig()
670 self.assertAlmostEqual(config.backoff_min, backoff_min)
671 self.assertAlmostEqual(config.backoff_max, backoff_max)
674class WebdavUtilsTestCase(unittest.TestCase):
675 """Test for the Webdav related utilities."""
677 def setUp(self):
678 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR))
680 def tearDown(self):
681 if self.tmpdir:
682 if self.tmpdir.isLocal:
683 removeTestTempDir(self.tmpdir.ospath)
685 @responses.activate
686 def test_is_webdav_endpoint(self):
687 davEndpoint = "http://www.lsstwithwebdav.org"
688 responses.add(responses.OPTIONS, davEndpoint, status=200, headers={"DAV": "1,2,3"})
689 self.assertTrue(_is_webdav_endpoint(davEndpoint))
691 plainHttpEndpoint = "http://www.lsstwithoutwebdav.org"
692 responses.add(responses.OPTIONS, plainHttpEndpoint, status=200)
693 self.assertFalse(_is_webdav_endpoint(plainHttpEndpoint))
695 def test_is_protected(self):
696 self.assertFalse(_is_protected("/this-file-does-not-exist"))
698 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
699 f.write("XXXX")
700 file_path = f.name
702 os.chmod(file_path, stat.S_IRUSR)
703 self.assertTrue(_is_protected(file_path))
705 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH):
706 os.chmod(file_path, stat.S_IRUSR | mode)
707 self.assertFalse(_is_protected(file_path))
710class BearerTokenAuthTestCase(unittest.TestCase):
711 """Test for the BearerTokenAuth class."""
713 def setUp(self):
714 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR))
715 self.token = "ABCDE1234"
717 def tearDown(self):
718 if self.tmpdir and self.tmpdir.isLocal:
719 removeTestTempDir(self.tmpdir.ospath)
721 def test_empty_token(self):
722 """Ensure that when no token is provided the request is not
723 modified.
724 """
725 auth = BearerTokenAuth(None)
726 auth._refresh()
727 self.assertIsNone(auth._token)
728 self.assertIsNone(auth._path)
729 req = requests.Request("GET", "https://example.org")
730 self.assertEqual(auth(req), req)
732 def test_token_value(self):
733 """Ensure that when a token value is provided, the 'Authorization'
734 header is added to the requests.
735 """
736 auth = BearerTokenAuth(self.token)
737 req = auth(requests.Request("GET", "https://example.org").prepare())
738 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}")
740 def test_token_file(self):
741 """Ensure when the provided token is a file path, its contents is
742 correctly used in the the 'Authorization' header of the requests.
743 """
744 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
745 f.write(self.token)
746 token_file_path = f.name
748 # Ensure the request's "Authorization" header is set with the right
749 # token value
750 os.chmod(token_file_path, stat.S_IRUSR)
751 auth = BearerTokenAuth(token_file_path)
752 req = auth(requests.Request("GET", "https://example.org").prepare())
753 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}")
755 # Ensure an exception is raised if either group or other can read the
756 # token file
757 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH):
758 os.chmod(token_file_path, stat.S_IRUSR | mode)
759 with self.assertRaises(PermissionError):
760 BearerTokenAuth(token_file_path)
763class SessionStoreTestCase(unittest.TestCase):
764 """Test for the SessionStore class."""
766 def setUp(self):
767 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR))
768 self.rpath = ResourcePath("https://example.org")
770 def tearDown(self):
771 if self.tmpdir and self.tmpdir.isLocal:
772 removeTestTempDir(self.tmpdir.ospath)
774 def test_ca_cert_bundle(self):
775 """Ensure a certificate authorities bundle is used to authentify
776 the remote server.
777 """
778 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
779 f.write("CERT BUNDLE")
780 cert_bundle = f.name
782 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_CACERT_BUNDLE": cert_bundle}, clear=True):
783 session = SessionStore().get(self.rpath)
784 self.assertEqual(session.verify, cert_bundle)
786 def test_user_cert(self):
787 """Ensure if user certificate and private key are provided, they are
788 used for authenticating the client.
789 """
790 # Create mock certificate and private key files.
791 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
792 f.write("CERT")
793 client_cert = f.name
795 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
796 f.write("KEY")
797 client_key = f.name
799 # Check both LSST_HTTP_AUTH_CLIENT_CERT and LSST_HTTP_AUTH_CLIENT_KEY
800 # must be initialized.
801 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert}, clear=True):
802 with self.assertRaises(ValueError):
803 SessionStore().get(self.rpath)
805 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_KEY": client_key}, clear=True):
806 with self.assertRaises(ValueError):
807 SessionStore().get(self.rpath)
809 # Check private key file must be accessible only by its owner.
810 with unittest.mock.patch.dict(
811 os.environ,
812 {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert, "LSST_HTTP_AUTH_CLIENT_KEY": client_key},
813 clear=True,
814 ):
815 # Ensure the session client certificate is initialized when
816 # only the owner can read the private key file.
817 os.chmod(client_key, stat.S_IRUSR)
818 session = SessionStore().get(self.rpath)
819 self.assertEqual(session.cert[0], client_cert)
820 self.assertEqual(session.cert[1], client_key)
822 # Ensure an exception is raised if either group or other can access
823 # the private key file.
824 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH):
825 os.chmod(client_key, stat.S_IRUSR | mode)
826 with self.assertRaises(PermissionError):
827 SessionStore().get(self.rpath)
829 def test_token_env(self):
830 """Ensure when the token is provided via an environment variable
831 the sessions are equipped with a BearerTokenAuth.
832 """
833 token = "ABCDE"
834 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_BEARER_TOKEN": token}, clear=True):
835 session = SessionStore().get(self.rpath)
836 self.assertEqual(type(session.auth), lsst.resources.http.BearerTokenAuth)
837 self.assertEqual(session.auth._token, token)
838 self.assertIsNone(session.auth._path)
840 def test_sessions(self):
841 """Ensure the session caching mechanism works."""
842 # Ensure the store provides a session for a given URL
843 root_url = "https://example.org"
844 store = SessionStore()
845 session = store.get(ResourcePath(root_url))
846 self.assertIsNotNone(session)
848 # Ensure the sessions retrieved from a single store with the same
849 # root URIs are equal
850 for u in (f"{root_url}", f"{root_url}/path/to/file"):
851 self.assertEqual(session, store.get(ResourcePath(u)))
853 # Ensure sessions retrieved for different root URIs are different
854 another_url = "https://another.example.org"
855 self.assertNotEqual(session, store.get(ResourcePath(another_url)))
857 # Ensure the sessions retrieved from a single store for URLs with
858 # different port numbers are different
859 root_url_with_port = f"{another_url}:12345"
860 session = store.get(ResourcePath(root_url_with_port))
861 self.assertNotEqual(session, store.get(ResourcePath(another_url)))
863 # Ensure the sessions retrieved from a single store with the same
864 # root URIs (including port numbers) are equal
865 for u in (f"{root_url_with_port}", f"{root_url_with_port}/path/to/file"):
866 self.assertEqual(session, store.get(ResourcePath(u)))
869if __name__ == "__main__":
870 unittest.main()