Coverage for tests/test_http.py: 16%
499 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-11-30 11:34 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-11-30 11:34 +0000
1# This file is part of lsst-resources.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# Use of this source code is governed by a 3-clause BSD-style
10# license that can be found in the LICENSE file.
12import hashlib
13import importlib
14import io
15import os.path
16import random
17import shutil
18import socket
19import stat
20import string
21import tempfile
22import time
23import unittest
24import warnings
25from collections.abc import Callable
26from threading import Thread
27from typing import cast
29try:
30 from cheroot import wsgi
31 from wsgidav.wsgidav_app import WsgiDAVApp
32except ImportError:
33 WsgiDAVApp = None
35import lsst.resources
36import requests
37import responses
38from lsst.resources import ResourcePath
39from lsst.resources._resourceHandles._httpResourceHandle import HttpReadResourceHandle
40from lsst.resources.http import (
41 BearerTokenAuth,
42 HttpResourcePathConfig,
43 SessionStore,
44 _is_protected,
45 _is_webdav_endpoint,
46)
47from lsst.resources.tests import GenericReadWriteTestCase, GenericTestCase
48from lsst.resources.utils import makeTestTempDir, removeTestTempDir
50TESTDIR = os.path.abspath(os.path.dirname(__file__))
53class GenericHttpTestCase(GenericTestCase, unittest.TestCase):
54 """Generic tests of http URIs."""
56 scheme = "http"
57 netloc = "server.example"
60class HttpReadWriteWebdavTestCase(GenericReadWriteTestCase, unittest.TestCase):
61 """Test with a real webDAV server, as opposed to mocking responses."""
63 scheme = "http"
65 @classmethod
66 def setUpClass(cls):
67 cls.webdav_tmpdir = tempfile.mkdtemp(prefix="webdav-server-test-")
68 cls.local_files_to_remove = []
69 cls.server_thread = None
71 # Disable warnings about socket connections left open. We purposedly
72 # keep network connections to the remote server open and have no
73 # means through the API exposed by Requests of actually close the
74 # underlyng sockets to make tests pass without warning.
75 warnings.filterwarnings(action="ignore", message=r"unclosed.*socket", category=ResourceWarning)
77 # Should we test against a running server?
78 #
79 # This is convenient for testing against real servers in the
80 # developer environment by initializing the environment variable
81 # LSST_RESOURCES_HTTP_TEST_SERVER_URL with the URL of the server, e.g.
82 # https://dav.example.org:1234/path/to/top/dir
83 if (test_endpoint := os.getenv("LSST_RESOURCES_HTTP_TEST_SERVER_URL")) is not None:
84 # Run this test case against the specified server.
85 uri = ResourcePath(test_endpoint)
86 cls.scheme = uri.scheme
87 cls.netloc = uri.netloc
88 cls.base_path = uri.path
89 elif WsgiDAVApp is not None:
90 # WsgiDAVApp is available, launch a local server in its own
91 # thread to expose a local temporary directory and run this
92 # test case against it.
93 cls.port_number = cls._get_port_number()
94 cls.stop_webdav_server = False
95 cls.server_thread = Thread(
96 target=cls._serve_webdav,
97 args=(cls, cls.webdav_tmpdir, cls.port_number, lambda: cls.stop_webdav_server),
98 daemon=True,
99 )
100 cls.server_thread.start()
102 # Wait for it to start
103 time.sleep(1)
105 # Initialize the server endpoint
106 cls.netloc = f"127.0.0.1:{cls.port_number}"
107 else:
108 cls.skipTest(
109 cls,
110 "neither WsgiDAVApp is available nor a webDAV test endpoint is configured to test against",
111 )
113 @classmethod
114 def tearDownClass(cls):
115 # Stop the WsgiDAVApp server, if any
116 if WsgiDAVApp is not None:
117 # Shut down of the webdav server and wait for the thread to exit
118 cls.stop_webdav_server = True
119 if cls.server_thread is not None:
120 cls.server_thread.join()
122 # Remove local temporary files
123 for file in cls.local_files_to_remove:
124 if os.path.exists(file):
125 os.remove(file)
127 # Remove temp dir
128 if cls.webdav_tmpdir:
129 shutil.rmtree(cls.webdav_tmpdir, ignore_errors=True)
131 # Reset the warnings filter.
132 warnings.resetwarnings()
134 def tearDown(self):
135 if self.tmpdir:
136 self.tmpdir.remove()
138 # Clear sessions. Some sockets may be left open, because urllib3
139 # doest not close in-flight connections.
140 # See https://urllib3.readthedocs.io > API Reference >
141 # Pool Manager > clear()
142 # I cannot add the full URL here because it is longer than 79
143 # characters.
144 self.tmpdir._clear_sessions()
146 super().tearDown()
148 def test_dav_file_handle(self):
149 # Upload a new file with known contents.
150 contents = "These are some \n bytes to read"
151 remote_file = self.tmpdir.join(self._get_file_name())
152 self.assertIsNone(remote_file.write(data=contents, overwrite=True))
154 # Test that the correct handle is returned.
155 with remote_file.open("rb") as handle:
156 self.assertIsInstance(handle, HttpReadResourceHandle)
158 # Test reading byte ranges works
159 with remote_file.open("rb") as handle:
160 sub_contents = contents[:10]
161 handle = cast(HttpReadResourceHandle, handle)
162 result = handle.read(len(sub_contents)).decode()
163 self.assertEqual(result, sub_contents)
164 # Verify there is no internal buffer.
165 self.assertIsNone(handle._completeBuffer)
166 # Verify the position.
167 self.assertEqual(handle.tell(), len(sub_contents))
169 # Jump back to the beginning and test if reading the whole file
170 # prompts the internal buffer to be read.
171 handle.seek(0)
172 self.assertEqual(handle.tell(), 0)
173 result = handle.read().decode()
174 self.assertIsNotNone(handle._completeBuffer)
175 self.assertEqual(result, contents)
177 # Check that flush works on read-only handle.
178 handle.flush()
180 # Verify reading as a string handle works as expected.
181 with remote_file.open("r") as handle:
182 self.assertIsInstance(handle, io.TextIOWrapper)
184 handle = cast(io.TextIOWrapper, handle)
185 self.assertIsInstance(handle.buffer, HttpReadResourceHandle)
187 # Check if string methods work.
188 result = handle.read()
189 self.assertEqual(result, contents)
191 # Check that flush works on read-only handle.
192 handle.flush()
194 # Verify that write modes invoke the default base method
195 with remote_file.open("w") as handle:
196 self.assertIsInstance(handle, io.StringIO)
198 def test_dav_is_dav_enpoint(self):
199 # Ensure the server is a webDAV endpoint
200 self.assertTrue(self.tmpdir.is_webdav_endpoint)
202 def test_dav_mkdir(self):
203 # Check creation and deletion of an empty directory
204 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
205 self.assertIsNone(subdir.mkdir())
206 self.assertTrue(subdir.exists())
208 # Creating an existing remote directory must succeed
209 self.assertIsNone(subdir.mkdir())
211 # Deletion of an existing directory must succeed
212 self.assertIsNone(subdir.remove())
214 # Deletion of an non-existing directory must succeed
215 subir_not_exists = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
216 self.assertIsNone(subir_not_exists.remove())
218 # Creation of a directory at a path where a file exists must raise
219 file = self.tmpdir.join(self._get_file_name(), forceDirectory=False)
220 file.write(data=None, overwrite=True)
221 self.assertTrue(file.exists())
223 existing_file = self.tmpdir.join(file.basename(), forceDirectory=True)
224 with self.assertRaises(NotADirectoryError):
225 self.assertIsNone(existing_file.mkdir())
227 def test_dav_upload_download(self):
228 # Test upload a randomly-generated file via write() with and without
229 # overwrite
230 local_file, file_size = self._generate_file()
231 with open(local_file, "rb") as f:
232 data = f.read()
234 remote_file = self.tmpdir.join(self._get_file_name())
235 self.assertIsNone(remote_file.write(data, overwrite=True))
236 self.assertTrue(remote_file.exists())
237 self.assertEqual(remote_file.size(), file_size)
239 # Write without overwrite must raise since target file exists
240 with self.assertRaises(FileExistsError):
241 remote_file.write(data, overwrite=False)
243 # Download the file we just uploaded. Compute and compare a digest of
244 # the uploaded and downloaded data and ensure they match
245 downloaded_data = remote_file.read()
246 self.assertEqual(len(downloaded_data), file_size)
247 upload_digest = self._compute_digest(data)
248 download_digest = self._compute_digest(downloaded_data)
249 self.assertEqual(upload_digest, download_digest)
250 os.remove(local_file)
252 def test_dav_as_local(self):
253 contents = str.encode("12345")
254 remote_file = self.tmpdir.join(self._get_file_name())
255 self.assertIsNone(remote_file.write(data=contents, overwrite=True))
257 local_path, is_temp = remote_file._as_local()
258 self.assertTrue(is_temp)
259 self.assertTrue(os.path.exists(local_path))
260 self.assertTrue(os.stat(local_path).st_size, len(contents))
261 self.assertEqual(ResourcePath(local_path).read(), contents)
262 os.remove(local_path)
264 def test_dav_size(self):
265 # Size of a non-existent file must raise.
266 remote_file = self.tmpdir.join(self._get_file_name())
267 with self.assertRaises(FileNotFoundError):
268 remote_file.size()
270 # Retrieving the size of a remote directory using a file-like path must
271 # raise
272 remote_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
273 self.assertIsNone(remote_dir.mkdir())
274 self.assertTrue(remote_dir.exists())
276 dir_as_file = ResourcePath(remote_dir.geturl().rstrip("/"), forceDirectory=False)
277 with self.assertRaises(IsADirectoryError):
278 dir_as_file.size()
280 def test_dav_upload_creates_dir(self):
281 # Uploading a file to a non existing directory must ensure its
282 # parent directories are automatically created and upload succeeds
283 non_existing_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
284 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True)
285 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True)
286 remote_file = non_existing_dir.join(self._get_file_name())
288 local_file, file_size = self._generate_file()
289 with open(local_file, "rb") as f:
290 data = f.read()
291 self.assertIsNone(remote_file.write(data, overwrite=True))
293 self.assertTrue(remote_file.exists())
294 self.assertEqual(remote_file.size(), file_size)
295 self.assertTrue(remote_file.parent().exists())
297 downloaded_data = remote_file.read()
298 upload_digest = self._compute_digest(data)
299 download_digest = self._compute_digest(downloaded_data)
300 self.assertEqual(upload_digest, download_digest)
301 os.remove(local_file)
303 def test_dav_transfer_from(self):
304 # Transfer from local file via "copy", with and without overwrite
305 remote_file = self.tmpdir.join(self._get_file_name())
306 local_file, _ = self._generate_file()
307 source_file = ResourcePath(local_file)
308 self.assertIsNone(remote_file.transfer_from(source_file, transfer="copy", overwrite=True))
309 self.assertTrue(remote_file.exists())
310 self.assertEqual(remote_file.size(), source_file.size())
311 with self.assertRaises(FileExistsError):
312 remote_file.transfer_from(ResourcePath(local_file), transfer="copy", overwrite=False)
314 # Transfer from remote file via "copy", with and without overwrite
315 source_file = remote_file
316 target_file = self.tmpdir.join(self._get_file_name())
317 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True))
318 self.assertTrue(target_file.exists())
319 self.assertEqual(target_file.size(), source_file.size())
321 # Transfer without overwrite must raise since target resource exists
322 with self.assertRaises(FileExistsError):
323 target_file.transfer_from(source_file, transfer="copy", overwrite=False)
325 # Test transfer from local file via "move", with and without overwrite
326 source_file = ResourcePath(local_file)
327 source_size = source_file.size()
328 target_file = self.tmpdir.join(self._get_file_name())
329 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True))
330 self.assertTrue(target_file.exists())
331 self.assertEqual(target_file.size(), source_size)
332 self.assertFalse(source_file.exists())
334 # Test transfer without overwrite must raise since target resource
335 # exists
336 local_file, file_size = self._generate_file()
337 with self.assertRaises(FileExistsError):
338 source_file = ResourcePath(local_file)
339 target_file.transfer_from(source_file, transfer="move", overwrite=False)
341 # Test transfer from remote file via "move" with and without overwrite
342 # must succeed
343 source_file = target_file
344 source_size = source_file.size()
345 target_file = self.tmpdir.join(self._get_file_name())
346 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True))
347 self.assertTrue(target_file.exists())
348 self.assertEqual(target_file.size(), source_size)
349 self.assertFalse(source_file.exists())
351 # Transfer without overwrite must raise since target resource exists
352 with self.assertRaises(FileExistsError):
353 source_file = ResourcePath(local_file)
354 target_file.transfer_from(source_file, transfer="move", overwrite=False)
356 def test_dav_handle(self):
357 # Resource handle must succeed
358 target_file = self.tmpdir.join(self._get_file_name())
359 data = "abcdefghi"
360 self.assertIsNone(target_file.write(data, overwrite=True))
361 with target_file.open("rb") as handle:
362 handle.seek(1)
363 self.assertEqual(handle.read(4).decode("utf-8"), data[1:5])
365 def test_dav_delete(self):
366 # Deletion of an existing remote file must succeed
367 local_file, file_size = self._generate_file()
368 with open(local_file, "rb") as f:
369 data = f.read()
371 remote_file = self.tmpdir.join(self._get_file_name())
372 self.assertIsNone(remote_file.write(data, overwrite=True))
373 self.assertTrue(remote_file.exists())
374 self.assertEqual(remote_file.size(), file_size)
375 self.assertIsNone(remote_file.remove())
376 os.remove(local_file)
378 # Deletion of a non-existing remote file must succeed
379 non_existing_file = self.tmpdir.join(self._get_file_name())
380 self.assertIsNone(non_existing_file.remove())
382 # Deletion of a non-empty remote directory must succeed
383 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
384 self.assertIsNone(subdir.mkdir())
385 self.assertTrue(subdir.exists())
386 local_file, _ = self._generate_file()
387 source_file = ResourcePath(local_file)
388 target_file = self.tmpdir.join(self._get_file_name(), forceDirectory=True)
389 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True))
390 self.assertIsNone(subdir.remove())
391 self.assertFalse(subdir.exists())
392 os.remove(local_file)
394 @classmethod
395 def _get_port_number(cls) -> int:
396 """Return a port number the webDAV server can use to listen to."""
397 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
398 s.bind(("127.0.0.1", 0))
399 s.listen()
400 port = s.getsockname()[1]
401 s.close()
402 return port
404 def _serve_webdav(self, local_path: str, port: int, stop_webdav_server: Callable[[], bool]):
405 """Start a local webDAV server, listening on http://localhost:port
406 and exposing local_path.
408 This server only runs when this test class is instantiated,
409 and then shuts down. The server must be started is a separate thread.
411 Parameters
412 ----------
413 port : `int`
414 The port number on which the server should listen
415 local_path : `str`
416 Path to an existing local directory for the server to expose.
417 stop_webdav_server : `Callable[[], bool]`
418 Boolean function which returns True when the server should be
419 stopped.
420 """
421 try:
422 # Start the wsgi server in a separate thread
423 config = {
424 "host": "127.0.0.1",
425 "port": port,
426 "provider_mapping": {"/": local_path},
427 "http_authenticator": {"domain_controller": None},
428 "simple_dc": {"user_mapping": {"*": True}},
429 "verbose": 0,
430 "lock_storage": False,
431 "dir_browser": {
432 "enable": False,
433 "ms_sharepoint_support": False,
434 "libre_office_support": False,
435 "response_trailer": False,
436 "davmount_links": False,
437 },
438 }
439 server = wsgi.Server(wsgi_app=WsgiDAVApp(config), bind_addr=(config["host"], config["port"]))
440 t = Thread(target=server.start, daemon=True)
441 t.start()
443 # Shut down the server when done: stop_webdav_server() returns
444 # True when this test suite is being teared down
445 while not stop_webdav_server():
446 time.sleep(1)
447 except KeyboardInterrupt:
448 # Caught Ctrl-C, shut down the server
449 pass
450 finally:
451 server.stop()
452 t.join()
454 @classmethod
455 def _get_name(cls, prefix: str) -> str:
456 alphabet = string.ascii_lowercase + string.digits
457 return f"{prefix}-" + "".join(random.choices(alphabet, k=8))
459 @classmethod
460 def _get_dir_name(cls) -> str:
461 """Return a randomly selected name for a file"""
462 return cls._get_name(prefix="dir")
464 @classmethod
465 def _get_file_name(cls) -> str:
466 """Return a randomly selected name for a file"""
467 return cls._get_name(prefix="file")
469 def _generate_file(self, remove_when_done=True) -> tuple[str, int]:
470 """Create a local file of random size with random contents.
472 Returns
473 -------
474 path : `str`
475 Path to local temporary file. The caller is responsible for
476 removing the file when appropriate.
477 size : `int`
478 Size of the generated file, in bytes.
479 """
480 megabyte = 1024 * 1024
481 size = random.randint(2 * megabyte, 5 * megabyte)
482 tmpfile, path = tempfile.mkstemp()
483 self.assertEqual(os.write(tmpfile, os.urandom(size)), size)
484 os.close(tmpfile)
486 if remove_when_done:
487 self.local_files_to_remove.append(path)
489 return path, size
491 @classmethod
492 def _compute_digest(cls, data: bytes) -> str:
493 """Compute a SHA256 hash of data."""
494 m = hashlib.sha256()
495 m.update(data)
496 return m.hexdigest()
498 @classmethod
499 def _is_server_running(cls, port: int) -> bool:
500 """Return True if there is a server listening on local address
501 127.0.0.1:<port>.
502 """
503 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
504 try:
505 s.connect(("127.0.0.1", port))
506 return True
507 except ConnectionRefusedError:
508 return False
511class HttpResourcePathConfigTestCase(unittest.TestCase):
512 """Test for the HttpResourcePathConfig class."""
514 def test_send_expect_header(self):
515 # Ensure environment variable LSST_HTTP_PUT_SEND_EXPECT_HEADER is
516 # inspected to initialize the HttpResourcePathConfig config class.
517 with unittest.mock.patch.dict(os.environ, {}, clear=True):
518 importlib.reload(lsst.resources.http)
519 config = HttpResourcePathConfig()
520 self.assertFalse(config.send_expect_on_put)
522 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_PUT_SEND_EXPECT_HEADER": "true"}, clear=True):
523 importlib.reload(lsst.resources.http)
524 config = HttpResourcePathConfig()
525 self.assertTrue(config.send_expect_on_put)
527 def test_collect_memory_usage(self):
528 # Ensure environment variable LSST_HTTP_COLLECT_MEMORY_USAGE is
529 # inspected to initialize the HttpResourcePathConfig class.
530 with unittest.mock.patch.dict(os.environ, {}, clear=True):
531 importlib.reload(lsst.resources.http)
532 config = HttpResourcePathConfig()
533 self.assertFalse(config.collect_memory_usage)
535 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_COLLECT_MEMORY_USAGE": "true"}, clear=True):
536 importlib.reload(lsst.resources.http)
537 config = HttpResourcePathConfig()
538 self.assertTrue(config.collect_memory_usage)
540 def test_timeout(self):
541 # Ensure that when the connect and read timeouts are not specified
542 # the default values are stored in the config.
543 with unittest.mock.patch.dict(os.environ, {}, clear=True):
544 importlib.reload(lsst.resources.http)
545 config = HttpResourcePathConfig()
546 self.assertAlmostEqual(config.timeout[0], config.DEFAULT_TIMEOUT_CONNECT)
547 self.assertAlmostEqual(config.timeout[1], config.DEFAULT_TIMEOUT_READ)
549 # Ensure that when both the connect and read timeouts are specified
550 # they are both stored in the config.
551 connect_timeout, read_timeout = 100.5, 200.8
552 with unittest.mock.patch.dict(
553 os.environ,
554 {"LSST_HTTP_TIMEOUT_CONNECT": str(connect_timeout), "LSST_HTTP_TIMEOUT_READ": str(read_timeout)},
555 clear=True,
556 ):
557 # Force module reload.
558 importlib.reload(lsst.resources.http)
559 config = HttpResourcePathConfig()
560 self.assertAlmostEqual(config.timeout[0], connect_timeout)
561 self.assertAlmostEqual(config.timeout[1], read_timeout)
563 # Ensure that invalid float values (including NaN values) raise a
564 # ValueError.
565 for value in ("invalid", "NaN"):
566 with unittest.mock.patch.dict(
567 os.environ,
568 {"LSST_HTTP_TIMEOUT_CONNECT": value, "LSST_HTTP_TIMEOUT_READ": value},
569 clear=True,
570 ):
571 # Force module reload.
572 importlib.reload(lsst.resources.http)
573 with self.assertRaises(ValueError):
574 config = HttpResourcePathConfig()
575 config.timeout()
577 def test_front_end_connections(self):
578 # Ensure that when the number of front end connections is not specified
579 # the default is stored in the config.
580 with unittest.mock.patch.dict(os.environ, {}, clear=True):
581 importlib.reload(lsst.resources.http)
582 config = HttpResourcePathConfig()
583 self.assertEqual(config.front_end_connections, config.DEFAULT_FRONTEND_PERSISTENT_CONNECTIONS)
585 # Ensure that when the number of front end connections is specified
586 # it is stored in the config.
587 connections = 42
588 with unittest.mock.patch.dict(
589 os.environ, {"LSST_HTTP_FRONTEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True
590 ):
591 importlib.reload(lsst.resources.http)
592 config = HttpResourcePathConfig()
593 self.assertTrue(config.front_end_connections, connections)
595 def test_back_end_connections(self):
596 # Ensure that when the number of back end connections is not specified
597 # the default is stored in the config.
598 with unittest.mock.patch.dict(os.environ, {}, clear=True):
599 importlib.reload(lsst.resources.http)
600 config = HttpResourcePathConfig()
601 self.assertEqual(config.back_end_connections, config.DEFAULT_BACKEND_PERSISTENT_CONNECTIONS)
603 # Ensure that when the number of back end connections is specified
604 # it is stored in the config.
605 connections = 42
606 with unittest.mock.patch.dict(
607 os.environ, {"LSST_HTTP_BACKEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True
608 ):
609 importlib.reload(lsst.resources.http)
610 config = HttpResourcePathConfig()
611 self.assertTrue(config.back_end_connections, connections)
613 def test_digest_algorithm(self):
614 # Ensure that when no digest is specified in the environment, the
615 # configured digest algorithm is the empty string.
616 with unittest.mock.patch.dict(os.environ, {}, clear=True):
617 importlib.reload(lsst.resources.http)
618 config = HttpResourcePathConfig()
619 self.assertEqual(config.digest_algorithm, "")
621 # Ensure that an invalid digest algorithm is ignored.
622 digest = "invalid"
623 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True):
624 importlib.reload(lsst.resources.http)
625 config = HttpResourcePathConfig()
626 self.assertEqual(config.digest_algorithm, "")
628 # Ensure that an accepted digest algorithm is stored.
629 for digest in HttpResourcePathConfig().ACCEPTED_DIGESTS:
630 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True):
631 importlib.reload(lsst.resources.http)
632 config = HttpResourcePathConfig()
633 self.assertTrue(config.digest_algorithm, digest)
635 def test_backoff_interval(self):
636 # Ensure that when no backoff interval is defined, the default values
637 # are used.
638 with unittest.mock.patch.dict(os.environ, {}, clear=True):
639 importlib.reload(lsst.resources.http)
640 config = HttpResourcePathConfig()
641 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN)
642 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX)
644 # Ensure that an invalid value for backoff interval is ignored and
645 # the default value is used.
646 with unittest.mock.patch.dict(
647 os.environ, {"LSST_HTTP_BACKOFF_MIN": "XXX", "LSST_HTTP_BACKOFF_MAX": "YYY"}, clear=True
648 ):
649 importlib.reload(lsst.resources.http)
650 config = HttpResourcePathConfig()
651 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN)
652 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX)
654 # Ensure that NaN values are ignored and the defaults values are used.
655 with unittest.mock.patch.dict(
656 os.environ, {"LSST_HTTP_BACKOFF_MIN": "NaN", "LSST_HTTP_BACKOFF_MAX": "NaN"}, clear=True
657 ):
658 importlib.reload(lsst.resources.http)
659 config = HttpResourcePathConfig()
660 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN)
661 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX)
663 # Ensure that when specified, valid limits backoff interval are used.
664 backoff_min, backoff_max = 3.0, 8.0
665 with unittest.mock.patch.dict(
666 os.environ,
667 {"LSST_HTTP_BACKOFF_MIN": str(backoff_min), "LSST_HTTP_BACKOFF_MAX": str(backoff_max)},
668 clear=True,
669 ):
670 importlib.reload(lsst.resources.http)
671 config = HttpResourcePathConfig()
672 self.assertAlmostEqual(config.backoff_min, backoff_min)
673 self.assertAlmostEqual(config.backoff_max, backoff_max)
676class WebdavUtilsTestCase(unittest.TestCase):
677 """Test for the Webdav related utilities."""
679 def setUp(self):
680 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR))
682 def tearDown(self):
683 if self.tmpdir and self.tmpdir.isLocal:
684 removeTestTempDir(self.tmpdir.ospath)
686 @responses.activate
687 def test_is_webdav_endpoint(self):
688 davEndpoint = "http://www.lsstwithwebdav.org"
689 responses.add(responses.OPTIONS, davEndpoint, status=200, headers={"DAV": "1,2,3"})
690 self.assertTrue(_is_webdav_endpoint(davEndpoint))
692 plainHttpEndpoint = "http://www.lsstwithoutwebdav.org"
693 responses.add(responses.OPTIONS, plainHttpEndpoint, status=200)
694 self.assertFalse(_is_webdav_endpoint(plainHttpEndpoint))
696 notWebdavEndpoint = "http://www.notwebdav.org"
697 responses.add(responses.OPTIONS, notWebdavEndpoint, status=403)
698 self.assertFalse(_is_webdav_endpoint(notWebdavEndpoint))
700 def test_is_protected(self):
701 self.assertFalse(_is_protected("/this-file-does-not-exist"))
703 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
704 f.write("XXXX")
705 file_path = f.name
707 os.chmod(file_path, stat.S_IRUSR)
708 self.assertTrue(_is_protected(file_path))
710 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH):
711 os.chmod(file_path, stat.S_IRUSR | mode)
712 self.assertFalse(_is_protected(file_path))
715class BearerTokenAuthTestCase(unittest.TestCase):
716 """Test for the BearerTokenAuth class."""
718 def setUp(self):
719 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR))
720 self.token = "ABCDE1234"
722 def tearDown(self):
723 if self.tmpdir and self.tmpdir.isLocal:
724 removeTestTempDir(self.tmpdir.ospath)
726 def test_empty_token(self):
727 """Ensure that when no token is provided the request is not
728 modified.
729 """
730 auth = BearerTokenAuth(None)
731 auth._refresh()
732 self.assertIsNone(auth._token)
733 self.assertIsNone(auth._path)
734 req = requests.Request("GET", "https://example.org")
735 self.assertEqual(auth(req), req)
737 def test_token_value(self):
738 """Ensure that when a token value is provided, the 'Authorization'
739 header is added to the requests.
740 """
741 auth = BearerTokenAuth(self.token)
742 req = auth(requests.Request("GET", "https://example.org").prepare())
743 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}")
745 def test_token_insecure_http(self):
746 """Ensure that no 'Authorization' header is attached to a request when
747 using insecure HTTP.
748 """
749 auth = BearerTokenAuth(self.token)
750 for url in ("http://example.org", "HTTP://example.org", "HttP://example.org"):
751 req = auth(requests.Request("GET", url).prepare())
752 self.assertIsNone(req.headers.get("Authorization"))
754 def test_token_file(self):
755 """Ensure when the provided token is a file path, its contents is
756 correctly used in the the 'Authorization' header of the requests.
757 """
758 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
759 f.write(self.token)
760 token_file_path = f.name
762 # Ensure the request's "Authorization" header is set with the right
763 # token value
764 os.chmod(token_file_path, stat.S_IRUSR)
765 auth = BearerTokenAuth(token_file_path)
766 req = auth(requests.Request("GET", "https://example.org").prepare())
767 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}")
769 # Ensure an exception is raised if either group or other can read the
770 # token file
771 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH):
772 os.chmod(token_file_path, stat.S_IRUSR | mode)
773 with self.assertRaises(PermissionError):
774 BearerTokenAuth(token_file_path)
777class SessionStoreTestCase(unittest.TestCase):
778 """Test for the SessionStore class."""
780 def setUp(self):
781 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR))
782 self.rpath = ResourcePath("https://example.org")
784 def tearDown(self):
785 if self.tmpdir and self.tmpdir.isLocal:
786 removeTestTempDir(self.tmpdir.ospath)
788 def test_ca_cert_bundle(self):
789 """Ensure a certificate authorities bundle is used to authentify
790 the remote server.
791 """
792 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
793 f.write("CERT BUNDLE")
794 cert_bundle = f.name
796 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_CACERT_BUNDLE": cert_bundle}, clear=True):
797 session = SessionStore().get(self.rpath)
798 self.assertEqual(session.verify, cert_bundle)
800 def test_user_cert(self):
801 """Ensure if user certificate and private key are provided, they are
802 used for authenticating the client.
803 """
804 # Create mock certificate and private key files.
805 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
806 f.write("CERT")
807 client_cert = f.name
809 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
810 f.write("KEY")
811 client_key = f.name
813 # Check both LSST_HTTP_AUTH_CLIENT_CERT and LSST_HTTP_AUTH_CLIENT_KEY
814 # must be initialized.
815 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert}, clear=True):
816 with self.assertRaises(ValueError):
817 SessionStore().get(self.rpath)
819 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_KEY": client_key}, clear=True):
820 with self.assertRaises(ValueError):
821 SessionStore().get(self.rpath)
823 # Check private key file must be accessible only by its owner.
824 with unittest.mock.patch.dict(
825 os.environ,
826 {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert, "LSST_HTTP_AUTH_CLIENT_KEY": client_key},
827 clear=True,
828 ):
829 # Ensure the session client certificate is initialized when
830 # only the owner can read the private key file.
831 os.chmod(client_key, stat.S_IRUSR)
832 session = SessionStore().get(self.rpath)
833 self.assertEqual(session.cert[0], client_cert)
834 self.assertEqual(session.cert[1], client_key)
836 # Ensure an exception is raised if either group or other can access
837 # the private key file.
838 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH):
839 os.chmod(client_key, stat.S_IRUSR | mode)
840 with self.assertRaises(PermissionError):
841 SessionStore().get(self.rpath)
843 def test_token_env(self):
844 """Ensure when the token is provided via an environment variable
845 the sessions are equipped with a BearerTokenAuth.
846 """
847 token = "ABCDE"
848 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_BEARER_TOKEN": token}, clear=True):
849 session = SessionStore().get(self.rpath)
850 self.assertEqual(type(session.auth), lsst.resources.http.BearerTokenAuth)
851 self.assertEqual(session.auth._token, token)
852 self.assertIsNone(session.auth._path)
854 def test_sessions(self):
855 """Ensure the session caching mechanism works."""
856 # Ensure the store provides a session for a given URL
857 root_url = "https://example.org"
858 store = SessionStore()
859 session = store.get(ResourcePath(root_url))
860 self.assertIsNotNone(session)
862 # Ensure the sessions retrieved from a single store with the same
863 # root URIs are equal
864 for u in (f"{root_url}", f"{root_url}/path/to/file"):
865 self.assertEqual(session, store.get(ResourcePath(u)))
867 # Ensure sessions retrieved for different root URIs are different
868 another_url = "https://another.example.org"
869 self.assertNotEqual(session, store.get(ResourcePath(another_url)))
871 # Ensure the sessions retrieved from a single store for URLs with
872 # different port numbers are different
873 root_url_with_port = f"{another_url}:12345"
874 session = store.get(ResourcePath(root_url_with_port))
875 self.assertNotEqual(session, store.get(ResourcePath(another_url)))
877 # Ensure the sessions retrieved from a single store with the same
878 # root URIs (including port numbers) are equal
879 for u in (f"{root_url_with_port}", f"{root_url_with_port}/path/to/file"):
880 self.assertEqual(session, store.get(ResourcePath(u)))
883if __name__ == "__main__":
884 unittest.main()