Coverage for tests/test_http.py: 16%
530 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-19 11:17 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-19 11:17 +0000
1# This file is part of lsst-resources.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# Use of this source code is governed by a 3-clause BSD-style
10# license that can be found in the LICENSE file.
12import hashlib
13import importlib
14import io
15import os.path
16import random
17import shutil
18import socket
19import stat
20import string
21import tempfile
22import time
23import unittest
24import warnings
25from collections.abc import Callable
26from threading import Thread
27from typing import cast
29try:
30 from cheroot import wsgi
31 from wsgidav.wsgidav_app import WsgiDAVApp
32except ImportError:
33 WsgiDAVApp = None
35import lsst.resources
36import requests
37import responses
38from lsst.resources import ResourcePath
39from lsst.resources._resourceHandles._httpResourceHandle import (
40 HttpReadResourceHandle,
41 parse_content_range_header,
42)
43from lsst.resources.http import (
44 BearerTokenAuth,
45 HttpResourcePathConfig,
46 SessionStore,
47 _is_protected,
48 _is_webdav_endpoint,
49)
50from lsst.resources.tests import GenericReadWriteTestCase, GenericTestCase
51from lsst.resources.utils import makeTestTempDir, removeTestTempDir
53TESTDIR = os.path.abspath(os.path.dirname(__file__))
56class GenericHttpTestCase(GenericTestCase, unittest.TestCase):
57 """Generic tests of http URIs."""
59 scheme = "http"
60 netloc = "server.example"
63class HttpReadWriteWebdavTestCase(GenericReadWriteTestCase, unittest.TestCase):
64 """Test with a real webDAV server, as opposed to mocking responses."""
66 scheme = "http"
68 @classmethod
69 def setUpClass(cls):
70 cls.webdav_tmpdir = tempfile.mkdtemp(prefix="webdav-server-test-")
71 cls.local_files_to_remove = []
72 cls.server_thread = None
74 # Disable warnings about socket connections left open. We purposedly
75 # keep network connections to the remote server open and have no
76 # means through the API exposed by Requests of actually close the
77 # underlyng sockets to make tests pass without warning.
78 warnings.filterwarnings(action="ignore", message=r"unclosed.*socket", category=ResourceWarning)
80 # Should we test against a running server?
81 #
82 # This is convenient for testing against real servers in the
83 # developer environment by initializing the environment variable
84 # LSST_RESOURCES_HTTP_TEST_SERVER_URL with the URL of the server, e.g.
85 # https://dav.example.org:1234/path/to/top/dir
86 if (test_endpoint := os.getenv("LSST_RESOURCES_HTTP_TEST_SERVER_URL")) is not None:
87 # Run this test case against the specified server.
88 uri = ResourcePath(test_endpoint)
89 cls.scheme = uri.scheme
90 cls.netloc = uri.netloc
91 cls.base_path = uri.path
92 elif WsgiDAVApp is not None:
93 # WsgiDAVApp is available, launch a local server in its own
94 # thread to expose a local temporary directory and run this
95 # test case against it.
96 cls.port_number = cls._get_port_number()
97 cls.stop_webdav_server = False
98 cls.server_thread = Thread(
99 target=cls._serve_webdav,
100 args=(cls, cls.webdav_tmpdir, cls.port_number, lambda: cls.stop_webdav_server),
101 daemon=True,
102 )
103 cls.server_thread.start()
105 # Wait for it to start
106 time.sleep(1)
108 # Initialize the server endpoint
109 cls.netloc = f"127.0.0.1:{cls.port_number}"
110 else:
111 cls.skipTest(
112 cls,
113 "neither WsgiDAVApp is available nor a webDAV test endpoint is configured to test against",
114 )
116 @classmethod
117 def tearDownClass(cls):
118 # Stop the WsgiDAVApp server, if any
119 if WsgiDAVApp is not None:
120 # Shut down of the webdav server and wait for the thread to exit
121 cls.stop_webdav_server = True
122 if cls.server_thread is not None:
123 cls.server_thread.join()
125 # Remove local temporary files
126 for file in cls.local_files_to_remove:
127 if os.path.exists(file):
128 os.remove(file)
130 # Remove temp dir
131 if cls.webdav_tmpdir:
132 shutil.rmtree(cls.webdav_tmpdir, ignore_errors=True)
134 # Reset the warnings filter.
135 warnings.resetwarnings()
137 def tearDown(self):
138 if self.tmpdir:
139 self.tmpdir.remove()
141 # Clear sessions. Some sockets may be left open, because urllib3
142 # doest not close in-flight connections.
143 # See https://urllib3.readthedocs.io > API Reference >
144 # Pool Manager > clear()
145 # I cannot add the full URL here because it is longer than 79
146 # characters.
147 self.tmpdir._clear_sessions()
149 super().tearDown()
151 def test_dav_file_handle(self):
152 # Upload a new file with known contents.
153 contents = "These are some \n bytes to read"
154 remote_file = self.tmpdir.join(self._get_file_name())
155 self.assertIsNone(remote_file.write(data=contents, overwrite=True))
157 # Test that the correct handle is returned.
158 with remote_file.open("rb") as handle:
159 self.assertIsInstance(handle, HttpReadResourceHandle)
161 # Test reading byte ranges works
162 with remote_file.open("rb") as handle:
163 sub_contents = contents[:10]
164 handle = cast(HttpReadResourceHandle, handle)
165 result = handle.read(len(sub_contents)).decode()
166 self.assertEqual(result, sub_contents)
167 # Verify there is no internal buffer.
168 self.assertIsNone(handle._completeBuffer)
169 # Verify the position.
170 self.assertEqual(handle.tell(), len(sub_contents))
172 # Jump back to the beginning and test if reading the whole file
173 # prompts the internal buffer to be read.
174 handle.seek(0)
175 self.assertEqual(handle.tell(), 0)
176 result = handle.read().decode()
177 self.assertIsNotNone(handle._completeBuffer)
178 self.assertEqual(result, contents)
180 # Check that flush works on read-only handle.
181 handle.flush()
183 # Verify reading as a string handle works as expected.
184 with remote_file.open("r") as handle:
185 self.assertIsInstance(handle, io.TextIOWrapper)
187 handle = cast(io.TextIOWrapper, handle)
188 self.assertIsInstance(handle.buffer, HttpReadResourceHandle)
190 # Check if string methods work.
191 result = handle.read()
192 self.assertEqual(result, contents)
194 # Check that flush works on read-only handle.
195 handle.flush()
197 # Verify that write modes invoke the default base method
198 with remote_file.open("w") as handle:
199 self.assertIsInstance(handle, io.StringIO)
201 def test_dav_is_dav_enpoint(self):
202 # Ensure the server is a webDAV endpoint
203 self.assertTrue(self.tmpdir.is_webdav_endpoint)
205 def test_dav_mkdir(self):
206 # Check creation and deletion of an empty directory
207 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
208 self.assertIsNone(subdir.mkdir())
209 self.assertTrue(subdir.exists())
211 # Creating an existing remote directory must succeed
212 self.assertIsNone(subdir.mkdir())
214 # Deletion of an existing directory must succeed
215 self.assertIsNone(subdir.remove())
217 # Deletion of an non-existing directory must succeed
218 subir_not_exists = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
219 self.assertIsNone(subir_not_exists.remove())
221 # Creation of a directory at a path where a file exists must raise
222 file = self.tmpdir.join(self._get_file_name(), forceDirectory=False)
223 file.write(data=None, overwrite=True)
224 self.assertTrue(file.exists())
226 existing_file = self.tmpdir.join(file.basename(), forceDirectory=True)
227 with self.assertRaises(NotADirectoryError):
228 self.assertIsNone(existing_file.mkdir())
230 def test_dav_upload_download(self):
231 # Test upload a randomly-generated file via write() with and without
232 # overwrite
233 local_file, file_size = self._generate_file()
234 with open(local_file, "rb") as f:
235 data = f.read()
237 remote_file = self.tmpdir.join(self._get_file_name())
238 self.assertIsNone(remote_file.write(data, overwrite=True))
239 self.assertTrue(remote_file.exists())
240 self.assertEqual(remote_file.size(), file_size)
242 # Write without overwrite must raise since target file exists
243 with self.assertRaises(FileExistsError):
244 remote_file.write(data, overwrite=False)
246 # Download the file we just uploaded. Compute and compare a digest of
247 # the uploaded and downloaded data and ensure they match
248 downloaded_data = remote_file.read()
249 self.assertEqual(len(downloaded_data), file_size)
250 upload_digest = self._compute_digest(data)
251 download_digest = self._compute_digest(downloaded_data)
252 self.assertEqual(upload_digest, download_digest)
253 os.remove(local_file)
255 def test_dav_as_local(self):
256 contents = str.encode("12345")
257 remote_file = self.tmpdir.join(self._get_file_name())
258 self.assertIsNone(remote_file.write(data=contents, overwrite=True))
260 local_path, is_temp = remote_file._as_local()
261 self.assertTrue(is_temp)
262 self.assertTrue(os.path.exists(local_path))
263 self.assertTrue(os.stat(local_path).st_size, len(contents))
264 self.assertEqual(ResourcePath(local_path).read(), contents)
265 os.remove(local_path)
267 def test_dav_size(self):
268 # Size of a non-existent file must raise.
269 remote_file = self.tmpdir.join(self._get_file_name())
270 with self.assertRaises(FileNotFoundError):
271 remote_file.size()
273 # Retrieving the size of a remote directory using a file-like path must
274 # raise
275 remote_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
276 self.assertIsNone(remote_dir.mkdir())
277 self.assertTrue(remote_dir.exists())
279 dir_as_file = ResourcePath(remote_dir.geturl().rstrip("/"), forceDirectory=False)
280 with self.assertRaises(IsADirectoryError):
281 dir_as_file.size()
283 def test_dav_upload_creates_dir(self):
284 # Uploading a file to a non existing directory must ensure its
285 # parent directories are automatically created and upload succeeds
286 non_existing_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
287 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True)
288 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True)
289 remote_file = non_existing_dir.join(self._get_file_name())
291 local_file, file_size = self._generate_file()
292 with open(local_file, "rb") as f:
293 data = f.read()
294 self.assertIsNone(remote_file.write(data, overwrite=True))
296 self.assertTrue(remote_file.exists())
297 self.assertEqual(remote_file.size(), file_size)
298 self.assertTrue(remote_file.parent().exists())
300 downloaded_data = remote_file.read()
301 upload_digest = self._compute_digest(data)
302 download_digest = self._compute_digest(downloaded_data)
303 self.assertEqual(upload_digest, download_digest)
304 os.remove(local_file)
306 def test_dav_transfer_from(self):
307 # Transfer from local file via "copy", with and without overwrite
308 remote_file = self.tmpdir.join(self._get_file_name())
309 local_file, _ = self._generate_file()
310 source_file = ResourcePath(local_file)
311 self.assertIsNone(remote_file.transfer_from(source_file, transfer="copy", overwrite=True))
312 self.assertTrue(remote_file.exists())
313 self.assertEqual(remote_file.size(), source_file.size())
314 with self.assertRaises(FileExistsError):
315 remote_file.transfer_from(ResourcePath(local_file), transfer="copy", overwrite=False)
317 # Transfer from remote file via "copy", with and without overwrite
318 source_file = remote_file
319 target_file = self.tmpdir.join(self._get_file_name())
320 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True))
321 self.assertTrue(target_file.exists())
322 self.assertEqual(target_file.size(), source_file.size())
324 # Transfer without overwrite must raise since target resource exists
325 with self.assertRaises(FileExistsError):
326 target_file.transfer_from(source_file, transfer="copy", overwrite=False)
328 # Test transfer from local file via "move", with and without overwrite
329 source_file = ResourcePath(local_file)
330 source_size = source_file.size()
331 target_file = self.tmpdir.join(self._get_file_name())
332 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True))
333 self.assertTrue(target_file.exists())
334 self.assertEqual(target_file.size(), source_size)
335 self.assertFalse(source_file.exists())
337 # Test transfer without overwrite must raise since target resource
338 # exists
339 local_file, file_size = self._generate_file()
340 with self.assertRaises(FileExistsError):
341 source_file = ResourcePath(local_file)
342 target_file.transfer_from(source_file, transfer="move", overwrite=False)
344 # Test transfer from remote file via "move" with and without overwrite
345 # must succeed
346 source_file = target_file
347 source_size = source_file.size()
348 target_file = self.tmpdir.join(self._get_file_name())
349 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True))
350 self.assertTrue(target_file.exists())
351 self.assertEqual(target_file.size(), source_size)
352 self.assertFalse(source_file.exists())
354 # Transfer without overwrite must raise since target resource exists
355 with self.assertRaises(FileExistsError):
356 source_file = ResourcePath(local_file)
357 target_file.transfer_from(source_file, transfer="move", overwrite=False)
359 def test_dav_handle(self):
360 # Resource handle must succeed
361 target_file = self.tmpdir.join(self._get_file_name())
362 data = "abcdefghi"
363 self.assertIsNone(target_file.write(data, overwrite=True))
364 with target_file.open("rb") as handle:
365 handle.seek(1)
366 self.assertEqual(handle.read(4).decode("utf-8"), data[1:5])
368 def test_dav_delete(self):
369 # Deletion of an existing remote file must succeed
370 local_file, file_size = self._generate_file()
371 with open(local_file, "rb") as f:
372 data = f.read()
374 remote_file = self.tmpdir.join(self._get_file_name())
375 self.assertIsNone(remote_file.write(data, overwrite=True))
376 self.assertTrue(remote_file.exists())
377 self.assertEqual(remote_file.size(), file_size)
378 self.assertIsNone(remote_file.remove())
379 os.remove(local_file)
381 # Deletion of a non-existing remote file must succeed
382 non_existing_file = self.tmpdir.join(self._get_file_name())
383 self.assertIsNone(non_existing_file.remove())
385 # Deletion of a non-empty remote directory must succeed
386 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True)
387 self.assertIsNone(subdir.mkdir())
388 self.assertTrue(subdir.exists())
389 local_file, _ = self._generate_file()
390 source_file = ResourcePath(local_file)
391 target_file = self.tmpdir.join(self._get_file_name(), forceDirectory=True)
392 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True))
393 self.assertIsNone(subdir.remove())
394 self.assertFalse(subdir.exists())
395 os.remove(local_file)
397 @classmethod
398 def _get_port_number(cls) -> int:
399 """Return a port number the webDAV server can use to listen to."""
400 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
401 s.bind(("127.0.0.1", 0))
402 s.listen()
403 port = s.getsockname()[1]
404 s.close()
405 return port
407 def _serve_webdav(self, local_path: str, port: int, stop_webdav_server: Callable[[], bool]):
408 """Start a local webDAV server, listening on http://localhost:port
409 and exposing local_path.
411 This server only runs when this test class is instantiated,
412 and then shuts down. The server must be started is a separate thread.
414 Parameters
415 ----------
416 port : `int`
417 The port number on which the server should listen
418 local_path : `str`
419 Path to an existing local directory for the server to expose.
420 stop_webdav_server : `Callable[[], bool]`
421 Boolean function which returns True when the server should be
422 stopped.
423 """
424 try:
425 # Start the wsgi server in a separate thread
426 config = {
427 "host": "127.0.0.1",
428 "port": port,
429 "provider_mapping": {"/": local_path},
430 "http_authenticator": {"domain_controller": None},
431 "simple_dc": {"user_mapping": {"*": True}},
432 "verbose": 0,
433 "lock_storage": False,
434 "dir_browser": {
435 "enable": False,
436 "ms_sharepoint_support": False,
437 "libre_office_support": False,
438 "response_trailer": False,
439 "davmount_links": False,
440 },
441 }
442 server = wsgi.Server(wsgi_app=WsgiDAVApp(config), bind_addr=(config["host"], config["port"]))
443 t = Thread(target=server.start, daemon=True)
444 t.start()
446 # Shut down the server when done: stop_webdav_server() returns
447 # True when this test suite is being teared down
448 while not stop_webdav_server():
449 time.sleep(1)
450 except KeyboardInterrupt:
451 # Caught Ctrl-C, shut down the server
452 pass
453 finally:
454 server.stop()
455 t.join()
457 @classmethod
458 def _get_name(cls, prefix: str) -> str:
459 alphabet = string.ascii_lowercase + string.digits
460 return f"{prefix}-" + "".join(random.choices(alphabet, k=8))
462 @classmethod
463 def _get_dir_name(cls) -> str:
464 """Return a randomly selected name for a file"""
465 return cls._get_name(prefix="dir")
467 @classmethod
468 def _get_file_name(cls) -> str:
469 """Return a randomly selected name for a file"""
470 return cls._get_name(prefix="file")
472 def _generate_file(self, remove_when_done=True) -> tuple[str, int]:
473 """Create a local file of random size with random contents.
475 Returns
476 -------
477 path : `str`
478 Path to local temporary file. The caller is responsible for
479 removing the file when appropriate.
480 size : `int`
481 Size of the generated file, in bytes.
482 """
483 megabyte = 1024 * 1024
484 size = random.randint(2 * megabyte, 5 * megabyte)
485 tmpfile, path = tempfile.mkstemp()
486 self.assertEqual(os.write(tmpfile, os.urandom(size)), size)
487 os.close(tmpfile)
489 if remove_when_done:
490 self.local_files_to_remove.append(path)
492 return path, size
494 @classmethod
495 def _compute_digest(cls, data: bytes) -> str:
496 """Compute a SHA256 hash of data."""
497 m = hashlib.sha256()
498 m.update(data)
499 return m.hexdigest()
501 @classmethod
502 def _is_server_running(cls, port: int) -> bool:
503 """Return True if there is a server listening on local address
504 127.0.0.1:<port>.
505 """
506 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
507 try:
508 s.connect(("127.0.0.1", port))
509 return True
510 except ConnectionRefusedError:
511 return False
514class HttpResourcePathConfigTestCase(unittest.TestCase):
515 """Test for the HttpResourcePathConfig class."""
517 def test_send_expect_header(self):
518 # Ensure environment variable LSST_HTTP_PUT_SEND_EXPECT_HEADER is
519 # inspected to initialize the HttpResourcePathConfig config class.
520 with unittest.mock.patch.dict(os.environ, {}, clear=True):
521 importlib.reload(lsst.resources.http)
522 config = HttpResourcePathConfig()
523 self.assertFalse(config.send_expect_on_put)
525 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_PUT_SEND_EXPECT_HEADER": "true"}, clear=True):
526 importlib.reload(lsst.resources.http)
527 config = HttpResourcePathConfig()
528 self.assertTrue(config.send_expect_on_put)
530 def test_collect_memory_usage(self):
531 # Ensure environment variable LSST_HTTP_COLLECT_MEMORY_USAGE is
532 # inspected to initialize the HttpResourcePathConfig class.
533 with unittest.mock.patch.dict(os.environ, {}, clear=True):
534 importlib.reload(lsst.resources.http)
535 config = HttpResourcePathConfig()
536 self.assertFalse(config.collect_memory_usage)
538 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_COLLECT_MEMORY_USAGE": "true"}, clear=True):
539 importlib.reload(lsst.resources.http)
540 config = HttpResourcePathConfig()
541 self.assertTrue(config.collect_memory_usage)
543 def test_timeout(self):
544 # Ensure that when the connect and read timeouts are not specified
545 # the default values are stored in the config.
546 with unittest.mock.patch.dict(os.environ, {}, clear=True):
547 importlib.reload(lsst.resources.http)
548 config = HttpResourcePathConfig()
549 self.assertAlmostEqual(config.timeout[0], config.DEFAULT_TIMEOUT_CONNECT)
550 self.assertAlmostEqual(config.timeout[1], config.DEFAULT_TIMEOUT_READ)
552 # Ensure that when both the connect and read timeouts are specified
553 # they are both stored in the config.
554 connect_timeout, read_timeout = 100.5, 200.8
555 with unittest.mock.patch.dict(
556 os.environ,
557 {"LSST_HTTP_TIMEOUT_CONNECT": str(connect_timeout), "LSST_HTTP_TIMEOUT_READ": str(read_timeout)},
558 clear=True,
559 ):
560 # Force module reload.
561 importlib.reload(lsst.resources.http)
562 config = HttpResourcePathConfig()
563 self.assertAlmostEqual(config.timeout[0], connect_timeout)
564 self.assertAlmostEqual(config.timeout[1], read_timeout)
566 # Ensure that invalid float values (including NaN values) raise a
567 # ValueError.
568 for value in ("invalid", "NaN"):
569 with unittest.mock.patch.dict(
570 os.environ,
571 {"LSST_HTTP_TIMEOUT_CONNECT": value, "LSST_HTTP_TIMEOUT_READ": value},
572 clear=True,
573 ):
574 # Force module reload.
575 importlib.reload(lsst.resources.http)
576 with self.assertRaises(ValueError):
577 config = HttpResourcePathConfig()
578 config.timeout()
580 def test_front_end_connections(self):
581 # Ensure that when the number of front end connections is not specified
582 # the default is stored in the config.
583 with unittest.mock.patch.dict(os.environ, {}, clear=True):
584 importlib.reload(lsst.resources.http)
585 config = HttpResourcePathConfig()
586 self.assertEqual(config.front_end_connections, config.DEFAULT_FRONTEND_PERSISTENT_CONNECTIONS)
588 # Ensure that when the number of front end connections is specified
589 # it is stored in the config.
590 connections = 42
591 with unittest.mock.patch.dict(
592 os.environ, {"LSST_HTTP_FRONTEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True
593 ):
594 importlib.reload(lsst.resources.http)
595 config = HttpResourcePathConfig()
596 self.assertTrue(config.front_end_connections, connections)
598 def test_back_end_connections(self):
599 # Ensure that when the number of back end connections is not specified
600 # the default is stored in the config.
601 with unittest.mock.patch.dict(os.environ, {}, clear=True):
602 importlib.reload(lsst.resources.http)
603 config = HttpResourcePathConfig()
604 self.assertEqual(config.back_end_connections, config.DEFAULT_BACKEND_PERSISTENT_CONNECTIONS)
606 # Ensure that when the number of back end connections is specified
607 # it is stored in the config.
608 connections = 42
609 with unittest.mock.patch.dict(
610 os.environ, {"LSST_HTTP_BACKEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True
611 ):
612 importlib.reload(lsst.resources.http)
613 config = HttpResourcePathConfig()
614 self.assertTrue(config.back_end_connections, connections)
616 def test_digest_algorithm(self):
617 # Ensure that when no digest is specified in the environment, the
618 # configured digest algorithm is the empty string.
619 with unittest.mock.patch.dict(os.environ, {}, clear=True):
620 importlib.reload(lsst.resources.http)
621 config = HttpResourcePathConfig()
622 self.assertEqual(config.digest_algorithm, "")
624 # Ensure that an invalid digest algorithm is ignored.
625 digest = "invalid"
626 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True):
627 importlib.reload(lsst.resources.http)
628 config = HttpResourcePathConfig()
629 self.assertEqual(config.digest_algorithm, "")
631 # Ensure that an accepted digest algorithm is stored.
632 for digest in HttpResourcePathConfig().ACCEPTED_DIGESTS:
633 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True):
634 importlib.reload(lsst.resources.http)
635 config = HttpResourcePathConfig()
636 self.assertTrue(config.digest_algorithm, digest)
638 def test_backoff_interval(self):
639 # Ensure that when no backoff interval is defined, the default values
640 # are used.
641 with unittest.mock.patch.dict(os.environ, {}, clear=True):
642 importlib.reload(lsst.resources.http)
643 config = HttpResourcePathConfig()
644 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN)
645 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX)
647 # Ensure that an invalid value for backoff interval is ignored and
648 # the default value is used.
649 with unittest.mock.patch.dict(
650 os.environ, {"LSST_HTTP_BACKOFF_MIN": "XXX", "LSST_HTTP_BACKOFF_MAX": "YYY"}, clear=True
651 ):
652 importlib.reload(lsst.resources.http)
653 config = HttpResourcePathConfig()
654 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN)
655 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX)
657 # Ensure that NaN values are ignored and the defaults values are used.
658 with unittest.mock.patch.dict(
659 os.environ, {"LSST_HTTP_BACKOFF_MIN": "NaN", "LSST_HTTP_BACKOFF_MAX": "NaN"}, clear=True
660 ):
661 importlib.reload(lsst.resources.http)
662 config = HttpResourcePathConfig()
663 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN)
664 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX)
666 # Ensure that when specified, valid limits backoff interval are used.
667 backoff_min, backoff_max = 3.0, 8.0
668 with unittest.mock.patch.dict(
669 os.environ,
670 {"LSST_HTTP_BACKOFF_MIN": str(backoff_min), "LSST_HTTP_BACKOFF_MAX": str(backoff_max)},
671 clear=True,
672 ):
673 importlib.reload(lsst.resources.http)
674 config = HttpResourcePathConfig()
675 self.assertAlmostEqual(config.backoff_min, backoff_min)
676 self.assertAlmostEqual(config.backoff_max, backoff_max)
679class WebdavUtilsTestCase(unittest.TestCase):
680 """Test for the Webdav related utilities."""
682 def setUp(self):
683 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR))
685 def tearDown(self):
686 if self.tmpdir and self.tmpdir.isLocal:
687 removeTestTempDir(self.tmpdir.ospath)
689 @responses.activate
690 def test_is_webdav_endpoint(self):
691 davEndpoint = "http://www.lsstwithwebdav.org"
692 responses.add(responses.OPTIONS, davEndpoint, status=200, headers={"DAV": "1,2,3"})
693 self.assertTrue(_is_webdav_endpoint(davEndpoint))
695 plainHttpEndpoint = "http://www.lsstwithoutwebdav.org"
696 responses.add(responses.OPTIONS, plainHttpEndpoint, status=200)
697 self.assertFalse(_is_webdav_endpoint(plainHttpEndpoint))
699 notWebdavEndpoint = "http://www.notwebdav.org"
700 responses.add(responses.OPTIONS, notWebdavEndpoint, status=403)
701 self.assertFalse(_is_webdav_endpoint(notWebdavEndpoint))
703 def test_is_protected(self):
704 self.assertFalse(_is_protected("/this-file-does-not-exist"))
706 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
707 f.write("XXXX")
708 file_path = f.name
710 os.chmod(file_path, stat.S_IRUSR)
711 self.assertTrue(_is_protected(file_path))
713 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH):
714 os.chmod(file_path, stat.S_IRUSR | mode)
715 self.assertFalse(_is_protected(file_path))
718class BearerTokenAuthTestCase(unittest.TestCase):
719 """Test for the BearerTokenAuth class."""
721 def setUp(self):
722 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR))
723 self.token = "ABCDE1234"
725 def tearDown(self):
726 if self.tmpdir and self.tmpdir.isLocal:
727 removeTestTempDir(self.tmpdir.ospath)
729 def test_empty_token(self):
730 """Ensure that when no token is provided the request is not
731 modified.
732 """
733 auth = BearerTokenAuth(None)
734 auth._refresh()
735 self.assertIsNone(auth._token)
736 self.assertIsNone(auth._path)
737 req = requests.Request("GET", "https://example.org")
738 self.assertEqual(auth(req), req)
740 def test_token_value(self):
741 """Ensure that when a token value is provided, the 'Authorization'
742 header is added to the requests.
743 """
744 auth = BearerTokenAuth(self.token)
745 req = auth(requests.Request("GET", "https://example.org").prepare())
746 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}")
748 def test_token_insecure_http(self):
749 """Ensure that no 'Authorization' header is attached to a request when
750 using insecure HTTP.
751 """
752 auth = BearerTokenAuth(self.token)
753 for url in ("http://example.org", "HTTP://example.org", "HttP://example.org"):
754 req = auth(requests.Request("GET", url).prepare())
755 self.assertIsNone(req.headers.get("Authorization"))
757 def test_token_file(self):
758 """Ensure when the provided token is a file path, its contents is
759 correctly used in the the 'Authorization' header of the requests.
760 """
761 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
762 f.write(self.token)
763 token_file_path = f.name
765 # Ensure the request's "Authorization" header is set with the right
766 # token value
767 os.chmod(token_file_path, stat.S_IRUSR)
768 auth = BearerTokenAuth(token_file_path)
769 req = auth(requests.Request("GET", "https://example.org").prepare())
770 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}")
772 # Ensure an exception is raised if either group or other can read the
773 # token file
774 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH):
775 os.chmod(token_file_path, stat.S_IRUSR | mode)
776 with self.assertRaises(PermissionError):
777 BearerTokenAuth(token_file_path)
780class SessionStoreTestCase(unittest.TestCase):
781 """Test for the SessionStore class."""
783 def setUp(self):
784 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR))
785 self.rpath = ResourcePath("https://example.org")
787 def tearDown(self):
788 if self.tmpdir and self.tmpdir.isLocal:
789 removeTestTempDir(self.tmpdir.ospath)
791 def test_ca_cert_bundle(self):
792 """Ensure a certificate authorities bundle is used to authentify
793 the remote server.
794 """
795 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
796 f.write("CERT BUNDLE")
797 cert_bundle = f.name
799 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_CACERT_BUNDLE": cert_bundle}, clear=True):
800 session = SessionStore().get(self.rpath)
801 self.assertEqual(session.verify, cert_bundle)
803 def test_user_cert(self):
804 """Ensure if user certificate and private key are provided, they are
805 used for authenticating the client.
806 """
807 # Create mock certificate and private key files.
808 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
809 f.write("CERT")
810 client_cert = f.name
812 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f:
813 f.write("KEY")
814 client_key = f.name
816 # Check both LSST_HTTP_AUTH_CLIENT_CERT and LSST_HTTP_AUTH_CLIENT_KEY
817 # must be initialized.
818 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert}, clear=True):
819 with self.assertRaises(ValueError):
820 SessionStore().get(self.rpath)
822 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_KEY": client_key}, clear=True):
823 with self.assertRaises(ValueError):
824 SessionStore().get(self.rpath)
826 # Check private key file must be accessible only by its owner.
827 with unittest.mock.patch.dict(
828 os.environ,
829 {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert, "LSST_HTTP_AUTH_CLIENT_KEY": client_key},
830 clear=True,
831 ):
832 # Ensure the session client certificate is initialized when
833 # only the owner can read the private key file.
834 os.chmod(client_key, stat.S_IRUSR)
835 session = SessionStore().get(self.rpath)
836 self.assertEqual(session.cert[0], client_cert)
837 self.assertEqual(session.cert[1], client_key)
839 # Ensure an exception is raised if either group or other can access
840 # the private key file.
841 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH):
842 os.chmod(client_key, stat.S_IRUSR | mode)
843 with self.assertRaises(PermissionError):
844 SessionStore().get(self.rpath)
846 def test_token_env(self):
847 """Ensure when the token is provided via an environment variable
848 the sessions are equipped with a BearerTokenAuth.
849 """
850 token = "ABCDE"
851 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_BEARER_TOKEN": token}, clear=True):
852 session = SessionStore().get(self.rpath)
853 self.assertEqual(type(session.auth), lsst.resources.http.BearerTokenAuth)
854 self.assertEqual(session.auth._token, token)
855 self.assertIsNone(session.auth._path)
857 def test_sessions(self):
858 """Ensure the session caching mechanism works."""
859 # Ensure the store provides a session for a given URL
860 root_url = "https://example.org"
861 store = SessionStore()
862 session = store.get(ResourcePath(root_url))
863 self.assertIsNotNone(session)
865 # Ensure the sessions retrieved from a single store with the same
866 # root URIs are equal
867 for u in (f"{root_url}", f"{root_url}/path/to/file"):
868 self.assertEqual(session, store.get(ResourcePath(u)))
870 # Ensure sessions retrieved for different root URIs are different
871 another_url = "https://another.example.org"
872 self.assertNotEqual(session, store.get(ResourcePath(another_url)))
874 # Ensure the sessions retrieved from a single store for URLs with
875 # different port numbers are different
876 root_url_with_port = f"{another_url}:12345"
877 session = store.get(ResourcePath(root_url_with_port))
878 self.assertNotEqual(session, store.get(ResourcePath(another_url)))
880 # Ensure the sessions retrieved from a single store with the same
881 # root URIs (including port numbers) are equal
882 for u in (f"{root_url_with_port}", f"{root_url_with_port}/path/to/file"):
883 self.assertEqual(session, store.get(ResourcePath(u)))
886class TestContentRange(unittest.TestCase):
887 """Test parsing of Content-Range header."""
889 def test_full_data(self):
890 parsed = parse_content_range_header("bytes 123-2555/12345")
891 self.assertEqual(parsed.range_start, 123)
892 self.assertEqual(parsed.range_end, 2555)
893 self.assertEqual(parsed.total, 12345)
895 parsed = parse_content_range_header(" bytes 0-0/5 ")
896 self.assertEqual(parsed.range_start, 0)
897 self.assertEqual(parsed.range_end, 0)
898 self.assertEqual(parsed.total, 5)
900 def test_empty_total(self):
901 parsed = parse_content_range_header("bytes 123-2555/*")
902 self.assertEqual(parsed.range_start, 123)
903 self.assertEqual(parsed.range_end, 2555)
904 self.assertIsNone(parsed.total)
906 parsed = parse_content_range_header(" bytes 0-0/* ")
907 self.assertEqual(parsed.range_start, 0)
908 self.assertEqual(parsed.range_end, 0)
909 self.assertIsNone(parsed.total)
911 def test_empty_range(self):
912 parsed = parse_content_range_header("bytes */12345")
913 self.assertIsNone(parsed.range_start)
914 self.assertIsNone(parsed.range_end)
915 self.assertEqual(parsed.total, 12345)
917 parsed = parse_content_range_header(" bytes */5 ")
918 self.assertIsNone(parsed.range_start)
919 self.assertIsNone(parsed.range_end)
920 self.assertEqual(parsed.total, 5)
922 def test_invalid_input(self):
923 with self.assertRaises(ValueError):
924 parse_content_range_header("pages 0-10/12")
927if __name__ == "__main__":
928 unittest.main()