Coverage for tests/test_http.py: 16%

537 statements  

« prev     ^ index     » next       coverage.py v7.5.1, created at 2024-05-16 02:51 -0700

1# This file is part of lsst-resources. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# Use of this source code is governed by a 3-clause BSD-style 

10# license that can be found in the LICENSE file. 

11 

12import hashlib 

13import importlib 

14import io 

15import os.path 

16import random 

17import shutil 

18import socket 

19import stat 

20import string 

21import tempfile 

22import time 

23import unittest 

24import warnings 

25from collections.abc import Callable 

26from threading import Thread 

27from typing import cast 

28 

29try: 

30 from cheroot import wsgi 

31 from wsgidav.wsgidav_app import WsgiDAVApp 

32except ImportError: 

33 WsgiDAVApp = None 

34 

35import lsst.resources 

36import requests 

37import responses 

38from lsst.resources import ResourcePath 

39from lsst.resources._resourceHandles._httpResourceHandle import ( 

40 HttpReadResourceHandle, 

41 parse_content_range_header, 

42) 

43from lsst.resources.http import ( 

44 BearerTokenAuth, 

45 HttpResourcePathConfig, 

46 SessionStore, 

47 _is_protected, 

48 _is_webdav_endpoint, 

49) 

50from lsst.resources.tests import GenericReadWriteTestCase, GenericTestCase 

51from lsst.resources.utils import makeTestTempDir, removeTestTempDir 

52 

53TESTDIR = os.path.abspath(os.path.dirname(__file__)) 

54 

55 

56class GenericHttpTestCase(GenericTestCase, unittest.TestCase): 

57 """Generic tests of http URIs.""" 

58 

59 scheme = "http" 

60 netloc = "server.example" 

61 

62 def test_root_uri(self): 

63 self.assertEqual(ResourcePath("http://server.com").root_uri(), ResourcePath("http://server.com/")) 

64 self.assertEqual( 

65 ResourcePath("http://user:password@server.com:3000/").root_uri(), 

66 ResourcePath("http://user:password@server.com:3000/"), 

67 ) 

68 self.assertEqual( 

69 ResourcePath("http://user:password@server.com:3000/some/path").root_uri(), 

70 ResourcePath("http://user:password@server.com:3000/"), 

71 ) 

72 self.assertEqual( 

73 ResourcePath("http://user:password@server.com:3000/some/path#fragment").root_uri(), 

74 ResourcePath("http://user:password@server.com:3000/"), 

75 ) 

76 self.assertEqual( 

77 ResourcePath("http://user:password@server.com:3000/some/path?param=value").root_uri(), 

78 ResourcePath("http://user:password@server.com:3000/"), 

79 ) 

80 self.assertEqual( 

81 ResourcePath("http://user:password@server.com:3000/some/path;parameters").root_uri(), 

82 ResourcePath("http://user:password@server.com:3000/"), 

83 ) 

84 

85 

86class HttpReadWriteWebdavTestCase(GenericReadWriteTestCase, unittest.TestCase): 

87 """Test with a real webDAV server, as opposed to mocking responses.""" 

88 

89 scheme = "http" 

90 

91 @classmethod 

92 def setUpClass(cls): 

93 cls.webdav_tmpdir = tempfile.mkdtemp(prefix="webdav-server-test-") 

94 cls.local_files_to_remove = [] 

95 cls.server_thread = None 

96 

97 # Disable warnings about socket connections left open. We purposedly 

98 # keep network connections to the remote server open and have no 

99 # means through the API exposed by Requests of actually close the 

100 # underlyng sockets to make tests pass without warning. 

101 warnings.filterwarnings(action="ignore", message=r"unclosed.*socket", category=ResourceWarning) 

102 

103 # Should we test against a running server? 

104 # 

105 # This is convenient for testing against real servers in the 

106 # developer environment by initializing the environment variable 

107 # LSST_RESOURCES_HTTP_TEST_SERVER_URL with the URL of the server, e.g. 

108 # https://dav.example.org:1234/path/to/top/dir 

109 if (test_endpoint := os.getenv("LSST_RESOURCES_HTTP_TEST_SERVER_URL")) is not None: 

110 # Run this test case against the specified server. 

111 uri = ResourcePath(test_endpoint) 

112 cls.scheme = uri.scheme 

113 cls.netloc = uri.netloc 

114 cls.base_path = uri.path 

115 elif WsgiDAVApp is not None: 

116 # WsgiDAVApp is available, launch a local server in its own 

117 # thread to expose a local temporary directory and run this 

118 # test case against it. 

119 cls.port_number = cls._get_port_number() 

120 cls.stop_webdav_server = False 

121 cls.server_thread = Thread( 

122 target=cls._serve_webdav, 

123 args=(cls, cls.webdav_tmpdir, cls.port_number, lambda: cls.stop_webdav_server), 

124 daemon=True, 

125 ) 

126 cls.server_thread.start() 

127 

128 # Wait for it to start 

129 time.sleep(1) 

130 

131 # Initialize the server endpoint 

132 cls.netloc = f"127.0.0.1:{cls.port_number}" 

133 else: 

134 cls.skipTest( 

135 cls, 

136 "neither WsgiDAVApp is available nor a webDAV test endpoint is configured to test against", 

137 ) 

138 

139 @classmethod 

140 def tearDownClass(cls): 

141 # Stop the WsgiDAVApp server, if any 

142 if WsgiDAVApp is not None: 

143 # Shut down of the webdav server and wait for the thread to exit 

144 cls.stop_webdav_server = True 

145 if cls.server_thread is not None: 

146 cls.server_thread.join() 

147 

148 # Remove local temporary files 

149 for file in cls.local_files_to_remove: 

150 if os.path.exists(file): 

151 os.remove(file) 

152 

153 # Remove temp dir 

154 if cls.webdav_tmpdir: 

155 shutil.rmtree(cls.webdav_tmpdir, ignore_errors=True) 

156 

157 # Reset the warnings filter. 

158 warnings.resetwarnings() 

159 

160 def tearDown(self): 

161 if self.tmpdir: 

162 self.tmpdir.remove() 

163 

164 # Clear sessions. Some sockets may be left open, because urllib3 

165 # doest not close in-flight connections. 

166 # See https://urllib3.readthedocs.io > API Reference > 

167 # Pool Manager > clear() 

168 # I cannot add the full URL here because it is longer than 79 

169 # characters. 

170 self.tmpdir._clear_sessions() 

171 

172 super().tearDown() 

173 

174 def test_dav_file_handle(self): 

175 # Upload a new file with known contents. 

176 contents = "These are some \n bytes to read" 

177 remote_file = self.tmpdir.join(self._get_file_name()) 

178 self.assertIsNone(remote_file.write(data=contents, overwrite=True)) 

179 

180 # Test that the correct handle is returned. 

181 with remote_file.open("rb") as handle: 

182 self.assertIsInstance(handle, HttpReadResourceHandle) 

183 

184 # Test reading byte ranges works 

185 with remote_file.open("rb") as handle: 

186 sub_contents = contents[:10] 

187 handle = cast(HttpReadResourceHandle, handle) 

188 result = handle.read(len(sub_contents)).decode() 

189 self.assertEqual(result, sub_contents) 

190 # Verify there is no internal buffer. 

191 self.assertIsNone(handle._completeBuffer) 

192 # Verify the position. 

193 self.assertEqual(handle.tell(), len(sub_contents)) 

194 

195 # Jump back to the beginning and test if reading the whole file 

196 # prompts the internal buffer to be read. 

197 handle.seek(0) 

198 self.assertEqual(handle.tell(), 0) 

199 result = handle.read().decode() 

200 self.assertIsNotNone(handle._completeBuffer) 

201 self.assertEqual(result, contents) 

202 

203 # Check that flush works on read-only handle. 

204 handle.flush() 

205 

206 # Verify reading as a string handle works as expected. 

207 with remote_file.open("r") as handle: 

208 self.assertIsInstance(handle, io.TextIOWrapper) 

209 

210 handle = cast(io.TextIOWrapper, handle) 

211 self.assertIsInstance(handle.buffer, HttpReadResourceHandle) 

212 

213 # Check if string methods work. 

214 result = handle.read() 

215 self.assertEqual(result, contents) 

216 

217 # Check that flush works on read-only handle. 

218 handle.flush() 

219 

220 # Verify that write modes invoke the default base method 

221 with remote_file.open("w") as handle: 

222 self.assertIsInstance(handle, io.StringIO) 

223 

224 def test_dav_is_dav_enpoint(self): 

225 # Ensure the server is a webDAV endpoint 

226 self.assertTrue(self.tmpdir.is_webdav_endpoint) 

227 

228 def test_dav_mkdir(self): 

229 # Check creation and deletion of an empty directory 

230 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

231 self.assertIsNone(subdir.mkdir()) 

232 self.assertTrue(subdir.exists()) 

233 

234 # Creating an existing remote directory must succeed 

235 self.assertIsNone(subdir.mkdir()) 

236 

237 # Deletion of an existing directory must succeed 

238 self.assertIsNone(subdir.remove()) 

239 

240 # Deletion of an non-existing directory must succeed 

241 subir_not_exists = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

242 self.assertIsNone(subir_not_exists.remove()) 

243 

244 # Creation of a directory at a path where a file exists must raise 

245 file = self.tmpdir.join(self._get_file_name(), forceDirectory=False) 

246 file.write(data=None, overwrite=True) 

247 self.assertTrue(file.exists()) 

248 

249 existing_file = self.tmpdir.join(file.basename(), forceDirectory=True) 

250 with self.assertRaises(NotADirectoryError): 

251 self.assertIsNone(existing_file.mkdir()) 

252 

253 def test_dav_upload_download(self): 

254 # Test upload a randomly-generated file via write() with and without 

255 # overwrite 

256 local_file, file_size = self._generate_file() 

257 with open(local_file, "rb") as f: 

258 data = f.read() 

259 

260 remote_file = self.tmpdir.join(self._get_file_name()) 

261 self.assertIsNone(remote_file.write(data, overwrite=True)) 

262 self.assertTrue(remote_file.exists()) 

263 self.assertEqual(remote_file.size(), file_size) 

264 

265 # Write without overwrite must raise since target file exists 

266 with self.assertRaises(FileExistsError): 

267 remote_file.write(data, overwrite=False) 

268 

269 # Download the file we just uploaded. Compute and compare a digest of 

270 # the uploaded and downloaded data and ensure they match 

271 downloaded_data = remote_file.read() 

272 self.assertEqual(len(downloaded_data), file_size) 

273 upload_digest = self._compute_digest(data) 

274 download_digest = self._compute_digest(downloaded_data) 

275 self.assertEqual(upload_digest, download_digest) 

276 os.remove(local_file) 

277 

278 def test_dav_as_local(self): 

279 contents = str.encode("12345") 

280 remote_file = self.tmpdir.join(self._get_file_name()) 

281 self.assertIsNone(remote_file.write(data=contents, overwrite=True)) 

282 

283 local_path, is_temp = remote_file._as_local() 

284 self.assertTrue(is_temp) 

285 self.assertTrue(os.path.exists(local_path)) 

286 self.assertTrue(os.stat(local_path).st_size, len(contents)) 

287 self.assertEqual(ResourcePath(local_path).read(), contents) 

288 os.remove(local_path) 

289 

290 def test_dav_size(self): 

291 # Size of a non-existent file must raise. 

292 remote_file = self.tmpdir.join(self._get_file_name()) 

293 with self.assertRaises(FileNotFoundError): 

294 remote_file.size() 

295 

296 # Retrieving the size of a remote directory using a file-like path must 

297 # raise 

298 remote_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

299 self.assertIsNone(remote_dir.mkdir()) 

300 self.assertTrue(remote_dir.exists()) 

301 

302 dir_as_file = ResourcePath(remote_dir.geturl().rstrip("/"), forceDirectory=False) 

303 with self.assertRaises(IsADirectoryError): 

304 dir_as_file.size() 

305 

306 def test_dav_upload_creates_dir(self): 

307 # Uploading a file to a non existing directory must ensure its 

308 # parent directories are automatically created and upload succeeds 

309 non_existing_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

310 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True) 

311 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True) 

312 remote_file = non_existing_dir.join(self._get_file_name()) 

313 

314 local_file, file_size = self._generate_file() 

315 with open(local_file, "rb") as f: 

316 data = f.read() 

317 self.assertIsNone(remote_file.write(data, overwrite=True)) 

318 

319 self.assertTrue(remote_file.exists()) 

320 self.assertEqual(remote_file.size(), file_size) 

321 self.assertTrue(remote_file.parent().exists()) 

322 

323 downloaded_data = remote_file.read() 

324 upload_digest = self._compute_digest(data) 

325 download_digest = self._compute_digest(downloaded_data) 

326 self.assertEqual(upload_digest, download_digest) 

327 os.remove(local_file) 

328 

329 def test_dav_transfer_from(self): 

330 # Transfer from local file via "copy", with and without overwrite 

331 remote_file = self.tmpdir.join(self._get_file_name()) 

332 local_file, _ = self._generate_file() 

333 source_file = ResourcePath(local_file) 

334 self.assertIsNone(remote_file.transfer_from(source_file, transfer="copy", overwrite=True)) 

335 self.assertTrue(remote_file.exists()) 

336 self.assertEqual(remote_file.size(), source_file.size()) 

337 with self.assertRaises(FileExistsError): 

338 remote_file.transfer_from(ResourcePath(local_file), transfer="copy", overwrite=False) 

339 

340 # Transfer from remote file via "copy", with and without overwrite 

341 source_file = remote_file 

342 target_file = self.tmpdir.join(self._get_file_name()) 

343 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True)) 

344 self.assertTrue(target_file.exists()) 

345 self.assertEqual(target_file.size(), source_file.size()) 

346 

347 # Transfer without overwrite must raise since target resource exists 

348 with self.assertRaises(FileExistsError): 

349 target_file.transfer_from(source_file, transfer="copy", overwrite=False) 

350 

351 # Test transfer from local file via "move", with and without overwrite 

352 source_file = ResourcePath(local_file) 

353 source_size = source_file.size() 

354 target_file = self.tmpdir.join(self._get_file_name()) 

355 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True)) 

356 self.assertTrue(target_file.exists()) 

357 self.assertEqual(target_file.size(), source_size) 

358 self.assertFalse(source_file.exists()) 

359 

360 # Test transfer without overwrite must raise since target resource 

361 # exists 

362 local_file, file_size = self._generate_file() 

363 with self.assertRaises(FileExistsError): 

364 source_file = ResourcePath(local_file) 

365 target_file.transfer_from(source_file, transfer="move", overwrite=False) 

366 

367 # Test transfer from remote file via "move" with and without overwrite 

368 # must succeed 

369 source_file = target_file 

370 source_size = source_file.size() 

371 target_file = self.tmpdir.join(self._get_file_name()) 

372 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True)) 

373 self.assertTrue(target_file.exists()) 

374 self.assertEqual(target_file.size(), source_size) 

375 self.assertFalse(source_file.exists()) 

376 

377 # Transfer without overwrite must raise since target resource exists 

378 with self.assertRaises(FileExistsError): 

379 source_file = ResourcePath(local_file) 

380 target_file.transfer_from(source_file, transfer="move", overwrite=False) 

381 

382 def test_dav_handle(self): 

383 # Resource handle must succeed 

384 target_file = self.tmpdir.join(self._get_file_name()) 

385 data = "abcdefghi" 

386 self.assertIsNone(target_file.write(data, overwrite=True)) 

387 with target_file.open("rb") as handle: 

388 handle.seek(1) 

389 self.assertEqual(handle.read(4).decode("utf-8"), data[1:5]) 

390 

391 def test_dav_delete(self): 

392 # Deletion of an existing remote file must succeed 

393 local_file, file_size = self._generate_file() 

394 with open(local_file, "rb") as f: 

395 data = f.read() 

396 

397 remote_file = self.tmpdir.join(self._get_file_name()) 

398 self.assertIsNone(remote_file.write(data, overwrite=True)) 

399 self.assertTrue(remote_file.exists()) 

400 self.assertEqual(remote_file.size(), file_size) 

401 self.assertIsNone(remote_file.remove()) 

402 os.remove(local_file) 

403 

404 # Deletion of a non-existing remote file must succeed 

405 non_existing_file = self.tmpdir.join(self._get_file_name()) 

406 self.assertIsNone(non_existing_file.remove()) 

407 

408 # Deletion of a non-empty remote directory must succeed 

409 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

410 self.assertIsNone(subdir.mkdir()) 

411 self.assertTrue(subdir.exists()) 

412 local_file, _ = self._generate_file() 

413 source_file = ResourcePath(local_file) 

414 target_file = self.tmpdir.join(self._get_file_name(), forceDirectory=True) 

415 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True)) 

416 self.assertIsNone(subdir.remove()) 

417 self.assertFalse(subdir.exists()) 

418 os.remove(local_file) 

419 

420 @classmethod 

421 def _get_port_number(cls) -> int: 

422 """Return a port number the webDAV server can use to listen to.""" 

423 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 

424 s.bind(("127.0.0.1", 0)) 

425 s.listen() 

426 port = s.getsockname()[1] 

427 s.close() 

428 return port 

429 

430 def _serve_webdav(self, local_path: str, port: int, stop_webdav_server: Callable[[], bool]): 

431 """Start a local webDAV server, listening on http://localhost:port 

432 and exposing local_path. 

433 

434 This server only runs when this test class is instantiated, 

435 and then shuts down. The server must be started is a separate thread. 

436 

437 Parameters 

438 ---------- 

439 port : `int` 

440 The port number on which the server should listen 

441 local_path : `str` 

442 Path to an existing local directory for the server to expose. 

443 stop_webdav_server : `Callable[[], bool]` 

444 Boolean function which returns True when the server should be 

445 stopped. 

446 """ 

447 try: 

448 # Start the wsgi server in a separate thread 

449 config = { 

450 "host": "127.0.0.1", 

451 "port": port, 

452 "provider_mapping": {"/": local_path}, 

453 "http_authenticator": {"domain_controller": None}, 

454 "simple_dc": {"user_mapping": {"*": True}}, 

455 "verbose": 0, 

456 "lock_storage": False, 

457 "dir_browser": { 

458 "enable": False, 

459 "ms_sharepoint_support": False, 

460 "libre_office_support": False, 

461 "response_trailer": False, 

462 "davmount_links": False, 

463 }, 

464 } 

465 server = wsgi.Server(wsgi_app=WsgiDAVApp(config), bind_addr=(config["host"], config["port"])) 

466 t = Thread(target=server.start, daemon=True) 

467 t.start() 

468 

469 # Shut down the server when done: stop_webdav_server() returns 

470 # True when this test suite is being teared down 

471 while not stop_webdav_server(): 

472 time.sleep(1) 

473 except KeyboardInterrupt: 

474 # Caught Ctrl-C, shut down the server 

475 pass 

476 finally: 

477 server.stop() 

478 t.join() 

479 

480 @classmethod 

481 def _get_name(cls, prefix: str) -> str: 

482 alphabet = string.ascii_lowercase + string.digits 

483 return f"{prefix}-" + "".join(random.choices(alphabet, k=8)) 

484 

485 @classmethod 

486 def _get_dir_name(cls) -> str: 

487 """Return a randomly selected name for a file""" 

488 return cls._get_name(prefix="dir") 

489 

490 @classmethod 

491 def _get_file_name(cls) -> str: 

492 """Return a randomly selected name for a file""" 

493 return cls._get_name(prefix="file") 

494 

495 def _generate_file(self, remove_when_done=True) -> tuple[str, int]: 

496 """Create a local file of random size with random contents. 

497 

498 Returns 

499 ------- 

500 path : `str` 

501 Path to local temporary file. The caller is responsible for 

502 removing the file when appropriate. 

503 size : `int` 

504 Size of the generated file, in bytes. 

505 """ 

506 megabyte = 1024 * 1024 

507 size = random.randint(2 * megabyte, 5 * megabyte) 

508 tmpfile, path = tempfile.mkstemp() 

509 self.assertEqual(os.write(tmpfile, os.urandom(size)), size) 

510 os.close(tmpfile) 

511 

512 if remove_when_done: 

513 self.local_files_to_remove.append(path) 

514 

515 return path, size 

516 

517 @classmethod 

518 def _compute_digest(cls, data: bytes) -> str: 

519 """Compute a SHA256 hash of data.""" 

520 m = hashlib.sha256() 

521 m.update(data) 

522 return m.hexdigest() 

523 

524 @classmethod 

525 def _is_server_running(cls, port: int) -> bool: 

526 """Return True if there is a server listening on local address 

527 127.0.0.1:<port>. 

528 """ 

529 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 

530 try: 

531 s.connect(("127.0.0.1", port)) 

532 return True 

533 except ConnectionRefusedError: 

534 return False 

535 

536 

537class HttpResourcePathConfigTestCase(unittest.TestCase): 

538 """Test for the HttpResourcePathConfig class.""" 

539 

540 def test_send_expect_header(self): 

541 # Ensure environment variable LSST_HTTP_PUT_SEND_EXPECT_HEADER is 

542 # inspected to initialize the HttpResourcePathConfig config class. 

543 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

544 importlib.reload(lsst.resources.http) 

545 config = HttpResourcePathConfig() 

546 self.assertFalse(config.send_expect_on_put) 

547 

548 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_PUT_SEND_EXPECT_HEADER": "true"}, clear=True): 

549 importlib.reload(lsst.resources.http) 

550 config = HttpResourcePathConfig() 

551 self.assertTrue(config.send_expect_on_put) 

552 

553 def test_collect_memory_usage(self): 

554 # Ensure environment variable LSST_HTTP_COLLECT_MEMORY_USAGE is 

555 # inspected to initialize the HttpResourcePathConfig class. 

556 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

557 importlib.reload(lsst.resources.http) 

558 config = HttpResourcePathConfig() 

559 self.assertFalse(config.collect_memory_usage) 

560 

561 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_COLLECT_MEMORY_USAGE": "true"}, clear=True): 

562 importlib.reload(lsst.resources.http) 

563 config = HttpResourcePathConfig() 

564 self.assertTrue(config.collect_memory_usage) 

565 

566 def test_timeout(self): 

567 # Ensure that when the connect and read timeouts are not specified 

568 # the default values are stored in the config. 

569 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

570 importlib.reload(lsst.resources.http) 

571 config = HttpResourcePathConfig() 

572 self.assertAlmostEqual(config.timeout[0], config.DEFAULT_TIMEOUT_CONNECT) 

573 self.assertAlmostEqual(config.timeout[1], config.DEFAULT_TIMEOUT_READ) 

574 

575 # Ensure that when both the connect and read timeouts are specified 

576 # they are both stored in the config. 

577 connect_timeout, read_timeout = 100.5, 200.8 

578 with unittest.mock.patch.dict( 

579 os.environ, 

580 {"LSST_HTTP_TIMEOUT_CONNECT": str(connect_timeout), "LSST_HTTP_TIMEOUT_READ": str(read_timeout)}, 

581 clear=True, 

582 ): 

583 # Force module reload. 

584 importlib.reload(lsst.resources.http) 

585 config = HttpResourcePathConfig() 

586 self.assertAlmostEqual(config.timeout[0], connect_timeout) 

587 self.assertAlmostEqual(config.timeout[1], read_timeout) 

588 

589 # Ensure that invalid float values (including NaN values) raise a 

590 # ValueError. 

591 for value in ("invalid", "NaN"): 

592 with unittest.mock.patch.dict( 

593 os.environ, 

594 {"LSST_HTTP_TIMEOUT_CONNECT": value, "LSST_HTTP_TIMEOUT_READ": value}, 

595 clear=True, 

596 ): 

597 # Force module reload. 

598 importlib.reload(lsst.resources.http) 

599 with self.assertRaises(ValueError): 

600 config = HttpResourcePathConfig() 

601 config.timeout() 

602 

603 def test_front_end_connections(self): 

604 # Ensure that when the number of front end connections is not specified 

605 # the default is stored in the config. 

606 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

607 importlib.reload(lsst.resources.http) 

608 config = HttpResourcePathConfig() 

609 self.assertEqual(config.front_end_connections, config.DEFAULT_FRONTEND_PERSISTENT_CONNECTIONS) 

610 

611 # Ensure that when the number of front end connections is specified 

612 # it is stored in the config. 

613 connections = 42 

614 with unittest.mock.patch.dict( 

615 os.environ, {"LSST_HTTP_FRONTEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True 

616 ): 

617 importlib.reload(lsst.resources.http) 

618 config = HttpResourcePathConfig() 

619 self.assertTrue(config.front_end_connections, connections) 

620 

621 def test_back_end_connections(self): 

622 # Ensure that when the number of back end connections is not specified 

623 # the default is stored in the config. 

624 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

625 importlib.reload(lsst.resources.http) 

626 config = HttpResourcePathConfig() 

627 self.assertEqual(config.back_end_connections, config.DEFAULT_BACKEND_PERSISTENT_CONNECTIONS) 

628 

629 # Ensure that when the number of back end connections is specified 

630 # it is stored in the config. 

631 connections = 42 

632 with unittest.mock.patch.dict( 

633 os.environ, {"LSST_HTTP_BACKEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True 

634 ): 

635 importlib.reload(lsst.resources.http) 

636 config = HttpResourcePathConfig() 

637 self.assertTrue(config.back_end_connections, connections) 

638 

639 def test_digest_algorithm(self): 

640 # Ensure that when no digest is specified in the environment, the 

641 # configured digest algorithm is the empty string. 

642 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

643 importlib.reload(lsst.resources.http) 

644 config = HttpResourcePathConfig() 

645 self.assertEqual(config.digest_algorithm, "") 

646 

647 # Ensure that an invalid digest algorithm is ignored. 

648 digest = "invalid" 

649 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True): 

650 importlib.reload(lsst.resources.http) 

651 config = HttpResourcePathConfig() 

652 self.assertEqual(config.digest_algorithm, "") 

653 

654 # Ensure that an accepted digest algorithm is stored. 

655 for digest in HttpResourcePathConfig().ACCEPTED_DIGESTS: 

656 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True): 

657 importlib.reload(lsst.resources.http) 

658 config = HttpResourcePathConfig() 

659 self.assertTrue(config.digest_algorithm, digest) 

660 

661 def test_backoff_interval(self): 

662 # Ensure that when no backoff interval is defined, the default values 

663 # are used. 

664 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

665 importlib.reload(lsst.resources.http) 

666 config = HttpResourcePathConfig() 

667 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN) 

668 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX) 

669 

670 # Ensure that an invalid value for backoff interval is ignored and 

671 # the default value is used. 

672 with unittest.mock.patch.dict( 

673 os.environ, {"LSST_HTTP_BACKOFF_MIN": "XXX", "LSST_HTTP_BACKOFF_MAX": "YYY"}, clear=True 

674 ): 

675 importlib.reload(lsst.resources.http) 

676 config = HttpResourcePathConfig() 

677 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN) 

678 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX) 

679 

680 # Ensure that NaN values are ignored and the defaults values are used. 

681 with unittest.mock.patch.dict( 

682 os.environ, {"LSST_HTTP_BACKOFF_MIN": "NaN", "LSST_HTTP_BACKOFF_MAX": "NaN"}, clear=True 

683 ): 

684 importlib.reload(lsst.resources.http) 

685 config = HttpResourcePathConfig() 

686 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN) 

687 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX) 

688 

689 # Ensure that when specified, valid limits backoff interval are used. 

690 backoff_min, backoff_max = 3.0, 8.0 

691 with unittest.mock.patch.dict( 

692 os.environ, 

693 {"LSST_HTTP_BACKOFF_MIN": str(backoff_min), "LSST_HTTP_BACKOFF_MAX": str(backoff_max)}, 

694 clear=True, 

695 ): 

696 importlib.reload(lsst.resources.http) 

697 config = HttpResourcePathConfig() 

698 self.assertAlmostEqual(config.backoff_min, backoff_min) 

699 self.assertAlmostEqual(config.backoff_max, backoff_max) 

700 

701 

702class WebdavUtilsTestCase(unittest.TestCase): 

703 """Test for the Webdav related utilities.""" 

704 

705 def setUp(self): 

706 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) 

707 

708 def tearDown(self): 

709 if self.tmpdir and self.tmpdir.isLocal: 

710 removeTestTempDir(self.tmpdir.ospath) 

711 

712 @responses.activate 

713 def test_is_webdav_endpoint(self): 

714 davEndpoint = "http://www.lsstwithwebdav.org" 

715 responses.add(responses.OPTIONS, davEndpoint, status=200, headers={"DAV": "1,2,3"}) 

716 self.assertTrue(_is_webdav_endpoint(davEndpoint)) 

717 

718 plainHttpEndpoint = "http://www.lsstwithoutwebdav.org" 

719 responses.add(responses.OPTIONS, plainHttpEndpoint, status=200) 

720 self.assertFalse(_is_webdav_endpoint(plainHttpEndpoint)) 

721 

722 notWebdavEndpoint = "http://www.notwebdav.org" 

723 responses.add(responses.OPTIONS, notWebdavEndpoint, status=403) 

724 self.assertFalse(_is_webdav_endpoint(notWebdavEndpoint)) 

725 

726 def test_is_protected(self): 

727 self.assertFalse(_is_protected("/this-file-does-not-exist")) 

728 

729 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

730 f.write("XXXX") 

731 file_path = f.name 

732 

733 os.chmod(file_path, stat.S_IRUSR) 

734 self.assertTrue(_is_protected(file_path)) 

735 

736 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): 

737 os.chmod(file_path, stat.S_IRUSR | mode) 

738 self.assertFalse(_is_protected(file_path)) 

739 

740 

741class BearerTokenAuthTestCase(unittest.TestCase): 

742 """Test for the BearerTokenAuth class.""" 

743 

744 def setUp(self): 

745 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) 

746 self.token = "ABCDE1234" 

747 

748 def tearDown(self): 

749 if self.tmpdir and self.tmpdir.isLocal: 

750 removeTestTempDir(self.tmpdir.ospath) 

751 

752 def test_empty_token(self): 

753 """Ensure that when no token is provided the request is not 

754 modified. 

755 """ 

756 auth = BearerTokenAuth(None) 

757 auth._refresh() 

758 self.assertIsNone(auth._token) 

759 self.assertIsNone(auth._path) 

760 req = requests.Request("GET", "https://example.org") 

761 self.assertEqual(auth(req), req) 

762 

763 def test_token_value(self): 

764 """Ensure that when a token value is provided, the 'Authorization' 

765 header is added to the requests. 

766 """ 

767 auth = BearerTokenAuth(self.token) 

768 req = auth(requests.Request("GET", "https://example.org").prepare()) 

769 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}") 

770 

771 def test_token_insecure_http(self): 

772 """Ensure that no 'Authorization' header is attached to a request when 

773 using insecure HTTP. 

774 """ 

775 auth = BearerTokenAuth(self.token) 

776 for url in ("http://example.org", "HTTP://example.org", "HttP://example.org"): 

777 req = auth(requests.Request("GET", url).prepare()) 

778 self.assertIsNone(req.headers.get("Authorization")) 

779 

780 def test_token_file(self): 

781 """Ensure when the provided token is a file path, its contents is 

782 correctly used in the the 'Authorization' header of the requests. 

783 """ 

784 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

785 f.write(self.token) 

786 token_file_path = f.name 

787 

788 # Ensure the request's "Authorization" header is set with the right 

789 # token value 

790 os.chmod(token_file_path, stat.S_IRUSR) 

791 auth = BearerTokenAuth(token_file_path) 

792 req = auth(requests.Request("GET", "https://example.org").prepare()) 

793 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}") 

794 

795 # Ensure an exception is raised if either group or other can read the 

796 # token file 

797 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): 

798 os.chmod(token_file_path, stat.S_IRUSR | mode) 

799 with self.assertRaises(PermissionError): 

800 BearerTokenAuth(token_file_path) 

801 

802 

803class SessionStoreTestCase(unittest.TestCase): 

804 """Test for the SessionStore class.""" 

805 

806 def setUp(self): 

807 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) 

808 self.rpath = ResourcePath("https://example.org") 

809 

810 def tearDown(self): 

811 if self.tmpdir and self.tmpdir.isLocal: 

812 removeTestTempDir(self.tmpdir.ospath) 

813 

814 def test_ca_cert_bundle(self): 

815 """Ensure a certificate authorities bundle is used to authentify 

816 the remote server. 

817 """ 

818 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

819 f.write("CERT BUNDLE") 

820 cert_bundle = f.name 

821 

822 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_CACERT_BUNDLE": cert_bundle}, clear=True): 

823 session = SessionStore().get(self.rpath) 

824 self.assertEqual(session.verify, cert_bundle) 

825 

826 def test_user_cert(self): 

827 """Ensure if user certificate and private key are provided, they are 

828 used for authenticating the client. 

829 """ 

830 # Create mock certificate and private key files. 

831 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

832 f.write("CERT") 

833 client_cert = f.name 

834 

835 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

836 f.write("KEY") 

837 client_key = f.name 

838 

839 # Check both LSST_HTTP_AUTH_CLIENT_CERT and LSST_HTTP_AUTH_CLIENT_KEY 

840 # must be initialized. 

841 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert}, clear=True): 

842 with self.assertRaises(ValueError): 

843 SessionStore().get(self.rpath) 

844 

845 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_KEY": client_key}, clear=True): 

846 with self.assertRaises(ValueError): 

847 SessionStore().get(self.rpath) 

848 

849 # Check private key file must be accessible only by its owner. 

850 with unittest.mock.patch.dict( 

851 os.environ, 

852 {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert, "LSST_HTTP_AUTH_CLIENT_KEY": client_key}, 

853 clear=True, 

854 ): 

855 # Ensure the session client certificate is initialized when 

856 # only the owner can read the private key file. 

857 os.chmod(client_key, stat.S_IRUSR) 

858 session = SessionStore().get(self.rpath) 

859 self.assertEqual(session.cert[0], client_cert) 

860 self.assertEqual(session.cert[1], client_key) 

861 

862 # Ensure an exception is raised if either group or other can access 

863 # the private key file. 

864 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): 

865 os.chmod(client_key, stat.S_IRUSR | mode) 

866 with self.assertRaises(PermissionError): 

867 SessionStore().get(self.rpath) 

868 

869 def test_token_env(self): 

870 """Ensure when the token is provided via an environment variable 

871 the sessions are equipped with a BearerTokenAuth. 

872 """ 

873 token = "ABCDE" 

874 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_BEARER_TOKEN": token}, clear=True): 

875 session = SessionStore().get(self.rpath) 

876 self.assertEqual(type(session.auth), lsst.resources.http.BearerTokenAuth) 

877 self.assertEqual(session.auth._token, token) 

878 self.assertIsNone(session.auth._path) 

879 

880 def test_sessions(self): 

881 """Ensure the session caching mechanism works.""" 

882 # Ensure the store provides a session for a given URL 

883 root_url = "https://example.org" 

884 store = SessionStore() 

885 session = store.get(ResourcePath(root_url)) 

886 self.assertIsNotNone(session) 

887 

888 # Ensure the sessions retrieved from a single store with the same 

889 # root URIs are equal 

890 for u in (f"{root_url}", f"{root_url}/path/to/file"): 

891 self.assertEqual(session, store.get(ResourcePath(u))) 

892 

893 # Ensure sessions retrieved for different root URIs are different 

894 another_url = "https://another.example.org" 

895 self.assertNotEqual(session, store.get(ResourcePath(another_url))) 

896 

897 # Ensure the sessions retrieved from a single store for URLs with 

898 # different port numbers are different 

899 root_url_with_port = f"{another_url}:12345" 

900 session = store.get(ResourcePath(root_url_with_port)) 

901 self.assertNotEqual(session, store.get(ResourcePath(another_url))) 

902 

903 # Ensure the sessions retrieved from a single store with the same 

904 # root URIs (including port numbers) are equal 

905 for u in (f"{root_url_with_port}", f"{root_url_with_port}/path/to/file"): 

906 self.assertEqual(session, store.get(ResourcePath(u))) 

907 

908 

909class TestContentRange(unittest.TestCase): 

910 """Test parsing of Content-Range header.""" 

911 

912 def test_full_data(self): 

913 parsed = parse_content_range_header("bytes 123-2555/12345") 

914 self.assertEqual(parsed.range_start, 123) 

915 self.assertEqual(parsed.range_end, 2555) 

916 self.assertEqual(parsed.total, 12345) 

917 

918 parsed = parse_content_range_header(" bytes 0-0/5 ") 

919 self.assertEqual(parsed.range_start, 0) 

920 self.assertEqual(parsed.range_end, 0) 

921 self.assertEqual(parsed.total, 5) 

922 

923 def test_empty_total(self): 

924 parsed = parse_content_range_header("bytes 123-2555/*") 

925 self.assertEqual(parsed.range_start, 123) 

926 self.assertEqual(parsed.range_end, 2555) 

927 self.assertIsNone(parsed.total) 

928 

929 parsed = parse_content_range_header(" bytes 0-0/* ") 

930 self.assertEqual(parsed.range_start, 0) 

931 self.assertEqual(parsed.range_end, 0) 

932 self.assertIsNone(parsed.total) 

933 

934 def test_empty_range(self): 

935 parsed = parse_content_range_header("bytes */12345") 

936 self.assertIsNone(parsed.range_start) 

937 self.assertIsNone(parsed.range_end) 

938 self.assertEqual(parsed.total, 12345) 

939 

940 parsed = parse_content_range_header(" bytes */5 ") 

941 self.assertIsNone(parsed.range_start) 

942 self.assertIsNone(parsed.range_end) 

943 self.assertEqual(parsed.total, 5) 

944 

945 def test_invalid_input(self): 

946 with self.assertRaises(ValueError): 

947 parse_content_range_header("pages 0-10/12") 

948 

949 

950if __name__ == "__main__": 

951 unittest.main()