Coverage for tests/test_http.py: 14%

490 statements  

« prev     ^ index     » next       coverage.py v7.2.5, created at 2023-05-18 02:06 -0700

1# This file is part of lsst-resources. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# Use of this source code is governed by a 3-clause BSD-style 

10# license that can be found in the LICENSE file. 

11 

12import hashlib 

13import importlib 

14import io 

15import os.path 

16import random 

17import shutil 

18import socket 

19import stat 

20import string 

21import tempfile 

22import time 

23import unittest 

24import warnings 

25from threading import Thread 

26from typing import Callable, Tuple, cast 

27 

28try: 

29 from cheroot import wsgi 

30 from wsgidav.wsgidav_app import WsgiDAVApp 

31except ImportError: 

32 WsgiDAVApp = None 

33 

34import lsst.resources 

35import requests 

36import responses 

37from lsst.resources import ResourcePath 

38from lsst.resources._resourceHandles._httpResourceHandle import HttpReadResourceHandle 

39from lsst.resources.http import ( 

40 BearerTokenAuth, 

41 HttpResourcePathConfig, 

42 SessionStore, 

43 _is_protected, 

44 _is_webdav_endpoint, 

45) 

46from lsst.resources.tests import GenericReadWriteTestCase, GenericTestCase 

47from lsst.resources.utils import makeTestTempDir, removeTestTempDir 

48 

49TESTDIR = os.path.abspath(os.path.dirname(__file__)) 

50 

51 

52class GenericHttpTestCase(GenericTestCase, unittest.TestCase): 

53 scheme = "http" 

54 netloc = "server.example" 

55 

56 

57class HttpReadWriteWebdavTestCase(GenericReadWriteTestCase, unittest.TestCase): 

58 """Test with a real webDAV server, as opposed to mocking responses.""" 

59 

60 scheme = "http" 

61 

62 @classmethod 

63 def setUpClass(cls): 

64 cls.webdav_tmpdir = tempfile.mkdtemp(prefix="webdav-server-test-") 

65 cls.local_files_to_remove = [] 

66 cls.server_thread = None 

67 

68 # Disable warnings about socket connections left open. We purposedly 

69 # keep network connections to the remote server open and have no 

70 # means through the API exposed by Requests of actually close the 

71 # underlyng sockets to make tests pass without warning. 

72 warnings.filterwarnings(action="ignore", message=r"unclosed.*socket", category=ResourceWarning) 

73 

74 # Should we test against a running server? 

75 # 

76 # This is convenient for testing against real servers in the 

77 # developer environment by initializing the environment variable 

78 # LSST_RESOURCES_HTTP_TEST_SERVER_URL with the URL of the server, e.g. 

79 # https://dav.example.org:1234/path/to/top/dir 

80 if (test_endpoint := os.getenv("LSST_RESOURCES_HTTP_TEST_SERVER_URL")) is not None: 

81 # Run this test case against the specified server. 

82 uri = ResourcePath(test_endpoint) 

83 cls.scheme = uri.scheme 

84 cls.netloc = uri.netloc 

85 cls.base_path = uri.path 

86 elif WsgiDAVApp is not None: 

87 # WsgiDAVApp is available, launch a local server in its own 

88 # thread to expose a local temporary directory and run this 

89 # test case against it. 

90 cls.port_number = cls._get_port_number() 

91 cls.stop_webdav_server = False 

92 cls.server_thread = Thread( 

93 target=cls._serve_webdav, 

94 args=(cls, cls.webdav_tmpdir, cls.port_number, lambda: cls.stop_webdav_server), 

95 daemon=True, 

96 ) 

97 cls.server_thread.start() 

98 

99 # Wait for it to start 

100 time.sleep(1) 

101 

102 # Initialize the server endpoint 

103 cls.netloc = f"127.0.0.1:{cls.port_number}" 

104 else: 

105 cls.skipTest( 

106 cls, 

107 "neither WsgiDAVApp is available nor a webDAV test endpoint is configured to test against", 

108 ) 

109 

110 @classmethod 

111 def tearDownClass(cls): 

112 # Stop the WsgiDAVApp server, if any 

113 if WsgiDAVApp is not None: 

114 # Shut down of the webdav server and wait for the thread to exit 

115 cls.stop_webdav_server = True 

116 if cls.server_thread is not None: 

117 cls.server_thread.join() 

118 

119 # Remove local temporary files 

120 for file in cls.local_files_to_remove: 

121 if os.path.exists(file): 

122 os.remove(file) 

123 

124 # Remove temp dir 

125 if cls.webdav_tmpdir: 

126 shutil.rmtree(cls.webdav_tmpdir, ignore_errors=True) 

127 

128 # Reset the warnings filter. 

129 warnings.resetwarnings() 

130 

131 def tearDown(self): 

132 if self.tmpdir: 

133 self.tmpdir.remove() 

134 

135 # Clear sessions. Some sockets may be left open, because urllib3 

136 # doest not close in-flight connections. 

137 # See https://urllib3.readthedocs.io > API Reference > 

138 # Pool Manager > clear() 

139 # I cannot add the full URL here because it is longer than 79 

140 # characters. 

141 self.tmpdir._clear_sessions() 

142 

143 super().tearDown() 

144 

145 def test_dav_file_handle(self): 

146 # Upload a new file with known contents. 

147 contents = "These are some \n bytes to read" 

148 remote_file = self.tmpdir.join(self._get_file_name()) 

149 self.assertIsNone(remote_file.write(data=contents, overwrite=True)) 

150 

151 # Test that the correct handle is returned. 

152 with remote_file.open("rb") as handle: 

153 self.assertIsInstance(handle, HttpReadResourceHandle) 

154 

155 # Test reading byte ranges works 

156 with remote_file.open("rb") as handle: 

157 sub_contents = contents[:10] 

158 handle = cast(HttpReadResourceHandle, handle) 

159 result = handle.read(len(sub_contents)).decode() 

160 self.assertEqual(result, sub_contents) 

161 # Verify there is no internal buffer. 

162 self.assertIsNone(handle._completeBuffer) 

163 # Verify the position. 

164 self.assertEqual(handle.tell(), len(sub_contents)) 

165 

166 # Jump back to the beginning and test if reading the whole file 

167 # prompts the internal buffer to be read. 

168 handle.seek(0) 

169 self.assertEqual(handle.tell(), 0) 

170 result = handle.read().decode() 

171 self.assertIsNotNone(handle._completeBuffer) 

172 self.assertEqual(result, contents) 

173 

174 # Check that flush works on read-only handle. 

175 handle.flush() 

176 

177 # Verify reading as a string handle works as expected. 

178 with remote_file.open("r") as handle: 

179 self.assertIsInstance(handle, io.TextIOWrapper) 

180 

181 handle = cast(io.TextIOWrapper, handle) 

182 self.assertIsInstance(handle.buffer, HttpReadResourceHandle) 

183 

184 # Check if string methods work. 

185 result = handle.read() 

186 self.assertEqual(result, contents) 

187 

188 # Check that flush works on read-only handle. 

189 handle.flush() 

190 

191 # Verify that write modes invoke the default base method 

192 with remote_file.open("w") as handle: 

193 self.assertIsInstance(handle, io.StringIO) 

194 

195 def test_dav_is_dav_enpoint(self): 

196 # Ensure the server is a webDAV endpoint 

197 self.assertTrue(self.tmpdir.is_webdav_endpoint) 

198 

199 def test_dav_mkdir(self): 

200 # Check creation and deletion of an empty directory 

201 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

202 self.assertIsNone(subdir.mkdir()) 

203 self.assertTrue(subdir.exists()) 

204 

205 # Creating an existing remote directory must succeed 

206 self.assertIsNone(subdir.mkdir()) 

207 

208 # Deletion of an existing directory must succeed 

209 self.assertIsNone(subdir.remove()) 

210 

211 # Deletion of an non-existing directory must succeed 

212 subir_not_exists = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

213 self.assertIsNone(subir_not_exists.remove()) 

214 

215 # Creation of a directory at a path where a file exists must raise 

216 file = self.tmpdir.join(self._get_file_name(), forceDirectory=False) 

217 file.write(data=None, overwrite=True) 

218 self.assertTrue(file.exists()) 

219 

220 existing_file = self.tmpdir.join(file.basename(), forceDirectory=True) 

221 with self.assertRaises(NotADirectoryError): 

222 self.assertIsNone(existing_file.mkdir()) 

223 

224 def test_dav_upload_download(self): 

225 # Test upload a randomly-generated file via write() with and without 

226 # overwrite 

227 local_file, file_size = self._generate_file() 

228 with open(local_file, "rb") as f: 

229 data = f.read() 

230 

231 remote_file = self.tmpdir.join(self._get_file_name()) 

232 self.assertIsNone(remote_file.write(data, overwrite=True)) 

233 self.assertTrue(remote_file.exists()) 

234 self.assertEqual(remote_file.size(), file_size) 

235 

236 # Write without overwrite must raise since target file exists 

237 with self.assertRaises(FileExistsError): 

238 remote_file.write(data, overwrite=False) 

239 

240 # Download the file we just uploaded. Compute and compare a digest of 

241 # the uploaded and downloaded data and ensure they match 

242 downloaded_data = remote_file.read() 

243 self.assertEqual(len(downloaded_data), file_size) 

244 upload_digest = self._compute_digest(data) 

245 download_digest = self._compute_digest(downloaded_data) 

246 self.assertEqual(upload_digest, download_digest) 

247 os.remove(local_file) 

248 

249 def test_dav_as_local(self): 

250 contents = str.encode("12345") 

251 remote_file = self.tmpdir.join(self._get_file_name()) 

252 self.assertIsNone(remote_file.write(data=contents, overwrite=True)) 

253 

254 local_path, is_temp = remote_file._as_local() 

255 self.assertTrue(is_temp) 

256 self.assertTrue(os.path.exists(local_path)) 

257 self.assertTrue(os.stat(local_path).st_size, len(contents)) 

258 self.assertEqual(ResourcePath(local_path).read(), contents) 

259 os.remove(local_path) 

260 

261 def test_dav_size(self): 

262 # Size of a non-existent file must raise. 

263 remote_file = self.tmpdir.join(self._get_file_name()) 

264 with self.assertRaises(FileNotFoundError): 

265 remote_file.size() 

266 

267 # Retrieving the size of a remote directory using a file-like path must 

268 # raise 

269 remote_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

270 self.assertIsNone(remote_dir.mkdir()) 

271 self.assertTrue(remote_dir.exists()) 

272 

273 dir_as_file = ResourcePath(remote_dir.geturl().rstrip("/"), forceDirectory=False) 

274 with self.assertRaises(IsADirectoryError): 

275 dir_as_file.size() 

276 

277 def test_dav_upload_creates_dir(self): 

278 # Uploading a file to a non existing directory must ensure its 

279 # parent directories are automatically created and upload succeeds 

280 non_existing_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

281 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True) 

282 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True) 

283 remote_file = non_existing_dir.join(self._get_file_name()) 

284 

285 local_file, file_size = self._generate_file() 

286 with open(local_file, "rb") as f: 

287 data = f.read() 

288 self.assertIsNone(remote_file.write(data, overwrite=True)) 

289 

290 self.assertTrue(remote_file.exists()) 

291 self.assertEqual(remote_file.size(), file_size) 

292 self.assertTrue(remote_file.parent().exists()) 

293 

294 downloaded_data = remote_file.read() 

295 upload_digest = self._compute_digest(data) 

296 download_digest = self._compute_digest(downloaded_data) 

297 self.assertEqual(upload_digest, download_digest) 

298 os.remove(local_file) 

299 

300 def test_dav_transfer_from(self): 

301 # Transfer from local file via "copy", with and without overwrite 

302 remote_file = self.tmpdir.join(self._get_file_name()) 

303 local_file, _ = self._generate_file() 

304 source_file = ResourcePath(local_file) 

305 self.assertIsNone(remote_file.transfer_from(source_file, transfer="copy", overwrite=True)) 

306 self.assertTrue(remote_file.exists()) 

307 self.assertEqual(remote_file.size(), source_file.size()) 

308 with self.assertRaises(FileExistsError): 

309 remote_file.transfer_from(ResourcePath(local_file), transfer="copy", overwrite=False) 

310 

311 # Transfer from remote file via "copy", with and without overwrite 

312 source_file = remote_file 

313 target_file = self.tmpdir.join(self._get_file_name()) 

314 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True)) 

315 self.assertTrue(target_file.exists()) 

316 self.assertEqual(target_file.size(), source_file.size()) 

317 

318 # Transfer without overwrite must raise since target resource exists 

319 with self.assertRaises(FileExistsError): 

320 target_file.transfer_from(source_file, transfer="copy", overwrite=False) 

321 

322 # Test transfer from local file via "move", with and without overwrite 

323 source_file = ResourcePath(local_file) 

324 source_size = source_file.size() 

325 target_file = self.tmpdir.join(self._get_file_name()) 

326 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True)) 

327 self.assertTrue(target_file.exists()) 

328 self.assertEqual(target_file.size(), source_size) 

329 self.assertFalse(source_file.exists()) 

330 

331 # Test transfer without overwrite must raise since target resource 

332 # exists 

333 local_file, file_size = self._generate_file() 

334 with self.assertRaises(FileExistsError): 

335 source_file = ResourcePath(local_file) 

336 target_file.transfer_from(source_file, transfer="move", overwrite=False) 

337 

338 # Test transfer from remote file via "move" with and without overwrite 

339 # must succeed 

340 source_file = target_file 

341 source_size = source_file.size() 

342 target_file = self.tmpdir.join(self._get_file_name()) 

343 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True)) 

344 self.assertTrue(target_file.exists()) 

345 self.assertEqual(target_file.size(), source_size) 

346 self.assertFalse(source_file.exists()) 

347 

348 # Transfer without overwrite must raise since target resource exists 

349 with self.assertRaises(FileExistsError): 

350 source_file = ResourcePath(local_file) 

351 target_file.transfer_from(source_file, transfer="move", overwrite=False) 

352 

353 def test_dav_handle(self): 

354 # Resource handle must succeed 

355 target_file = self.tmpdir.join(self._get_file_name()) 

356 data = "abcdefghi" 

357 self.assertIsNone(target_file.write(data, overwrite=True)) 

358 with target_file.open("rb") as handle: 

359 handle.seek(1) 

360 self.assertEqual(handle.read(4).decode("utf-8"), data[1:5]) 

361 

362 def test_dav_delete(self): 

363 # Deletion of an existing remote file must succeed 

364 local_file, file_size = self._generate_file() 

365 with open(local_file, "rb") as f: 

366 data = f.read() 

367 

368 remote_file = self.tmpdir.join(self._get_file_name()) 

369 self.assertIsNone(remote_file.write(data, overwrite=True)) 

370 self.assertTrue(remote_file.exists()) 

371 self.assertEqual(remote_file.size(), file_size) 

372 self.assertIsNone(remote_file.remove()) 

373 os.remove(local_file) 

374 

375 # Deletion of a non-existing remote file must succeed 

376 non_existing_file = self.tmpdir.join(self._get_file_name()) 

377 self.assertIsNone(non_existing_file.remove()) 

378 

379 # Deletion of a non-empty remote directory must succeed 

380 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

381 self.assertIsNone(subdir.mkdir()) 

382 self.assertTrue(subdir.exists()) 

383 local_file, _ = self._generate_file() 

384 source_file = ResourcePath(local_file) 

385 target_file = self.tmpdir.join(self._get_file_name(), forceDirectory=True) 

386 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True)) 

387 self.assertIsNone(subdir.remove()) 

388 self.assertFalse(subdir.exists()) 

389 os.remove(local_file) 

390 

391 @classmethod 

392 def _get_port_number(cls) -> int: 

393 """Return a port number the webDAV server can use to listen to.""" 

394 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 

395 s.bind(("127.0.0.1", 0)) 

396 s.listen() 

397 port = s.getsockname()[1] 

398 s.close() 

399 return port 

400 

401 def _serve_webdav(self, local_path: str, port: int, stop_webdav_server: Callable[[], bool]): 

402 """Start a local webDAV server, listening on http://localhost:port 

403 and exposing local_path. 

404 

405 This server only runs when this test class is instantiated, 

406 and then shuts down. The server must be started is a separate thread. 

407 

408 Parameters 

409 ---------- 

410 port : `int` 

411 The port number on which the server should listen 

412 local_path : `str` 

413 Path to an existing local directory for the server to expose. 

414 stop_webdav_server : `Callable[[], bool]` 

415 Boolean function which returns True when the server should be 

416 stopped. 

417 """ 

418 try: 

419 # Start the wsgi server in a separate thread 

420 config = { 

421 "host": "127.0.0.1", 

422 "port": port, 

423 "provider_mapping": {"/": local_path}, 

424 "http_authenticator": {"domain_controller": None}, 

425 "simple_dc": {"user_mapping": {"*": True}}, 

426 "verbose": 0, 

427 "lock_storage": False, 

428 "dir_browser": { 

429 "enable": False, 

430 "ms_sharepoint_support": False, 

431 "libre_office_support": False, 

432 "response_trailer": False, 

433 "davmount_links": False, 

434 }, 

435 } 

436 server = wsgi.Server(wsgi_app=WsgiDAVApp(config), bind_addr=(config["host"], config["port"])) 

437 t = Thread(target=server.start, daemon=True) 

438 t.start() 

439 

440 # Shut down the server when done: stop_webdav_server() returns 

441 # True when this test suite is being teared down 

442 while not stop_webdav_server(): 

443 time.sleep(1) 

444 except KeyboardInterrupt: 

445 # Caught Ctrl-C, shut down the server 

446 pass 

447 finally: 

448 server.stop() 

449 t.join() 

450 

451 @classmethod 

452 def _get_name(cls, prefix: str) -> str: 

453 alphabet = string.ascii_lowercase + string.digits 

454 return f"{prefix}-" + "".join(random.choices(alphabet, k=8)) 

455 

456 @classmethod 

457 def _get_dir_name(cls) -> str: 

458 """Return a randomly selected name for a file""" 

459 return cls._get_name(prefix="dir") 

460 

461 @classmethod 

462 def _get_file_name(cls) -> str: 

463 """Return a randomly selected name for a file""" 

464 return cls._get_name(prefix="file") 

465 

466 def _generate_file(self, remove_when_done=True) -> Tuple[str, int]: 

467 """Create a local file of random size with random contents. 

468 

469 Returns 

470 ------- 

471 path : `str` 

472 Path to local temporary file. The caller is responsible for 

473 removing the file when appropriate. 

474 size : `int` 

475 Size of the generated file, in bytes. 

476 """ 

477 megabyte = 1024 * 1024 

478 size = random.randint(2 * megabyte, 5 * megabyte) 

479 tmpfile, path = tempfile.mkstemp() 

480 self.assertEqual(os.write(tmpfile, os.urandom(size)), size) 

481 os.close(tmpfile) 

482 

483 if remove_when_done: 

484 self.local_files_to_remove.append(path) 

485 

486 return path, size 

487 

488 @classmethod 

489 def _compute_digest(cls, data: bytes) -> str: 

490 """Compute a SHA256 hash of data.""" 

491 m = hashlib.sha256() 

492 m.update(data) 

493 return m.hexdigest() 

494 

495 @classmethod 

496 def _is_server_running(cls, port: int) -> bool: 

497 """Return True if there is a server listening on local address 

498 127.0.0.1:<port>. 

499 """ 

500 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 

501 try: 

502 s.connect(("127.0.0.1", port)) 

503 return True 

504 except ConnectionRefusedError: 

505 return False 

506 

507 

508class HttpResourcePathConfigTestCase(unittest.TestCase): 

509 """Test for the HttpResourcePathConfig class.""" 

510 

511 def test_send_expect_header(self): 

512 # Ensure environment variable LSST_HTTP_PUT_SEND_EXPECT_HEADER is 

513 # inspected to initialize the HttpResourcePathConfig config class. 

514 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

515 importlib.reload(lsst.resources.http) 

516 config = HttpResourcePathConfig() 

517 self.assertFalse(config.send_expect_on_put) 

518 

519 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_PUT_SEND_EXPECT_HEADER": "true"}, clear=True): 

520 importlib.reload(lsst.resources.http) 

521 config = HttpResourcePathConfig() 

522 self.assertTrue(config.send_expect_on_put) 

523 

524 def test_collect_memory_usage(self): 

525 # Ensure environment variable LSST_HTTP_COLLECT_MEMORY_USAGE is 

526 # inspected to initialize the HttpResourcePathConfig class. 

527 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

528 importlib.reload(lsst.resources.http) 

529 config = HttpResourcePathConfig() 

530 self.assertFalse(config.collect_memory_usage) 

531 

532 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_COLLECT_MEMORY_USAGE": "true"}, clear=True): 

533 importlib.reload(lsst.resources.http) 

534 config = HttpResourcePathConfig() 

535 self.assertTrue(config.collect_memory_usage) 

536 

537 def test_timeout(self): 

538 # Ensure that when the connect and read timeouts are not specified 

539 # the default values are stored in the config. 

540 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

541 importlib.reload(lsst.resources.http) 

542 config = HttpResourcePathConfig() 

543 self.assertAlmostEqual(config.timeout[0], config.DEFAULT_TIMEOUT_CONNECT) 

544 self.assertAlmostEqual(config.timeout[1], config.DEFAULT_TIMEOUT_READ) 

545 

546 # Ensure that when both the connect and read timeouts are specified 

547 # they are stored in the config. 

548 connect_timeout, read_timeout = 100.5, 200.8 

549 with unittest.mock.patch.dict( 

550 os.environ, 

551 {"LSST_HTTP_TIMEOUT_CONNECT": str(connect_timeout), "LSST_HTTP_TIMEOUT_READ": str(read_timeout)}, 

552 clear=True, 

553 ): 

554 # Force module reload. 

555 importlib.reload(lsst.resources.http) 

556 config = HttpResourcePathConfig() 

557 self.assertAlmostEqual(config.timeout[0], connect_timeout) 

558 self.assertAlmostEqual(config.timeout[1], read_timeout) 

559 

560 # Ensure that NaN values are ignored and the defaults values are used. 

561 with unittest.mock.patch.dict( 

562 os.environ, 

563 {"LSST_HTTP_TIMEOUT_CONNECT": "NaN", "LSST_HTTP_TIMEOUT_READ": "NaN"}, 

564 clear=True, 

565 ): 

566 # Force module reload. 

567 importlib.reload(lsst.resources.http) 

568 config = HttpResourcePathConfig() 

569 self.assertAlmostEqual(config.timeout[0], config.DEFAULT_TIMEOUT_CONNECT) 

570 self.assertAlmostEqual(config.timeout[1], config.DEFAULT_TIMEOUT_READ) 

571 

572 def test_front_end_connections(self): 

573 # Ensure that when the number of front end connections is not specified 

574 # the default is stored in the config. 

575 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

576 importlib.reload(lsst.resources.http) 

577 config = HttpResourcePathConfig() 

578 self.assertEqual(config.front_end_connections, config.DEFAULT_FRONTEND_PERSISTENT_CONNECTIONS) 

579 

580 # Ensure that when the number of front end connections is specified 

581 # it is stored in the config. 

582 connections = 42 

583 with unittest.mock.patch.dict( 

584 os.environ, {"LSST_HTTP_FRONTEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True 

585 ): 

586 importlib.reload(lsst.resources.http) 

587 config = HttpResourcePathConfig() 

588 self.assertTrue(config.front_end_connections, connections) 

589 

590 def test_back_end_connections(self): 

591 # Ensure that when the number of back end connections is not specified 

592 # the default is stored in the config. 

593 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

594 importlib.reload(lsst.resources.http) 

595 config = HttpResourcePathConfig() 

596 self.assertEqual(config.back_end_connections, config.DEFAULT_BACKEND_PERSISTENT_CONNECTIONS) 

597 

598 # Ensure that when the number of back end connections is specified 

599 # it is stored in the config. 

600 connections = 42 

601 with unittest.mock.patch.dict( 

602 os.environ, {"LSST_HTTP_BACKEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True 

603 ): 

604 importlib.reload(lsst.resources.http) 

605 config = HttpResourcePathConfig() 

606 self.assertTrue(config.back_end_connections, connections) 

607 

608 def test_digest_algorithm(self): 

609 # Ensure that when no digest is specified in the environment, the 

610 # configured digest algorithm is the empty string. 

611 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

612 importlib.reload(lsst.resources.http) 

613 config = HttpResourcePathConfig() 

614 self.assertEqual(config.digest_algorithm, "") 

615 

616 # Ensure that an invalid digest algorithm is ignored. 

617 digest = "invalid" 

618 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True): 

619 importlib.reload(lsst.resources.http) 

620 config = HttpResourcePathConfig() 

621 self.assertEqual(config.digest_algorithm, "") 

622 

623 # Ensure that an accepted digest algorithm is stored. 

624 for digest in HttpResourcePathConfig().ACCEPTED_DIGESTS: 

625 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True): 

626 importlib.reload(lsst.resources.http) 

627 config = HttpResourcePathConfig() 

628 self.assertTrue(config.digest_algorithm, digest) 

629 

630 def test_backoff_interval(self): 

631 # Ensure that when no backoff interval is defined, the default values 

632 # are used. 

633 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

634 importlib.reload(lsst.resources.http) 

635 config = HttpResourcePathConfig() 

636 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN) 

637 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX) 

638 

639 # Ensure that an invalid value for backoff interval is ignored and 

640 # the default value is used. 

641 with unittest.mock.patch.dict( 

642 os.environ, {"LSST_HTTP_BACKOFF_MIN": "XXX", "LSST_HTTP_BACKOFF_MAX": "YYY"}, clear=True 

643 ): 

644 importlib.reload(lsst.resources.http) 

645 config = HttpResourcePathConfig() 

646 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN) 

647 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX) 

648 

649 # Ensure that NaN values are ignored and the defaults values are used. 

650 with unittest.mock.patch.dict( 

651 os.environ, {"LSST_HTTP_BACKOFF_MIN": "NaN", "LSST_HTTP_BACKOFF_MAX": "NaN"}, clear=True 

652 ): 

653 importlib.reload(lsst.resources.http) 

654 config = HttpResourcePathConfig() 

655 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN) 

656 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX) 

657 

658 # Ensure that when specified, valid limits backoff interval are used. 

659 backoff_min, backoff_max = 3.0, 8.0 

660 with unittest.mock.patch.dict( 

661 os.environ, 

662 {"LSST_HTTP_BACKOFF_MIN": str(backoff_min), "LSST_HTTP_BACKOFF_MAX": str(backoff_max)}, 

663 clear=True, 

664 ): 

665 importlib.reload(lsst.resources.http) 

666 config = HttpResourcePathConfig() 

667 self.assertAlmostEqual(config.backoff_min, backoff_min) 

668 self.assertAlmostEqual(config.backoff_max, backoff_max) 

669 

670 

671class WebdavUtilsTestCase(unittest.TestCase): 

672 """Test for the Webdav related utilities.""" 

673 

674 def setUp(self): 

675 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) 

676 

677 def tearDown(self): 

678 if self.tmpdir: 

679 if self.tmpdir.isLocal: 

680 removeTestTempDir(self.tmpdir.ospath) 

681 

682 @responses.activate 

683 def test_is_webdav_endpoint(self): 

684 davEndpoint = "http://www.lsstwithwebdav.org" 

685 responses.add(responses.OPTIONS, davEndpoint, status=200, headers={"DAV": "1,2,3"}) 

686 self.assertTrue(_is_webdav_endpoint(davEndpoint)) 

687 

688 plainHttpEndpoint = "http://www.lsstwithoutwebdav.org" 

689 responses.add(responses.OPTIONS, plainHttpEndpoint, status=200) 

690 self.assertFalse(_is_webdav_endpoint(plainHttpEndpoint)) 

691 

692 def test_is_protected(self): 

693 self.assertFalse(_is_protected("/this-file-does-not-exist")) 

694 

695 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

696 f.write("XXXX") 

697 file_path = f.name 

698 

699 os.chmod(file_path, stat.S_IRUSR) 

700 self.assertTrue(_is_protected(file_path)) 

701 

702 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): 

703 os.chmod(file_path, stat.S_IRUSR | mode) 

704 self.assertFalse(_is_protected(file_path)) 

705 

706 

707class BearerTokenAuthTestCase(unittest.TestCase): 

708 """Test for the BearerTokenAuth class.""" 

709 

710 def setUp(self): 

711 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) 

712 self.token = "ABCDE1234" 

713 

714 def tearDown(self): 

715 if self.tmpdir and self.tmpdir.isLocal: 

716 removeTestTempDir(self.tmpdir.ospath) 

717 

718 def test_empty_token(self): 

719 """Ensure that when no token is provided the request is not 

720 modified. 

721 """ 

722 auth = BearerTokenAuth(None) 

723 auth._refresh() 

724 self.assertIsNone(auth._token) 

725 self.assertIsNone(auth._path) 

726 req = requests.Request("GET", "https://example.org") 

727 self.assertEqual(auth(req), req) 

728 

729 def test_token_value(self): 

730 """Ensure that when a token value is provided, the 'Authorization' 

731 header is added to the requests. 

732 """ 

733 auth = BearerTokenAuth(self.token) 

734 req = auth(requests.Request("GET", "https://example.org").prepare()) 

735 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}") 

736 

737 def test_token_file(self): 

738 """Ensure when the provided token is a file path, its contents is 

739 correctly used in the the 'Authorization' header of the requests. 

740 """ 

741 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

742 f.write(self.token) 

743 token_file_path = f.name 

744 

745 # Ensure the request's "Authorization" header is set with the right 

746 # token value 

747 os.chmod(token_file_path, stat.S_IRUSR) 

748 auth = BearerTokenAuth(token_file_path) 

749 req = auth(requests.Request("GET", "https://example.org").prepare()) 

750 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}") 

751 

752 # Ensure an exception is raised if either group or other can read the 

753 # token file 

754 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): 

755 os.chmod(token_file_path, stat.S_IRUSR | mode) 

756 with self.assertRaises(PermissionError): 

757 BearerTokenAuth(token_file_path) 

758 

759 

760class SessionStoreTestCase(unittest.TestCase): 

761 """Test for the SessionStore class.""" 

762 

763 def setUp(self): 

764 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) 

765 self.rpath = ResourcePath("https://example.org") 

766 

767 def tearDown(self): 

768 if self.tmpdir and self.tmpdir.isLocal: 

769 removeTestTempDir(self.tmpdir.ospath) 

770 

771 def test_ca_cert_bundle(self): 

772 """Ensure a certificate authorities bundle is used to authentify 

773 the remote server. 

774 """ 

775 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

776 f.write("CERT BUNDLE") 

777 cert_bundle = f.name 

778 

779 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_CACERT_BUNDLE": cert_bundle}, clear=True): 

780 session = SessionStore().get(self.rpath) 

781 self.assertEqual(session.verify, cert_bundle) 

782 

783 def test_user_cert(self): 

784 """Ensure if user certificate and private key are provided, they are 

785 used for authenticating the client. 

786 """ 

787 

788 # Create mock certificate and private key files. 

789 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

790 f.write("CERT") 

791 client_cert = f.name 

792 

793 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

794 f.write("KEY") 

795 client_key = f.name 

796 

797 # Check both LSST_HTTP_AUTH_CLIENT_CERT and LSST_HTTP_AUTH_CLIENT_KEY 

798 # must be initialized. 

799 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert}, clear=True): 

800 with self.assertRaises(ValueError): 

801 SessionStore().get(self.rpath) 

802 

803 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_KEY": client_key}, clear=True): 

804 with self.assertRaises(ValueError): 

805 SessionStore().get(self.rpath) 

806 

807 # Check private key file must be accessible only by its owner. 

808 with unittest.mock.patch.dict( 

809 os.environ, 

810 {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert, "LSST_HTTP_AUTH_CLIENT_KEY": client_key}, 

811 clear=True, 

812 ): 

813 # Ensure the session client certificate is initialized when 

814 # only the owner can read the private key file. 

815 os.chmod(client_key, stat.S_IRUSR) 

816 session = SessionStore().get(self.rpath) 

817 self.assertEqual(session.cert[0], client_cert) 

818 self.assertEqual(session.cert[1], client_key) 

819 

820 # Ensure an exception is raised if either group or other can access 

821 # the private key file. 

822 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): 

823 os.chmod(client_key, stat.S_IRUSR | mode) 

824 with self.assertRaises(PermissionError): 

825 SessionStore().get(self.rpath) 

826 

827 def test_token_env(self): 

828 """Ensure when the token is provided via an environment variable 

829 the sessions are equipped with a BearerTokenAuth. 

830 """ 

831 token = "ABCDE" 

832 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_BEARER_TOKEN": token}, clear=True): 

833 session = SessionStore().get(self.rpath) 

834 self.assertEqual(type(session.auth), lsst.resources.http.BearerTokenAuth) 

835 self.assertEqual(session.auth._token, token) 

836 self.assertIsNone(session.auth._path) 

837 

838 def test_sessions(self): 

839 """Ensure the session caching mechanism works.""" 

840 

841 # Ensure the store provides a session for a given URL 

842 root_url = "https://example.org" 

843 store = SessionStore() 

844 session = store.get(ResourcePath(root_url)) 

845 self.assertIsNotNone(session) 

846 

847 # Ensure the sessions retrieved from a single store with the same 

848 # root URIs are equal 

849 for u in (f"{root_url}", f"{root_url}/path/to/file"): 

850 self.assertEqual(session, store.get(ResourcePath(u))) 

851 

852 # Ensure sessions retrieved for different root URIs are different 

853 another_url = "https://another.example.org" 

854 self.assertNotEqual(session, store.get(ResourcePath(another_url))) 

855 

856 # Ensure the sessions retrieved from a single store for URLs with 

857 # different port numbers are different 

858 root_url_with_port = f"{another_url}:12345" 

859 session = store.get(ResourcePath(root_url_with_port)) 

860 self.assertNotEqual(session, store.get(ResourcePath(another_url))) 

861 

862 # Ensure the sessions retrieved from a single store with the same 

863 # root URIs (including port numbers) are equal 

864 for u in (f"{root_url_with_port}", f"{root_url_with_port}/path/to/file"): 

865 self.assertEqual(session, store.get(ResourcePath(u))) 

866 

867 

868if __name__ == "__main__": 

869 unittest.main()