Coverage for tests/test_http.py: 14%

488 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-04-11 02:04 -0700

1# This file is part of lsst-resources. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# Use of this source code is governed by a 3-clause BSD-style 

10# license that can be found in the LICENSE file. 

11 

12import hashlib 

13import importlib 

14import io 

15import os.path 

16import random 

17import shutil 

18import socket 

19import stat 

20import string 

21import tempfile 

22import time 

23import unittest 

24import warnings 

25from threading import Thread 

26from typing import Callable, Tuple, cast 

27 

28try: 

29 from cheroot import wsgi 

30 from wsgidav.wsgidav_app import WsgiDAVApp 

31except ImportError: 

32 WsgiDAVApp = None 

33 

34import lsst.resources 

35import requests 

36import responses 

37from lsst.resources import ResourcePath 

38from lsst.resources._resourceHandles._httpResourceHandle import HttpReadResourceHandle 

39from lsst.resources.http import ( 

40 BearerTokenAuth, 

41 HttpResourcePathConfig, 

42 SessionStore, 

43 _is_protected, 

44 _is_webdav_endpoint, 

45) 

46from lsst.resources.tests import GenericReadWriteTestCase, GenericTestCase 

47from lsst.resources.utils import makeTestTempDir, removeTestTempDir 

48 

49TESTDIR = os.path.abspath(os.path.dirname(__file__)) 

50 

51 

52class GenericHttpTestCase(GenericTestCase, unittest.TestCase): 

53 scheme = "http" 

54 netloc = "server.example" 

55 

56 

57class HttpReadWriteWebdavTestCase(GenericReadWriteTestCase, unittest.TestCase): 

58 """Test with a real webDAV server, as opposed to mocking responses.""" 

59 

60 scheme = "http" 

61 

62 @classmethod 

63 def setUpClass(cls): 

64 cls.webdav_tmpdir = tempfile.mkdtemp(prefix="webdav-server-test-") 

65 cls.local_files_to_remove = [] 

66 cls.server_thread = None 

67 

68 # Disable warnings about socket connections left open. We purposedly 

69 # keep network connections to the remote server open and have no 

70 # means through the API exposed by Requests of actually close the 

71 # underlyng sockets to make tests pass without warning. 

72 warnings.filterwarnings(action="ignore", message=r"unclosed.*socket", category=ResourceWarning) 

73 

74 # Should we test against a running server? 

75 # 

76 # This is convenient for testing against real servers in the 

77 # developer environment by initializing the environment variable 

78 # LSST_RESOURCES_HTTP_TEST_SERVER_URL with the URL of the server, e.g. 

79 # https://dav.example.org:1234/path/to/top/dir 

80 if (test_endpoint := os.getenv("LSST_RESOURCES_HTTP_TEST_SERVER_URL")) is not None: 

81 # Run this test case against the specified server. 

82 uri = ResourcePath(test_endpoint) 

83 cls.scheme = uri.scheme 

84 cls.netloc = uri.netloc 

85 cls.base_path = uri.path 

86 elif WsgiDAVApp is not None: 

87 # WsgiDAVApp is available, launch a local server in its own 

88 # thread to expose a local temporary directory and run this 

89 # test case against it. 

90 cls.port_number = cls._get_port_number() 

91 cls.stop_webdav_server = False 

92 cls.server_thread = Thread( 

93 target=cls._serve_webdav, 

94 args=(cls, cls.webdav_tmpdir, cls.port_number, lambda: cls.stop_webdav_server), 

95 daemon=True, 

96 ) 

97 cls.server_thread.start() 

98 

99 # Wait for it to start 

100 time.sleep(1) 

101 

102 # Initialize the server endpoint 

103 cls.netloc = f"127.0.0.1:{cls.port_number}" 

104 else: 

105 cls.skipTest( 

106 cls, 

107 "neither WsgiDAVApp is available nor a webDAV test endpoint is configured to test against", 

108 ) 

109 

110 @classmethod 

111 def tearDownClass(cls): 

112 # Stop the WsgiDAVApp server, if any 

113 if WsgiDAVApp is not None: 

114 # Shut down of the webdav server and wait for the thread to exit 

115 cls.stop_webdav_server = True 

116 if cls.server_thread is not None: 

117 cls.server_thread.join() 

118 

119 # Remove local temporary files 

120 for file in cls.local_files_to_remove: 

121 if os.path.exists(file): 

122 os.remove(file) 

123 

124 # Remove temp dir 

125 if cls.webdav_tmpdir: 

126 shutil.rmtree(cls.webdav_tmpdir, ignore_errors=True) 

127 

128 # Reset the warnings filter. 

129 warnings.resetwarnings() 

130 

131 def tearDown(self): 

132 if self.tmpdir: 

133 self.tmpdir.remove() 

134 

135 # Clear sessions. Some sockets may be left open, because urllib3 

136 # doest not close in-flight connections. 

137 # See https://urllib3.readthedocs.io > API Reference > 

138 # Pool Manager > clear() 

139 # I cannot add the full URL here because it is longer than 79 

140 # characters. 

141 self.tmpdir._clear_sessions() 

142 

143 super().tearDown() 

144 

145 def test_dav_file_handle(self): 

146 # Upload a new file with known contents. 

147 contents = "These are some \n bytes to read" 

148 remote_file = self.tmpdir.join(self._get_file_name()) 

149 self.assertIsNone(remote_file.write(data=contents, overwrite=True)) 

150 

151 # Test that the correct handle is returned. 

152 with remote_file.open("rb") as handle: 

153 self.assertIsInstance(handle, HttpReadResourceHandle) 

154 

155 # Test reading byte ranges works 

156 with remote_file.open("rb") as handle: 

157 sub_contents = contents[:10] 

158 handle = cast(HttpReadResourceHandle, handle) 

159 result = handle.read(len(sub_contents)).decode() 

160 self.assertEqual(result, sub_contents) 

161 # Verify there is no internal buffer. 

162 self.assertIsNone(handle._completeBuffer) 

163 # Verify the position. 

164 self.assertEqual(handle.tell(), len(sub_contents)) 

165 

166 # Jump back to the beginning and test if reading the whole file 

167 # prompts the internal buffer to be read. 

168 handle.seek(0) 

169 self.assertEqual(handle.tell(), 0) 

170 result = handle.read().decode() 

171 self.assertIsNotNone(handle._completeBuffer) 

172 self.assertEqual(result, contents) 

173 

174 # Verify reading as a string handle works as expected. 

175 with remote_file.open("r") as handle: 

176 self.assertIsInstance(handle, io.TextIOWrapper) 

177 

178 handle = cast(io.TextIOWrapper, handle) 

179 self.assertIsInstance(handle.buffer, HttpReadResourceHandle) 

180 

181 # Check if string methods work. 

182 result = handle.read() 

183 self.assertEqual(result, contents) 

184 

185 # Verify that write modes invoke the default base method 

186 with remote_file.open("w") as handle: 

187 self.assertIsInstance(handle, io.StringIO) 

188 

189 def test_dav_is_dav_enpoint(self): 

190 # Ensure the server is a webDAV endpoint 

191 self.assertTrue(self.tmpdir.is_webdav_endpoint) 

192 

193 def test_dav_mkdir(self): 

194 # Check creation and deletion of an empty directory 

195 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

196 self.assertIsNone(subdir.mkdir()) 

197 self.assertTrue(subdir.exists()) 

198 

199 # Creating an existing remote directory must succeed 

200 self.assertIsNone(subdir.mkdir()) 

201 

202 # Deletion of an existing directory must succeed 

203 self.assertIsNone(subdir.remove()) 

204 

205 # Deletion of an non-existing directory must succeed 

206 subir_not_exists = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

207 self.assertIsNone(subir_not_exists.remove()) 

208 

209 # Creation of a directory at a path where a file exists must raise 

210 file = self.tmpdir.join(self._get_file_name(), forceDirectory=False) 

211 file.write(data=None, overwrite=True) 

212 self.assertTrue(file.exists()) 

213 

214 existing_file = self.tmpdir.join(file.basename(), forceDirectory=True) 

215 with self.assertRaises(NotADirectoryError): 

216 self.assertIsNone(existing_file.mkdir()) 

217 

218 def test_dav_upload_download(self): 

219 # Test upload a randomly-generated file via write() with and without 

220 # overwrite 

221 local_file, file_size = self._generate_file() 

222 with open(local_file, "rb") as f: 

223 data = f.read() 

224 

225 remote_file = self.tmpdir.join(self._get_file_name()) 

226 self.assertIsNone(remote_file.write(data, overwrite=True)) 

227 self.assertTrue(remote_file.exists()) 

228 self.assertEqual(remote_file.size(), file_size) 

229 

230 # Write without overwrite must raise since target file exists 

231 with self.assertRaises(FileExistsError): 

232 remote_file.write(data, overwrite=False) 

233 

234 # Download the file we just uploaded. Compute and compare a digest of 

235 # the uploaded and downloaded data and ensure they match 

236 downloaded_data = remote_file.read() 

237 self.assertEqual(len(downloaded_data), file_size) 

238 upload_digest = self._compute_digest(data) 

239 download_digest = self._compute_digest(downloaded_data) 

240 self.assertEqual(upload_digest, download_digest) 

241 os.remove(local_file) 

242 

243 def test_dav_as_local(self): 

244 contents = str.encode("12345") 

245 remote_file = self.tmpdir.join(self._get_file_name()) 

246 self.assertIsNone(remote_file.write(data=contents, overwrite=True)) 

247 

248 local_path, is_temp = remote_file._as_local() 

249 self.assertTrue(is_temp) 

250 self.assertTrue(os.path.exists(local_path)) 

251 self.assertTrue(os.stat(local_path).st_size, len(contents)) 

252 self.assertEqual(ResourcePath(local_path).read(), contents) 

253 os.remove(local_path) 

254 

255 def test_dav_size(self): 

256 # Size of a non-existent file must raise. 

257 remote_file = self.tmpdir.join(self._get_file_name()) 

258 with self.assertRaises(FileNotFoundError): 

259 remote_file.size() 

260 

261 # Retrieving the size of a remote directory using a file-like path must 

262 # raise 

263 remote_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

264 self.assertIsNone(remote_dir.mkdir()) 

265 self.assertTrue(remote_dir.exists()) 

266 

267 dir_as_file = ResourcePath(remote_dir.geturl().rstrip("/"), forceDirectory=False) 

268 with self.assertRaises(IsADirectoryError): 

269 dir_as_file.size() 

270 

271 def test_dav_upload_creates_dir(self): 

272 # Uploading a file to a non existing directory must ensure its 

273 # parent directories are automatically created and upload succeeds 

274 non_existing_dir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

275 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True) 

276 non_existing_dir = non_existing_dir.join(self._get_dir_name(), forceDirectory=True) 

277 remote_file = non_existing_dir.join(self._get_file_name()) 

278 

279 local_file, file_size = self._generate_file() 

280 with open(local_file, "rb") as f: 

281 data = f.read() 

282 self.assertIsNone(remote_file.write(data, overwrite=True)) 

283 

284 self.assertTrue(remote_file.exists()) 

285 self.assertEqual(remote_file.size(), file_size) 

286 self.assertTrue(remote_file.parent().exists()) 

287 

288 downloaded_data = remote_file.read() 

289 upload_digest = self._compute_digest(data) 

290 download_digest = self._compute_digest(downloaded_data) 

291 self.assertEqual(upload_digest, download_digest) 

292 os.remove(local_file) 

293 

294 def test_dav_transfer_from(self): 

295 # Transfer from local file via "copy", with and without overwrite 

296 remote_file = self.tmpdir.join(self._get_file_name()) 

297 local_file, _ = self._generate_file() 

298 source_file = ResourcePath(local_file) 

299 self.assertIsNone(remote_file.transfer_from(source_file, transfer="copy", overwrite=True)) 

300 self.assertTrue(remote_file.exists()) 

301 self.assertEqual(remote_file.size(), source_file.size()) 

302 with self.assertRaises(FileExistsError): 

303 remote_file.transfer_from(ResourcePath(local_file), transfer="copy", overwrite=False) 

304 

305 # Transfer from remote file via "copy", with and without overwrite 

306 source_file = remote_file 

307 target_file = self.tmpdir.join(self._get_file_name()) 

308 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True)) 

309 self.assertTrue(target_file.exists()) 

310 self.assertEqual(target_file.size(), source_file.size()) 

311 

312 # Transfer without overwrite must raise since target resource exists 

313 with self.assertRaises(FileExistsError): 

314 target_file.transfer_from(source_file, transfer="copy", overwrite=False) 

315 

316 # Test transfer from local file via "move", with and without overwrite 

317 source_file = ResourcePath(local_file) 

318 source_size = source_file.size() 

319 target_file = self.tmpdir.join(self._get_file_name()) 

320 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True)) 

321 self.assertTrue(target_file.exists()) 

322 self.assertEqual(target_file.size(), source_size) 

323 self.assertFalse(source_file.exists()) 

324 

325 # Test transfer without overwrite must raise since target resource 

326 # exists 

327 local_file, file_size = self._generate_file() 

328 with self.assertRaises(FileExistsError): 

329 source_file = ResourcePath(local_file) 

330 target_file.transfer_from(source_file, transfer="move", overwrite=False) 

331 

332 # Test transfer from remote file via "move" with and without overwrite 

333 # must succeed 

334 source_file = target_file 

335 source_size = source_file.size() 

336 target_file = self.tmpdir.join(self._get_file_name()) 

337 self.assertIsNone(target_file.transfer_from(source_file, transfer="move", overwrite=True)) 

338 self.assertTrue(target_file.exists()) 

339 self.assertEqual(target_file.size(), source_size) 

340 self.assertFalse(source_file.exists()) 

341 

342 # Transfer without overwrite must raise since target resource exists 

343 with self.assertRaises(FileExistsError): 

344 source_file = ResourcePath(local_file) 

345 target_file.transfer_from(source_file, transfer="move", overwrite=False) 

346 

347 def test_dav_handle(self): 

348 # Resource handle must succeed 

349 target_file = self.tmpdir.join(self._get_file_name()) 

350 data = "abcdefghi" 

351 self.assertIsNone(target_file.write(data, overwrite=True)) 

352 with target_file.open("rb") as handle: 

353 handle.seek(1) 

354 self.assertEqual(handle.read(4).decode("utf-8"), data[1:5]) 

355 

356 def test_dav_delete(self): 

357 # Deletion of an existing remote file must succeed 

358 local_file, file_size = self._generate_file() 

359 with open(local_file, "rb") as f: 

360 data = f.read() 

361 

362 remote_file = self.tmpdir.join(self._get_file_name()) 

363 self.assertIsNone(remote_file.write(data, overwrite=True)) 

364 self.assertTrue(remote_file.exists()) 

365 self.assertEqual(remote_file.size(), file_size) 

366 self.assertIsNone(remote_file.remove()) 

367 os.remove(local_file) 

368 

369 # Deletion of a non-existing remote file must succeed 

370 non_existing_file = self.tmpdir.join(self._get_file_name()) 

371 self.assertIsNone(non_existing_file.remove()) 

372 

373 # Deletion of a non-empty remote directory must succeed 

374 subdir = self.tmpdir.join(self._get_dir_name(), forceDirectory=True) 

375 self.assertIsNone(subdir.mkdir()) 

376 self.assertTrue(subdir.exists()) 

377 local_file, _ = self._generate_file() 

378 source_file = ResourcePath(local_file) 

379 target_file = self.tmpdir.join(self._get_file_name(), forceDirectory=True) 

380 self.assertIsNone(target_file.transfer_from(source_file, transfer="copy", overwrite=True)) 

381 self.assertIsNone(subdir.remove()) 

382 self.assertFalse(subdir.exists()) 

383 os.remove(local_file) 

384 

385 @classmethod 

386 def _get_port_number(cls) -> int: 

387 """Return a port number the webDAV server can use to listen to.""" 

388 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 

389 s.bind(("127.0.0.1", 0)) 

390 s.listen() 

391 port = s.getsockname()[1] 

392 s.close() 

393 return port 

394 

395 def _serve_webdav(self, local_path: str, port: int, stop_webdav_server: Callable[[], bool]): 

396 """Start a local webDAV server, listening on http://localhost:port 

397 and exposing local_path. 

398 

399 This server only runs when this test class is instantiated, 

400 and then shuts down. The server must be started is a separate thread. 

401 

402 Parameters 

403 ---------- 

404 port : `int` 

405 The port number on which the server should listen 

406 local_path : `str` 

407 Path to an existing local directory for the server to expose. 

408 stop_webdav_server : `Callable[[], bool]` 

409 Boolean function which returns True when the server should be 

410 stopped. 

411 """ 

412 try: 

413 # Start the wsgi server in a separate thread 

414 config = { 

415 "host": "127.0.0.1", 

416 "port": port, 

417 "provider_mapping": {"/": local_path}, 

418 "http_authenticator": {"domain_controller": None}, 

419 "simple_dc": {"user_mapping": {"*": True}}, 

420 "verbose": 0, 

421 "lock_storage": False, 

422 "dir_browser": { 

423 "enable": False, 

424 "ms_sharepoint_support": False, 

425 "libre_office_support": False, 

426 "response_trailer": False, 

427 "davmount_links": False, 

428 }, 

429 } 

430 server = wsgi.Server(wsgi_app=WsgiDAVApp(config), bind_addr=(config["host"], config["port"])) 

431 t = Thread(target=server.start, daemon=True) 

432 t.start() 

433 

434 # Shut down the server when done: stop_webdav_server() returns 

435 # True when this test suite is being teared down 

436 while not stop_webdav_server(): 

437 time.sleep(1) 

438 except KeyboardInterrupt: 

439 # Caught Ctrl-C, shut down the server 

440 pass 

441 finally: 

442 server.stop() 

443 t.join() 

444 

445 @classmethod 

446 def _get_name(cls, prefix: str) -> str: 

447 alphabet = string.ascii_lowercase + string.digits 

448 return f"{prefix}-" + "".join(random.choices(alphabet, k=8)) 

449 

450 @classmethod 

451 def _get_dir_name(cls) -> str: 

452 """Return a randomly selected name for a file""" 

453 return cls._get_name(prefix="dir") 

454 

455 @classmethod 

456 def _get_file_name(cls) -> str: 

457 """Return a randomly selected name for a file""" 

458 return cls._get_name(prefix="file") 

459 

460 def _generate_file(self, remove_when_done=True) -> Tuple[str, int]: 

461 """Create a local file of random size with random contents. 

462 

463 Returns 

464 ------- 

465 path : `str` 

466 Path to local temporary file. The caller is responsible for 

467 removing the file when appropriate. 

468 size : `int` 

469 Size of the generated file, in bytes. 

470 """ 

471 megabyte = 1024 * 1024 

472 size = random.randint(2 * megabyte, 5 * megabyte) 

473 tmpfile, path = tempfile.mkstemp() 

474 self.assertEqual(os.write(tmpfile, os.urandom(size)), size) 

475 os.close(tmpfile) 

476 

477 if remove_when_done: 

478 self.local_files_to_remove.append(path) 

479 

480 return path, size 

481 

482 @classmethod 

483 def _compute_digest(cls, data: bytes) -> str: 

484 """Compute a SHA256 hash of data.""" 

485 m = hashlib.sha256() 

486 m.update(data) 

487 return m.hexdigest() 

488 

489 @classmethod 

490 def _is_server_running(cls, port: int) -> bool: 

491 """Return True if there is a server listening on local address 

492 127.0.0.1:<port>. 

493 """ 

494 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 

495 try: 

496 s.connect(("127.0.0.1", port)) 

497 return True 

498 except ConnectionRefusedError: 

499 return False 

500 

501 

502class HttpResourcePathConfigTestCase(unittest.TestCase): 

503 """Test for the HttpResourcePathConfig class.""" 

504 

505 def test_send_expect_header(self): 

506 # Ensure environment variable LSST_HTTP_PUT_SEND_EXPECT_HEADER is 

507 # inspected to initialize the HttpResourcePathConfig config class. 

508 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

509 importlib.reload(lsst.resources.http) 

510 config = HttpResourcePathConfig() 

511 self.assertFalse(config.send_expect_on_put) 

512 

513 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_PUT_SEND_EXPECT_HEADER": "true"}, clear=True): 

514 importlib.reload(lsst.resources.http) 

515 config = HttpResourcePathConfig() 

516 self.assertTrue(config.send_expect_on_put) 

517 

518 def test_collect_memory_usage(self): 

519 # Ensure environment variable LSST_HTTP_COLLECT_MEMORY_USAGE is 

520 # inspected to initialize the HttpResourcePathConfig class. 

521 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

522 importlib.reload(lsst.resources.http) 

523 config = HttpResourcePathConfig() 

524 self.assertFalse(config.collect_memory_usage) 

525 

526 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_COLLECT_MEMORY_USAGE": "true"}, clear=True): 

527 importlib.reload(lsst.resources.http) 

528 config = HttpResourcePathConfig() 

529 self.assertTrue(config.collect_memory_usage) 

530 

531 def test_timeout(self): 

532 # Ensure that when the connect and read timeouts are not specified 

533 # the default values are stored in the config. 

534 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

535 importlib.reload(lsst.resources.http) 

536 config = HttpResourcePathConfig() 

537 self.assertAlmostEqual(config.timeout[0], config.DEFAULT_TIMEOUT_CONNECT) 

538 self.assertAlmostEqual(config.timeout[1], config.DEFAULT_TIMEOUT_READ) 

539 

540 # Ensure that when both the connect and read timeouts are specified 

541 # they are stored in the config. 

542 connect_timeout, read_timeout = 100.5, 200.8 

543 with unittest.mock.patch.dict( 

544 os.environ, 

545 {"LSST_HTTP_TIMEOUT_CONNECT": str(connect_timeout), "LSST_HTTP_TIMEOUT_READ": str(read_timeout)}, 

546 clear=True, 

547 ): 

548 # Force module reload. 

549 importlib.reload(lsst.resources.http) 

550 config = HttpResourcePathConfig() 

551 self.assertAlmostEqual(config.timeout[0], connect_timeout) 

552 self.assertAlmostEqual(config.timeout[1], read_timeout) 

553 

554 # Ensure that NaN values are ignored and the defaults values are used. 

555 with unittest.mock.patch.dict( 

556 os.environ, 

557 {"LSST_HTTP_TIMEOUT_CONNECT": "NaN", "LSST_HTTP_TIMEOUT_READ": "NaN"}, 

558 clear=True, 

559 ): 

560 # Force module reload. 

561 importlib.reload(lsst.resources.http) 

562 config = HttpResourcePathConfig() 

563 self.assertAlmostEqual(config.timeout[0], config.DEFAULT_TIMEOUT_CONNECT) 

564 self.assertAlmostEqual(config.timeout[1], config.DEFAULT_TIMEOUT_READ) 

565 

566 def test_front_end_connections(self): 

567 # Ensure that when the number of front end connections is not specified 

568 # the default is stored in the config. 

569 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

570 importlib.reload(lsst.resources.http) 

571 config = HttpResourcePathConfig() 

572 self.assertEqual(config.front_end_connections, config.DEFAULT_FRONTEND_PERSISTENT_CONNECTIONS) 

573 

574 # Ensure that when the number of front end connections is specified 

575 # it is stored in the config. 

576 connections = 42 

577 with unittest.mock.patch.dict( 

578 os.environ, {"LSST_HTTP_FRONTEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True 

579 ): 

580 importlib.reload(lsst.resources.http) 

581 config = HttpResourcePathConfig() 

582 self.assertTrue(config.front_end_connections, connections) 

583 

584 def test_back_end_connections(self): 

585 # Ensure that when the number of back end connections is not specified 

586 # the default is stored in the config. 

587 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

588 importlib.reload(lsst.resources.http) 

589 config = HttpResourcePathConfig() 

590 self.assertEqual(config.back_end_connections, config.DEFAULT_BACKEND_PERSISTENT_CONNECTIONS) 

591 

592 # Ensure that when the number of back end connections is specified 

593 # it is stored in the config. 

594 connections = 42 

595 with unittest.mock.patch.dict( 

596 os.environ, {"LSST_HTTP_BACKEND_PERSISTENT_CONNECTIONS": str(connections)}, clear=True 

597 ): 

598 importlib.reload(lsst.resources.http) 

599 config = HttpResourcePathConfig() 

600 self.assertTrue(config.back_end_connections, connections) 

601 

602 def test_digest_algorithm(self): 

603 # Ensure that when no digest is specified in the environment, the 

604 # configured digest algorithm is the empty string. 

605 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

606 importlib.reload(lsst.resources.http) 

607 config = HttpResourcePathConfig() 

608 self.assertEqual(config.digest_algorithm, "") 

609 

610 # Ensure that an invalid digest algorithm is ignored. 

611 digest = "invalid" 

612 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True): 

613 importlib.reload(lsst.resources.http) 

614 config = HttpResourcePathConfig() 

615 self.assertEqual(config.digest_algorithm, "") 

616 

617 # Ensure that an accepted digest algorithm is stored. 

618 for digest in HttpResourcePathConfig().ACCEPTED_DIGESTS: 

619 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_DIGEST": digest}, clear=True): 

620 importlib.reload(lsst.resources.http) 

621 config = HttpResourcePathConfig() 

622 self.assertTrue(config.digest_algorithm, digest) 

623 

624 def test_backoff_interval(self): 

625 # Ensure that when no backoff interval is defined, the default values 

626 # are used. 

627 with unittest.mock.patch.dict(os.environ, {}, clear=True): 

628 importlib.reload(lsst.resources.http) 

629 config = HttpResourcePathConfig() 

630 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN) 

631 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX) 

632 

633 # Ensure that an invalid value for backoff interval is ignored and 

634 # the default value is used. 

635 with unittest.mock.patch.dict( 

636 os.environ, {"LSST_HTTP_BACKOFF_MIN": "XXX", "LSST_HTTP_BACKOFF_MAX": "YYY"}, clear=True 

637 ): 

638 importlib.reload(lsst.resources.http) 

639 config = HttpResourcePathConfig() 

640 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN) 

641 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX) 

642 

643 # Ensure that NaN values are ignored and the defaults values are used. 

644 with unittest.mock.patch.dict( 

645 os.environ, {"LSST_HTTP_BACKOFF_MIN": "NaN", "LSST_HTTP_BACKOFF_MAX": "NaN"}, clear=True 

646 ): 

647 importlib.reload(lsst.resources.http) 

648 config = HttpResourcePathConfig() 

649 self.assertAlmostEqual(config.backoff_min, config.DEFAULT_BACKOFF_MIN) 

650 self.assertAlmostEqual(config.backoff_max, config.DEFAULT_BACKOFF_MAX) 

651 

652 # Ensure that when specified, valid limits backoff interval are used. 

653 backoff_min, backoff_max = 3.0, 8.0 

654 with unittest.mock.patch.dict( 

655 os.environ, 

656 {"LSST_HTTP_BACKOFF_MIN": str(backoff_min), "LSST_HTTP_BACKOFF_MAX": str(backoff_max)}, 

657 clear=True, 

658 ): 

659 importlib.reload(lsst.resources.http) 

660 config = HttpResourcePathConfig() 

661 self.assertAlmostEqual(config.backoff_min, backoff_min) 

662 self.assertAlmostEqual(config.backoff_max, backoff_max) 

663 

664 

665class WebdavUtilsTestCase(unittest.TestCase): 

666 """Test for the Webdav related utilities.""" 

667 

668 def setUp(self): 

669 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) 

670 

671 def tearDown(self): 

672 if self.tmpdir: 

673 if self.tmpdir.isLocal: 

674 removeTestTempDir(self.tmpdir.ospath) 

675 

676 @responses.activate 

677 def test_is_webdav_endpoint(self): 

678 davEndpoint = "http://www.lsstwithwebdav.org" 

679 responses.add(responses.OPTIONS, davEndpoint, status=200, headers={"DAV": "1,2,3"}) 

680 self.assertTrue(_is_webdav_endpoint(davEndpoint)) 

681 

682 plainHttpEndpoint = "http://www.lsstwithoutwebdav.org" 

683 responses.add(responses.OPTIONS, plainHttpEndpoint, status=200) 

684 self.assertFalse(_is_webdav_endpoint(plainHttpEndpoint)) 

685 

686 def test_is_protected(self): 

687 self.assertFalse(_is_protected("/this-file-does-not-exist")) 

688 

689 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

690 f.write("XXXX") 

691 file_path = f.name 

692 

693 os.chmod(file_path, stat.S_IRUSR) 

694 self.assertTrue(_is_protected(file_path)) 

695 

696 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): 

697 os.chmod(file_path, stat.S_IRUSR | mode) 

698 self.assertFalse(_is_protected(file_path)) 

699 

700 

701class BearerTokenAuthTestCase(unittest.TestCase): 

702 """Test for the BearerTokenAuth class.""" 

703 

704 def setUp(self): 

705 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) 

706 self.token = "ABCDE1234" 

707 

708 def tearDown(self): 

709 if self.tmpdir and self.tmpdir.isLocal: 

710 removeTestTempDir(self.tmpdir.ospath) 

711 

712 def test_empty_token(self): 

713 """Ensure that when no token is provided the request is not 

714 modified. 

715 """ 

716 auth = BearerTokenAuth(None) 

717 auth._refresh() 

718 self.assertIsNone(auth._token) 

719 self.assertIsNone(auth._path) 

720 req = requests.Request("GET", "https://example.org") 

721 self.assertEqual(auth(req), req) 

722 

723 def test_token_value(self): 

724 """Ensure that when a token value is provided, the 'Authorization' 

725 header is added to the requests. 

726 """ 

727 auth = BearerTokenAuth(self.token) 

728 req = auth(requests.Request("GET", "https://example.org").prepare()) 

729 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}") 

730 

731 def test_token_file(self): 

732 """Ensure when the provided token is a file path, its contents is 

733 correctly used in the the 'Authorization' header of the requests. 

734 """ 

735 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

736 f.write(self.token) 

737 token_file_path = f.name 

738 

739 # Ensure the request's "Authorization" header is set with the right 

740 # token value 

741 os.chmod(token_file_path, stat.S_IRUSR) 

742 auth = BearerTokenAuth(token_file_path) 

743 req = auth(requests.Request("GET", "https://example.org").prepare()) 

744 self.assertEqual(req.headers.get("Authorization"), f"Bearer {self.token}") 

745 

746 # Ensure an exception is raised if either group or other can read the 

747 # token file 

748 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): 

749 os.chmod(token_file_path, stat.S_IRUSR | mode) 

750 with self.assertRaises(PermissionError): 

751 BearerTokenAuth(token_file_path) 

752 

753 

754class SessionStoreTestCase(unittest.TestCase): 

755 """Test for the SessionStore class.""" 

756 

757 def setUp(self): 

758 self.tmpdir = ResourcePath(makeTestTempDir(TESTDIR)) 

759 self.rpath = ResourcePath("https://example.org") 

760 

761 def tearDown(self): 

762 if self.tmpdir and self.tmpdir.isLocal: 

763 removeTestTempDir(self.tmpdir.ospath) 

764 

765 def test_ca_cert_bundle(self): 

766 """Ensure a certificate authorities bundle is used to authentify 

767 the remote server. 

768 """ 

769 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

770 f.write("CERT BUNDLE") 

771 cert_bundle = f.name 

772 

773 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_CACERT_BUNDLE": cert_bundle}, clear=True): 

774 session = SessionStore().get(self.rpath) 

775 self.assertEqual(session.verify, cert_bundle) 

776 

777 def test_user_cert(self): 

778 """Ensure if user certificate and private key are provided, they are 

779 used for authenticating the client. 

780 """ 

781 

782 # Create mock certificate and private key files. 

783 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

784 f.write("CERT") 

785 client_cert = f.name 

786 

787 with tempfile.NamedTemporaryFile(mode="wt", dir=self.tmpdir.ospath, delete=False) as f: 

788 f.write("KEY") 

789 client_key = f.name 

790 

791 # Check both LSST_HTTP_AUTH_CLIENT_CERT and LSST_HTTP_AUTH_CLIENT_KEY 

792 # must be initialized. 

793 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert}, clear=True): 

794 with self.assertRaises(ValueError): 

795 SessionStore().get(self.rpath) 

796 

797 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_CLIENT_KEY": client_key}, clear=True): 

798 with self.assertRaises(ValueError): 

799 SessionStore().get(self.rpath) 

800 

801 # Check private key file must be accessible only by its owner. 

802 with unittest.mock.patch.dict( 

803 os.environ, 

804 {"LSST_HTTP_AUTH_CLIENT_CERT": client_cert, "LSST_HTTP_AUTH_CLIENT_KEY": client_key}, 

805 clear=True, 

806 ): 

807 # Ensure the session client certificate is initialized when 

808 # only the owner can read the private key file. 

809 os.chmod(client_key, stat.S_IRUSR) 

810 session = SessionStore().get(self.rpath) 

811 self.assertEqual(session.cert[0], client_cert) 

812 self.assertEqual(session.cert[1], client_key) 

813 

814 # Ensure an exception is raised if either group or other can access 

815 # the private key file. 

816 for mode in (stat.S_IRGRP, stat.S_IWGRP, stat.S_IXGRP, stat.S_IROTH, stat.S_IWOTH, stat.S_IXOTH): 

817 os.chmod(client_key, stat.S_IRUSR | mode) 

818 with self.assertRaises(PermissionError): 

819 SessionStore().get(self.rpath) 

820 

821 def test_token_env(self): 

822 """Ensure when the token is provided via an environment variable 

823 the sessions are equipped with a BearerTokenAuth. 

824 """ 

825 token = "ABCDE" 

826 with unittest.mock.patch.dict(os.environ, {"LSST_HTTP_AUTH_BEARER_TOKEN": token}, clear=True): 

827 session = SessionStore().get(self.rpath) 

828 self.assertEqual(type(session.auth), lsst.resources.http.BearerTokenAuth) 

829 self.assertEqual(session.auth._token, token) 

830 self.assertIsNone(session.auth._path) 

831 

832 def test_sessions(self): 

833 """Ensure the session caching mechanism works.""" 

834 

835 # Ensure the store provides a session for a given URL 

836 root_url = "https://example.org" 

837 store = SessionStore() 

838 session = store.get(ResourcePath(root_url)) 

839 self.assertIsNotNone(session) 

840 

841 # Ensure the sessions retrieved from a single store with the same 

842 # root URIs are equal 

843 for u in (f"{root_url}", f"{root_url}/path/to/file"): 

844 self.assertEqual(session, store.get(ResourcePath(u))) 

845 

846 # Ensure sessions retrieved for different root URIs are different 

847 another_url = "https://another.example.org" 

848 self.assertNotEqual(session, store.get(ResourcePath(another_url))) 

849 

850 # Ensure the sessions retrieved from a single store for URLs with 

851 # different port numbers are different 

852 root_url_with_port = f"{another_url}:12345" 

853 session = store.get(ResourcePath(root_url_with_port)) 

854 self.assertNotEqual(session, store.get(ResourcePath(another_url))) 

855 

856 # Ensure the sessions retrieved from a single store with the same 

857 # root URIs (including port numbers) are equal 

858 for u in (f"{root_url_with_port}", f"{root_url_with_port}/path/to/file"): 

859 self.assertEqual(session, store.get(ResourcePath(u))) 

860 

861 

862if __name__ == "__main__": 

863 unittest.main()