diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst index dffb167c74771f..0fe1bdde8176c0 100644 --- a/Doc/library/hashlib.rst +++ b/Doc/library/hashlib.rst @@ -55,11 +55,14 @@ hash supplied more than 2047 bytes of data at once in its constructor or .. index:: single: OpenSSL; (use in module hashlib) Constructors for hash algorithms that are always present in this module are -:func:`sha1`, :func:`sha224`, :func:`sha256`, :func:`sha384`, :func:`sha512`, +:func:`md5`, :func:`sha1`, :func:`sha224`, :func:`sha256`, :func:`sha384`, :func:`sha512`, :func:`sha3_224`, :func:`sha3_256`, :func:`sha3_384`, :func:`sha3_512`, :func:`shake_128`, :func:`shake_256`, :func:`blake2b`, and :func:`blake2s`. -:func:`md5` is normally available as well, though it may be missing or blocked -if you are using a rare "FIPS compliant" build of Python. +Some of these may be missing or blocked if you are running in an environment +with OpenSSL's "FIPS mode" configured to exclude some hash algorithms from its +default provider and are using a Python runtime built with that in mind. Such +environments are unusual. + These correspond to :data:`algorithms_guaranteed`. Additional algorithms may also be available if your Python distribution's @@ -119,7 +122,7 @@ More condensed: Constructors ------------ -.. function:: new(name[, data], *, usedforsecurity=True) +.. function:: new(name[, data], \*, usedforsecurity=True) Is a generic constructor that takes the string *name* of the desired algorithm as its first parameter. It also exists to allow access to the @@ -134,16 +137,16 @@ Using :func:`new` with an algorithm name: '031edd7d41651593c5fe5c006fa5752b37fddff7bc4e843aa6af0c950f4b9406' -.. function:: md5([, data], *, usedforsecurity=True) -.. function:: sha1([, data], *, usedforsecurity=True) -.. function:: sha224([, data], *, usedforsecurity=True) -.. function:: sha256([, data], *, usedforsecurity=True) -.. function:: sha384([, data], *, usedforsecurity=True) -.. function:: sha512([, data], *, usedforsecurity=True) -.. function:: sha3_224([, data], *, usedforsecurity=True) -.. function:: sha3_256([, data], *, usedforsecurity=True) -.. function:: sha3_384([, data], *, usedforsecurity=True) -.. function:: sha3_512([, data], *, usedforsecurity=True) +.. function:: md5([, data], \*, usedforsecurity=True) +.. function:: sha1([, data], \*, usedforsecurity=True) +.. function:: sha224([, data], \*, usedforsecurity=True) +.. function:: sha256([, data], \*, usedforsecurity=True) +.. function:: sha384([, data], \*, usedforsecurity=True) +.. function:: sha512([, data], \*, usedforsecurity=True) +.. function:: sha3_224([, data], \*, usedforsecurity=True) +.. function:: sha3_256([, data], \*, usedforsecurity=True) +.. function:: sha3_384([, data], \*, usedforsecurity=True) +.. function:: sha3_512([, data], \*, usedforsecurity=True) Named constructors such as these are faster than passing an algorithm name to :func:`new`. @@ -156,9 +159,10 @@ Hashlib provides the following constant module attributes: .. data:: algorithms_guaranteed A set containing the names of the hash algorithms guaranteed to be supported - by this module on all platforms. Note that 'md5' is in this list despite - some upstream vendors offering an odd "FIPS compliant" Python build that - excludes it. + by this module on all platforms. Note that the guarnatees do not hold true + in the face of vendors offering "FIPS compliant" Python builds that exclude + some algorithms entirely. Similarly when OpenSSL is used and its FIPS mode + configuration disables some in the default provider. .. versionadded:: 3.2 diff --git a/Lib/hashlib.py b/Lib/hashlib.py index 1b2c30cc32f564..8c00a50f262d9f 100644 --- a/Lib/hashlib.py +++ b/Lib/hashlib.py @@ -55,17 +55,18 @@ # This tuple and __get_builtin_constructor() must be modified if a new # always available algorithm is added. -__always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', - 'blake2b', 'blake2s', - 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', - 'shake_128', 'shake_256') +__always_supported = [ + 'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', + 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', + 'shake_128', 'shake_256', 'blake2b', 'blake2s' +] algorithms_guaranteed = set(__always_supported) algorithms_available = set(__always_supported) -__all__ = __always_supported + ('new', 'algorithms_guaranteed', - 'algorithms_available', 'file_digest') +__all__ = __always_supported + [ + 'new', 'algorithms_guaranteed', 'algorithms_available', 'file_digest'] __builtin_constructor_cache = {} @@ -243,9 +244,11 @@ def file_digest(fileobj, digest, /, *, _bufsize=2**18): # version not supporting that algorithm. try: globals()[__func_name] = __get_hash(__func_name) - except ValueError: - import logging - logging.exception('code for hash %s was not found.', __func_name) + except ValueError as exc: + # Errors logged here would be seen as noise by most people. + # Code using a missing hash will get an obvious exception. + __all__.remove(__func_name) + algorithms_available.remove(__func_name) # Cleanup locals() diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py index 710aba9685efda..5dcffe396fe56b 100644 --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -23,6 +23,7 @@ from . import util from . import AuthenticationError, BufferTooShort +from . import context from .context import reduction _ForkingPickler = reduction.ForkingPickler @@ -899,9 +900,9 @@ def _create_response(authkey, message): return hmac.new(authkey, message, 'md5').digest() except ValueError: # HMAC-MD5 is not available (FIPS mode?), fall back to - # HMAC-SHA2-256 modern protocol. The legacy server probably + # our modern HMAC-SHA* protocol. The legacy server probably # doesn't support it and will reject us anyways. :shrug: - digest_name = 'sha256' + digest_name = context._DIGEST_FOR_CONNECTION_HMAC # Modern protocol, indicate the digest used in the reply. response = hmac.new(authkey, message, digest_name).digest() return b'{%s}%s' % (digest_name.encode('ascii'), response) @@ -932,10 +933,12 @@ def _verify_challenge(authkey, message, response): raise AuthenticationError('digest received was wrong') -def deliver_challenge(connection, authkey: bytes, digest_name='sha256'): +def deliver_challenge(connection, authkey: bytes, digest_name: str = ''): if not isinstance(authkey, bytes): raise ValueError( "Authkey must be bytes, not {0!s}".format(type(authkey))) + if not digest_name: + digest_name = context._DIGEST_FOR_CONNECTION_HMAC assert MESSAGE_LENGTH > _MD5ONLY_MESSAGE_LENGTH, "protocol constraint" message = os.urandom(MESSAGE_LENGTH) message = b'{%s}%s' % (digest_name.encode('ascii'), message) diff --git a/Lib/multiprocessing/context.py b/Lib/multiprocessing/context.py index d0a3ad00e53ad8..a45a992d359889 100644 --- a/Lib/multiprocessing/context.py +++ b/Lib/multiprocessing/context.py @@ -23,6 +23,12 @@ class TimeoutError(ProcessError): class AuthenticationError(ProcessError): pass +# The default digest for multiprocessing.connection to use for auth. +# We configure it here so that it can be tested when choosing a +# default context without a circular import. +# Must be the str of a value seen in connection._ALLOWED_DIGESTS. +_DIGEST_FOR_CONNECTION_HMAC = 'sha256' + # # Base type for contexts. Bound methods of an instance of this type are included in __all__ of __init__.py # @@ -313,6 +319,21 @@ class ForkServerContext(BaseContext): def _check_available(self): if not reduction.HAVE_SEND_HANDLE: raise ValueError('forkserver start method not available') + if not _test_if_connection_can_work(): + raise ValueError(f'forkserver start method not available due to missing hmac-{_DIGEST_FOR_CONNECTION_HMAC}') + + def _test_if_connection_can_work() -> bool: + # Authenticated connections required for forkserver using hmac. + # If the algorithm is unavailable (poor FIPS mode config?) at + # import time, we cannot default to forkserver. If a user + # changes the _DIGEST_FOR_CONNECTION_HMAC to one that works in + # their strange config, the forkserver context will still work. + import hmac + try: + hmac.new(b'test-key'*8, b'', _DIGEST_FOR_CONNECTION_HMAC) + except ValueError: + return False + return True _concrete_contexts = { 'fork': ForkContext(), @@ -322,7 +343,8 @@ def _check_available(self): # bpo-33725: running arbitrary code after fork() is no longer reliable # on macOS since macOS 10.14 (Mojave). Use spawn by default instead. # gh-84559: We changed everyones default to a thread safeish one in 3.14. - if reduction.HAVE_SEND_HANDLE and sys.platform != 'darwin': + if (reduction.HAVE_SEND_HANDLE and sys.platform != 'darwin' and + _test_if_connection_can_work()): _default_context = DefaultContext(_concrete_contexts['forkserver']) else: _default_context = DefaultContext(_concrete_contexts['spawn']) diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 80b08b8ac66899..4b5449c89eafca 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -1431,6 +1431,7 @@ def _acquire_event(lock, event): event.set() time.sleep(1.0) + @hashlib_helper.requires_hashdigest('sha256') # Manager RPC connection auth def test_repr_lock(self): if self.TYPE != 'processes': self.skipTest('test not appropriate for {}'.format(self.TYPE)) @@ -1496,6 +1497,7 @@ def _acquire_release(lock, timeout, l=None, n=1): for _ in range(n): lock.release() + @hashlib_helper.requires_hashdigest('sha256') # Manager RPC connection auth def test_repr_rlock(self): if self.TYPE != 'processes': self.skipTest('test not appropriate for {}'.format(self.TYPE)) @@ -3417,6 +3419,7 @@ def tearDown(self): self.mgr.shutdown() self.mgr.join() + @hashlib_helper.requires_hashdigest('sha256') # multiprocessing.connection def test_queue_get(self): queue = self.mgr.Queue() if gc.isenabled(): @@ -3730,6 +3733,7 @@ def test_context(self): if self.TYPE == 'processes': self.assertRaises(OSError, l.accept) + @hashlib_helper.requires_hashdigest('sha256') # connection auth def test_empty_authkey(self): # bpo-43952: allow empty bytes as authkey def handler(*args): @@ -5782,9 +5786,11 @@ def test_get_all_start_methods(self): self.assertIn(methods[0], {'forkserver', 'spawn'}, msg='3.14+ default must not be fork') if methods[0] == 'spawn': - # Confirm that the current default selection logic prefers - # forkserver vs spawn when available. - self.assertNotIn('forkserver', methods) + if not hashlib_helper.in_openssl_fips_mode(): + # Confirm that the current default selection logic prefers + # forkserver vs spawn when available. + # OpenSSL FIPS mode can disable this by blocking sha256. + self.assertNotIn('forkserver', methods) def test_preload_resources(self): if multiprocessing.get_start_method() != 'forkserver': @@ -5805,7 +5811,11 @@ def test_mixed_startmethod(self): # Fork-based locks cannot be used with spawned process for process_method in ["spawn", "forkserver"]: queue = multiprocessing.get_context("fork").Queue() - process_ctx = multiprocessing.get_context(process_method) + try: + process_ctx = multiprocessing.get_context(process_method) + except ValueError as err: + self.skipTest(err) + continue p = process_ctx.Process(target=close_queue, args=(queue,)) err_msg = "A SemLock created in a fork" with self.assertRaisesRegex(RuntimeError, err_msg): @@ -5814,8 +5824,13 @@ def test_mixed_startmethod(self): # non-fork-based locks can be used with all other start methods for queue_method in ["spawn", "forkserver"]: for process_method in multiprocessing.get_all_start_methods(): - queue = multiprocessing.get_context(queue_method).Queue() - process_ctx = multiprocessing.get_context(process_method) + try: + queue_ctx = multiprocessing.get_context(queue_method) + process_ctx = multiprocessing.get_context(process_method) + except ValueError as err: + self.skipTest(err) + continue + queue = queue_ctx.Queue() p = process_ctx.Process(target=close_queue, args=(queue,)) p.start() p.join() diff --git a/Lib/test/support/hashlib_helper.py b/Lib/test/support/hashlib_helper.py index a4e6c92203ab50..ade83d6bbc9bad 100644 --- a/Lib/test/support/hashlib_helper.py +++ b/Lib/test/support/hashlib_helper.py @@ -8,6 +8,13 @@ _hashlib = None +def in_openssl_fips_mode() -> bool: + """Is OpenSSL based _hashlib is present & operating in FIPS mode?""" + if _hashlib and _hashlib.get_fips_mode() != 0: + return True + return False + + def requires_hashdigest(digestname, openssl=None, usedforsecurity=True): """Decorator raising SkipTest if a hashing algorithm is not available diff --git a/Lib/test/test_concurrent_futures/test_init.py b/Lib/test/test_concurrent_futures/test_init.py index df640929309318..e4ec2d5ee8edf0 100644 --- a/Lib/test/test_concurrent_futures/test_init.py +++ b/Lib/test/test_concurrent_futures/test_init.py @@ -141,6 +141,11 @@ def test_spawn(self): @support.skip_if_sanitizer("TSAN doesn't support threads after fork", thread=True) def test_forkserver(self): + import multiprocessing + try: + multiprocessing.get_context("forkserver") + except ValueError as err: + self.skipTest(str(err)) self._test(ProcessPoolForkserverFailingInitializerTest) diff --git a/Lib/test/test_concurrent_futures/util.py b/Lib/test/test_concurrent_futures/util.py index 52baab51340fc9..e2c69e3015aa5f 100644 --- a/Lib/test/test_concurrent_futures/util.py +++ b/Lib/test/test_concurrent_futures/util.py @@ -119,7 +119,10 @@ def get_context(self): self.skipTest("require unix system") if support.check_sanitizer(thread=True): self.skipTest("TSAN doesn't support threads after fork") - return super().get_context() + try: + return super().get_context() + except ValueError as err: + self.skipTest(str(err)) def create_executor_tests(remote_globals, mixin, bases=(BaseTestCase,), diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py index 575b2cd0da7056..0e2d44442f34f9 100644 --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -66,6 +66,11 @@ def get_fips_mode(): SKIP_SHA3 = support.check_sanitizer(ub=True) requires_sha3 = unittest.skipUnless(not SKIP_SHA3, 'requires _sha3') +requires_usedforsecurity = unittest.skipIf( + get_fips_mode(), + "If an OpenSSL FIPS mode configuration has disabled any algorithms"+ + " in the default provider, this test would fail." +) def hexstr(s): assert isinstance(s, bytes), repr(s) @@ -102,6 +107,7 @@ class HashLibTestCase(unittest.TestCase): 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', 'shake_128', 'shake_256') + blakes = {'blake2b', 'blake2s'} shakes = {'shake_128', 'shake_256'} # gh-58898: Fallback modules are always compiled under POSIX. @@ -121,9 +127,12 @@ def __init__(self, *args, **kwargs): for algorithm in self.supported_hash_names: algorithms.add(algorithm.lower()) + # blake2s and blake2b *require* the _blake2 builtin. _blake2 = self._conditional_import_module('_blake2') if _blake2: - algorithms.update({'blake2b', 'blake2s'}) + algorithms.update(self.blakes) + else: + algorithms.difference_update(self.blakes) self.constructors_to_test = {} for algorithm in algorithms: @@ -134,7 +143,15 @@ def __init__(self, *args, **kwargs): # For each algorithm, test the direct constructor and the use # of hashlib.new given the algorithm name. for algorithm, constructors in self.constructors_to_test.items(): - constructors.add(getattr(hashlib, algorithm)) + if get_fips_mode(): + # Arbitrary algorithms may be missing via openssl.cnf + try: + constructor = getattr(hashlib, algorithm) + except AttributeError: + continue + constructors.add(constructor) + else: + constructors.add(getattr(hashlib, algorithm)) def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, **kwargs): if data is None: return hashlib.new(_alg, **kwargs) @@ -196,10 +213,6 @@ def hash_constructors(self): constructors = self.constructors_to_test.values() return itertools.chain.from_iterable(constructors) - @property - def is_fips_mode(self): - return get_fips_mode() - def test_hash_array(self): a = array.array("b", range(10)) for cons in self.hash_constructors: @@ -214,18 +227,25 @@ def test_algorithms_guaranteed(self): set(_algo for _algo in self.supported_hash_names if _algo.islower())) + @unittest.skipIf(get_fips_mode(), reason="guaranteed algorithms may not be available in FIPS mode") def test_algorithms_available(self): + print(f"{get_fips_mode()=}") self.assertTrue(set(hashlib.algorithms_guaranteed). - issubset(hashlib.algorithms_available)) + issubset(hashlib.algorithms_available), + msg=f"\n{sorted(hashlib.algorithms_guaranteed)=}\n{sorted(hashlib.algorithms_available)=}") # all available algorithms must be loadable, bpo-47101 self.assertNotIn("undefined", hashlib.algorithms_available) for name in hashlib.algorithms_available: - digest = hashlib.new(name, usedforsecurity=False) - + with self.subTest(name=name): + if name in self.blakes and not _blake2: + self.skipTest("requires _blake2") + hashlib.new(name, usedforsecurity=False) + + @requires_usedforsecurity + @unittest.skipUnless(hasattr(hashlib, "sha256"), "sha256 unavailable") + @unittest.skipUnless(hasattr(hashlib, "md5"), "md5 unavailable") def test_usedforsecurity_true(self): hashlib.new("sha256", usedforsecurity=True) - if self.is_fips_mode: - self.skipTest("skip in FIPS mode") for cons in self.hash_constructors: cons(usedforsecurity=True) cons(b'', usedforsecurity=True) @@ -235,6 +255,8 @@ def test_usedforsecurity_true(self): self._hashlib.new("md5", usedforsecurity=True) self._hashlib.openssl_md5(usedforsecurity=True) + @unittest.skipUnless(hasattr(hashlib, "sha256"), "sha256 unavailable") + @unittest.skipUnless(hasattr(hashlib, "md5"), "md5 unavailable") def test_usedforsecurity_false(self): hashlib.new("sha256", usedforsecurity=False) for cons in self.hash_constructors: @@ -250,8 +272,9 @@ def test_unknown_hash(self): self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam') self.assertRaises(TypeError, hashlib.new, 1) + @unittest.skipUnless(hasattr(hashlib, "sha256"), "sha256 unavailable") def test_new_upper_to_lower(self): - self.assertEqual(hashlib.new("SHA256").name, "sha256") + self.assertEqual(hashlib.new("SHA256", usedforsecurity=False).name, "sha256") def test_get_builtin_constructor(self): get_builtin_constructor = getattr(hashlib, @@ -309,10 +332,6 @@ def test_name_attribute(self): for cons in self.hash_constructors: h = cons(usedforsecurity=False) self.assertIsInstance(h.name, str) - if h.name in self.supported_hash_names: - self.assertIn(h.name, self.supported_hash_names) - else: - self.assertNotIn(h.name, self.supported_hash_names) self.assertEqual( h.name, hashlib.new(h.name, usedforsecurity=False).name @@ -353,7 +372,7 @@ def test_large_update(self): @requires_resource('cpu') def test_sha256_update_over_4gb(self): zero_1mb = b"\0" * 1024 * 1024 - h = hashlib.sha256() + h = hashlib.sha256(usedforsecurity=False) for i in range(0, 4096): h.update(zero_1mb) h.update(b"hello world") @@ -362,29 +381,33 @@ def test_sha256_update_over_4gb(self): @requires_resource('cpu') def test_sha3_256_update_over_4gb(self): zero_1mb = b"\0" * 1024 * 1024 - h = hashlib.sha3_256() + h = hashlib.sha3_256(usedforsecurity=False) for i in range(0, 4096): h.update(zero_1mb) h.update(b"hello world") self.assertEqual(h.hexdigest(), "e2d4535e3b613135c14f2fe4e026d7ad8d569db44901740beffa30d430acb038") + @requires_blake2 @requires_resource('cpu') def test_blake2_update_over_4gb(self): # blake2s or blake2b doesn't matter based on how our C code is structured, this tests the # common loop macro logic. zero_1mb = b"\0" * 1024 * 1024 - h = hashlib.blake2s() + h = hashlib.blake2s(usedforsecurity=False) for i in range(0, 4096): h.update(zero_1mb) h.update(b"hello world") self.assertEqual(h.hexdigest(), "8a268e83dd30528bc0907fa2008c91de8f090a0b6e0e60a5ff0d999d8485526f") def check(self, name, data, hexdigest, shake=False, **kwargs): + if 'usedforsecurity' not in kwargs: + kwargs['usedforsecurity'] = False length = len(hexdigest)//2 hexdigest = hexdigest.lower() constructors = self.constructors_to_test[name] # 2 is for hashlib.name(...) and hashlib.new(name, ...) - self.assertGreaterEqual(len(constructors), 2) + if get_fips_mode() == 0: + self.assertGreaterEqual(len(constructors), 2) for hash_object_constructor in constructors: m = hash_object_constructor(data, **kwargs) computed = m.hexdigest() if not shake else m.hexdigest(length) @@ -434,7 +457,8 @@ def check_no_unicode(self, algorithm_name): # Unicode objects are not allowed as input. constructors = self.constructors_to_test[algorithm_name] for hash_object_constructor in constructors: - self.assertRaises(TypeError, hash_object_constructor, 'spam') + with self.assertRaises(TypeError): + hash_object_constructor('spam', usedforsecurity=False) def test_no_unicode(self): self.check_no_unicode('md5') @@ -497,7 +521,7 @@ def test_blocksize_name_sha3(self): def check_sha3(self, name, capacity, rate, suffix): constructors = self.constructors_to_test[name] for hash_object_constructor in constructors: - m = hash_object_constructor() + m = hash_object_constructor(usedforsecurity=False) if HASH is not None and isinstance(m, HASH): # _hashopenssl's variant does not have extra SHA3 attributes continue @@ -661,7 +685,7 @@ def check_blake2(self, constructor, salt_size, person_size, key_size, digest_size, max_offset): self.assertEqual(constructor.SALT_SIZE, salt_size) for i in range(salt_size + 1): - constructor(salt=b'a' * i) + constructor(salt=b'a' * i, usedforsecurity=False) salt = b'a' * (salt_size + 1) self.assertRaises(ValueError, constructor, salt=salt) @@ -908,6 +932,7 @@ def test_case_shake256_vector(self): for msg, md in read_vectors('shake_256'): self.check('shake_256', msg, md, True) + @unittest.skipUnless(hasattr(hashlib, "sha256"), "sha256 unavailable") def test_gil(self): # Check things work fine with an input larger than the size required # for multithreaded operation (which is hardwired to 2048). @@ -922,7 +947,7 @@ def test_gil(self): m = cons(b'x' * gil_minsize, usedforsecurity=False) m.update(b'1') - m = hashlib.sha256() + m = hashlib.sha256(usedforsecurity=False) m.update(b'1') m.update(b'#' * gil_minsize) m.update(b'1') @@ -931,7 +956,8 @@ def test_gil(self): '1cfceca95989f51f658e3f3ffe7f1cd43726c9e088c13ee10b46f57cef135b94' ) - m = hashlib.sha256(b'1' + b'#' * gil_minsize + b'1') + m = hashlib.sha256(b'1' + b'#' * gil_minsize + b'1', + usedforsecurity=False) self.assertEqual( m.hexdigest(), '1cfceca95989f51f658e3f3ffe7f1cd43726c9e088c13ee10b46f57cef135b94' @@ -939,6 +965,7 @@ def test_gil(self): @threading_helper.reap_threads @threading_helper.requires_working_threading() + @unittest.skipUnless(hasattr(hashlib, "sha1"), "sha1 unavailable") def test_threaded_hashing(self): # Updating the same hash object from several threads at once # using data chunk sizes containing the same byte sequences. @@ -946,7 +973,7 @@ def test_threaded_hashing(self): # If the internal locks are working to prevent multiple # updates on the same object from running at once, the resulting # hash will be the same as doing it single threaded upfront. - hasher = hashlib.sha1() + hasher = hashlib.sha1(usedforsecurity=False) num_threads = 5 smallest_data = b'swineflu' data = smallest_data * 200000 @@ -975,8 +1002,7 @@ def hash_in_chunks(chunk_size): self.assertEqual(expected_hash, hasher.hexdigest()) def test_get_fips_mode(self): - fips_mode = self.is_fips_mode - if fips_mode is not None: + if (fips_mode := get_fips_mode()) is not None: self.assertIsInstance(fips_mode, int) @support.cpython_only @@ -1169,6 +1195,9 @@ def test_normalized_name(self): self.assertNotIn("blake2b512", hashlib.algorithms_available) self.assertNotIn("sha3-512", hashlib.algorithms_available) + # defaults True because file_digest doesn't support the parameter. + @requires_usedforsecurity + @unittest.skipUnless(hasattr(hashlib, "sha256"), "sha256 unavailable") def test_file_digest(self): data = b'a' * 65536 d1 = hashlib.sha256() diff --git a/Misc/NEWS.d/next/Library/2024-12-01-07-46-23.gh-issue-84559.nj2wpQ.rst b/Misc/NEWS.d/next/Library/2024-12-01-07-46-23.gh-issue-84559.nj2wpQ.rst new file mode 100644 index 00000000000000..4eb5f4a87125ca --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-01-07-46-23.gh-issue-84559.nj2wpQ.rst @@ -0,0 +1,4 @@ +On hosts without the hmac-sha256 algorithm available (rare), +:mod:`multiprocessing` will default to the ``"spawn"`` start method instead +of ``"forkserver"`` which requires the algorithm for control socket +authentication.