Skip to content

Commit

Permalink
Cap maximum shard size at the size of an integer
Browse files Browse the repository at this point in the history
Supercedes dask#5134

Copying over the summary of that PR

Works around the OpenSSL 1.0.2 bug demonstrated in issue ( dask#4538 ), except unlike PR ( dask#5115 ) which did this for reading, this does the same thing for writing. The error may be less likely to show up in the write path (as frames may simply be smaller than this limit). Still it seems like a good idea to protect against OverflowErrors from OpenSSL
  • Loading branch information
mrocklin committed Jul 29, 2021
1 parent 83e209b commit 8bf63b3
Showing 1 changed file with 5 additions and 0 deletions.
5 changes: 5 additions & 0 deletions distributed/comm/tcp.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,8 @@ class TCP(Comm):
An established communication based on an underlying Tornado IOStream.
"""

max_shard_size = dask.utils.parse_bytes(dask.config.get("distributed.comm.shard"))

def __init__(self, stream, local_addr, peer_addr, deserialize=True):
self._closed = False
Comm.__init__(self)
Expand Down Expand Up @@ -248,6 +250,7 @@ async def write(self, msg, serializers=None, on_error="message"):
"recipient": self.remote_info,
**self.handshake_options,
},
frame_split_size=self.max_shard_size,
)
frames_nbytes = [nbytes(f) for f in frames]
frames_nbytes_total = sum(frames_nbytes)
Expand Down Expand Up @@ -335,6 +338,8 @@ class TLS(TCP):
A TLS-specific version of TCP.
"""

max_shard_size = min(C_INT_MAX, TCP.max_shard_size)

def _read_extra(self):
TCP._read_extra(self)
sock = self.stream.socket
Expand Down

0 comments on commit 8bf63b3

Please sign in to comment.