Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add support for on-demand topic metadata fetch #1541

Merged
merged 1 commit into from
Aug 31, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions kafka/client_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,7 @@ class KafkaClient(object):

DEFAULT_CONFIG = {
'bootstrap_servers': 'localhost',
'bootstrap_topics_filter': set(),
'client_id': 'kafka-python-' + __version__,
'request_timeout_ms': 30000,
'connections_max_idle_ms': 9 * 60 * 1000,
Expand Down Expand Up @@ -231,9 +232,15 @@ def _bootstrap(self, hosts):
self._last_bootstrap = time.time()

if self.config['api_version'] is None or self.config['api_version'] < (0, 10):
metadata_request = MetadataRequest[0]([])
if self.config['bootstrap_topics_filter']:
metadata_request = MetadataRequest[0](list(self.config['bootstrap_topics_filter']))
else:
metadata_request = MetadataRequest[0]([])
else:
metadata_request = MetadataRequest[1](None)
if self.config['bootstrap_topics_filter']:
metadata_request = MetadataRequest[1](list(self.config['bootstrap_topics_filter']))
else:
metadata_request = MetadataRequest[1](None)

for host, port, afi in hosts:
log.debug("Attempting to bootstrap via node at %s:%s", host, port)
Expand Down Expand Up @@ -825,7 +832,7 @@ def check_version(self, node_id=None, timeout=2, strict=False):
self._refresh_on_disconnects = False
try:
remaining = end - time.time()
version = conn.check_version(timeout=remaining, strict=strict)
version = conn.check_version(timeout=remaining, strict=strict, topics=list(self.config['bootstrap_topics_filter']))
return version
except Errors.NodeNotReadyError:
# Only raise to user if this is a node-specific request
Expand Down
6 changes: 3 additions & 3 deletions kafka/conn.py
Original file line number Diff line number Diff line change
Expand Up @@ -892,7 +892,7 @@ def _infer_broker_version_from_api_versions(self, api_versions):
# so if all else fails, choose that
return (0, 10, 0)

def check_version(self, timeout=2, strict=False):
def check_version(self, timeout=2, strict=False, topics=[]):
"""Attempt to guess the broker version.

Note: This is a blocking call.
Expand Down Expand Up @@ -925,7 +925,7 @@ def check_version(self, timeout=2, strict=False):
((0, 9), ListGroupsRequest[0]()),
((0, 8, 2), GroupCoordinatorRequest[0]('kafka-python-default-group')),
((0, 8, 1), OffsetFetchRequest[0]('kafka-python-default-group', [])),
((0, 8, 0), MetadataRequest[0]([])),
((0, 8, 0), MetadataRequest[0](topics)),
]

for version, request in test_cases:
Expand All @@ -941,7 +941,7 @@ def check_version(self, timeout=2, strict=False):
# the attempt to write to a disconnected socket should
# immediately fail and allow us to infer that the prior
# request was unrecognized
mr = self.send(MetadataRequest[0]([]))
mr = self.send(MetadataRequest[0](topics))

selector = self.config['selector']()
selector.register(self._sock, selectors.EVENT_READ)
Expand Down
1 change: 1 addition & 0 deletions kafka/producer/kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,6 +276,7 @@ class KafkaProducer(object):
'key_serializer': None,
'value_serializer': None,
'acks': 1,
'bootstrap_topics_filter': set(),
'compression_type': None,
'retries': 0,
'batch_size': 16384,
Expand Down