Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve throughput and resiliency of batching algo #1371

Closed
wants to merge 8 commits into from
10 changes: 5 additions & 5 deletions integration/test_batch_v4.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,9 +553,9 @@ def test_add_1000_tenant_objects_with_async_indexing_and_wait_for_only_one(
lambda client: client.batch.rate_limit(1000),
],
ids=[
"test_add_one_hundred_objects_and_references_between_all_dynamic",
"test_add_one_hundred_objects_and_references_between_all_fixed_size",
"test_add_one_hundred_objects_and_references_between_all_rate_limit",
"test_add_one_object_and_a_self_reference_dynamic",
"test_add_one_object_and_a_self_reference_fixed_size",
"test_add_one_object_and_a_self_reference_rate_limit",
],
)
def test_add_one_object_and_a_self_reference(
Expand Down Expand Up @@ -596,8 +596,8 @@ def batch_insert(batch: BatchClient) -> None:
with concurrent.futures.ThreadPoolExecutor() as executor:
with client.batch.dynamic() as batch:
futures = [executor.submit(batch_insert, batch) for _ in range(nr_threads)]
for future in concurrent.futures.as_completed(futures):
future.result()
for future in concurrent.futures.as_completed(futures):
future.result()
objs = client.collections.get(name).query.fetch_objects(limit=nr_objects * nr_threads).objects
assert len(objs) == nr_objects * nr_threads

Expand Down
Loading