Skip to content

Commit

Permalink
fix the batch writer data loss bug which occurs with concurrent tasks…
Browse files Browse the repository at this point in the history
… using same batch writer
  • Loading branch information
JamesVerrill authored and terrycain committed Aug 10, 2022
1 parent 7f23e5b commit 2deccf0
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 7 deletions.
15 changes: 8 additions & 7 deletions aioboto3/dynamodb/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,14 +111,15 @@ async def _flush(self):
RequestItems={self._table_name: items_to_send})
unprocessed_items = response['UnprocessedItems']

if unprocessed_items and unprocessed_items[self._table_name]:
# Any unprocessed_items are immediately added to the
# next batch we send.
self._items_buffer.extend(unprocessed_items[self._table_name])
else:
self._items_buffer = []
if not unprocessed_items:
unprocessed_items = {}
item_list = unprocessed_items.get(self._table_name, [])
# Any unprocessed_items are immediately added to the
# next batch we send.
self._items_buffer.extend(item_list)
logger.debug(
"Batch write sent %s, unprocessed: %s", len(items_to_send), len(self._items_buffer)
"Batch write sent %s, unprocessed: %s, buffer %s",
len(items_to_send), len(item_list), len(self._items_buffer)
)

async def __aenter__(self):
Expand Down
44 changes: 44 additions & 0 deletions tests/test_dynamo.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,50 @@ async def test_dynamo_resource_batch_write_flush_amount(event_loop, dynamodb_res
assert result['Count'] == 9


@pytest.mark.asyncio
async def test_flush_doesnt_reset_item_buffer(event_loop, dynamodb_resource, random_table_name):
await dynamodb_resource.create_table(
TableName=random_table_name,
KeySchema=[{'AttributeName': 'pk', 'KeyType': 'HASH'}],
AttributeDefinitions=[{'AttributeName': 'pk', 'AttributeType': 'S'}],
ProvisionedThroughput={'ReadCapacityUnits': 2, 'WriteCapacityUnits': 1}
)

table = await dynamodb_resource.Table(random_table_name)
async with table.batch_writer(flush_amount=5, on_exit_loop_sleep=0.1) as dynamo_writer:
dynamo_writer._items_buffer.extend([
{'PutRequest': {'Item': {'pk': 'test1', 'test_col1': 'col'}}},
{'PutRequest': {'Item': {'pk': 'test2', 'test_col1': 'col'}}},
{'PutRequest': {'Item': {'pk': 'test3', 'test_col1': 'col'}}},
{'PutRequest': {'Item': {'pk': 'test4', 'test_col1': 'col'}}},
{'PutRequest': {'Item': {'pk': 'test5', 'test_col1': 'col'}}},
{'PutRequest': {'Item': {'pk': 'test6', 'test_col1': 'col'}}},
])
result = await table.scan()
assert result['Count'] == 0

await dynamo_writer.put_item(Item={'pk': 'test7', 'test_col1': 'col'})

# Flush amount is 5 so count should be 5 not 6
result = await table.scan()
assert result['Count'] == 5

assert len(dynamo_writer._items_buffer) == 2
# the buffer doesn't have unprocessed items deleted

# add more items than the flush size to check exit iterates over all items
dynamo_writer._items_buffer.extend([
{'PutRequest': {'Item': {'pk': 'test8', 'test_col1': 'col'}}},
{'PutRequest': {'Item': {'pk': 'test9', 'test_col1': 'col'}}},
{'PutRequest': {'Item': {'pk': 'test10', 'test_col1': 'col'}}},
{'PutRequest': {'Item': {'pk': 'test11', 'test_col1': 'col'}}},
])

# On exit it should flush so count should be 11
result = await table.scan()
assert result['Count'] == 11


@pytest.mark.asyncio
async def test_dynamo_resource_property(event_loop, dynamodb_resource, random_table_name):
await dynamodb_resource.create_table(
Expand Down

0 comments on commit 2deccf0

Please sign in to comment.