Skip to content

Commit

Permalink
fix data downloaded size sum race when loading chunks (#6586)
Browse files Browse the repository at this point in the history
Signed-off-by: Ben Ye <benye@amazon.com>
  • Loading branch information
yeya24 authored Aug 7, 2023
1 parent 7d0d563 commit 7432fc6
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions pkg/store/bucket.go
Original file line number Diff line number Diff line change
Expand Up @@ -3317,17 +3317,16 @@ func (r *bucketChunkReader) loadChunks(ctx context.Context, res []seriesEntry, a
}

// If we didn't fetch enough data for the chunk, fetch more.
r.mtx.Unlock()
locked = false

fetchBegin = time.Now()

// Read entire chunk into new buffer.
// TODO: readChunkRange call could be avoided for any chunk but last in this particular part.
if err := bytesLimiter.Reserve(uint64(chunkLen)); err != nil {
return httpgrpc.Errorf(int(codes.ResourceExhausted), "exceeded bytes limit while fetching chunks: %s", err)
}
r.stats.DataDownloadedSizeSum += units.Base2Bytes(chunkLen)
r.mtx.Unlock()
locked = false

nb, err := r.block.readChunkRange(ctx, seq, int64(pIdx.offset), int64(chunkLen), []byteRange{{offset: 0, length: chunkLen}})
if err != nil {
return errors.Wrapf(err, "preloaded chunk too small, expecting %d, and failed to fetch full chunk", chunkLen)
Expand Down

0 comments on commit 7432fc6

Please sign in to comment.