diff --git a/util/arena.cc b/util/arena.cc index 46e3b2eb8f..45db99df10 100644 --- a/util/arena.cc +++ b/util/arena.cc @@ -36,6 +36,7 @@ char* Arena::AllocateFallback(size_t bytes) { } char* Arena::AllocateAligned(size_t bytes) { + std::lock_guard lock(mutex_); // Protect access with a mutex const int align = (sizeof(void*) > 8) ? sizeof(void*) : 8; static_assert((align & (align - 1)) == 0, "Pointer size should be a power of 2"); diff --git a/util/arena.h b/util/arena.h index 68fc55d4dd..95b8ca58b2 100644 --- a/util/arena.h +++ b/util/arena.h @@ -9,6 +9,7 @@ #include #include #include +#include #include namespace leveldb { @@ -45,6 +46,10 @@ class Arena { // Array of new[] allocated memory blocks std::vector blocks_; + // Mutex protects alloc_ptr_, alloc_bytes_remaining_, and blocks_ in + // Allocate() and AllocateAligned(). + std::mutex mutex_; + // Total memory usage of the arena. // // TODO(costan): This member is accessed via atomics, but the others are @@ -56,6 +61,7 @@ inline char* Arena::Allocate(size_t bytes) { // The semantics of what to return are a bit messy if we allow // 0-byte allocations, so we disallow them here (we don't need // them for our internal use). + std::lock_guard lock(mutex_); // Protect access with a mutex assert(bytes > 0); if (bytes <= alloc_bytes_remaining_) { char* result = alloc_ptr_; diff --git a/util/arena_test.cc b/util/arena_test.cc index 3e2011eca8..880a910d2a 100644 --- a/util/arena_test.cc +++ b/util/arena_test.cc @@ -4,9 +4,13 @@ #include "util/arena.h" -#include "gtest/gtest.h" +#include +#include + #include "util/random.h" +#include "gtest/gtest.h" + namespace leveldb { TEST(ArenaTest, Empty) { Arena arena; } @@ -58,4 +62,40 @@ TEST(ArenaTest, Simple) { } } +void ThreadedAllocation(Arena* arena, std::atomic* bytes_allocated, + Random* rnd) { + const int N = 10000; // Number of allocations per thread + for (int i = 0; i < N; ++i) { + size_t s = rnd->OneIn(4000) + ? rnd->Uniform(1000) + : (rnd->OneIn(10) ? rnd->Uniform(100) : rnd->Uniform(20)); + if (s == 0) { + s = 1; // Ensure we never allocate 0 bytes. + } + char* r = arena->Allocate(s); + std::memset(r, 0, s); // Fill allocated memory with zeros. + *bytes_allocated += s; + } +} + +TEST(ArenaTest, ThreadSafety) { + Arena arena; + std::atomic bytes_allocated(0); + const int kNumThreads = + 4; // Adjust based on how many threads you want to test with. + std::vector threads; + Random rnd(301); + + for (int i = 0; i < kNumThreads; ++i) { + threads.emplace_back( + std::thread(ThreadedAllocation, &arena, &bytes_allocated, &rnd)); + } + + for (auto& t : threads) { + t.join(); + } + + ASSERT_GE(arena.MemoryUsage(), bytes_allocated.load()); +} + } // namespace leveldb