diff --git a/src/support/lockedpool.cpp b/src/support/lockedpool.cpp index 67d8c892e..422a1f686 100644 --- a/src/support/lockedpool.cpp +++ b/src/support/lockedpool.cpp @@ -320,6 +320,11 @@ void* LockedPool::alloc(size_t size) void LockedPool::free(void *ptr) { + // Freeing the nullptr pointer is OK. + if (ptr == nullptr) { + return; + } + std::lock_guard lock(mutex); // TODO we can do better than this linear search by keeping a map of arena // extents to arena, and looking up the address. diff --git a/src/test/allocator_tests.cpp b/src/test/allocator_tests.cpp index 68e987e42..198c940eb 100644 --- a/src/test/allocator_tests.cpp +++ b/src/test/allocator_tests.cpp @@ -124,6 +124,9 @@ BOOST_AUTO_TEST_CASE(arena_tests) BOOST_CHECK(b.stats().total == synth_size); BOOST_CHECK(b.stats().free == synth_size); + + // Check that Arena::free may be called on nullptr. + b.free(nullptr); } /** Mock LockedPageAllocator for testing */ @@ -229,6 +232,9 @@ BOOST_AUTO_TEST_CASE(lockedpool_tests_live) BOOST_CHECK(pool.stats().total <= (initial.total + LockedPool::ARENA_SIZE)); // Usage must be back to where it started BOOST_CHECK(pool.stats().used == initial.used); + + // Check that LockedPool::free may be called on nullptr. + pool.free(nullptr); } BOOST_AUTO_TEST_SUITE_END()