Skip to content

Commit

Permalink
Avoid taking pageheap_lock for span deallocations.
Browse files Browse the repository at this point in the history
span_allocator is internally thread-safe, so we do not need to access the page
heap lock to guard it.

PiperOrigin-RevId: 706034172
Change-Id: Icf0d63afd9f5e7eb6b704700938646b3612b6a0b
  • Loading branch information
ckennelly authored and copybara-github committed Dec 14, 2024
1 parent cb05bc4 commit e4308a4
Show file tree
Hide file tree
Showing 11 changed files with 171 additions and 16 deletions.
25 changes: 25 additions & 0 deletions tcmalloc/central_freelist.cc
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
#include "tcmalloc/internal/config.h"
#include "tcmalloc/internal/logging.h"
#include "tcmalloc/internal/prefetch.h"
#include "tcmalloc/page_allocator_interface.h"
#include "tcmalloc/pagemap.h"
#include "tcmalloc/pages.h"
#include "tcmalloc/selsan/selsan.h"
Expand Down Expand Up @@ -100,6 +101,7 @@ Span* StaticForwarder::AllocateSpan(int size_class, size_t objects_per_span,
return span;
}

#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
static void ReturnSpansToPageHeap(MemoryTag tag, absl::Span<Span*> free_spans,
size_t objects_per_span)
ABSL_LOCKS_EXCLUDED(pageheap_lock) {
Expand All @@ -109,6 +111,17 @@ static void ReturnSpansToPageHeap(MemoryTag tag, absl::Span<Span*> free_spans,
tc_globals.page_allocator().Delete(free_span, tag);
}
}
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING

static void ReturnAllocsToPageHeap(
MemoryTag tag,
absl::Span<PageAllocatorInterface::AllocationState> free_allocs)
ABSL_LOCKS_EXCLUDED(pageheap_lock) {
PageHeapSpinLockHolder l;
for (const auto& alloc : free_allocs) {
tc_globals.page_allocator().Delete(alloc, tag);
}
}

void StaticForwarder::DeallocateSpans(size_t objects_per_span,
absl::Span<Span*> free_spans) {
Expand All @@ -134,7 +147,19 @@ void StaticForwarder::DeallocateSpans(size_t objects_per_span,
ABSL_CACHELINE_SIZE));
}

#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
ReturnSpansToPageHeap(tag, free_spans, objects_per_span);
#else
PageAllocatorInterface::AllocationState allocs[kMaxObjectsToMove];
for (int i = 0, n = free_spans.size(); i < n; ++i) {
Span* s = free_spans[i];
TC_ASSERT_EQ(tag, GetMemoryTag(s->start_address()));
allocs[i].r = Range(s->first_page(), s->num_pages());
allocs[i].donated = s->donated();
Span::Delete(s);
}
ReturnAllocsToPageHeap(tag, absl::MakeSpan(allocs, free_spans.size()));
#endif
}

} // namespace central_freelist_internal
Expand Down
30 changes: 25 additions & 5 deletions tcmalloc/huge_page_aware_allocator.h
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,10 @@ class StaticForwarder {
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING
ABSL_ATTRIBUTE_RETURNS_NONNULL;
static void DeleteSpan(Span* span)
ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock) ABSL_ATTRIBUTE_NONNULL();
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock)
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING
ABSL_ATTRIBUTE_NONNULL();

// SystemAlloc state.
static AddressRange AllocatePages(size_t bytes, size_t align, MemoryTag tag) {
Expand Down Expand Up @@ -170,7 +173,12 @@ class HugePageAwareAllocator final : public PageAllocatorInterface {
// Delete the span "[p, p+n-1]".
// REQUIRES: span was returned by earlier call to New() and
// has not yet been deleted.
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
void Delete(Span* span) ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock) override;
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING

void Delete(AllocationState s)
ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock) override;

BackingStats stats() const
ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock) override;
Expand Down Expand Up @@ -751,21 +759,33 @@ inline bool HugePageAwareAllocator<Forwarder>::AddRegion() {
return true;
}

#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
template <class Forwarder>
inline void HugePageAwareAllocator<Forwarder>::Delete(Span* span) {
TC_ASSERT(!span || GetMemoryTag(span->start_address()) == tag_);
PageId p = span->first_page();
HugePage hp = HugePageContaining(p);
Length n = span->num_pages();
info_.RecordFree(Range(p, n));

bool might_abandon = span->donated();
// TODO(b/175334169): Lift this out of Delete's pageheap_lock.
bool donated = span->donated();
forwarder_.DeleteSpan(span);

Delete(AllocationState{Range{p, n}, donated});
}
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING

template <class Forwarder>
inline void HugePageAwareAllocator<Forwarder>::Delete(AllocationState s) {
const PageId p = s.r.p;
const HugePage hp = HugePageContaining(p);
const Length n = s.r.n;
info_.RecordFree(Range(p, n));

// Clear the descriptor of the page so a second pass through the same page
// could trigger the check on `span != nullptr` in do_free_pages.
forwarder_.Set(p, nullptr);

const bool might_abandon = s.donated;

// The tricky part, as with so many allocators: where did we come from?
// There are several possibilities.
FillerType::Tracker* pt = GetTracker(hp);
Expand Down
27 changes: 24 additions & 3 deletions tcmalloc/huge_page_aware_allocator_fuzz.cc
Original file line number Diff line number Diff line change
Expand Up @@ -243,8 +243,19 @@ void FuzzHPAA(const std::string& s) {
allocated -= span_info.span->num_pages();

{
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
PageHeapSpinLockHolder l;
allocator->Delete(span_info.span);
#else
PageAllocatorInterface::AllocationState a{
Range(span_info.span->first_page(),
span_info.span->num_pages()),
span_info.span->donated(),
};
allocator->forwarder().DeleteSpan(span_info.span);
PageHeapSpinLockHolder l;
allocator->Delete(a);
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING
}
break;
}
Expand Down Expand Up @@ -492,14 +503,24 @@ void FuzzHPAA(const std::string& s) {

// Clean up.
const PageReleaseStats final_stats = [&] {
PageHeapSpinLockHolder l;

for (auto span_info : allocs) {
Span* span = span_info.span;
allocated -= span->num_pages();
allocator->Delete(span);
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
PageHeapSpinLockHolder l;
allocator->Delete(span_info.span);
#else
PageAllocatorInterface::AllocationState a{
Range(span_info.span->first_page(), span_info.span->num_pages()),
span_info.span->donated(),
};
allocator->forwarder().DeleteSpan(span_info.span);
PageHeapSpinLockHolder l;
allocator->Delete(a);
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING
}

PageHeapSpinLockHolder l;
return allocator->GetReleaseStats();
}();

Expand Down
40 changes: 34 additions & 6 deletions tcmalloc/huge_page_aware_allocator_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -141,8 +141,18 @@ class HugePageAwareAllocatorTest
}

void AllocatorDelete(Span* s, size_t objects_per_span) {
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
PageHeapSpinLockHolder l;
allocator_->Delete(s);
#else
PageAllocatorInterface::AllocationState a{
Range(s->first_page(), s->num_pages()),
s->donated(),
};
allocator_->forwarder().DeleteSpan(s);
PageHeapSpinLockHolder l;
allocator_->Delete(a);
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING
}

Span* New(Length n, SpanAllocInfo span_alloc_info) {
Expand Down Expand Up @@ -1374,10 +1384,18 @@ class StatTest : public testing::Test {
void Free(Span* s, SpanAllocInfo span_info) {
Length n = s->num_pages();
total_ -= n;
{
PageHeapSpinLockHolder l;
alloc_->Delete(s);
}
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
PageHeapSpinLockHolder l;
alloc_->Delete(s);
#else
PageAllocatorInterface::AllocationState a{
Range(s->first_page(), s->num_pages()),
s->donated(),
};
alloc_->forwarder().DeleteSpan(s);
PageHeapSpinLockHolder l;
alloc_->Delete(a);
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING
}

void CheckStats() {
Expand Down Expand Up @@ -1556,8 +1574,18 @@ struct SpanDeleter {
: allocator(*allocator) {}

void operator()(Span* s) ABSL_LOCKS_EXCLUDED(pageheap_lock) {
const PageHeapSpinLockHolder l;
allocator.Delete(s);
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
PageHeapSpinLockHolder l;
allocator_->Delete(s);
#else
PageAllocatorInterface::AllocationState a{
Range(s->first_page(), s->num_pages()),
s->donated(),
};
allocator.forwarder().DeleteSpan(s);
PageHeapSpinLockHolder l;
allocator.Delete(a);
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING
}

HugePageAwareAllocator& allocator;
Expand Down
7 changes: 5 additions & 2 deletions tcmalloc/mock_huge_page_static_forwarder.h
Original file line number Diff line number Diff line change
Expand Up @@ -141,8 +141,11 @@ class FakeStaticForwarder {
reinterpret_cast<uintptr_t>(result);
return span;
}
void DeleteSpan(Span* span) ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock)
ABSL_ATTRIBUTE_NONNULL() {
void DeleteSpan(Span* span)
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock)
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING
ABSL_ATTRIBUTE_NONNULL() {
absl::base_internal::LowLevelAlloc::Free(
reinterpret_cast<void*>(*(reinterpret_cast<uintptr_t*>(span + 1))));
}
Expand Down
12 changes: 12 additions & 0 deletions tcmalloc/page_allocator.h
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,13 @@ class PageAllocator {
// Delete the span "[p, p+n-1]".
// REQUIRES: span was returned by earlier call to New() with the same value of
// "tag" and has not yet been deleted.
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
void Delete(Span* span, MemoryTag tag)
ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock);
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING

void Delete(PageAllocatorInterface::AllocationState s, MemoryTag tag)
ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock);

BackingStats stats() const ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock);

Expand Down Expand Up @@ -211,9 +216,16 @@ inline Span* PageAllocator::NewAligned(Length n, Length align,
return impl(tag)->NewAligned(n, align, span_alloc_info);
}

#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
inline void PageAllocator::Delete(Span* span, MemoryTag tag) {
impl(tag)->Delete(span);
}
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING

inline void PageAllocator::Delete(PageAllocatorInterface::AllocationState s,
MemoryTag tag) {
impl(tag)->Delete(s);
}

inline BackingStats PageAllocator::stats() const {
BackingStats ret = normal_impl_[0]->stats();
Expand Down
12 changes: 12 additions & 0 deletions tcmalloc/page_allocator_interface.h
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,20 @@ class PageAllocatorInterface {
// Delete the span "[p, p+n-1]".
// REQUIRES: span was returned by earlier call to New() and
// has not yet been deleted.
//
// TODO(b/175334169): Prefer the AllocationState-ful API.
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
virtual void Delete(Span* span)
ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock) = 0;
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING

struct AllocationState {
Range r;
bool donated;
};

virtual void Delete(AllocationState s)
ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock) = 0;

virtual BackingStats stats() const
ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock) = 0;
Expand Down
11 changes: 11 additions & 0 deletions tcmalloc/page_allocator_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
#include "tcmalloc/internal/config.h"
#include "tcmalloc/internal/logging.h"
#include "tcmalloc/malloc_extension.h"
#include "tcmalloc/page_allocator_interface.h"
#include "tcmalloc/page_allocator_test_util.h"
#include "tcmalloc/span.h"
#include "tcmalloc/static_vars.h"
Expand Down Expand Up @@ -70,8 +71,18 @@ class PageAllocatorTest : public testing::Test {
return allocator_->NewAligned(n, align, span_alloc_info, tag);
}
void Delete(Span* s, MemoryTag tag = MemoryTag::kNormal) {
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
PageHeapSpinLockHolder l;
allocator_->Delete(s, tag);
#else
PageAllocatorInterface::AllocationState a{
Range(s->first_page(), s->num_pages()),
s->donated(),
};
Span::Delete(s);
PageHeapSpinLockHolder l;
allocator_->Delete(a, tag);
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING
}

std::string Print() {
Expand Down
10 changes: 10 additions & 0 deletions tcmalloc/span_benchmark.cc
Original file line number Diff line number Diff line change
Expand Up @@ -175,8 +175,18 @@ void BM_NewDelete(benchmark::State& state) {

benchmark::DoNotOptimize(sp);

#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
PageHeapSpinLockHolder l;
tc_globals.page_allocator().Delete(sp, MemoryTag::kNormal);
#else
PageAllocatorInterface::AllocationState a{
Range(sp->first_page(), sp->num_pages()),
sp->donated(),
};
Span::Delete(sp);
PageHeapSpinLockHolder l;
tc_globals.page_allocator().Delete(a, MemoryTag::kNormal);
#endif
}
state.SetItemsProcessed(state.iterations());
}
Expand Down
2 changes: 2 additions & 0 deletions tcmalloc/span_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,9 @@ TEST(SpanAllocatorTest, Alignment) {
}

{
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
PageHeapSpinLockHolder l;
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING
for (Span* s : spans) {
Span::Delete(s);
}
Expand Down
11 changes: 11 additions & 0 deletions tcmalloc/tcmalloc.cc
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@
#include "tcmalloc/malloc_tracing_extension.h"
#include "tcmalloc/metadata_object_allocator.h"
#include "tcmalloc/page_allocator.h"
#include "tcmalloc/page_allocator_interface.h"
#include "tcmalloc/pagemap.h"
#include "tcmalloc/pages.h"
#include "tcmalloc/parameters.h"
Expand Down Expand Up @@ -659,8 +660,18 @@ static void InvokeHooksAndFreePages(void* ptr, std::optional<size_t> size) {
} else {
TC_ASSERT_EQ(span->first_page(), p);
TC_ASSERT_EQ(reinterpret_cast<uintptr_t>(ptr) % kPageSize, 0);
#ifdef TCMALLOC_INTERNAL_LEGACY_LOCKING
PageHeapSpinLockHolder l;
tc_globals.page_allocator().Delete(span, GetMemoryTag(ptr));
#else
PageAllocatorInterface::AllocationState a{
Range(p, span->num_pages()),
span->donated(),
};
Span::Delete(span);
PageHeapSpinLockHolder l;
tc_globals.page_allocator().Delete(a, GetMemoryTag(ptr));
#endif // TCMALLOC_INTERNAL_LEGACY_LOCKING
}
}

Expand Down

0 comments on commit e4308a4

Please sign in to comment.