From 970ae43af654392cf8c0de7f634cc39666ff0ce4 Mon Sep 17 00:00:00 2001 From: Chris Kennelly Date: Tue, 3 Dec 2024 11:29:19 -0800 Subject: [PATCH] Remove unused parameter. For span-driven allocation decisions, we need to lookup the correct huge page on the correct list either way on deallocation. Knowledge of the number of objects is unused by the allocator. PiperOrigin-RevId: 702410863 Change-Id: Ie13f4b44597c6c910c87d7eb6ae1d0de970f4847 --- tcmalloc/central_freelist.cc | 2 +- tcmalloc/huge_page_aware_allocator.h | 6 ++---- tcmalloc/huge_page_aware_allocator_fuzz.cc | 4 ++-- tcmalloc/huge_page_aware_allocator_test.cc | 6 +++--- tcmalloc/page_allocator.h | 7 +++---- tcmalloc/page_allocator_interface.h | 2 +- tcmalloc/page_allocator_test.cc | 22 ++++++++++------------ tcmalloc/span_benchmark.cc | 3 +-- tcmalloc/tcmalloc.cc | 3 +-- 9 files changed, 24 insertions(+), 31 deletions(-) diff --git a/tcmalloc/central_freelist.cc b/tcmalloc/central_freelist.cc index f081a37d8..47916fb7e 100644 --- a/tcmalloc/central_freelist.cc +++ b/tcmalloc/central_freelist.cc @@ -106,7 +106,7 @@ static void ReturnSpansToPageHeap(MemoryTag tag, absl::Span free_spans, PageHeapSpinLockHolder l; for (Span* const free_span : free_spans) { TC_ASSERT_EQ(tag, GetMemoryTag(free_span->start_address())); - tc_globals.page_allocator().Delete(free_span, objects_per_span, tag); + tc_globals.page_allocator().Delete(free_span, tag); } } diff --git a/tcmalloc/huge_page_aware_allocator.h b/tcmalloc/huge_page_aware_allocator.h index b79121c18..ea999786b 100644 --- a/tcmalloc/huge_page_aware_allocator.h +++ b/tcmalloc/huge_page_aware_allocator.h @@ -164,8 +164,7 @@ class HugePageAwareAllocator final : public PageAllocatorInterface { // Delete the span "[p, p+n-1]". // REQUIRES: span was returned by earlier call to New() and // has not yet been deleted. - void Delete(Span* span, size_t objects_per_span) - ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock) override; + void Delete(Span* span) ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock) override; BackingStats stats() const ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock) override; @@ -697,8 +696,7 @@ inline bool HugePageAwareAllocator::AddRegion() { } template -inline void HugePageAwareAllocator::Delete(Span* span, - size_t objects_per_span) { +inline void HugePageAwareAllocator::Delete(Span* span) { TC_ASSERT(!span || GetMemoryTag(span->start_address()) == tag_); PageId p = span->first_page(); HugePage hp = HugePageContaining(p); diff --git a/tcmalloc/huge_page_aware_allocator_fuzz.cc b/tcmalloc/huge_page_aware_allocator_fuzz.cc index 7b8bb630f..4063bb181 100644 --- a/tcmalloc/huge_page_aware_allocator_fuzz.cc +++ b/tcmalloc/huge_page_aware_allocator_fuzz.cc @@ -244,7 +244,7 @@ void FuzzHPAA(const std::string& s) { { PageHeapSpinLockHolder l; - allocator->Delete(span_info.span, span_info.objects_per_span); + allocator->Delete(span_info.span); } break; } @@ -497,7 +497,7 @@ void FuzzHPAA(const std::string& s) { for (auto span_info : allocs) { Span* span = span_info.span; allocated -= span->num_pages(); - allocator->Delete(span, span_info.objects_per_span); + allocator->Delete(span); } return allocator->GetReleaseStats(); diff --git a/tcmalloc/huge_page_aware_allocator_test.cc b/tcmalloc/huge_page_aware_allocator_test.cc index 39dc04a8f..77d5ad86d 100644 --- a/tcmalloc/huge_page_aware_allocator_test.cc +++ b/tcmalloc/huge_page_aware_allocator_test.cc @@ -142,7 +142,7 @@ class HugePageAwareAllocatorTest void AllocatorDelete(Span* s, size_t objects_per_span) { PageHeapSpinLockHolder l; - allocator_->Delete(s, objects_per_span); + allocator_->Delete(s); } Span* New(Length n, SpanAllocInfo span_alloc_info) { @@ -1376,7 +1376,7 @@ class StatTest : public testing::Test { total_ -= n; { PageHeapSpinLockHolder l; - alloc_->Delete(s, span_info.objects_per_span); + alloc_->Delete(s); } } @@ -1557,7 +1557,7 @@ struct SpanDeleter { void operator()(Span* s) ABSL_LOCKS_EXCLUDED(pageheap_lock) { const PageHeapSpinLockHolder l; - allocator.Delete(s, /*objects_per_span=*/1); + allocator.Delete(s); } HugePageAwareAllocator& allocator; diff --git a/tcmalloc/page_allocator.h b/tcmalloc/page_allocator.h index 086ef982b..2390207ea 100644 --- a/tcmalloc/page_allocator.h +++ b/tcmalloc/page_allocator.h @@ -63,7 +63,7 @@ class PageAllocator { // Delete the span "[p, p+n-1]". // REQUIRES: span was returned by earlier call to New() with the same value of // "tag" and has not yet been deleted. - void Delete(Span* span, size_t objects_per_span, MemoryTag tag) + void Delete(Span* span, MemoryTag tag) ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock); BackingStats stats() const ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock); @@ -211,9 +211,8 @@ inline Span* PageAllocator::NewAligned(Length n, Length align, return impl(tag)->NewAligned(n, align, span_alloc_info); } -inline void PageAllocator::Delete(Span* span, size_t objects_per_span, - MemoryTag tag) { - impl(tag)->Delete(span, objects_per_span); +inline void PageAllocator::Delete(Span* span, MemoryTag tag) { + impl(tag)->Delete(span); } inline BackingStats PageAllocator::stats() const { diff --git a/tcmalloc/page_allocator_interface.h b/tcmalloc/page_allocator_interface.h index a0c8c589a..adeb99e42 100644 --- a/tcmalloc/page_allocator_interface.h +++ b/tcmalloc/page_allocator_interface.h @@ -52,7 +52,7 @@ class PageAllocatorInterface { // Delete the span "[p, p+n-1]". // REQUIRES: span was returned by earlier call to New() and // has not yet been deleted. - virtual void Delete(Span* span, size_t num_objects) + virtual void Delete(Span* span) ABSL_EXCLUSIVE_LOCKS_REQUIRED(pageheap_lock) = 0; virtual BackingStats stats() const diff --git a/tcmalloc/page_allocator_test.cc b/tcmalloc/page_allocator_test.cc index cddab2dcc..ae1a4339b 100644 --- a/tcmalloc/page_allocator_test.cc +++ b/tcmalloc/page_allocator_test.cc @@ -69,10 +69,9 @@ class PageAllocatorTest : public testing::Test { MemoryTag tag = MemoryTag::kNormal) { return allocator_->NewAligned(n, align, span_alloc_info, tag); } - void Delete(Span* s, size_t objects_per_span, - MemoryTag tag = MemoryTag::kNormal) { + void Delete(Span* s, MemoryTag tag = MemoryTag::kNormal) { PageHeapSpinLockHolder l; - allocator_->Delete(s, objects_per_span, tag); + allocator_->Delete(s, tag); } std::string Print() { @@ -94,7 +93,7 @@ TEST_F(PageAllocatorTest, Record) { constexpr SpanAllocInfo kSpanInfo = {/*objects_per_span=*/7, AccessDensityPrediction::kSparse}; for (int i = 0; i < 15; ++i) { - Delete(New(Length(1), kSpanInfo), kSpanInfo.objects_per_span); + Delete(New(Length(1), kSpanInfo)); } std::vector spans; @@ -103,8 +102,7 @@ TEST_F(PageAllocatorTest, Record) { } for (int i = 0; i < 25; ++i) { - Delete(NewAligned(Length(3), Length(2), kSpanInfo), - kSpanInfo.objects_per_span); + Delete(NewAligned(Length(3), Length(2), kSpanInfo)); } { PageHeapSpinLockHolder l; @@ -131,14 +129,14 @@ TEST_F(PageAllocatorTest, Record) { ASSERT_EQ(0, info.counts_for(i).nfree); } } - for (auto s : spans) Delete(s, kSpanInfo.objects_per_span); + for (auto s : spans) Delete(s); } // And that we call the print method properly. TEST_F(PageAllocatorTest, PrintIt) { constexpr SpanAllocInfo kSpanInfo = {/*objects_per_span=*/17, AccessDensityPrediction::kDense}; - Delete(New(Length(1), kSpanInfo), kSpanInfo.objects_per_span); + Delete(New(Length(1), kSpanInfo)); std::string output = Print(); EXPECT_THAT(output, testing::ContainsRegex("stats on allocation sizes")); } @@ -172,8 +170,8 @@ TEST_F(PageAllocatorTest, ShrinkFailureTest) { EXPECT_LE( 0, allocator_->successful_shrinks_after_limit_hit(PageAllocator::kSoft)); - Delete(normal, kSpanInfo.objects_per_span, MemoryTag::kNormal); - Delete(sampled, kSpanInfo.objects_per_span, MemoryTag::kSampled); + Delete(normal, MemoryTag::kNormal); + Delete(sampled, MemoryTag::kSampled); Parameters::set_hpaa_subrelease(old_subrelease); } @@ -215,8 +213,8 @@ TEST_F(PageAllocatorTest, b270916852) { EXPECT_LE( 1, allocator_->successful_shrinks_after_limit_hit(PageAllocator::kSoft)); - Delete(normal, kSpanInfo.objects_per_span, MemoryTag::kNormal); - Delete(sampled, kSpanInfo.objects_per_span, MemoryTag::kSampled); + Delete(normal, MemoryTag::kNormal); + Delete(sampled, MemoryTag::kSampled); Parameters::set_hpaa_subrelease(old_subrelease); } diff --git a/tcmalloc/span_benchmark.cc b/tcmalloc/span_benchmark.cc index 4f2f5d598..535132abe 100644 --- a/tcmalloc/span_benchmark.cc +++ b/tcmalloc/span_benchmark.cc @@ -176,8 +176,7 @@ void BM_NewDelete(benchmark::State& state) { benchmark::DoNotOptimize(sp); PageHeapSpinLockHolder l; - tc_globals.page_allocator().Delete(sp, kSpanInfo.objects_per_span, - MemoryTag::kNormal); + tc_globals.page_allocator().Delete(sp, MemoryTag::kNormal); } state.SetItemsProcessed(state.iterations()); } diff --git a/tcmalloc/tcmalloc.cc b/tcmalloc/tcmalloc.cc index 500781e05..6484cc22a 100644 --- a/tcmalloc/tcmalloc.cc +++ b/tcmalloc/tcmalloc.cc @@ -649,8 +649,7 @@ static void InvokeHooksAndFreePages(void* ptr, std::optional size) { TC_ASSERT_EQ(span->first_page(), p); TC_ASSERT_EQ(reinterpret_cast(ptr) % kPageSize, 0); PageHeapSpinLockHolder l; - tc_globals.page_allocator().Delete(span, /*objects_per_span=*/1, - GetMemoryTag(ptr)); + tc_globals.page_allocator().Delete(span, GetMemoryTag(ptr)); } }