0

[PA/shim] Support sized-delete

This is a change to add support to C++'s sized deallocation inside
Allocator Shim at Chrome. There is a planned change,
https://crrev.com/c/6445033, to enable sized deallocation support on the
compiler side. As current Shim does not export symbol with the extra
size parameter, the information is just ignored. Such information can be
useful in some scenarios and it would be great if Shim users can choose
to utilize the size info. Therefore, this patch makes following changes:

1. Adds new dispatcher functions:
  - `free_with_size_function`:
    `free()` but with known size; this was existing on `IS_APPLE`
    platforms as `free_definite_size_function` but added to other
    platforms.
  - `free_with_alignment_function`:
    `free()` but with known alignment.
  - `free_with_size_and_alignment_function`:
    `free()` but with known size and alignment.

2. Adds implementations of the functions above to the existing shim
   dispatchers.

3. Exported C++ symbols for sized deallocation now routes free requests
   through the new dispatcher functions.
   See `allocator_shim_override_cpp_symbols.h` for details.

4. Expanded an unit-test for C++ operators.
   See `allocator_shim_unittest.cc` for details.

5. Adds `shim_supports_sized_dealloc` gn arg, which is disabled by
   default. 3 are gated behind this flag and this patch is no-op
   unless this flag is explicitly flipped. 1-2 are intentionally not
   gated so that dispatcher users does not need to pay attention to
   build configuration in use. The new dispatcher functions are just
   not called when disabled.

Bug: 410190984, 410192659
Change-Id: I350fe94ce84a100ead22cb6eb9a557031fd57b29
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6442080
Reviewed-by: Francois Pierre Doray <fdoray@chromium.org>
Reviewed-by: Takashi Sakamoto <tasak@google.com>
Mega-CQ: Mikihito Matsuura <mikt@google.com>
Commit-Queue: Mikihito Matsuura <mikt@google.com>
Reviewed-by: Yuki Shiino <yukishiino@chromium.org>
Reviewed-by: Paul Semel <paulsemel@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1450497}
This commit is contained in:
mikt
2025-04-23 05:45:18 -07:00
committed by Chromium LUCI CQ
parent 51324be11d
commit 989d97897b
25 changed files with 1333 additions and 351 deletions

@ -195,6 +195,30 @@ struct DispatcherImpl {
MUSTTAIL return allocator_dispatch_.next->free_function(address, context);
}
static void FreeWithSizeFn(void* address, size_t size, void* context) {
DoNotifyFreeForShim(address);
MUSTTAIL return allocator_dispatch_.next->free_with_size_function(
address, size, context);
}
static void FreeWithAlignmentFn(void* address,
size_t alignment,
void* context) {
DoNotifyFreeForShim(address);
MUSTTAIL return allocator_dispatch_.next->free_with_alignment_function(
address, alignment, context);
}
static void FreeWithSizeAndAlignmentFn(void* address,
size_t size,
size_t alignment,
void* context) {
DoNotifyFreeForShim(address);
MUSTTAIL return allocator_dispatch_.next
->free_with_size_and_alignment_function(address, size, alignment,
context);
}
static unsigned BatchMallocFn(size_t size,
void** results,
unsigned num_requested,
@ -219,12 +243,6 @@ struct DispatcherImpl {
to_be_freed, num_to_be_freed, context);
}
static void FreeDefiniteSizeFn(void* address, size_t size, void* context) {
DoNotifyFreeForShim(address);
MUSTTAIL return allocator_dispatch_.next->free_definite_size_function(
address, size, context);
}
static void TryFreeDefaultFn(void* address, void* context) {
DoNotifyFreeForShim(address);
MUSTTAIL return allocator_dispatch_.next->try_free_default_function(
@ -324,26 +342,28 @@ std::tuple<ObserverTypes*...> DispatcherImpl<ObserverTypes...>::s_observers;
#if PA_BUILDFLAG(USE_ALLOCATOR_SHIM)
template <typename... ObserverTypes>
AllocatorDispatch DispatcherImpl<ObserverTypes...>::allocator_dispatch_ = {
AllocFn, // alloc_function
AllocUncheckedFn, // alloc_unchecked_function
AllocZeroInitializedFn, // alloc_zero_initialized_function
AllocAlignedFn, // alloc_aligned_function
ReallocFn, // realloc_function
ReallocUncheckedFn, // realloc_unchecked_function
FreeFn, // free_function
nullptr, // get_size_estimate_function
nullptr, // good_size_function
nullptr, // claimed_address_function
BatchMallocFn, // batch_malloc_function
BatchFreeFn, // batch_free_function
FreeDefiniteSizeFn, // free_definite_size_function
TryFreeDefaultFn, // try_free_default_function
AlignedMallocFn, // aligned_malloc_function
AlignedMallocUncheckedFn, // aligned_malloc_unchecked_function
AlignedReallocFn, // aligned_realloc_function
AlignedReallocUncheckedFn, // aligned_realloc_unchecked_function
AlignedFreeFn, // aligned_free_function
nullptr // next
AllocFn, // alloc_function
AllocUncheckedFn, // alloc_unchecked_function
AllocZeroInitializedFn, // alloc_zero_initialized_function
AllocAlignedFn, // alloc_aligned_function
ReallocFn, // realloc_function
ReallocUncheckedFn, // realloc_unchecked_function
FreeFn, // free_function
FreeWithSizeFn, // free_with_size_function
FreeWithAlignmentFn, // free_with_alignment_function
FreeWithSizeAndAlignmentFn, // free_with_size_and_alignment_function
nullptr, // get_size_estimate_function
nullptr, // good_size_function
nullptr, // claimed_address_function
BatchMallocFn, // batch_malloc_function
BatchFreeFn, // batch_free_function
TryFreeDefaultFn, // try_free_default_function
AlignedMallocFn, // aligned_malloc_function
AlignedMallocUncheckedFn, // aligned_malloc_unchecked_function
AlignedReallocFn, // aligned_realloc_function
AlignedReallocUncheckedFn, // aligned_realloc_unchecked_function
AlignedFreeFn, // aligned_free_function
nullptr // next
};
#endif // PA_BUILDFLAG(USE_ALLOCATOR_SHIM)

@ -146,12 +146,14 @@ struct AllocationEventDispatcherInternalTest : public DispatcherTest {
&realloc_function,
&realloc_unchecked_function,
[](void*, void*) {},
[](void*, size_t, void*) {},
[](void*, size_t, void*) {},
[](void*, size_t, size_t, void*) {},
&get_size_estimate_function,
&good_size_function,
&claimed_address_function,
&batch_malloc_function,
[](void**, unsigned, void*) {},
[](void*, size_t, void*) {},
[](void*, void*) {},
&aligned_malloc_function,
&aligned_malloc_unchecked_function,
@ -259,7 +261,7 @@ TEST_F(AllocationEventDispatcherInternalTest, VerifyAllocatorShimDataIsSet) {
EXPECT_NE(nullptr, allocator_dispatch->free_function);
EXPECT_NE(nullptr, allocator_dispatch->batch_malloc_function);
EXPECT_NE(nullptr, allocator_dispatch->batch_free_function);
EXPECT_NE(nullptr, allocator_dispatch->free_definite_size_function);
EXPECT_NE(nullptr, allocator_dispatch->free_with_size_function);
EXPECT_NE(nullptr, allocator_dispatch->try_free_default_function);
EXPECT_NE(nullptr, allocator_dispatch->aligned_malloc_function);
EXPECT_NE(nullptr, allocator_dispatch->aligned_malloc_unchecked_function);
@ -515,7 +517,7 @@ TEST_F(AllocationEventDispatcherInternalTest,
}
TEST_F(AllocationEventDispatcherInternalTest,
VerifyAllocatorShimHooksTriggerCorrectly_free_definite_size_function) {
VerifyAllocatorShimHooksTriggerCorrectly_free_with_size_function) {
std::array<ObserverMock, kMaximumNumberOfObservers> observers;
for (auto& mock : observers) {
@ -530,12 +532,12 @@ TEST_F(AllocationEventDispatcherInternalTest,
GetNotificationHooks(CreateTupleOfPointers(observers));
auto* const allocator_dispatch = dispatch_data.GetAllocatorDispatch();
EXPECT_NE(allocator_dispatch->free_definite_size_function, nullptr);
EXPECT_NE(allocator_dispatch->free_with_size_function, nullptr);
allocator_dispatch->next = GetNextAllocatorDispatch();
allocator_dispatch->free_definite_size_function(GetAllocatedAddress(),
GetAllocatedSize(), nullptr);
allocator_dispatch->free_with_size_function(GetAllocatedAddress(),
GetAllocatedSize(), nullptr);
}
TEST_F(AllocationEventDispatcherInternalTest,

@ -12,6 +12,7 @@ build_with_chromium = false
# configuration.
use_partition_alloc_as_malloc_default = false
use_allocator_shim_default = false
shim_supports_sized_dealloc_default = false
enable_backup_ref_ptr_support_default = false
enable_backup_ref_ptr_slow_checks_default = false
enable_dangling_raw_ptr_checks_default = false

@ -142,6 +142,9 @@ declare_args() {
# calls to PartitionAlloc, rather than some other platform allocator.
use_partition_alloc_as_malloc = use_partition_alloc && use_allocator_shim &&
use_partition_alloc_as_malloc_default
shim_supports_sized_dealloc =
use_allocator_shim && shim_supports_sized_dealloc_default
}
declare_args() {

@ -183,6 +183,7 @@ pa_buildflag_header("buildflags") {
"RAW_PTR_ZERO_ON_MOVE=$raw_ptr_zero_on_move",
"REALLOC_GROWTH_FACTOR_MITIGATION=$partition_alloc_realloc_growth_factor_mitigation",
"RECORD_ALLOC_INFO=$record_alloc_info",
"SHIM_SUPPORTS_SIZED_DEALLOC=$shim_supports_sized_dealloc",
"SMALLER_PARTITION_COOKIE=$smaller_partition_cookie",
"STACK_SCAN_SUPPORTED=$stack_scan_supported",
"USE_ALLOCATOR_SHIM=$use_allocator_shim",

@ -31,7 +31,12 @@ struct AllocatorDispatch {
using BatchFreeFn = void(void** to_be_freed,
unsigned num_to_be_freed,
void* context);
using FreeDefiniteSizeFn = void(void* ptr, size_t size, void* context);
using FreeWithSizeFn = void(void* ptr, size_t size, void* context);
using FreeWithAlignmentFn = void(void* ptr, size_t alignment, void* context);
using FreeWithSizeAndAlignmentFn = void(void* ptr,
size_t size,
size_t alignment,
void* context);
using TryFreeDefaultFn = void(void* ptr, void* context);
using AlignedMallocFn = void*(size_t size, size_t alignment, void* context);
using AlignedMallocUncheckedFn = void*(size_t size,
@ -54,14 +59,16 @@ struct AllocatorDispatch {
ReallocFn* realloc_function;
ReallocUncheckedFn* realloc_unchecked_function;
FreeFn* free_function;
FreeWithSizeFn* free_with_size_function;
FreeWithAlignmentFn* free_with_alignment_function;
FreeWithSizeAndAlignmentFn* free_with_size_and_alignment_function;
GetSizeEstimateFn* get_size_estimate_function;
GoodSizeFn* good_size_function;
// claimed_address, batch_malloc, batch_free, free_definite_size and
// claimed_address, batch_malloc, batch_free and
// try_free_default are specific to the OSX and iOS allocators.
ClaimedAddressFn* claimed_address_function;
BatchMallocFn* batch_malloc_function;
BatchFreeFn* batch_free_function;
FreeDefiniteSizeFn* free_definite_size_function;
TryFreeDefaultFn* try_free_default_function;
// _aligned_malloc, _aligned_realloc, and _aligned_free are specific to the
// Windows allocator.
@ -125,12 +132,14 @@ struct AllocatorDispatch {
COPY_IF_NULLPTR(realloc_function);
COPY_IF_NULLPTR(realloc_unchecked_function);
COPY_IF_NULLPTR(free_function);
COPY_IF_NULLPTR(free_with_size_function);
COPY_IF_NULLPTR(free_with_alignment_function);
COPY_IF_NULLPTR(free_with_size_and_alignment_function);
COPY_IF_NULLPTR(get_size_estimate_function);
COPY_IF_NULLPTR(good_size_function);
COPY_IF_NULLPTR(claimed_address_function);
COPY_IF_NULLPTR(batch_malloc_function);
COPY_IF_NULLPTR(batch_free_function);
COPY_IF_NULLPTR(free_definite_size_function);
COPY_IF_NULLPTR(try_free_default_function);
COPY_IF_NULLPTR(aligned_malloc_function);
COPY_IF_NULLPTR(aligned_malloc_unchecked_function);

@ -40,6 +40,26 @@ void FreeImpl(void* ptr, void* context) {
functions.free(reinterpret_cast<struct _malloc_zone_t*>(context), ptr);
}
void FreeWithSizeImpl(void* ptr, size_t size, void* context) {
MallocZoneFunctions& functions = GetFunctionsForZone(context);
functions.free_definite_size(
reinterpret_cast<struct _malloc_zone_t*>(context), ptr, size);
}
void FreeWithAlignmentImpl(void* ptr, size_t, void* context) {
MallocZoneFunctions& functions = GetFunctionsForZone(context);
functions.free(reinterpret_cast<struct _malloc_zone_t*>(context), ptr);
}
void FreeWithSizeAndAlignmentImpl(void* ptr,
size_t size,
size_t,
void* context) {
MallocZoneFunctions& functions = GetFunctionsForZone(context);
functions.free_definite_size(
reinterpret_cast<struct _malloc_zone_t*>(context), ptr, size);
}
size_t GetSizeEstimateImpl(void* ptr, void* context) {
MallocZoneFunctions& functions = GetFunctionsForZone(context);
return functions.size(reinterpret_cast<struct _malloc_zone_t*>(context), ptr);
@ -84,12 +104,6 @@ void BatchFreeImpl(void** to_be_freed,
to_be_freed, num_to_be_freed);
}
void FreeDefiniteSizeImpl(void* ptr, size_t size, void* context) {
MallocZoneFunctions& functions = GetFunctionsForZone(context);
functions.free_definite_size(
reinterpret_cast<struct _malloc_zone_t*>(context), ptr, size);
}
void TryFreeDefaultImpl(void* ptr, void* context) {
MallocZoneFunctions& functions = GetFunctionsForZone(context);
if (functions.try_free_default) {
@ -102,26 +116,28 @@ void TryFreeDefaultImpl(void* ptr, void* context) {
} // namespace
const AllocatorDispatch AllocatorDispatch::default_dispatch = {
&MallocImpl, /* alloc_function */
&MallocImpl, /* alloc_unchecked_function */
&CallocImpl, /* alloc_zero_initialized_function */
&MemalignImpl, /* alloc_aligned_function */
&ReallocImpl, /* realloc_function */
&ReallocImpl, /* realloc_unchecked_function */
&FreeImpl, /* free_function */
&GetSizeEstimateImpl, /* get_size_estimate_function */
&GoodSizeImpl, /* good_size_function */
&ClaimedAddressImpl, /* claimed_address_function */
&BatchMallocImpl, /* batch_malloc_function */
&BatchFreeImpl, /* batch_free_function */
&FreeDefiniteSizeImpl, /* free_definite_size_function */
&TryFreeDefaultImpl, /* try_free_default_function */
nullptr, /* aligned_malloc_function */
nullptr, /* aligned_malloc_unchecked_function */
nullptr, /* aligned_realloc_function */
nullptr, /* aligned_realloc_unchecked_function */
nullptr, /* aligned_free_function */
nullptr, /* next */
&MallocImpl, /* alloc_function */
&MallocImpl, /* alloc_unchecked_function */
&CallocImpl, /* alloc_zero_initialized_function */
&MemalignImpl, /* alloc_aligned_function */
&ReallocImpl, /* realloc_function */
&ReallocImpl, /* realloc_unchecked_function */
&FreeImpl, /* free_function */
&FreeWithSizeImpl, /* free_with_size_function */
&FreeWithAlignmentImpl, /* free_with_size_function */
&FreeWithSizeAndAlignmentImpl, /* free_with_size_function */
&GetSizeEstimateImpl, /* get_size_estimate_function */
&GoodSizeImpl, /* good_size_function */
&ClaimedAddressImpl, /* claimed_address_function */
&BatchMallocImpl, /* batch_malloc_function */
&BatchFreeImpl, /* batch_free_function */
&TryFreeDefaultImpl, /* try_free_default_function */
nullptr, /* aligned_malloc_function */
nullptr, /* aligned_malloc_unchecked_function */
nullptr, /* aligned_realloc_function */
nullptr, /* aligned_realloc_unchecked_function */
nullptr, /* aligned_free_function */
nullptr, /* next */
};
} // namespace allocator_shim

@ -88,6 +88,21 @@ void GlibcFree(void* address, void* context) {
__libc_free(address);
}
void GlibcFreeWithSize(void* address, size_t, void* context) {
__libc_free(address);
}
void GlibcFreeWithAlignment(void* address, size_t, void* context) {
__libc_free(address);
}
void GlibcFreeWithSizeAndAlignment(void* address,
size_t,
size_t,
void* context) {
__libc_free(address);
}
PA_NO_SANITIZE("cfi-icall")
size_t GlibcGetSizeEstimate(void* address, void* context) {
// glibc does not expose an alias to resolve malloc_usable_size. Dynamically
@ -112,17 +127,21 @@ const AllocatorDispatch AllocatorDispatch::default_dispatch = {
&GlibcRealloc, /* realloc_function */
&GlibcUncheckedRealloc, /* realloc_unchecked_function */
&GlibcFree, /* free_function */
&GlibcGetSizeEstimate, /* get_size_estimate_function */
nullptr, /* good_size_function */
nullptr, /* claimed_address */
nullptr, /* batch_malloc_function */
nullptr, /* batch_free_function */
nullptr, /* free_definite_size_function */
nullptr, /* try_free_default_function */
nullptr, /* aligned_malloc_function */
nullptr, /* aligned_malloc_unchecked_function */
nullptr, /* aligned_realloc_function */
nullptr, /* aligned_realloc_unchecked_function */
nullptr, /* aligned_free_function */
nullptr, /* next */
GlibcFreeWithSize, /* free_with_size_function */
GlibcFreeWithAlignment,
/* free_with_alignment_function */
GlibcFreeWithSizeAndAlignment,
/* free_with_size_and_alignment_function */
&GlibcGetSizeEstimate, /* get_size_estimate_function */
nullptr, /* good_size_function */
nullptr, /* claimed_address */
nullptr, /* batch_malloc_function */
nullptr, /* batch_free_function */
nullptr, /* try_free_default_function */
nullptr, /* aligned_malloc_function */
nullptr, /* aligned_malloc_unchecked_function */
nullptr, /* aligned_realloc_function */
nullptr, /* aligned_realloc_unchecked_function */
nullptr, /* aligned_free_function */
nullptr, /* next */
};

@ -49,6 +49,21 @@ void RealFree(void* address, void* context) {
__real_free(address);
}
void RealFreeWithSize(void* address, size_t, void* context) {
__real_free(address);
}
void RealFreeWithAlignment(void* address, size_t, void* context) {
__real_free(address);
}
void RealFreeWithSizeAndAlignment(void* address,
size_t,
size_t,
void* context) {
__real_free(address);
}
size_t RealSizeEstimate(void* address, void* context) {
return __real_malloc_usable_size(address);
}
@ -56,24 +71,26 @@ size_t RealSizeEstimate(void* address, void* context) {
} // namespace
const AllocatorDispatch AllocatorDispatch::default_dispatch = {
&RealMalloc, /* alloc_function */
&RealMalloc, /* alloc_unchecked_function */
&RealCalloc, /* alloc_zero_initialized_function */
&RealMemalign, /* alloc_aligned_function */
&RealRealloc, /* realloc_function */
&RealRealloc, /* realloc_unchecked_function */
&RealFree, /* free_function */
&RealSizeEstimate, /* get_size_estimate_function */
nullptr, /* good_size_function */
nullptr, /* claimed_address */
nullptr, /* batch_malloc_function */
nullptr, /* batch_free_function */
nullptr, /* free_definite_size_function */
nullptr, /* try_free_default_function */
nullptr, /* aligned_malloc_function */
nullptr, /* aligned_malloc_unchecked_function */
nullptr, /* aligned_realloc_function */
nullptr, /* aligned_realloc_unchecked_function */
nullptr, /* aligned_free_function */
nullptr, /* next */
&RealMalloc, /* alloc_function */
&RealMalloc, /* alloc_unchecked_function */
&RealCalloc, /* alloc_zero_initialized_function */
&RealMemalign, /* alloc_aligned_function */
&RealRealloc, /* realloc_function */
&RealRealloc, /* realloc_unchecked_function */
&RealFree, /* free_function */
&RealFreeWithSize, /* free_with_size_function */
&RealFreeWithAlignment, /* free_with_alignment_function */
&RealFreeWithSizeAndAlignment, /* free_with_size_and_alignment_function */
&RealSizeEstimate, /* get_size_estimate_function */
nullptr, /* good_size_function */
nullptr, /* claimed_address */
nullptr, /* batch_malloc_function */
nullptr, /* batch_free_function */
nullptr, /* try_free_default_function */
nullptr, /* aligned_malloc_function */
nullptr, /* aligned_malloc_unchecked_function */
nullptr, /* aligned_realloc_function */
nullptr, /* aligned_realloc_unchecked_function */
nullptr, /* aligned_free_function */
nullptr, /* next */
};

@ -435,25 +435,46 @@ PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::Free(
object);
}
#if PA_BUILDFLAG(IS_APPLE)
// Normal free() path on Apple OSes:
// 1. size = GetSizeEstimate(ptr);
// 2. if (size) FreeDefiniteSize(ptr, size)
//
// So we don't need to re-check that the pointer is owned in Free(), and we
// can use the size.
// static
template <partition_alloc::AllocFlags base_alloc_flags,
partition_alloc::FreeFlags base_free_flags>
void PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::
FreeDefiniteSize(void* address, size_t size, void* context) {
partition_alloc::ScopedDisallowAllocations guard{};
PA_ALWAYS_INLINE void
PartitionAllocFunctionsInternal<base_alloc_flags,
base_free_flags>::FreeWithSize(void* object,
size_t size,
void* context) {
// TODO(lizeb): Optimize PartitionAlloc to use the size information. This is
// still useful though, as we avoid double-checking that the address is owned.
partition_alloc::PartitionRoot::FreeInlineInUnknownRoot<base_free_flags>(
address);
PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::Free(
object, context);
}
// static
template <partition_alloc::AllocFlags base_alloc_flags,
partition_alloc::FreeFlags base_free_flags>
PA_ALWAYS_INLINE void
PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::
FreeWithAlignment(void* object, size_t alignment, void* context) {
// TODO(lizeb): Optimize PartitionAlloc to use the size information. This is
// still useful though, as we avoid double-checking that the address is owned.
PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::Free(
object, context);
}
// static
template <partition_alloc::AllocFlags base_alloc_flags,
partition_alloc::FreeFlags base_free_flags>
PA_ALWAYS_INLINE void PartitionAllocFunctionsInternal<
base_alloc_flags,
base_free_flags>::FreeWithSizeAndAlignment(void* object,
size_t size,
size_t alignment,
void* context) {
// TODO(lizeb): Optimize PartitionAlloc to use the size information. This is
// still useful though, as we avoid double-checking that the address is owned.
PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::Free(
object, context);
}
#endif // PA_BUILDFLAG(IS_APPLE)
// static
template <partition_alloc::AllocFlags base_alloc_flags,

@ -62,9 +62,14 @@ class PartitionAllocFunctionsInternal {
static void Free(void* object, void* context);
#if PA_BUILDFLAG(IS_APPLE)
static void FreeDefiniteSize(void* address, size_t size, void* context);
#endif // PA_BUILDFLAG(IS_APPLE)
static void FreeWithSize(void* object, size_t size, void* context);
static void FreeWithAlignment(void* object, size_t alignment, void* context);
static void FreeWithSizeAndAlignment(void* object,
size_t size,
size_t alignment,
void* context);
static size_t GetSizeEstimate(void* address, void* context);
@ -89,14 +94,17 @@ class PartitionAllocFunctionsInternal {
static constexpr AllocatorDispatch MakeDispatch() {
return {
&Malloc, // alloc_function
&MallocUnchecked, // alloc_unchecked_function
&Calloc, // alloc_zero_initialized_function
&Memalign, // alloc_aligned_function
&Realloc, // realloc_function
&ReallocUnchecked, // realloc_unchecked_function
&Free, // free_function
&GetSizeEstimate, // get_size_estimate_function
&Malloc, // alloc_function
&MallocUnchecked, // alloc_unchecked_function
&Calloc, // alloc_zero_initialized_function
&Memalign, // alloc_aligned_function
&Realloc, // realloc_function
&ReallocUnchecked, // realloc_unchecked_function
&Free, // free_function
&FreeWithSize, // free_with_size_function
&FreeWithAlignment, // free_with_alignment_function
&FreeWithSizeAndAlignment, // free_with_size_and_alignment_function
&GetSizeEstimate, // get_size_estimate_function
#if PA_BUILDFLAG(IS_APPLE)
&GoodSize, // good_size
&ClaimedAddress, // claimed_address
@ -107,15 +115,10 @@ class PartitionAllocFunctionsInternal {
&BatchMalloc, // batch_malloc_function
&BatchFree, // batch_free_function
#if PA_BUILDFLAG(IS_APPLE)
// On Apple OSes, free_definite_size() is always called from free(),
// since get_size_estimate() is used to determine whether an allocation
// belongs to the current zone. It makes sense to optimize for it.
&FreeDefiniteSize,
// On Apple OSes, try_free_default() is sometimes called as an
// optimization of free().
&TryFreeDefault,
#else
nullptr, // free_definite_size_function
nullptr, // try_free_default_function
#endif
&AlignedAlloc, // aligned_malloc_function

@ -58,6 +58,28 @@ void DelegatedFreeFn(void* address, void* context) {
PA_MUSTTAIL return delegate->free_function(address, context);
}
void DelegatedFreeWithSizeFn(void* address, size_t size, void* context) {
const AllocatorDispatch* delegate = GetDelegate();
PA_MUSTTAIL return delegate->free_with_size_function(address, size, context);
}
void DelegatedFreeWithAlignmentFn(void* address,
size_t alignment,
void* context) {
const AllocatorDispatch* delegate = GetDelegate();
PA_MUSTTAIL return delegate->free_with_alignment_function(address, alignment,
context);
}
void DelegatedFreeWithSizeAndAlignmentFn(void* address,
size_t size,
size_t alignment,
void* context) {
const AllocatorDispatch* delegate = GetDelegate();
PA_MUSTTAIL return delegate->free_with_size_and_alignment_function(
address, size, alignment, context);
}
size_t DelegatedGetSizeEstimateFn(void* address, void* context) {
const AllocatorDispatch* delegate = GetDelegate();
PA_MUSTTAIL return delegate->get_size_estimate_function(address, context);
@ -90,12 +112,6 @@ void DelegatedBatchFreeFn(void** to_be_freed,
context);
}
void DelegatedFreeDefiniteSizeFn(void* address, size_t size, void* context) {
const AllocatorDispatch* delegate = GetDelegate();
PA_MUSTTAIL return delegate->free_definite_size_function(address, size,
context);
}
void DelegatedTryFreeDefaultFn(void* address, void* context) {
const AllocatorDispatch* delegate = GetDelegate();
PA_MUSTTAIL return delegate->try_free_default_function(address, context);
@ -158,7 +174,7 @@ void InstallCustomDispatch(AllocatorDispatch* dispatch) {
PA_DCHECK(dispatch->batch_malloc_function != nullptr);
PA_DCHECK(dispatch->batch_free_function != nullptr);
#if PA_BUILDFLAG(IS_APPLE)
PA_DCHECK(dispatch->free_definite_size_function != nullptr);
PA_DCHECK(dispatch->free_with_size_function != nullptr);
PA_DCHECK(dispatch->try_free_default_function != nullptr);
#endif // PA_BUILDFLAG(IS_APPLE)
PA_DCHECK(dispatch->aligned_malloc_function != nullptr);
@ -210,12 +226,15 @@ const AllocatorDispatch AllocatorDispatch::default_dispatch = {
.realloc_function = &DelegatedReallocFn,
.realloc_unchecked_function = &DelegatedReallocUncheckedFn,
.free_function = &DelegatedFreeFn,
.free_with_size_function = &DelegatedFreeWithSizeFn,
.free_with_alignment_function = &DelegatedFreeWithAlignmentFn,
.free_with_size_and_alignment_function =
&DelegatedFreeWithSizeAndAlignmentFn,
.get_size_estimate_function = &DelegatedGetSizeEstimateFn,
.good_size_function = &DelegatedGoodSizeFn,
.claimed_address_function = &DelegatedClaimedAddressFn,
.batch_malloc_function = &DelegatedBatchMallocFn,
.batch_free_function = &DelegatedBatchFreeFn,
.free_definite_size_function = &DelegatedFreeDefiniteSizeFn,
.try_free_default_function = &DelegatedTryFreeDefaultFn,
.aligned_malloc_function = &DelegatedAlignedMallocFn,
.aligned_malloc_unchecked_function = &DelegatedAlignedMallocUncheckedFn,

@ -43,6 +43,23 @@ void DefaultWinHeapFreeImpl(void* address, void* context) {
allocator_shim::WinHeapFree(address);
}
void DefaultWinHeapFreeWithSizeImpl(void* address, size_t size, void* context) {
allocator_shim::WinHeapFree(address);
}
void DefaultWinHeapFreeWithAlignmentImpl(void* address,
size_t alignment,
void* context) {
allocator_shim::WinHeapFree(address);
}
void DefaultWinHeapFreeWithSizeAndAlignmentImpl(void* address,
size_t size,
size_t alignment,
void* context) {
allocator_shim::WinHeapFree(address);
}
size_t DefaultWinHeapGetSizeEstimateImpl(void* address, void* context) {
return allocator_shim::WinHeapGetSizeEstimate(address);
}
@ -77,12 +94,14 @@ constexpr AllocatorDispatch AllocatorDispatch::default_dispatch = {
&DefaultWinHeapReallocImpl,
&DefaultWinHeapReallocImpl, /* realloc_unchecked_function */
&DefaultWinHeapFreeImpl,
&DefaultWinHeapFreeWithSizeImpl,
&DefaultWinHeapFreeWithAlignmentImpl,
&DefaultWinHeapFreeWithSizeAndAlignmentImpl,
&DefaultWinHeapGetSizeEstimateImpl,
nullptr, /* good_size */
nullptr, /* claimed_address */
nullptr, /* batch_malloc_function */
nullptr, /* batch_free_function */
nullptr, /* free_definite_size_function */
nullptr, /* try_free_default_function */
&DefaultWinHeapAlignedMallocImpl,
&DefaultWinHeapAlignedMallocImpl, /* aligned_malloc_unchecked_function */

@ -15,35 +15,44 @@ namespace {
void FreeFn(void* address, void* context) {}
void BatchFreeFn(void** to_be_freed, unsigned num_to_be_freed, void* context) {}
void FreeWithSizeFn(void* address, size_t size, void* context) {}
void FreeDefiniteSizeFn(void* address, size_t size, void* context) {}
void FreeWithAlignmentFn(void* address, size_t alignment, void* context) {}
void FreeWithSizeAndAlignmentFn(void* address,
size_t size,
size_t alignment,
void* context) {}
void BatchFreeFn(void** to_be_freed, unsigned num_to_be_freed, void* context) {}
void TryFreeDefaultFn(void* address, void* context) {}
static void AlignedFreeFn(void* address, void* context) {}
AllocatorDispatch allocator_dispatch = {
nullptr, // alloc_function
nullptr, // alloc_unchecked_function
nullptr, // alloc_zero_initialized_function
nullptr, // alloc_aligned_function
nullptr, // realloc_function
nullptr, // realloc_unchecked_function
FreeFn, // free_function
nullptr, // get_size_estimate_function
nullptr, // good_size_function
nullptr, // claimed_address_function
nullptr, // batch_malloc_function
BatchFreeFn, // batch_free_function
FreeDefiniteSizeFn, // free_definite_size_function
TryFreeDefaultFn, // try_free_default_function
nullptr, // aligned_malloc_function
nullptr, // aligned_malloc_unchecked_function
nullptr, // aligned_realloc_function
nullptr, // aligned_realloc_unchecked_function
AlignedFreeFn, // aligned_free_function
nullptr // next
nullptr, // alloc_function
nullptr, // alloc_unchecked_function
nullptr, // alloc_zero_initialized_function
nullptr, // alloc_aligned_function
nullptr, // realloc_function
nullptr, // realloc_unchecked_function
FreeFn, // free_function
FreeWithSizeFn, // free_definite_size_function
FreeWithAlignmentFn, // free_with_alignment_function
FreeWithSizeAndAlignmentFn, // free_with_size_and_alignment_function
nullptr, // get_size_estimate_function
nullptr, // good_size_function
nullptr, // claimed_address_function
nullptr, // batch_malloc_function
BatchFreeFn, // batch_free_function
TryFreeDefaultFn, // try_free_default_function
nullptr, // aligned_malloc_function
nullptr, // aligned_malloc_unchecked_function
nullptr, // aligned_realloc_function
nullptr, // aligned_realloc_unchecked_function
AlignedFreeFn, // aligned_free_function
nullptr // next
};
} // namespace

@ -177,7 +177,7 @@ void* MallocZoneMemalign(malloc_zone_t* zone, size_t alignment, size_t size) {
}
void MallocZoneFreeDefiniteSize(malloc_zone_t* zone, void* ptr, size_t size) {
return ShimFreeDefiniteSize(ptr, size, nullptr);
return ShimFreeWithSize(ptr, size, nullptr);
}
unsigned MallocZoneBatchMalloc(malloc_zone_t* zone,

@ -62,7 +62,7 @@ MallocZoneFunctions MallocZoneFunctionsToReplaceDefault() {
};
new_functions.free_definite_size = [](malloc_zone_t* zone, void* ptr,
size_t size) {
ShimFreeDefiniteSize(ptr, size, zone);
ShimFreeWithSize(ptr, size, zone);
};
new_functions.try_free_default = [](malloc_zone_t* zone, void* ptr) {
ShimTryFreeDefault(ptr, zone);

@ -102,17 +102,21 @@ SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p,
#endif
}
SHIM_CPP_SYMBOLS_EXPORT void operator delete(void* p, size_t) __THROW {
SHIM_CPP_SYMBOLS_EXPORT void operator delete(void* p, size_t size) __THROW {
#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
free(p);
#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
ShimCppDeleteWithSize(p, size);
#else
ShimCppDelete(p);
#endif
}
SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p, size_t) __THROW {
SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p, size_t size) __THROW {
#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
free(p);
#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
ShimCppDeleteWithSize(p, size);
#else
ShimCppDelete(p);
#endif
@ -137,30 +141,36 @@ SHIM_CPP_SYMBOLS_EXPORT void* operator new(std::size_t size,
#endif
}
SHIM_CPP_SYMBOLS_EXPORT void operator delete(void* p,
std::align_val_t) __THROW {
SHIM_CPP_SYMBOLS_EXPORT void operator delete(
void* p,
std::align_val_t alignment) __THROW {
#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
free(p);
#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
ShimCppDeleteWithAlignment(p, static_cast<size_t>(alignment));
#else
ShimCppDelete(p);
#endif
}
SHIM_CPP_SYMBOLS_EXPORT void
operator delete(void* p, std::size_t size, std::align_val_t alignment) __THROW {
#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
free(p);
#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
ShimCppDeleteWithSizeAndAlignment(p, size, static_cast<size_t>(alignment));
#else
ShimCppDelete(p);
#endif
}
SHIM_CPP_SYMBOLS_EXPORT void operator delete(void* p,
std::size_t size,
std::align_val_t) __THROW {
#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
free(p);
#else
ShimCppDelete(p);
#endif
}
SHIM_CPP_SYMBOLS_EXPORT void operator delete(void* p,
std::align_val_t,
std::align_val_t alignment,
const std::nothrow_t&) __THROW {
#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
free(p);
#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
ShimCppDeleteWithAlignment(p, static_cast<size_t>(alignment));
#else
ShimCppDelete(p);
#endif
@ -185,30 +195,38 @@ SHIM_CPP_SYMBOLS_EXPORT void* operator new[](std::size_t size,
#endif
}
SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p,
std::align_val_t) __THROW {
SHIM_CPP_SYMBOLS_EXPORT void operator delete[](
void* p,
std::align_val_t alignment) __THROW {
#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
free(p);
#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
ShimCppDeleteWithAlignment(p, static_cast<size_t>(alignment));
#else
ShimCppDelete(p);
#endif
}
SHIM_CPP_SYMBOLS_EXPORT void operator delete[](
void* p,
std::size_t size,
std::align_val_t alignment) __THROW {
#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
free(p);
#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
ShimCppDeleteWithSizeAndAlignment(p, size, static_cast<size_t>(alignment));
#else
ShimCppDelete(p);
#endif
}
SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p,
std::size_t size,
std::align_val_t) __THROW {
#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
free(p);
#else
ShimCppDelete(p);
#endif
}
SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p,
std::align_val_t,
std::align_val_t alignment,
const std::nothrow_t&) __THROW {
#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
free(p);
#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
ShimCppDeleteWithAlignment(p, static_cast<size_t>(alignment));
#else
ShimCppDelete(p);
#endif

@ -112,6 +112,42 @@ PA_ALWAYS_INLINE void ShimCppDelete(void* address) {
return chain_head->free_function(address, context);
}
#if PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
PA_ALWAYS_INLINE void ShimCppDeleteWithSize(void* address, size_t size) {
const allocator_shim::AllocatorDispatch* const chain_head =
allocator_shim::internal::GetChainHead();
void* context = nullptr;
#if PA_BUILDFLAG(IS_APPLE) && !PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
context = malloc_default_zone();
#endif
return chain_head->free_with_size_function(address, size, context);
}
PA_ALWAYS_INLINE void ShimCppDeleteWithAlignment(void* address,
size_t alignment) {
const allocator_shim::AllocatorDispatch* const chain_head =
allocator_shim::internal::GetChainHead();
void* context = nullptr;
#if PA_BUILDFLAG(IS_APPLE) && !PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
context = malloc_default_zone();
#endif
return chain_head->free_with_alignment_function(address, alignment, context);
}
PA_ALWAYS_INLINE void ShimCppDeleteWithSizeAndAlignment(void* address,
size_t size,
size_t alignment) {
const allocator_shim::AllocatorDispatch* const chain_head =
allocator_shim::internal::GetChainHead();
void* context = nullptr;
#if PA_BUILDFLAG(IS_APPLE) && !PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
context = malloc_default_zone();
#endif
return chain_head->free_with_size_and_alignment_function(address, size,
alignment, context);
}
#endif // PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
PA_ALWAYS_INLINE void* ShimMalloc(size_t size, void* context) {
const allocator_shim::AllocatorDispatch* const chain_head =
allocator_shim::internal::GetChainHead();
@ -213,6 +249,12 @@ PA_ALWAYS_INLINE void ShimFree(void* address, void* context) {
return chain_head->free_function(address, context);
}
PA_ALWAYS_INLINE void ShimFreeWithSize(void* ptr, size_t size, void* context) {
const allocator_shim::AllocatorDispatch* const chain_head =
allocator_shim::internal::GetChainHead();
return chain_head->free_with_size_function(ptr, size, context);
}
PA_ALWAYS_INLINE size_t ShimGetSizeEstimate(const void* address,
void* context) {
const allocator_shim::AllocatorDispatch* const chain_head =
@ -251,14 +293,6 @@ PA_ALWAYS_INLINE void ShimBatchFree(void** to_be_freed,
return chain_head->batch_free_function(to_be_freed, num_to_be_freed, context);
}
PA_ALWAYS_INLINE void ShimFreeDefiniteSize(void* ptr,
size_t size,
void* context) {
const allocator_shim::AllocatorDispatch* const chain_head =
allocator_shim::internal::GetChainHead();
return chain_head->free_definite_size_function(ptr, size, context);
}
PA_ALWAYS_INLINE void ShimTryFreeDefault(void* ptr, void* context) {
const allocator_shim::AllocatorDispatch* const chain_head =
allocator_shim::internal::GetChainHead();

@ -205,12 +205,14 @@ allocator_shim::AllocatorDispatch g_bad_malloc_dispatch = {
&BadRealloc, /* realloc_function */
&BadRealloc, /* realloc_unchecked_function */
&BadFree, /* free_function */
nullptr, /* free_with_size_function */
nullptr, /* free_with_alignment_function */
nullptr, /* free_with_size_and_alignment_function */
nullptr, /* get_size_estimate_function */
nullptr, /* good_size_function */
nullptr, /* claimed_address_function */
nullptr, /* batch_malloc_function */
nullptr, /* batch_free_function */
nullptr, /* free_definite_size_function */
nullptr, /* try_free_default_function */
&BadAlignedAlloc, /* aligned_malloc_function */
&BadAlignedAlloc, /* aligned_malloc_unchecked_function */

@ -111,6 +111,8 @@ if (is_win && is_component_build && (!use_custom_libcxx || libcxx_is_shared)) {
use_allocator_shim_default = false
}
shim_supports_sized_dealloc_default = use_sized_deallocation
use_partition_alloc_as_malloc_default =
use_allocator_shim_default && _is_partition_alloc_everywhere_platform &&
!_disable_partition_alloc_everywhere

@ -96,7 +96,7 @@ bool TryInitSlow() {
//
// This code runs only on the codepaths of deallocations (`free`, `delete`,
// etc.) and _never_ runs on the codepaths of allocations (`malloc`, `new`,
// etc.) because this allocator shim hooks only FreeFn, FreeDefiniteSizeFn,
// etc.) because this allocator shim hooks only FreeFn, FreeWithSizeFn,
// etc. So, it's safe to allocate memory here as it doesn't recurse, however,
// it's _NOT_ allowed to deallocate memory here as it _does_ recurse.
//
@ -218,16 +218,40 @@ void FreeFn(void* address, void* context) {
MUSTTAIL return allocator_dispatch.next->free_function(address, context);
}
void FreeDefiniteSizeFn(void* address, size_t size, void* context) {
void FreeWithSizeFn(void* address, size_t size, void* context) {
if (sampling_state.Sample()) [[unlikely]] {
if (Quarantine(address)) [[likely]] {
return;
}
}
MUSTTAIL return allocator_dispatch.next->free_definite_size_function(
MUSTTAIL return allocator_dispatch.next->free_with_size_function(
address, size, context);
}
void FreeWithAlignmentFn(void* address, size_t alignment, void* context) {
if (sampling_state.Sample()) [[unlikely]] {
if (Quarantine(address)) [[likely]] {
return;
}
}
MUSTTAIL return allocator_dispatch.next->free_with_alignment_function(
address, alignment, context);
}
void FreeWithSizeAndAlignmentFn(void* address,
size_t size,
size_t alignment,
void* context) {
if (sampling_state.Sample()) [[unlikely]] {
if (Quarantine(address)) [[likely]] {
return;
}
}
MUSTTAIL return allocator_dispatch.next
->free_with_size_and_alignment_function(address, size, alignment,
context);
}
AllocatorDispatch allocator_dispatch = {
nullptr, // alloc_function
nullptr, // alloc_unchecked_function
@ -235,17 +259,19 @@ AllocatorDispatch allocator_dispatch = {
nullptr, // alloc_aligned_function
// realloc doesn't always deallocate memory, so the Extreme LUD doesn't
// support realloc.
nullptr, // realloc_function
nullptr, // realloc_unchecked_function
FreeFn, // free_function
nullptr, // get_size_estimate_function
nullptr, // good_size_function
nullptr, // claimed_address_function
nullptr, // batch_malloc_function
nullptr, // realloc_function
nullptr, // realloc_unchecked_function
FreeFn, // free_function
FreeWithSizeFn, // free_with_size_function
FreeWithAlignmentFn, // free_with_alignment_function
FreeWithSizeAndAlignmentFn, // free_with_size_and_alignment_function
nullptr, // get_size_estimate_function
nullptr, // good_size_function
nullptr, // claimed_address_function
nullptr, // batch_malloc_function
// batch_free is rarely used, so the Extreme LUD doesn't support batch_free
// (at least for now).
nullptr, // batch_free_function
FreeDefiniteSizeFn, // free_definite_size_function
nullptr, // batch_free_function
// try_free_default is rarely used, so the Extreme LUD doesn't support
// try_free_default (at least for now).
nullptr, // try_free_default_function

@ -62,16 +62,40 @@ void FreeFn(void* address, void* context) {
MUSTTAIL return g_allocator_dispatch.next->free_function(address, context);
}
void FreeDefiniteSizeFn(void* address, size_t size, void* context) {
void FreeWithSizeFn(void* address, size_t size, void* context) {
if (MaybeQuarantine(address, size, context,
FreeFunctionKind::kFreeDefiniteSize)) {
FreeFunctionKind::kFreeWithSize)) {
return;
}
MUSTTAIL return g_allocator_dispatch.next->free_definite_size_function(
MUSTTAIL return g_allocator_dispatch.next->free_with_size_function(
address, size, context);
}
void FreeWithAlignmentFn(void* address, size_t alignment, void* context) {
if (MaybeQuarantine(address, std::nullopt, context,
FreeFunctionKind::kFreeWithAlignment)) {
return;
}
MUSTTAIL return g_allocator_dispatch.next->free_with_alignment_function(
address, alignment, context);
}
void FreeWithSizeAndAlignmentFn(void* address,
size_t size,
size_t alignment,
void* context) {
if (MaybeQuarantine(address, size, context,
FreeFunctionKind::kFreeWithSizeAndAlignment)) {
return;
}
MUSTTAIL return g_allocator_dispatch.next
->free_with_size_and_alignment_function(address, size, alignment,
context);
}
void TryFreeDefaultFn(void* address, void* context) {
if (MaybeQuarantine(address, std::nullopt, context,
FreeFunctionKind::kTryFreeDefault)) {
@ -93,26 +117,28 @@ static void AlignedFreeFn(void* address, void* context) {
}
AllocatorDispatch g_allocator_dispatch = {
nullptr, // alloc_function
nullptr, // alloc_unchecked_function
nullptr, // alloc_zero_initialized_function
nullptr, // alloc_aligned_function
nullptr, // realloc_function
nullptr, // realloc_unchecked_function
FreeFn, // free_function
nullptr, // get_size_estimate_function
nullptr, // good_size_function
nullptr, // claimed_address_function
nullptr, // batch_malloc_function
nullptr, // batch_free_function
FreeDefiniteSizeFn, // free_definite_size_function
TryFreeDefaultFn, // try_free_default_function
nullptr, // aligned_malloc_function
nullptr, // aligned_malloc_unchecked_function
nullptr, // aligned_realloc_function
nullptr, // aligned_realloc_unchecked_function
AlignedFreeFn, // aligned_free_function
nullptr // next
nullptr, // alloc_function
nullptr, // alloc_unchecked_function
nullptr, // alloc_zero_initialized_function
nullptr, // alloc_aligned_function
nullptr, // realloc_function
nullptr, // realloc_unchecked_function
FreeFn, // free_function
FreeWithSizeFn, // free_with_size_function
FreeWithAlignmentFn, // free_with_alignment_function
FreeWithSizeAndAlignmentFn, // free_with_size_and_alignment_function
nullptr, // get_size_estimate_function
nullptr, // good_size_function
nullptr, // claimed_address_function
nullptr, // batch_malloc_function
nullptr, // batch_free_function
TryFreeDefaultFn, // try_free_default_function
nullptr, // aligned_malloc_function
nullptr, // aligned_malloc_unchecked_function
nullptr, // aligned_realloc_function
nullptr, // aligned_realloc_unchecked_function
AlignedFreeFn, // aligned_free_function
nullptr // next
};
} // namespace
@ -145,9 +171,19 @@ void FinishFree(const AllocationInfo& allocation) {
case FreeFunctionKind::kFree:
next->free_function(allocation.address, context);
break;
case FreeFunctionKind::kFreeDefiniteSize:
next->free_definite_size_function(allocation.address, allocation.size,
context);
case FreeFunctionKind::kFreeWithSize:
next->free_with_size_function(allocation.address, allocation.size,
context);
break;
case FreeFunctionKind::kFreeWithAlignment:
// TODO(crbug.com/412358843): Memory and forward alignment information.
next->free_function(allocation.address, context);
break;
case FreeFunctionKind::kFreeWithSizeAndAlignment:
// TODO(crbug.com/412358843): Similar to above, forward alignment
// information. We shall not forward size information here because it can
// confuse an allocator by alignment mismatch.
next->free_function(allocation.address, context);
break;
case FreeFunctionKind::kTryFreeDefault:
next->try_free_default_function(allocation.address, context);

@ -19,7 +19,9 @@ namespace gwp_asan::internal::lud {
enum class FreeFunctionKind : uint8_t {
kUnknown,
kFree,
kFreeDefiniteSize,
kFreeWithSize,
kFreeWithAlignment,
kFreeWithSizeAndAlignment,
kTryFreeDefault,
kAlignedFree,
};

@ -160,6 +160,44 @@ void FreeFn(void* address, void* context) {
g_allocator_dispatch.next->free_function(address, context);
}
void FreeWithSizeFn(void* address, size_t size, void* context) {
if (gpa->PointerIsMine(address)) [[unlikely]] {
// TODO(vtsyrklevich): Perform this check in GuardedPageAllocator and report
// failed checks using the same pipeline.
CHECK_EQ(size, gpa->GetRequestedSize(address));
gpa->Deallocate(address);
return;
}
g_allocator_dispatch.next->free_with_size_function(address, size, context);
}
void FreeWithAlignmentFn(void* address, size_t alignment, void* context) {
if (gpa->PointerIsMine(address)) [[unlikely]] {
gpa->Deallocate(address);
return;
}
g_allocator_dispatch.next->free_with_alignment_function(address, alignment,
context);
}
void FreeWithSizeAndAlignmentFn(void* address,
size_t size,
size_t alignment,
void* context) {
if (gpa->PointerIsMine(address)) [[unlikely]] {
// TODO(vtsyrklevich): Perform this check in GuardedPageAllocator and report
// failed checks using the same pipeline.
CHECK_EQ(size, gpa->GetRequestedSize(address));
gpa->Deallocate(address);
return;
}
g_allocator_dispatch.next->free_with_size_and_alignment_function(
address, size, alignment, context);
}
size_t GetSizeEstimateFn(void* address, void* context) {
if (gpa->PointerIsMine(address)) [[unlikely]] {
return gpa->GetRequestedSize(address);
@ -212,19 +250,6 @@ void BatchFreeFn(void** to_be_freed, unsigned num_to_be_freed, void* context) {
context);
}
void FreeDefiniteSizeFn(void* address, size_t size, void* context) {
if (gpa->PointerIsMine(address)) [[unlikely]] {
// TODO(vtsyrklevich): Perform this check in GuardedPageAllocator and report
// failed checks using the same pipeline.
CHECK_EQ(size, gpa->GetRequestedSize(address));
gpa->Deallocate(address);
return;
}
g_allocator_dispatch.next->free_definite_size_function(address, size,
context);
}
void TryFreeDefaultFn(void* address, void* context) {
if (gpa->PointerIsMine(address)) [[unlikely]] {
gpa->Deallocate(address);
@ -335,12 +360,14 @@ AllocatorDispatch g_allocator_dispatch = {
&ReallocFn,
&ReallocUncheckedFn,
&FreeFn,
&FreeWithSizeFn,
&FreeWithAlignmentFn,
&FreeWithSizeAndAlignmentFn,
&GetSizeEstimateFn,
&GoodSizeFn,
&ClaimedAddressFn,
&BatchMallocFn,
&BatchFreeFn,
&FreeDefiniteSizeFn,
&TryFreeDefaultFn,
&AlignedMallocFn,
&AlignedMallocUncheckedFn,