diff --git a/base/allocator/dispatcher/internal/dispatcher_internal.h b/base/allocator/dispatcher/internal/dispatcher_internal.h
index 96463f3b966da..0321b1282e556 100644
--- a/base/allocator/dispatcher/internal/dispatcher_internal.h
+++ b/base/allocator/dispatcher/internal/dispatcher_internal.h
@@ -195,6 +195,30 @@ struct DispatcherImpl {
     MUSTTAIL return allocator_dispatch_.next->free_function(address, context);
   }
 
+  static void FreeWithSizeFn(void* address, size_t size, void* context) {
+    DoNotifyFreeForShim(address);
+    MUSTTAIL return allocator_dispatch_.next->free_with_size_function(
+        address, size, context);
+  }
+
+  static void FreeWithAlignmentFn(void* address,
+                                  size_t alignment,
+                                  void* context) {
+    DoNotifyFreeForShim(address);
+    MUSTTAIL return allocator_dispatch_.next->free_with_alignment_function(
+        address, alignment, context);
+  }
+
+  static void FreeWithSizeAndAlignmentFn(void* address,
+                                         size_t size,
+                                         size_t alignment,
+                                         void* context) {
+    DoNotifyFreeForShim(address);
+    MUSTTAIL return allocator_dispatch_.next
+        ->free_with_size_and_alignment_function(address, size, alignment,
+                                                context);
+  }
+
   static unsigned BatchMallocFn(size_t size,
                                 void** results,
                                 unsigned num_requested,
@@ -219,12 +243,6 @@ struct DispatcherImpl {
         to_be_freed, num_to_be_freed, context);
   }
 
-  static void FreeDefiniteSizeFn(void* address, size_t size, void* context) {
-    DoNotifyFreeForShim(address);
-    MUSTTAIL return allocator_dispatch_.next->free_definite_size_function(
-        address, size, context);
-  }
-
   static void TryFreeDefaultFn(void* address, void* context) {
     DoNotifyFreeForShim(address);
     MUSTTAIL return allocator_dispatch_.next->try_free_default_function(
@@ -324,26 +342,28 @@ std::tuple<ObserverTypes*...> DispatcherImpl<ObserverTypes...>::s_observers;
 #if PA_BUILDFLAG(USE_ALLOCATOR_SHIM)
 template <typename... ObserverTypes>
 AllocatorDispatch DispatcherImpl<ObserverTypes...>::allocator_dispatch_ = {
-    AllocFn,                    // alloc_function
-    AllocUncheckedFn,           // alloc_unchecked_function
-    AllocZeroInitializedFn,     // alloc_zero_initialized_function
-    AllocAlignedFn,             // alloc_aligned_function
-    ReallocFn,                  // realloc_function
-    ReallocUncheckedFn,         // realloc_unchecked_function
-    FreeFn,                     // free_function
-    nullptr,                    // get_size_estimate_function
-    nullptr,                    // good_size_function
-    nullptr,                    // claimed_address_function
-    BatchMallocFn,              // batch_malloc_function
-    BatchFreeFn,                // batch_free_function
-    FreeDefiniteSizeFn,         // free_definite_size_function
-    TryFreeDefaultFn,           // try_free_default_function
-    AlignedMallocFn,            // aligned_malloc_function
-    AlignedMallocUncheckedFn,   // aligned_malloc_unchecked_function
-    AlignedReallocFn,           // aligned_realloc_function
-    AlignedReallocUncheckedFn,  // aligned_realloc_unchecked_function
-    AlignedFreeFn,              // aligned_free_function
-    nullptr                     // next
+    AllocFn,                     // alloc_function
+    AllocUncheckedFn,            // alloc_unchecked_function
+    AllocZeroInitializedFn,      // alloc_zero_initialized_function
+    AllocAlignedFn,              // alloc_aligned_function
+    ReallocFn,                   // realloc_function
+    ReallocUncheckedFn,          // realloc_unchecked_function
+    FreeFn,                      // free_function
+    FreeWithSizeFn,              // free_with_size_function
+    FreeWithAlignmentFn,         // free_with_alignment_function
+    FreeWithSizeAndAlignmentFn,  // free_with_size_and_alignment_function
+    nullptr,                     // get_size_estimate_function
+    nullptr,                     // good_size_function
+    nullptr,                     // claimed_address_function
+    BatchMallocFn,               // batch_malloc_function
+    BatchFreeFn,                 // batch_free_function
+    TryFreeDefaultFn,            // try_free_default_function
+    AlignedMallocFn,             // aligned_malloc_function
+    AlignedMallocUncheckedFn,    // aligned_malloc_unchecked_function
+    AlignedReallocFn,            // aligned_realloc_function
+    AlignedReallocUncheckedFn,   // aligned_realloc_unchecked_function
+    AlignedFreeFn,               // aligned_free_function
+    nullptr                      // next
 };
 #endif  // PA_BUILDFLAG(USE_ALLOCATOR_SHIM)
 
diff --git a/base/allocator/dispatcher/internal/dispatcher_internal_unittest.cc b/base/allocator/dispatcher/internal/dispatcher_internal_unittest.cc
index 0da5a4442db17..85cbcdc9ad798 100644
--- a/base/allocator/dispatcher/internal/dispatcher_internal_unittest.cc
+++ b/base/allocator/dispatcher/internal/dispatcher_internal_unittest.cc
@@ -146,12 +146,14 @@ struct AllocationEventDispatcherInternalTest : public DispatcherTest {
                                            &realloc_function,
                                            &realloc_unchecked_function,
                                            [](void*, void*) {},
+                                           [](void*, size_t, void*) {},
+                                           [](void*, size_t, void*) {},
+                                           [](void*, size_t, size_t, void*) {},
                                            &get_size_estimate_function,
                                            &good_size_function,
                                            &claimed_address_function,
                                            &batch_malloc_function,
                                            [](void**, unsigned, void*) {},
-                                           [](void*, size_t, void*) {},
                                            [](void*, void*) {},
                                            &aligned_malloc_function,
                                            &aligned_malloc_unchecked_function,
@@ -259,7 +261,7 @@ TEST_F(AllocationEventDispatcherInternalTest, VerifyAllocatorShimDataIsSet) {
   EXPECT_NE(nullptr, allocator_dispatch->free_function);
   EXPECT_NE(nullptr, allocator_dispatch->batch_malloc_function);
   EXPECT_NE(nullptr, allocator_dispatch->batch_free_function);
-  EXPECT_NE(nullptr, allocator_dispatch->free_definite_size_function);
+  EXPECT_NE(nullptr, allocator_dispatch->free_with_size_function);
   EXPECT_NE(nullptr, allocator_dispatch->try_free_default_function);
   EXPECT_NE(nullptr, allocator_dispatch->aligned_malloc_function);
   EXPECT_NE(nullptr, allocator_dispatch->aligned_malloc_unchecked_function);
@@ -515,7 +517,7 @@ TEST_F(AllocationEventDispatcherInternalTest,
 }
 
 TEST_F(AllocationEventDispatcherInternalTest,
-       VerifyAllocatorShimHooksTriggerCorrectly_free_definite_size_function) {
+       VerifyAllocatorShimHooksTriggerCorrectly_free_with_size_function) {
   std::array<ObserverMock, kMaximumNumberOfObservers> observers;
 
   for (auto& mock : observers) {
@@ -530,12 +532,12 @@ TEST_F(AllocationEventDispatcherInternalTest,
       GetNotificationHooks(CreateTupleOfPointers(observers));
 
   auto* const allocator_dispatch = dispatch_data.GetAllocatorDispatch();
-  EXPECT_NE(allocator_dispatch->free_definite_size_function, nullptr);
+  EXPECT_NE(allocator_dispatch->free_with_size_function, nullptr);
 
   allocator_dispatch->next = GetNextAllocatorDispatch();
 
-  allocator_dispatch->free_definite_size_function(GetAllocatedAddress(),
-                                                  GetAllocatedSize(), nullptr);
+  allocator_dispatch->free_with_size_function(GetAllocatedAddress(),
+                                              GetAllocatedSize(), nullptr);
 }
 
 TEST_F(AllocationEventDispatcherInternalTest,
diff --git a/base/allocator/partition_allocator/build_overrides/partition_alloc.gni b/base/allocator/partition_allocator/build_overrides/partition_alloc.gni
index a3f831c70908a..00749baef8746 100644
--- a/base/allocator/partition_allocator/build_overrides/partition_alloc.gni
+++ b/base/allocator/partition_allocator/build_overrides/partition_alloc.gni
@@ -12,6 +12,7 @@ build_with_chromium = false
 # configuration.
 use_partition_alloc_as_malloc_default = false
 use_allocator_shim_default = false
+shim_supports_sized_dealloc_default = false
 enable_backup_ref_ptr_support_default = false
 enable_backup_ref_ptr_slow_checks_default = false
 enable_dangling_raw_ptr_checks_default = false
diff --git a/base/allocator/partition_allocator/partition_alloc.gni b/base/allocator/partition_allocator/partition_alloc.gni
index d3d81d208865f..8286ef3241cc9 100644
--- a/base/allocator/partition_allocator/partition_alloc.gni
+++ b/base/allocator/partition_allocator/partition_alloc.gni
@@ -142,6 +142,9 @@ declare_args() {
   # calls to PartitionAlloc, rather than some other platform allocator.
   use_partition_alloc_as_malloc = use_partition_alloc && use_allocator_shim &&
                                   use_partition_alloc_as_malloc_default
+
+  shim_supports_sized_dealloc =
+      use_allocator_shim && shim_supports_sized_dealloc_default
 }
 
 declare_args() {
diff --git a/base/allocator/partition_allocator/src/partition_alloc/BUILD.gn b/base/allocator/partition_allocator/src/partition_alloc/BUILD.gn
index d4380a2bd39d0..a06cffaf75901 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/BUILD.gn
+++ b/base/allocator/partition_allocator/src/partition_alloc/BUILD.gn
@@ -183,6 +183,7 @@ pa_buildflag_header("buildflags") {
     "RAW_PTR_ZERO_ON_MOVE=$raw_ptr_zero_on_move",
     "REALLOC_GROWTH_FACTOR_MITIGATION=$partition_alloc_realloc_growth_factor_mitigation",
     "RECORD_ALLOC_INFO=$record_alloc_info",
+    "SHIM_SUPPORTS_SIZED_DEALLOC=$shim_supports_sized_dealloc",
     "SMALLER_PARTITION_COOKIE=$smaller_partition_cookie",
     "STACK_SCAN_SUPPORTED=$stack_scan_supported",
     "USE_ALLOCATOR_SHIM=$use_allocator_shim",
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_dispatch.h b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_dispatch.h
index a83fc6e8eccca..ebde848816fed 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_dispatch.h
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_dispatch.h
@@ -31,7 +31,12 @@ struct AllocatorDispatch {
   using BatchFreeFn = void(void** to_be_freed,
                            unsigned num_to_be_freed,
                            void* context);
-  using FreeDefiniteSizeFn = void(void* ptr, size_t size, void* context);
+  using FreeWithSizeFn = void(void* ptr, size_t size, void* context);
+  using FreeWithAlignmentFn = void(void* ptr, size_t alignment, void* context);
+  using FreeWithSizeAndAlignmentFn = void(void* ptr,
+                                          size_t size,
+                                          size_t alignment,
+                                          void* context);
   using TryFreeDefaultFn = void(void* ptr, void* context);
   using AlignedMallocFn = void*(size_t size, size_t alignment, void* context);
   using AlignedMallocUncheckedFn = void*(size_t size,
@@ -54,14 +59,16 @@ struct AllocatorDispatch {
   ReallocFn* realloc_function;
   ReallocUncheckedFn* realloc_unchecked_function;
   FreeFn* free_function;
+  FreeWithSizeFn* free_with_size_function;
+  FreeWithAlignmentFn* free_with_alignment_function;
+  FreeWithSizeAndAlignmentFn* free_with_size_and_alignment_function;
   GetSizeEstimateFn* get_size_estimate_function;
   GoodSizeFn* good_size_function;
-  // claimed_address, batch_malloc, batch_free, free_definite_size and
+  // claimed_address, batch_malloc, batch_free and
   // try_free_default are specific to the OSX and iOS allocators.
   ClaimedAddressFn* claimed_address_function;
   BatchMallocFn* batch_malloc_function;
   BatchFreeFn* batch_free_function;
-  FreeDefiniteSizeFn* free_definite_size_function;
   TryFreeDefaultFn* try_free_default_function;
   // _aligned_malloc, _aligned_realloc, and _aligned_free are specific to the
   // Windows allocator.
@@ -125,12 +132,14 @@ struct AllocatorDispatch {
     COPY_IF_NULLPTR(realloc_function);
     COPY_IF_NULLPTR(realloc_unchecked_function);
     COPY_IF_NULLPTR(free_function);
+    COPY_IF_NULLPTR(free_with_size_function);
+    COPY_IF_NULLPTR(free_with_alignment_function);
+    COPY_IF_NULLPTR(free_with_size_and_alignment_function);
     COPY_IF_NULLPTR(get_size_estimate_function);
     COPY_IF_NULLPTR(good_size_function);
     COPY_IF_NULLPTR(claimed_address_function);
     COPY_IF_NULLPTR(batch_malloc_function);
     COPY_IF_NULLPTR(batch_free_function);
-    COPY_IF_NULLPTR(free_definite_size_function);
     COPY_IF_NULLPTR(try_free_default_function);
     COPY_IF_NULLPTR(aligned_malloc_function);
     COPY_IF_NULLPTR(aligned_malloc_unchecked_function);
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_apple_zoned_malloc.cc b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_apple_zoned_malloc.cc
index eb24ae6551267..65eb0b30815d2 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_apple_zoned_malloc.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_apple_zoned_malloc.cc
@@ -40,6 +40,26 @@ void FreeImpl(void* ptr, void* context) {
   functions.free(reinterpret_cast<struct _malloc_zone_t*>(context), ptr);
 }
 
+void FreeWithSizeImpl(void* ptr, size_t size, void* context) {
+  MallocZoneFunctions& functions = GetFunctionsForZone(context);
+  functions.free_definite_size(
+      reinterpret_cast<struct _malloc_zone_t*>(context), ptr, size);
+}
+
+void FreeWithAlignmentImpl(void* ptr, size_t, void* context) {
+  MallocZoneFunctions& functions = GetFunctionsForZone(context);
+  functions.free(reinterpret_cast<struct _malloc_zone_t*>(context), ptr);
+}
+
+void FreeWithSizeAndAlignmentImpl(void* ptr,
+                                  size_t size,
+                                  size_t,
+                                  void* context) {
+  MallocZoneFunctions& functions = GetFunctionsForZone(context);
+  functions.free_definite_size(
+      reinterpret_cast<struct _malloc_zone_t*>(context), ptr, size);
+}
+
 size_t GetSizeEstimateImpl(void* ptr, void* context) {
   MallocZoneFunctions& functions = GetFunctionsForZone(context);
   return functions.size(reinterpret_cast<struct _malloc_zone_t*>(context), ptr);
@@ -84,12 +104,6 @@ void BatchFreeImpl(void** to_be_freed,
                        to_be_freed, num_to_be_freed);
 }
 
-void FreeDefiniteSizeImpl(void* ptr, size_t size, void* context) {
-  MallocZoneFunctions& functions = GetFunctionsForZone(context);
-  functions.free_definite_size(
-      reinterpret_cast<struct _malloc_zone_t*>(context), ptr, size);
-}
-
 void TryFreeDefaultImpl(void* ptr, void* context) {
   MallocZoneFunctions& functions = GetFunctionsForZone(context);
   if (functions.try_free_default) {
@@ -102,26 +116,28 @@ void TryFreeDefaultImpl(void* ptr, void* context) {
 }  // namespace
 
 const AllocatorDispatch AllocatorDispatch::default_dispatch = {
-    &MallocImpl,           /* alloc_function */
-    &MallocImpl,           /* alloc_unchecked_function */
-    &CallocImpl,           /* alloc_zero_initialized_function */
-    &MemalignImpl,         /* alloc_aligned_function */
-    &ReallocImpl,          /* realloc_function */
-    &ReallocImpl,          /* realloc_unchecked_function */
-    &FreeImpl,             /* free_function */
-    &GetSizeEstimateImpl,  /* get_size_estimate_function */
-    &GoodSizeImpl,         /* good_size_function */
-    &ClaimedAddressImpl,   /* claimed_address_function */
-    &BatchMallocImpl,      /* batch_malloc_function */
-    &BatchFreeImpl,        /* batch_free_function */
-    &FreeDefiniteSizeImpl, /* free_definite_size_function */
-    &TryFreeDefaultImpl,   /* try_free_default_function */
-    nullptr,               /* aligned_malloc_function */
-    nullptr,               /* aligned_malloc_unchecked_function */
-    nullptr,               /* aligned_realloc_function */
-    nullptr,               /* aligned_realloc_unchecked_function */
-    nullptr,               /* aligned_free_function */
-    nullptr,               /* next */
+    &MallocImpl,                   /* alloc_function */
+    &MallocImpl,                   /* alloc_unchecked_function */
+    &CallocImpl,                   /* alloc_zero_initialized_function */
+    &MemalignImpl,                 /* alloc_aligned_function */
+    &ReallocImpl,                  /* realloc_function */
+    &ReallocImpl,                  /* realloc_unchecked_function */
+    &FreeImpl,                     /* free_function */
+    &FreeWithSizeImpl,             /* free_with_size_function */
+    &FreeWithAlignmentImpl,        /* free_with_size_function */
+    &FreeWithSizeAndAlignmentImpl, /* free_with_size_function */
+    &GetSizeEstimateImpl,          /* get_size_estimate_function */
+    &GoodSizeImpl,                 /* good_size_function */
+    &ClaimedAddressImpl,           /* claimed_address_function */
+    &BatchMallocImpl,              /* batch_malloc_function */
+    &BatchFreeImpl,                /* batch_free_function */
+    &TryFreeDefaultImpl,           /* try_free_default_function */
+    nullptr,                       /* aligned_malloc_function */
+    nullptr,                       /* aligned_malloc_unchecked_function */
+    nullptr,                       /* aligned_realloc_function */
+    nullptr,                       /* aligned_realloc_unchecked_function */
+    nullptr,                       /* aligned_free_function */
+    nullptr,                       /* next */
 };
 
 }  // namespace allocator_shim
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_glibc.cc b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_glibc.cc
index 93352d9f0e851..143c1d3b08d6a 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_glibc.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_glibc.cc
@@ -88,6 +88,21 @@ void GlibcFree(void* address, void* context) {
   __libc_free(address);
 }
 
+void GlibcFreeWithSize(void* address, size_t, void* context) {
+  __libc_free(address);
+}
+
+void GlibcFreeWithAlignment(void* address, size_t, void* context) {
+  __libc_free(address);
+}
+
+void GlibcFreeWithSizeAndAlignment(void* address,
+                                   size_t,
+                                   size_t,
+                                   void* context) {
+  __libc_free(address);
+}
+
 PA_NO_SANITIZE("cfi-icall")
 size_t GlibcGetSizeEstimate(void* address, void* context) {
   // glibc does not expose an alias to resolve malloc_usable_size. Dynamically
@@ -112,17 +127,21 @@ const AllocatorDispatch AllocatorDispatch::default_dispatch = {
     &GlibcRealloc,          /* realloc_function */
     &GlibcUncheckedRealloc, /* realloc_unchecked_function */
     &GlibcFree,             /* free_function */
-    &GlibcGetSizeEstimate,  /* get_size_estimate_function */
-    nullptr,                /* good_size_function */
-    nullptr,                /* claimed_address */
-    nullptr,                /* batch_malloc_function */
-    nullptr,                /* batch_free_function */
-    nullptr,                /* free_definite_size_function */
-    nullptr,                /* try_free_default_function */
-    nullptr,                /* aligned_malloc_function */
-    nullptr,                /* aligned_malloc_unchecked_function */
-    nullptr,                /* aligned_realloc_function */
-    nullptr,                /* aligned_realloc_unchecked_function */
-    nullptr,                /* aligned_free_function */
-    nullptr,                /* next */
+    GlibcFreeWithSize,      /* free_with_size_function */
+    GlibcFreeWithAlignment,
+    /* free_with_alignment_function */
+    GlibcFreeWithSizeAndAlignment,
+    /* free_with_size_and_alignment_function */
+    &GlibcGetSizeEstimate, /* get_size_estimate_function */
+    nullptr,               /* good_size_function */
+    nullptr,               /* claimed_address */
+    nullptr,               /* batch_malloc_function */
+    nullptr,               /* batch_free_function */
+    nullptr,               /* try_free_default_function */
+    nullptr,               /* aligned_malloc_function */
+    nullptr,               /* aligned_malloc_unchecked_function */
+    nullptr,               /* aligned_realloc_function */
+    nullptr,               /* aligned_realloc_unchecked_function */
+    nullptr,               /* aligned_free_function */
+    nullptr,               /* next */
 };
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_linker_wrapped_symbols.cc b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_linker_wrapped_symbols.cc
index 2d4ff4af2b9f4..3ef3f9170b2ab 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_linker_wrapped_symbols.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_linker_wrapped_symbols.cc
@@ -49,6 +49,21 @@ void RealFree(void* address, void* context) {
   __real_free(address);
 }
 
+void RealFreeWithSize(void* address, size_t, void* context) {
+  __real_free(address);
+}
+
+void RealFreeWithAlignment(void* address, size_t, void* context) {
+  __real_free(address);
+}
+
+void RealFreeWithSizeAndAlignment(void* address,
+                                  size_t,
+                                  size_t,
+                                  void* context) {
+  __real_free(address);
+}
+
 size_t RealSizeEstimate(void* address, void* context) {
   return __real_malloc_usable_size(address);
 }
@@ -56,24 +71,26 @@ size_t RealSizeEstimate(void* address, void* context) {
 }  // namespace
 
 const AllocatorDispatch AllocatorDispatch::default_dispatch = {
-    &RealMalloc,       /* alloc_function */
-    &RealMalloc,       /* alloc_unchecked_function */
-    &RealCalloc,       /* alloc_zero_initialized_function */
-    &RealMemalign,     /* alloc_aligned_function */
-    &RealRealloc,      /* realloc_function */
-    &RealRealloc,      /* realloc_unchecked_function */
-    &RealFree,         /* free_function */
-    &RealSizeEstimate, /* get_size_estimate_function */
-    nullptr,           /* good_size_function */
-    nullptr,           /* claimed_address */
-    nullptr,           /* batch_malloc_function */
-    nullptr,           /* batch_free_function */
-    nullptr,           /* free_definite_size_function */
-    nullptr,           /* try_free_default_function */
-    nullptr,           /* aligned_malloc_function */
-    nullptr,           /* aligned_malloc_unchecked_function */
-    nullptr,           /* aligned_realloc_function */
-    nullptr,           /* aligned_realloc_unchecked_function */
-    nullptr,           /* aligned_free_function */
-    nullptr,           /* next */
+    &RealMalloc,                   /* alloc_function */
+    &RealMalloc,                   /* alloc_unchecked_function */
+    &RealCalloc,                   /* alloc_zero_initialized_function */
+    &RealMemalign,                 /* alloc_aligned_function */
+    &RealRealloc,                  /* realloc_function */
+    &RealRealloc,                  /* realloc_unchecked_function */
+    &RealFree,                     /* free_function */
+    &RealFreeWithSize,             /* free_with_size_function */
+    &RealFreeWithAlignment,        /* free_with_alignment_function */
+    &RealFreeWithSizeAndAlignment, /* free_with_size_and_alignment_function */
+    &RealSizeEstimate,             /* get_size_estimate_function */
+    nullptr,                       /* good_size_function */
+    nullptr,                       /* claimed_address */
+    nullptr,                       /* batch_malloc_function */
+    nullptr,                       /* batch_free_function */
+    nullptr,                       /* try_free_default_function */
+    nullptr,                       /* aligned_malloc_function */
+    nullptr,                       /* aligned_malloc_unchecked_function */
+    nullptr,                       /* aligned_realloc_function */
+    nullptr,                       /* aligned_realloc_unchecked_function */
+    nullptr,                       /* aligned_free_function */
+    nullptr,                       /* next */
 };
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.cc b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.cc
index 6bd337f30105e..a9c3abfb04a25 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.cc
@@ -435,25 +435,46 @@ PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::Free(
       object);
 }
 
-#if PA_BUILDFLAG(IS_APPLE)
-// Normal free() path on Apple OSes:
-// 1. size = GetSizeEstimate(ptr);
-// 2. if (size) FreeDefiniteSize(ptr, size)
-//
-// So we don't need to re-check that the pointer is owned in Free(), and we
-// can use the size.
 // static
 template <partition_alloc::AllocFlags base_alloc_flags,
           partition_alloc::FreeFlags base_free_flags>
-void PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::
-    FreeDefiniteSize(void* address, size_t size, void* context) {
-  partition_alloc::ScopedDisallowAllocations guard{};
+PA_ALWAYS_INLINE void
+PartitionAllocFunctionsInternal<base_alloc_flags,
+                                base_free_flags>::FreeWithSize(void* object,
+                                                               size_t size,
+                                                               void* context) {
   // TODO(lizeb): Optimize PartitionAlloc to use the size information. This is
   // still useful though, as we avoid double-checking that the address is owned.
-  partition_alloc::PartitionRoot::FreeInlineInUnknownRoot<base_free_flags>(
-      address);
+  PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::Free(
+      object, context);
+}
+
+// static
+template <partition_alloc::AllocFlags base_alloc_flags,
+          partition_alloc::FreeFlags base_free_flags>
+PA_ALWAYS_INLINE void
+PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::
+    FreeWithAlignment(void* object, size_t alignment, void* context) {
+  // TODO(lizeb): Optimize PartitionAlloc to use the size information. This is
+  // still useful though, as we avoid double-checking that the address is owned.
+  PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::Free(
+      object, context);
+}
+
+// static
+template <partition_alloc::AllocFlags base_alloc_flags,
+          partition_alloc::FreeFlags base_free_flags>
+PA_ALWAYS_INLINE void PartitionAllocFunctionsInternal<
+    base_alloc_flags,
+    base_free_flags>::FreeWithSizeAndAlignment(void* object,
+                                               size_t size,
+                                               size_t alignment,
+                                               void* context) {
+  // TODO(lizeb): Optimize PartitionAlloc to use the size information. This is
+  // still useful though, as we avoid double-checking that the address is owned.
+  PartitionAllocFunctionsInternal<base_alloc_flags, base_free_flags>::Free(
+      object, context);
 }
-#endif  // PA_BUILDFLAG(IS_APPLE)
 
 // static
 template <partition_alloc::AllocFlags base_alloc_flags,
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.h b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.h
index eed9155b69304..29206a5030bd4 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.h
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc.h
@@ -62,9 +62,14 @@ class PartitionAllocFunctionsInternal {
 
   static void Free(void* object, void* context);
 
-#if PA_BUILDFLAG(IS_APPLE)
-  static void FreeDefiniteSize(void* address, size_t size, void* context);
-#endif  // PA_BUILDFLAG(IS_APPLE)
+  static void FreeWithSize(void* object, size_t size, void* context);
+
+  static void FreeWithAlignment(void* object, size_t alignment, void* context);
+
+  static void FreeWithSizeAndAlignment(void* object,
+                                       size_t size,
+                                       size_t alignment,
+                                       void* context);
 
   static size_t GetSizeEstimate(void* address, void* context);
 
@@ -89,14 +94,17 @@ class PartitionAllocFunctionsInternal {
 
   static constexpr AllocatorDispatch MakeDispatch() {
     return {
-        &Malloc,            // alloc_function
-        &MallocUnchecked,   // alloc_unchecked_function
-        &Calloc,            // alloc_zero_initialized_function
-        &Memalign,          // alloc_aligned_function
-        &Realloc,           // realloc_function
-        &ReallocUnchecked,  // realloc_unchecked_function
-        &Free,              // free_function
-        &GetSizeEstimate,   // get_size_estimate_function
+        &Malloc,                    // alloc_function
+        &MallocUnchecked,           // alloc_unchecked_function
+        &Calloc,                    // alloc_zero_initialized_function
+        &Memalign,                  // alloc_aligned_function
+        &Realloc,                   // realloc_function
+        &ReallocUnchecked,          // realloc_unchecked_function
+        &Free,                      // free_function
+        &FreeWithSize,              // free_with_size_function
+        &FreeWithAlignment,         // free_with_alignment_function
+        &FreeWithSizeAndAlignment,  // free_with_size_and_alignment_function
+        &GetSizeEstimate,           // get_size_estimate_function
 #if PA_BUILDFLAG(IS_APPLE)
         &GoodSize,        // good_size
         &ClaimedAddress,  // claimed_address
@@ -107,15 +115,10 @@ class PartitionAllocFunctionsInternal {
         &BatchMalloc,  // batch_malloc_function
         &BatchFree,    // batch_free_function
 #if PA_BUILDFLAG(IS_APPLE)
-        // On Apple OSes, free_definite_size() is always called from free(),
-        // since get_size_estimate() is used to determine whether an allocation
-        // belongs to the current zone. It makes sense to optimize for it.
-        &FreeDefiniteSize,
         // On Apple OSes, try_free_default() is sometimes called as an
         // optimization of free().
         &TryFreeDefault,
 #else
-        nullptr,  // free_definite_size_function
         nullptr,  // try_free_default_function
 #endif
         &AlignedAlloc,             // aligned_malloc_function
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc_with_advanced_checks.cc b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc_with_advanced_checks.cc
index 58404a56605f0..99c633799ad83 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc_with_advanced_checks.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_partition_alloc_with_advanced_checks.cc
@@ -58,6 +58,28 @@ void DelegatedFreeFn(void* address, void* context) {
   PA_MUSTTAIL return delegate->free_function(address, context);
 }
 
+void DelegatedFreeWithSizeFn(void* address, size_t size, void* context) {
+  const AllocatorDispatch* delegate = GetDelegate();
+  PA_MUSTTAIL return delegate->free_with_size_function(address, size, context);
+}
+
+void DelegatedFreeWithAlignmentFn(void* address,
+                                  size_t alignment,
+                                  void* context) {
+  const AllocatorDispatch* delegate = GetDelegate();
+  PA_MUSTTAIL return delegate->free_with_alignment_function(address, alignment,
+                                                            context);
+}
+
+void DelegatedFreeWithSizeAndAlignmentFn(void* address,
+                                         size_t size,
+                                         size_t alignment,
+                                         void* context) {
+  const AllocatorDispatch* delegate = GetDelegate();
+  PA_MUSTTAIL return delegate->free_with_size_and_alignment_function(
+      address, size, alignment, context);
+}
+
 size_t DelegatedGetSizeEstimateFn(void* address, void* context) {
   const AllocatorDispatch* delegate = GetDelegate();
   PA_MUSTTAIL return delegate->get_size_estimate_function(address, context);
@@ -90,12 +112,6 @@ void DelegatedBatchFreeFn(void** to_be_freed,
                                                    context);
 }
 
-void DelegatedFreeDefiniteSizeFn(void* address, size_t size, void* context) {
-  const AllocatorDispatch* delegate = GetDelegate();
-  PA_MUSTTAIL return delegate->free_definite_size_function(address, size,
-                                                           context);
-}
-
 void DelegatedTryFreeDefaultFn(void* address, void* context) {
   const AllocatorDispatch* delegate = GetDelegate();
   PA_MUSTTAIL return delegate->try_free_default_function(address, context);
@@ -158,7 +174,7 @@ void InstallCustomDispatch(AllocatorDispatch* dispatch) {
   PA_DCHECK(dispatch->batch_malloc_function != nullptr);
   PA_DCHECK(dispatch->batch_free_function != nullptr);
 #if PA_BUILDFLAG(IS_APPLE)
-  PA_DCHECK(dispatch->free_definite_size_function != nullptr);
+  PA_DCHECK(dispatch->free_with_size_function != nullptr);
   PA_DCHECK(dispatch->try_free_default_function != nullptr);
 #endif  // PA_BUILDFLAG(IS_APPLE)
   PA_DCHECK(dispatch->aligned_malloc_function != nullptr);
@@ -210,12 +226,15 @@ const AllocatorDispatch AllocatorDispatch::default_dispatch = {
     .realloc_function = &DelegatedReallocFn,
     .realloc_unchecked_function = &DelegatedReallocUncheckedFn,
     .free_function = &DelegatedFreeFn,
+    .free_with_size_function = &DelegatedFreeWithSizeFn,
+    .free_with_alignment_function = &DelegatedFreeWithAlignmentFn,
+    .free_with_size_and_alignment_function =
+        &DelegatedFreeWithSizeAndAlignmentFn,
     .get_size_estimate_function = &DelegatedGetSizeEstimateFn,
     .good_size_function = &DelegatedGoodSizeFn,
     .claimed_address_function = &DelegatedClaimedAddressFn,
     .batch_malloc_function = &DelegatedBatchMallocFn,
     .batch_free_function = &DelegatedBatchFreeFn,
-    .free_definite_size_function = &DelegatedFreeDefiniteSizeFn,
     .try_free_default_function = &DelegatedTryFreeDefaultFn,
     .aligned_malloc_function = &DelegatedAlignedMallocFn,
     .aligned_malloc_unchecked_function = &DelegatedAlignedMallocUncheckedFn,
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_winheap.cc b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_winheap.cc
index 97b6fee0c3fa8..f994c535940bc 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_winheap.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_default_dispatch_to_winheap.cc
@@ -43,6 +43,23 @@ void DefaultWinHeapFreeImpl(void* address, void* context) {
   allocator_shim::WinHeapFree(address);
 }
 
+void DefaultWinHeapFreeWithSizeImpl(void* address, size_t size, void* context) {
+  allocator_shim::WinHeapFree(address);
+}
+
+void DefaultWinHeapFreeWithAlignmentImpl(void* address,
+                                         size_t alignment,
+                                         void* context) {
+  allocator_shim::WinHeapFree(address);
+}
+
+void DefaultWinHeapFreeWithSizeAndAlignmentImpl(void* address,
+                                                size_t size,
+                                                size_t alignment,
+                                                void* context) {
+  allocator_shim::WinHeapFree(address);
+}
+
 size_t DefaultWinHeapGetSizeEstimateImpl(void* address, void* context) {
   return allocator_shim::WinHeapGetSizeEstimate(address);
 }
@@ -77,12 +94,14 @@ constexpr AllocatorDispatch AllocatorDispatch::default_dispatch = {
     &DefaultWinHeapReallocImpl,
     &DefaultWinHeapReallocImpl, /* realloc_unchecked_function */
     &DefaultWinHeapFreeImpl,
+    &DefaultWinHeapFreeWithSizeImpl,
+    &DefaultWinHeapFreeWithAlignmentImpl,
+    &DefaultWinHeapFreeWithSizeAndAlignmentImpl,
     &DefaultWinHeapGetSizeEstimateImpl,
     nullptr, /* good_size */
     nullptr, /* claimed_address */
     nullptr, /* batch_malloc_function */
     nullptr, /* batch_free_function */
-    nullptr, /* free_definite_size_function */
     nullptr, /* try_free_default_function */
     &DefaultWinHeapAlignedMallocImpl,
     &DefaultWinHeapAlignedMallocImpl, /* aligned_malloc_unchecked_function */
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_dispatch_to_noop_on_free.cc b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_dispatch_to_noop_on_free.cc
index 919a6dbed05de..2fa401bd45512 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_dispatch_to_noop_on_free.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_dispatch_to_noop_on_free.cc
@@ -15,35 +15,44 @@ namespace {
 
 void FreeFn(void* address, void* context) {}
 
-void BatchFreeFn(void** to_be_freed, unsigned num_to_be_freed, void* context) {}
+void FreeWithSizeFn(void* address, size_t size, void* context) {}
 
-void FreeDefiniteSizeFn(void* address, size_t size, void* context) {}
+void FreeWithAlignmentFn(void* address, size_t alignment, void* context) {}
+
+void FreeWithSizeAndAlignmentFn(void* address,
+                                size_t size,
+                                size_t alignment,
+                                void* context) {}
+
+void BatchFreeFn(void** to_be_freed, unsigned num_to_be_freed, void* context) {}
 
 void TryFreeDefaultFn(void* address, void* context) {}
 
 static void AlignedFreeFn(void* address, void* context) {}
 
 AllocatorDispatch allocator_dispatch = {
-    nullptr,             // alloc_function
-    nullptr,             // alloc_unchecked_function
-    nullptr,             // alloc_zero_initialized_function
-    nullptr,             // alloc_aligned_function
-    nullptr,             // realloc_function
-    nullptr,             // realloc_unchecked_function
-    FreeFn,              // free_function
-    nullptr,             // get_size_estimate_function
-    nullptr,             // good_size_function
-    nullptr,             // claimed_address_function
-    nullptr,             // batch_malloc_function
-    BatchFreeFn,         // batch_free_function
-    FreeDefiniteSizeFn,  // free_definite_size_function
-    TryFreeDefaultFn,    // try_free_default_function
-    nullptr,             // aligned_malloc_function
-    nullptr,             // aligned_malloc_unchecked_function
-    nullptr,             // aligned_realloc_function
-    nullptr,             // aligned_realloc_unchecked_function
-    AlignedFreeFn,       // aligned_free_function
-    nullptr              // next
+    nullptr,                     // alloc_function
+    nullptr,                     // alloc_unchecked_function
+    nullptr,                     // alloc_zero_initialized_function
+    nullptr,                     // alloc_aligned_function
+    nullptr,                     // realloc_function
+    nullptr,                     // realloc_unchecked_function
+    FreeFn,                      // free_function
+    FreeWithSizeFn,              // free_definite_size_function
+    FreeWithAlignmentFn,         // free_with_alignment_function
+    FreeWithSizeAndAlignmentFn,  // free_with_size_and_alignment_function
+    nullptr,                     // get_size_estimate_function
+    nullptr,                     // good_size_function
+    nullptr,                     // claimed_address_function
+    nullptr,                     // batch_malloc_function
+    BatchFreeFn,                 // batch_free_function
+    TryFreeDefaultFn,            // try_free_default_function
+    nullptr,                     // aligned_malloc_function
+    nullptr,                     // aligned_malloc_unchecked_function
+    nullptr,                     // aligned_realloc_function
+    nullptr,                     // aligned_realloc_unchecked_function
+    AlignedFreeFn,               // aligned_free_function
+    nullptr                      // next
 };
 
 }  // namespace
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_apple_default_zone.h b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_apple_default_zone.h
index c4824d8c9daa1..51d4b74d24817 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_apple_default_zone.h
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_apple_default_zone.h
@@ -177,7 +177,7 @@ void* MallocZoneMemalign(malloc_zone_t* zone, size_t alignment, size_t size) {
 }
 
 void MallocZoneFreeDefiniteSize(malloc_zone_t* zone, void* ptr, size_t size) {
-  return ShimFreeDefiniteSize(ptr, size, nullptr);
+  return ShimFreeWithSize(ptr, size, nullptr);
 }
 
 unsigned MallocZoneBatchMalloc(malloc_zone_t* zone,
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_apple_symbols.h b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_apple_symbols.h
index 7e8ebc5e85ac4..4537d02cc620c 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_apple_symbols.h
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_apple_symbols.h
@@ -62,7 +62,7 @@ MallocZoneFunctions MallocZoneFunctionsToReplaceDefault() {
   };
   new_functions.free_definite_size = [](malloc_zone_t* zone, void* ptr,
                                         size_t size) {
-    ShimFreeDefiniteSize(ptr, size, zone);
+    ShimFreeWithSize(ptr, size, zone);
   };
   new_functions.try_free_default = [](malloc_zone_t* zone, void* ptr) {
     ShimTryFreeDefault(ptr, zone);
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_cpp_symbols.h b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_cpp_symbols.h
index 24a6c2459ecc0..1ba4cca8db9ee 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_cpp_symbols.h
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_override_cpp_symbols.h
@@ -102,17 +102,21 @@ SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p,
 #endif
 }
 
-SHIM_CPP_SYMBOLS_EXPORT void operator delete(void* p, size_t) __THROW {
+SHIM_CPP_SYMBOLS_EXPORT void operator delete(void* p, size_t size) __THROW {
 #if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
   free(p);
+#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+  ShimCppDeleteWithSize(p, size);
 #else
   ShimCppDelete(p);
 #endif
 }
 
-SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p, size_t) __THROW {
+SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p, size_t size) __THROW {
 #if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
   free(p);
+#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+  ShimCppDeleteWithSize(p, size);
 #else
   ShimCppDelete(p);
 #endif
@@ -137,30 +141,36 @@ SHIM_CPP_SYMBOLS_EXPORT void* operator new(std::size_t size,
 #endif
 }
 
-SHIM_CPP_SYMBOLS_EXPORT void operator delete(void* p,
-                                             std::align_val_t) __THROW {
+SHIM_CPP_SYMBOLS_EXPORT void operator delete(
+    void* p,
+    std::align_val_t alignment) __THROW {
 #if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
   free(p);
+#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+  ShimCppDeleteWithAlignment(p, static_cast<size_t>(alignment));
+#else
+  ShimCppDelete(p);
+#endif
+}
+
+SHIM_CPP_SYMBOLS_EXPORT void
+operator delete(void* p, std::size_t size, std::align_val_t alignment) __THROW {
+#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
+  free(p);
+#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+  ShimCppDeleteWithSizeAndAlignment(p, size, static_cast<size_t>(alignment));
 #else
   ShimCppDelete(p);
 #endif
 }
 
 SHIM_CPP_SYMBOLS_EXPORT void operator delete(void* p,
-                                             std::size_t size,
-                                             std::align_val_t) __THROW {
-#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
-  free(p);
-#else
-  ShimCppDelete(p);
-#endif
-}
-
-SHIM_CPP_SYMBOLS_EXPORT void operator delete(void* p,
-                                             std::align_val_t,
+                                             std::align_val_t alignment,
                                              const std::nothrow_t&) __THROW {
 #if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
   free(p);
+#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+  ShimCppDeleteWithAlignment(p, static_cast<size_t>(alignment));
 #else
   ShimCppDelete(p);
 #endif
@@ -185,30 +195,38 @@ SHIM_CPP_SYMBOLS_EXPORT void* operator new[](std::size_t size,
 #endif
 }
 
-SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p,
-                                               std::align_val_t) __THROW {
+SHIM_CPP_SYMBOLS_EXPORT void operator delete[](
+    void* p,
+    std::align_val_t alignment) __THROW {
 #if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
   free(p);
+#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+  ShimCppDeleteWithAlignment(p, static_cast<size_t>(alignment));
+#else
+  ShimCppDelete(p);
+#endif
+}
+
+SHIM_CPP_SYMBOLS_EXPORT void operator delete[](
+    void* p,
+    std::size_t size,
+    std::align_val_t alignment) __THROW {
+#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
+  free(p);
+#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+  ShimCppDeleteWithSizeAndAlignment(p, size, static_cast<size_t>(alignment));
 #else
   ShimCppDelete(p);
 #endif
 }
 
 SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p,
-                                               std::size_t size,
-                                               std::align_val_t) __THROW {
-#if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
-  free(p);
-#else
-  ShimCppDelete(p);
-#endif
-}
-
-SHIM_CPP_SYMBOLS_EXPORT void operator delete[](void* p,
-                                               std::align_val_t,
+                                               std::align_val_t alignment,
                                                const std::nothrow_t&) __THROW {
 #if PA_BUILDFLAG(FORWARD_THROUGH_MALLOC)
   free(p);
+#elif PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+  ShimCppDeleteWithAlignment(p, static_cast<size_t>(alignment));
 #else
   ShimCppDelete(p);
 #endif
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_unittest.cc b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_unittest.cc
index 3b7f92603bc06..dc68ddc7a9965 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_unittest.cc
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/allocator_shim_unittest.cc
@@ -11,6 +11,7 @@
 #include <memory>
 #include <new>
 #include <sstream>
+#include <type_traits>
 #include <vector>
 
 #include "base/synchronization/waitable_event.h"
@@ -101,10 +102,10 @@ class AllocatorShimTest : public testing::Test {
   static void* MockAllocAligned(size_t alignment, size_t size, void* context) {
     if (instance_) {
       if (size < MaxSizeTracked()) {
-        ++(instance_->aligned_allocs_intercepted_by_size[size]);
+        ++(instance_->allocs_intercepted_by_size[size]);
       }
       if (alignment < MaxSizeTracked()) {
-        ++(instance_->aligned_allocs_intercepted_by_alignment[alignment]);
+        ++(instance_->allocs_intercepted_by_alignment[alignment]);
       }
     }
     return g_mock_dispatch.next->alloc_aligned_function(alignment, size,
@@ -163,6 +164,45 @@ class AllocatorShimTest : public testing::Test {
     g_mock_dispatch.next->free_function(address, context);
   }
 
+  static void MockFreeWithSize(void* ptr, size_t size, void* context) {
+    if (instance_) {
+      ++instance_->frees_intercepted_by_addr[Hash(ptr)];
+      if (size < MaxSizeTracked()) {
+        ++(instance_->frees_intercepted_by_size[size]);
+      }
+    }
+    g_mock_dispatch.next->free_with_size_function(ptr, size, context);
+  }
+
+  static void MockFreeWithAlignment(void* ptr,
+                                    size_t alignment,
+                                    void* context) {
+    if (instance_) {
+      ++instance_->frees_intercepted_by_addr[Hash(ptr)];
+      if (alignment < MaxSizeTracked()) {
+        ++(instance_->frees_intercepted_by_alignment[alignment]);
+      }
+    }
+    g_mock_dispatch.next->free_with_alignment_function(ptr, alignment, context);
+  }
+
+  static void MockFreeWithSizeAndAlignment(void* ptr,
+                                           size_t size,
+                                           size_t alignment,
+                                           void* context) {
+    if (instance_) {
+      ++instance_->frees_intercepted_by_addr[Hash(ptr)];
+      if (size < MaxSizeTracked()) {
+        ++(instance_->frees_intercepted_by_size[size]);
+      }
+      if (alignment < MaxSizeTracked()) {
+        ++(instance_->frees_intercepted_by_alignment[alignment]);
+      }
+    }
+    g_mock_dispatch.next->free_with_size_and_alignment_function(
+        ptr, size, alignment, context);
+  }
+
   static size_t MockGetSizeEstimate(void* address, void* context) {
     // Special testing values for GetSizeEstimate() interception.
     if (address == kTestSizeEstimateAddress) {
@@ -205,14 +245,6 @@ class AllocatorShimTest : public testing::Test {
                                               context);
   }
 
-  static void MockFreeDefiniteSize(void* ptr, size_t size, void* context) {
-    if (instance_) {
-      ++instance_->frees_intercepted_by_addr[Hash(ptr)];
-      ++instance_->free_definite_sizes_intercepted_by_size[size];
-    }
-    g_mock_dispatch.next->free_definite_size_function(ptr, size, context);
-  }
-
   static void MockTryFreeDefault(void* ptr, void* context) {
     if (instance_) {
       ++instance_->frees_intercepted_by_addr[Hash(ptr)];
@@ -222,7 +254,10 @@ class AllocatorShimTest : public testing::Test {
 
   static void* MockAlignedMalloc(size_t size, size_t alignment, void* context) {
     if (instance_ && size < MaxSizeTracked()) {
-      ++instance_->aligned_mallocs_intercepted_by_size[size];
+      ++instance_->allocs_intercepted_by_size[size];
+    }
+    if (alignment < MaxSizeTracked()) {
+      ++(instance_->allocs_intercepted_by_alignment[alignment]);
     }
     return g_mock_dispatch.next->aligned_malloc_function(size, alignment,
                                                          context);
@@ -232,7 +267,10 @@ class AllocatorShimTest : public testing::Test {
                                           size_t alignment,
                                           void* context) {
     if (instance_ && size < MaxSizeTracked()) {
-      ++instance_->aligned_mallocs_intercepted_by_size[size];
+      ++instance_->allocs_intercepted_by_size[size];
+    }
+    if (alignment < MaxSizeTracked()) {
+      ++(instance_->allocs_intercepted_by_alignment[alignment]);
     }
     return g_mock_dispatch.next->aligned_malloc_unchecked_function(
         size, alignment, context);
@@ -268,7 +306,7 @@ class AllocatorShimTest : public testing::Test {
 
   static void MockAlignedFree(void* address, void* context) {
     if (instance_) {
-      ++instance_->aligned_frees_intercepted_by_addr[Hash(address)];
+      ++instance_->frees_intercepted_by_addr[Hash(address)];
     }
     g_mock_dispatch.next->aligned_free_function(address, context);
   }
@@ -286,19 +324,17 @@ class AllocatorShimTest : public testing::Test {
 
   void SetUp() override {
     allocs_intercepted_by_size.resize(MaxSizeTracked());
+    allocs_intercepted_by_alignment.resize(MaxSizeTracked());
     zero_allocs_intercepted_by_size.resize(MaxSizeTracked());
-    aligned_allocs_intercepted_by_size.resize(MaxSizeTracked());
-    aligned_allocs_intercepted_by_alignment.resize(MaxSizeTracked());
     reallocs_intercepted_by_size.resize(MaxSizeTracked());
     reallocs_intercepted_by_addr.resize(MaxSizeTracked());
     frees_intercepted_by_addr.resize(MaxSizeTracked());
+    frees_intercepted_by_size.resize(MaxSizeTracked());
+    frees_intercepted_by_alignment.resize(MaxSizeTracked());
     batch_mallocs_intercepted_by_size.resize(MaxSizeTracked());
     batch_frees_intercepted_by_addr.resize(MaxSizeTracked());
-    free_definite_sizes_intercepted_by_size.resize(MaxSizeTracked());
-    aligned_mallocs_intercepted_by_size.resize(MaxSizeTracked());
     aligned_reallocs_intercepted_by_size.resize(MaxSizeTracked());
     aligned_reallocs_intercepted_by_addr.resize(MaxSizeTracked());
-    aligned_frees_intercepted_by_addr.resize(MaxSizeTracked());
     num_new_handler_calls.store(0, std::memory_order_release);
     instance_ = this;
 
@@ -327,35 +363,23 @@ class AllocatorShimTest : public testing::Test {
 
  protected:
   std::vector<size_t> allocs_intercepted_by_size;
+  std::vector<size_t> allocs_intercepted_by_alignment;
   std::vector<size_t> zero_allocs_intercepted_by_size;
-  std::vector<size_t> aligned_allocs_intercepted_by_size;
-  std::vector<size_t> aligned_allocs_intercepted_by_alignment;
   std::vector<size_t> reallocs_intercepted_by_size;
   std::vector<size_t> reallocs_intercepted_by_addr;
   std::vector<size_t> frees_intercepted_by_addr;
+  std::vector<size_t> frees_intercepted_by_size;
+  std::vector<size_t> frees_intercepted_by_alignment;
   std::vector<size_t> batch_mallocs_intercepted_by_size;
   std::vector<size_t> batch_frees_intercepted_by_addr;
-  std::vector<size_t> free_definite_sizes_intercepted_by_size;
-  std::vector<size_t> aligned_mallocs_intercepted_by_size;
   std::vector<size_t> aligned_reallocs_intercepted_by_size;
   std::vector<size_t> aligned_reallocs_intercepted_by_addr;
-  std::vector<size_t> aligned_frees_intercepted_by_addr;
   std::atomic<uint32_t> num_new_handler_calls;
 
  private:
   static AllocatorShimTest* instance_;
 };
 
-struct TestStruct1 {
-  uint32_t ignored;
-  uint8_t ignored_2;
-};
-
-struct TestStruct2 {
-  uint64_t ignored;
-  uint8_t ignored_3;
-};
-
 class ThreadDelegateForNewHandlerTest : public base::PlatformThread::Delegate {
  public:
   explicit ThreadDelegateForNewHandlerTest(base::WaitableEvent* event)
@@ -380,16 +404,21 @@ AllocatorDispatch g_mock_dispatch = {
     &AllocatorShimTest::MockAllocZeroInit, /* alloc_zero_initialized_function */
     &AllocatorShimTest::MockAllocAligned,  /* alloc_aligned_function */
     &AllocatorShimTest::MockRealloc,       /* realloc_function */
-    &AllocatorShimTest::MockReallocUnchecked, /* realloc_unchecked_function */
-    &AllocatorShimTest::MockFree,             /* free_function */
-    &AllocatorShimTest::MockGetSizeEstimate,  /* get_size_estimate_function */
-    &AllocatorShimTest::MockGoodSize,         /* good_size */
-    &AllocatorShimTest::MockClaimedAddress,   /* claimed_address_function */
-    &AllocatorShimTest::MockBatchMalloc,      /* batch_malloc_function */
-    &AllocatorShimTest::MockBatchFree,        /* batch_free_function */
-    &AllocatorShimTest::MockFreeDefiniteSize, /* free_definite_size_function */
-    &AllocatorShimTest::MockTryFreeDefault,   /* try_free_default_function */
-    &AllocatorShimTest::MockAlignedMalloc,    /* aligned_malloc_function */
+    &AllocatorShimTest::MockReallocUnchecked,  /* realloc_unchecked_function */
+    &AllocatorShimTest::MockFree,              /* free_function */
+    &AllocatorShimTest::MockFreeWithSize,      /* free_with_size_function */
+    &AllocatorShimTest::MockFreeWithAlignment, /* free_with_alignment_function
+                                                */
+    &AllocatorShimTest::
+        MockFreeWithSizeAndAlignment, /* free_with_size_and_alignment_function
+                                       */
+    &AllocatorShimTest::MockGetSizeEstimate, /* get_size_estimate_function */
+    &AllocatorShimTest::MockGoodSize,        /* good_size */
+    &AllocatorShimTest::MockClaimedAddress,  /* claimed_address_function */
+    &AllocatorShimTest::MockBatchMalloc,     /* batch_malloc_function */
+    &AllocatorShimTest::MockBatchFree,       /* batch_free_function */
+    &AllocatorShimTest::MockTryFreeDefault,  /* try_free_default_function */
+    &AllocatorShimTest::MockAlignedMalloc,   /* aligned_malloc_function */
     &AllocatorShimTest::MockAlignedMallocUnchecked,
     /* aligned_malloc_unchecked_function */
     &AllocatorShimTest::MockAlignedRealloc, /* aligned_realloc_function */
@@ -416,8 +445,8 @@ TEST_F(AllocatorShimTest, InterceptLibcSymbols) {
   ASSERT_EQ(0, res);
   ASSERT_NE(nullptr, posix_memalign_ptr);
   ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(posix_memalign_ptr) % 256);
-  ASSERT_GE(aligned_allocs_intercepted_by_alignment[256], 1u);
-  ASSERT_GE(aligned_allocs_intercepted_by_size[59], 1u);
+  ASSERT_GE(allocs_intercepted_by_alignment[256], 1u);
+  ASSERT_GE(allocs_intercepted_by_size[59], 1u);
 
   // (p)valloc() are not defined on Android. pvalloc() is a GNU extension,
   // valloc() is not in POSIX.
@@ -426,8 +455,8 @@ TEST_F(AllocatorShimTest, InterceptLibcSymbols) {
   void* valloc_ptr = valloc(61);
   ASSERT_NE(nullptr, valloc_ptr);
   ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(valloc_ptr) % kPageSize);
-  ASSERT_GE(aligned_allocs_intercepted_by_alignment[kPageSize], 1u);
-  ASSERT_GE(aligned_allocs_intercepted_by_size[61], 1u);
+  ASSERT_GE(allocs_intercepted_by_alignment[kPageSize], 1u);
+  ASSERT_GE(allocs_intercepted_by_size[61], 1u);
 #endif  // !PA_BUILDFLAG(IS_ANDROID)
 
 #endif  // !PA_BUILDFLAG(IS_WIN)
@@ -436,16 +465,16 @@ TEST_F(AllocatorShimTest, InterceptLibcSymbols) {
   void* memalign_ptr = memalign(128, 53);
   ASSERT_NE(nullptr, memalign_ptr);
   ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(memalign_ptr) % 128);
-  ASSERT_GE(aligned_allocs_intercepted_by_alignment[128], 1u);
-  ASSERT_GE(aligned_allocs_intercepted_by_size[53], 1u);
+  ASSERT_GE(allocs_intercepted_by_alignment[128], 1u);
+  ASSERT_GE(allocs_intercepted_by_size[53], 1u);
 
 #if PA_BUILDFLAG(IS_POSIX) && !PA_BUILDFLAG(IS_ANDROID)
   void* pvalloc_ptr = pvalloc(67);
   ASSERT_NE(nullptr, pvalloc_ptr);
   ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(pvalloc_ptr) % kPageSize);
-  ASSERT_GE(aligned_allocs_intercepted_by_alignment[kPageSize], 1u);
+  ASSERT_GE(allocs_intercepted_by_alignment[kPageSize], 1u);
   // pvalloc rounds the size up to the next page.
-  ASSERT_GE(aligned_allocs_intercepted_by_size[kPageSize], 1u);
+  ASSERT_GE(allocs_intercepted_by_size[kPageSize], 1u);
 #endif  // PA_BUILDFLAG(IS_POSIX) && !PA_BUILDFLAG(IS_ANDROID)
 
 #endif  // !PA_BUILDFLAG(IS_WIN) && !PA_BUILDFLAG(IS_APPLE)
@@ -456,8 +485,8 @@ TEST_F(AllocatorShimTest, InterceptLibcSymbols) {
   void* libc_memalign_ptr = __libc_memalign(512, 56);
   ASSERT_NE(nullptr, memalign_ptr);
   ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(libc_memalign_ptr) % 512);
-  ASSERT_GE(aligned_allocs_intercepted_by_alignment[512], 1u);
-  ASSERT_GE(aligned_allocs_intercepted_by_size[56], 1u);
+  ASSERT_GE(allocs_intercepted_by_alignment[512], 1u);
+  ASSERT_GE(allocs_intercepted_by_size[56], 1u);
 #endif
 
   // TODO(crbug.com/407932921) Support Apple platforms.
@@ -467,8 +496,8 @@ TEST_F(AllocatorShimTest, InterceptLibcSymbols) {
     void* aligned_alloc_ptr = aligned_alloc(128, 32);
     ASSERT_NE(nullptr, aligned_alloc_ptr);
     ASSERT_EQ(0u, reinterpret_cast<uintptr_t>(aligned_alloc_ptr) % 128);
-    ASSERT_GE(aligned_allocs_intercepted_by_alignment[128], 1u);
-    ASSERT_GE(aligned_allocs_intercepted_by_size[32], 1u);
+    ASSERT_GE(allocs_intercepted_by_alignment[128], 1u);
+    ASSERT_GE(allocs_intercepted_by_size[32], 1u);
   }
 #endif  // !BUILDFLAG(IS_APPLE)
 
@@ -551,7 +580,7 @@ TEST_F(AllocatorShimTest, InterceptLibcSymbolsBatchMallocFree) {
   RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
 }
 
-TEST_F(AllocatorShimTest, InterceptLibcSymbolsFreeDefiniteSize) {
+TEST_F(AllocatorShimTest, InterceptLibcSymbolsFreeWithSize) {
   InsertAllocatorDispatch(&g_mock_dispatch);
 
   void* alloc_ptr = malloc(19);
@@ -561,7 +590,7 @@ TEST_F(AllocatorShimTest, InterceptLibcSymbolsFreeDefiniteSize) {
   ChromeMallocZone* default_zone =
       reinterpret_cast<ChromeMallocZone*>(malloc_default_zone());
   default_zone->free_definite_size(malloc_default_zone(), alloc_ptr, 19);
-  ASSERT_GE(free_definite_sizes_intercepted_by_size[19], 1u);
+  ASSERT_GE(frees_intercepted_by_size[19], 1u);
   RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
 }
 #endif  // PA_BUILDFLAG(IS_APPLE) &&
@@ -573,14 +602,14 @@ TEST_F(AllocatorShimTest, InterceptUcrtAlignedAllocationSymbols) {
 
   constexpr size_t kAlignment = 32;
   void* alloc_ptr = _aligned_malloc(123, kAlignment);
-  EXPECT_GE(aligned_mallocs_intercepted_by_size[123], 1u);
+  EXPECT_GE(allocs_intercepted_by_size[123], 1u);
 
   void* new_alloc_ptr = _aligned_realloc(alloc_ptr, 1234, kAlignment);
   EXPECT_GE(aligned_reallocs_intercepted_by_size[1234], 1u);
   EXPECT_GE(aligned_reallocs_intercepted_by_addr[Hash(alloc_ptr)], 1u);
 
   _aligned_free(new_alloc_ptr);
-  EXPECT_GE(aligned_frees_intercepted_by_addr[Hash(new_alloc_ptr)], 1u);
+  EXPECT_GE(frees_intercepted_by_addr[Hash(new_alloc_ptr)], 1u);
 
   RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
 }
@@ -593,40 +622,6 @@ TEST_F(AllocatorShimTest, AlignedReallocSizeZeroFrees) {
 }
 #endif  // PA_BUILDFLAG(IS_WIN)
 
-TEST_F(AllocatorShimTest, InterceptCppSymbols) {
-  InsertAllocatorDispatch(&g_mock_dispatch);
-
-  TestStruct1* new_ptr = new TestStruct1;
-  ASSERT_NE(nullptr, new_ptr);
-  ASSERT_GE(allocs_intercepted_by_size[sizeof(TestStruct1)], 1u);
-
-  TestStruct1* new_array_ptr = new TestStruct1[3];
-  ASSERT_NE(nullptr, new_array_ptr);
-  ASSERT_GE(allocs_intercepted_by_size[sizeof(TestStruct1) * 3], 1u);
-
-  TestStruct2* new_nt_ptr = new (std::nothrow) TestStruct2;
-  ASSERT_NE(nullptr, new_nt_ptr);
-  ASSERT_GE(allocs_intercepted_by_size[sizeof(TestStruct2)], 1u);
-
-  TestStruct2* new_array_nt_ptr = new TestStruct2[3];
-  ASSERT_NE(nullptr, new_array_nt_ptr);
-  ASSERT_GE(allocs_intercepted_by_size[sizeof(TestStruct2) * 3], 1u);
-
-  delete new_ptr;
-  ASSERT_GE(frees_intercepted_by_addr[Hash(new_ptr)], 1u);
-
-  delete[] new_array_ptr;
-  ASSERT_GE(frees_intercepted_by_addr[Hash(new_array_ptr)], 1u);
-
-  delete new_nt_ptr;
-  ASSERT_GE(frees_intercepted_by_addr[Hash(new_nt_ptr)], 1u);
-
-  delete[] new_array_nt_ptr;
-  ASSERT_GE(frees_intercepted_by_addr[Hash(new_array_nt_ptr)], 1u);
-
-  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
-}
-
 // PartitionAlloc disallows large allocations to avoid errors with int
 // overflows.
 #if PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
@@ -871,6 +866,659 @@ TEST_F(AllocatorShimTest, OptimizeAllocatorDispatchTable) {
   RemoveAllocatorDispatchForTesting(&non_empty_dispatch);
 }
 
+class AllocatorShimCppOperatorTest : public AllocatorShimTest {
+  template <typename T>
+  static constexpr size_t GetPaddingSize() {
+    if (std::is_array_v<T> &&
+        !std::is_trivially_destructible_v<std::remove_all_extents_t<T>>) {
+#if !PA_BUILDFLAG(IS_APPLE) || !PA_BUILDFLAG(PA_ARCH_CPU_ARM64)
+      // Itanium C++ ABI defines a cookie, a region to store an array size, and
+      // its size is as follows.
+      return std::max(sizeof(size_t), std::alignment_of_v<T>);
+#else
+      // On ARM Apple devices, they store a pair of integers, one for element
+      // size and the other for element count.
+      return std::max(sizeof(size_t) * 2, std::alignment_of_v<T>);
+#endif  // !PA_BUILDFLAG(IS_IOS)
+    } else {
+      // Cookie is not used.
+      return 0;
+    }
+  }
+
+  template <typename T>
+  static constexpr size_t GetAllocSize() {
+    return sizeof(T) + GetPaddingSize<T>();
+  }
+
+  template <typename T>
+  static size_t Hash(const void* ptr) {
+    uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
+    addr -= GetPaddingSize<T>();
+    return addr % MaxSizeTracked();
+  }
+
+ protected:
+  // Tests `operator new()` and `operator delete()` against `T`.
+  template <typename T, bool use_nothrow>
+  void NewAndDeleteSingle() {
+    InsertAllocatorDispatch(&g_mock_dispatch);
+
+    constexpr auto kSize = GetAllocSize<T>();
+    constexpr auto kAlignment = std::alignment_of_v<T>;
+
+    T* new_ptr = use_nothrow ? new (std::nothrow) T : new T;
+    ASSERT_NE(nullptr, new_ptr);
+    ASSERT_GE(allocs_intercepted_by_size[kSize], 1u);
+    ASSERT_EQ(reinterpret_cast<uintptr_t>(new_ptr) % kAlignment, 0);
+    given_alignment_on_alloc_ = allocs_intercepted_by_alignment[kAlignment];
+
+    delete new_ptr;
+    ASSERT_GE(frees_intercepted_by_addr[Hash<T>(new_ptr)], 1u);
+    given_size_on_delete_ = frees_intercepted_by_size[kSize];
+    given_alignment_on_delete_ = frees_intercepted_by_alignment[kAlignment];
+
+    RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+  }
+
+  // Tests `operator new[]()` and `operator delete[]()` against `T[3]`.
+  template <typename T, bool use_nothrow>
+  void NewAndDeleteTriplet() {
+    InsertAllocatorDispatch(&g_mock_dispatch);
+
+    constexpr auto kSize = GetAllocSize<T[3]>();
+    constexpr auto kAlignment = std::alignment_of_v<T>;
+
+    T* new_ptr = use_nothrow ? new (std::nothrow) T[3] : new T[3];
+    ASSERT_NE(nullptr, new_ptr);
+    ASSERT_GE(allocs_intercepted_by_size[kSize], 1u);
+    ASSERT_EQ(reinterpret_cast<uintptr_t>(new_ptr) % kAlignment, 0);
+    given_alignment_on_alloc_ = allocs_intercepted_by_alignment[kAlignment];
+
+    delete[] new_ptr;
+    const auto hash = Hash<T[]>(new_ptr);
+    ASSERT_GE(frees_intercepted_by_addr[hash], 1u);
+    given_size_on_delete_ = frees_intercepted_by_size[kSize];
+    given_alignment_on_delete_ = frees_intercepted_by_alignment[kAlignment];
+
+    RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+  }
+
+  // Tests `operator new()` and `operator delete()` against `T`, but indirectly
+  // through `std::unique_ptr<T>`.
+  template <typename T>
+  void MakeUniquePtrSingle() {
+    InsertAllocatorDispatch(&g_mock_dispatch);
+
+    constexpr auto kSize = GetAllocSize<T>();
+    constexpr auto kAlignment = std::alignment_of_v<T>;
+
+    std::unique_ptr<T> new_ptr = std::make_unique<T>();
+    ASSERT_NE(nullptr, new_ptr);
+    ASSERT_GE(allocs_intercepted_by_size[kSize], 1u);
+    ASSERT_EQ(reinterpret_cast<uintptr_t>(new_ptr.get()) % kAlignment, 0);
+    given_alignment_on_alloc_ = allocs_intercepted_by_alignment[kAlignment];
+
+    const auto hash = Hash<T>(new_ptr.get());
+    new_ptr.reset();
+    ASSERT_GE(frees_intercepted_by_addr[hash], 1u);
+    given_size_on_delete_ = frees_intercepted_by_size[kSize];
+    given_alignment_on_delete_ = frees_intercepted_by_alignment[kAlignment];
+
+    RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+  }
+
+  // Tests `operator new[]()` and `operator delete[]()` against `T[3]`.
+  template <typename T>
+  void MakeUniquePtrTriplet() {
+    InsertAllocatorDispatch(&g_mock_dispatch);
+
+    constexpr auto kSize = GetAllocSize<T[3]>();
+    constexpr auto kAlignment = std::alignment_of_v<T>;
+
+    std::unique_ptr<T[]> new_ptr = std::make_unique<T[]>(3);
+    ASSERT_NE(nullptr, new_ptr);
+    ASSERT_GE(allocs_intercepted_by_size[kSize], 1u);
+    ASSERT_EQ(reinterpret_cast<uintptr_t>(new_ptr.get()) % kAlignment, 0);
+    given_alignment_on_alloc_ = allocs_intercepted_by_alignment[kAlignment];
+
+    const auto hash = Hash<T[]>(new_ptr.get());
+    new_ptr.reset();
+    ASSERT_GE(frees_intercepted_by_addr[hash], 1u);
+    given_size_on_delete_ = frees_intercepted_by_size[kSize];
+    given_alignment_on_delete_ = frees_intercepted_by_alignment[kAlignment];
+
+    RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+  }
+
+  // Tests `operator new[]()` and `operator delete[]()` against
+  // `std::vector<T>`. The allocation is made through `std::allocator<T>`.
+  template <typename T>
+  void MakeVectorTriplet() {
+    InsertAllocatorDispatch(&g_mock_dispatch);
+
+    constexpr auto kSize = sizeof(T) * 3;
+    constexpr auto kAlignment = std::alignment_of_v<T>;
+
+    std::vector<T> vec(3);
+    ASSERT_NE(nullptr, vec.data());
+    ASSERT_GE(allocs_intercepted_by_size[kSize], 1u);
+    ASSERT_EQ(reinterpret_cast<uintptr_t>(vec.data()) % kAlignment, 0);
+    given_alignment_on_alloc_ = allocs_intercepted_by_alignment[kAlignment];
+
+    const auto hash = Hash<T>(vec.data());
+    vec.clear();
+    vec.shrink_to_fit();
+    ASSERT_GE(frees_intercepted_by_addr[hash], 1u);
+    given_size_on_delete_ = frees_intercepted_by_size[kSize];
+    given_alignment_on_delete_ = frees_intercepted_by_alignment[kAlignment];
+
+    RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+  }
+
+  bool given_alignment_on_alloc_;
+  bool given_size_on_delete_;
+  bool given_alignment_on_delete_;
+};
+
+// `ASSERT_TRUE` when sized allocation is in use. Otherwise, `ASSERT_FALSE`.
+#if PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+#define ASSERT_TRUE_IFF_SIZED(a) ASSERT_TRUE(a)
+#else
+#define ASSERT_TRUE_IFF_SIZED(a) ASSERT_FALSE(a)
+#endif
+
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteGlobalOperator) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+
+  void* new_ptr = ::operator new(kSize);
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+
+  ::operator delete(new_ptr);
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_FALSE(frees_intercepted_by_size[kSize]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteGlobalOperatorNoThrow) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+
+  void* new_ptr = ::operator new(kSize, std::nothrow);
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+
+  ::operator delete(new_ptr, std::nothrow);
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_FALSE(frees_intercepted_by_size[kSize]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteGlobalOperatorAligned) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+  constexpr auto kAlignment = 32;
+
+  void* new_ptr = ::operator new(kSize, std::align_val_t(kAlignment));
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+  ASSERT_TRUE(allocs_intercepted_by_alignment[kAlignment]);
+
+  ::operator delete(new_ptr, std::align_val_t(kAlignment));
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_FALSE(frees_intercepted_by_size[kSize]);
+  ASSERT_TRUE_IFF_SIZED(frees_intercepted_by_alignment[kAlignment]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteGlobalOperatorAlignedNoThrow) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+  constexpr auto kAlignment = 32;
+
+  void* new_ptr =
+      ::operator new(kSize, std::align_val_t(kAlignment), std::nothrow);
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+  ASSERT_TRUE(allocs_intercepted_by_alignment[kAlignment]);
+
+  ::operator delete(new_ptr, std::align_val_t(kAlignment), std::nothrow);
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_FALSE(frees_intercepted_by_size[kSize]);
+  ASSERT_TRUE_IFF_SIZED(frees_intercepted_by_alignment[kAlignment]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+
+// The operator exists only if `-fsized-decallcation` is in use.
+#if PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteGlobalOperatorSized) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+
+  void* new_ptr = ::operator new(kSize);
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+
+  ::operator delete(new_ptr, kSize);
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_TRUE(frees_intercepted_by_size[kSize]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeleteGlobalOperatorSizedAndAligned) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+  constexpr auto kAlignment = 32;
+
+  void* new_ptr = ::operator new(kSize, std::align_val_t(kAlignment));
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+  ASSERT_TRUE(allocs_intercepted_by_alignment[kAlignment]);
+
+  ::operator delete(new_ptr, kSize, std::align_val_t(kAlignment));
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_TRUE(frees_intercepted_by_size[kSize]);
+  ASSERT_TRUE(frees_intercepted_by_alignment[kAlignment]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+#endif  // PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteArrayGlobalOperator) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+
+  void* new_ptr = ::operator new[](kSize);
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+
+  ::operator delete[](new_ptr);
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_FALSE(frees_intercepted_by_size[kSize]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteArrayGlobalOperatorNoThrow) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+
+  void* new_ptr = ::operator new[](kSize, std::nothrow);
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+
+  ::operator delete[](new_ptr, std::nothrow);
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_FALSE(frees_intercepted_by_size[kSize]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteArrayGlobalOperatorAligned) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+  constexpr auto kAlignment = 32;
+
+  void* new_ptr = ::operator new[](kSize, std::align_val_t(kAlignment));
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+  ASSERT_TRUE(allocs_intercepted_by_alignment[kAlignment]);
+
+  ::operator delete[](new_ptr, std::align_val_t(kAlignment));
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_FALSE(frees_intercepted_by_size[kSize]);
+  ASSERT_TRUE_IFF_SIZED(frees_intercepted_by_alignment[kAlignment]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeleteArrayGlobalOperatorAlignedNoThrow) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+  constexpr auto kAlignment = 32;
+
+  void* new_ptr =
+      ::operator new[](kSize, std::align_val_t(kAlignment), std::nothrow);
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+  ASSERT_TRUE(allocs_intercepted_by_alignment[kAlignment]);
+
+  ::operator delete[](new_ptr, std::align_val_t(kAlignment), std::nothrow);
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_FALSE(frees_intercepted_by_size[kSize]);
+  ASSERT_TRUE_IFF_SIZED(frees_intercepted_by_alignment[kAlignment]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+
+// The operator exists only if `-fsized-decallcation` is in use.
+#if PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteArrayGlobalOperatorSized) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+
+  void* new_ptr = ::operator new[](kSize);
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+
+  ::operator delete[](new_ptr, kSize);
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_TRUE(frees_intercepted_by_size[kSize]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeleteArrayGlobalOperatorSizedAndAligned) {
+  InsertAllocatorDispatch(&g_mock_dispatch);
+
+  constexpr auto kSize = 10;
+  constexpr auto kAlignment = 32;
+
+  void* new_ptr = ::operator new[](kSize, std::align_val_t(kAlignment));
+  ASSERT_NE(nullptr, new_ptr);
+  ASSERT_TRUE(allocs_intercepted_by_size[kSize]);
+  ASSERT_TRUE(allocs_intercepted_by_alignment[kAlignment]);
+
+  ::operator delete[](new_ptr, kSize, std::align_val_t(kAlignment));
+  ASSERT_TRUE(frees_intercepted_by_addr[AllocatorShimTest::Hash(new_ptr)]);
+  ASSERT_TRUE(frees_intercepted_by_size[kSize]);
+  ASSERT_TRUE(frees_intercepted_by_alignment[kAlignment]);
+
+  RemoveAllocatorDispatchForTesting(&g_mock_dispatch);
+}
+#endif
+
+struct BasicStruct {
+  uint32_t ignored;
+  uint8_t ignored_2;
+};
+static_assert(std::is_trivially_destructible_v<BasicStruct>);
+
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteBasicStruct) {
+  NewAndDeleteSingle<BasicStruct, false>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteBasicStructNoThrow) {
+  NewAndDeleteSingle<BasicStruct, true>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, MakeUniquePtrBasicStruct) {
+  MakeUniquePtrSingle<BasicStruct>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteBasicStructArray) {
+  NewAndDeleteTriplet<BasicStruct, false>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_FALSE(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteBasicStructArrayNoThrow) {
+  NewAndDeleteTriplet<BasicStruct, true>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_FALSE(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, MakeUniquePtrBasicStructArray) {
+  MakeUniquePtrTriplet<BasicStruct>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_FALSE(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, MakeVectorBasicStruct) {
+  MakeVectorTriplet<BasicStruct>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+
+// Aligned structs can get routed to different operator new/delete, with
+// `std::align_val_t` parameters.
+struct alignas(32) AlignedStruct {
+  char ignored[999];
+};
+static_assert(std::alignment_of_v<AlignedStruct> == 32);
+
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteAlignedStruct) {
+  NewAndDeleteSingle<AlignedStruct, false>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteAlignedStructNoThrow) {
+  NewAndDeleteSingle<AlignedStruct, true>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, MakeUniquePtrAlignedStruct) {
+  MakeUniquePtrSingle<AlignedStruct>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteAlignedStructArray) {
+  NewAndDeleteTriplet<AlignedStruct, false>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_FALSE(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeleteAlignedStructArrayNoThrow) {
+  NewAndDeleteTriplet<AlignedStruct, true>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_FALSE(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, MakeUniquePtrAlignedStructArray) {
+  MakeUniquePtrTriplet<AlignedStruct>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_FALSE(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, MakeVectorAlignedStruct) {
+  MakeVectorTriplet<AlignedStruct>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+
+// Clang behaves differently on non-trivially destructible types for array
+// allocations. More specifically, they allocates an extra space to remember
+// length of an array to run destructors of elements.
+struct NonTriviallyDestructibleStruct {
+  ~NonTriviallyDestructibleStruct() {}  // NOLINT(modernize-use-equals-default)
+  uint64_t ignored;
+};
+static_assert(
+    !std::is_trivially_destructible_v<NonTriviallyDestructibleStruct>);
+
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeleteNonTriviallyDestructibleStruct) {
+  NewAndDeleteSingle<NonTriviallyDestructibleStruct, false>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeleteNonTriviallyDestructibleStructNoThrow) {
+  NewAndDeleteSingle<NonTriviallyDestructibleStruct, true>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       MakeUniquePtrNonTriviallyDestructibleStruct) {
+  MakeUniquePtrSingle<NonTriviallyDestructibleStruct>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeleteNonTriviallyDestructibleStructArray) {
+  NewAndDeleteTriplet<NonTriviallyDestructibleStruct, false>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeleteNonTriviallyDestructibleStructArrayNoThrow) {
+  NewAndDeleteTriplet<NonTriviallyDestructibleStruct, true>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       MakeUniquePtrNonTriviallyDestructibleStructArray) {
+  MakeUniquePtrTriplet<NonTriviallyDestructibleStruct>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, MakeVectorNonTriviallyDestructibleStruct) {
+  MakeVectorTriplet<NonTriviallyDestructibleStruct>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+
+// Padding size is larger on aligned struct.
+struct alignas(128) NonTriviallyDestructibleAlignedStruct {
+  // NOLINTNEXTLINE(modernize-use-equals-default)
+  ~NonTriviallyDestructibleAlignedStruct() {}
+  char ignored;
+};
+static_assert(std::alignment_of_v<NonTriviallyDestructibleAlignedStruct> ==
+              128);
+static_assert(
+    !std::is_trivially_destructible_v<NonTriviallyDestructibleStruct>);
+
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeleteNonTriviallyDestructibleAlignedStruct) {
+  NewAndDeleteSingle<NonTriviallyDestructibleAlignedStruct, false>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeleteNonTriviallyDestructibleAlignedStructNoThrow) {
+  NewAndDeleteSingle<NonTriviallyDestructibleAlignedStruct, true>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       MakeUniquePtrNonTriviallyDestructibleAlignedStruct) {
+  MakeUniquePtrSingle<NonTriviallyDestructibleAlignedStruct>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeleteNonTriviallyDestructibleAlignedStructArray) {
+  NewAndDeleteTriplet<NonTriviallyDestructibleAlignedStruct, false>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeleteNonTriviallyDestructibleAlignedStructArrayNoThrow) {
+  NewAndDeleteTriplet<NonTriviallyDestructibleAlignedStruct, true>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       MakeUniquePtrNonTriviallyDestructibleAlignedStructArray) {
+  MakeUniquePtrTriplet<NonTriviallyDestructibleAlignedStruct>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       MakeVectorNonTriviallyDestructibleAlignedStruct) {
+  MakeVectorTriplet<NonTriviallyDestructibleAlignedStruct>();
+  ASSERT_TRUE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_TRUE_IFF_SIZED(given_alignment_on_delete_);
+}
+
+// A class with a virtual destructor can be deleted through
+// deleting-destructor.
+struct PolymorphicStruct {
+  virtual ~PolymorphicStruct() {}  // NOLINT(modernize-use-equals-default)
+  uint64_t ignored;
+};
+
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeletePolymorphicStruct) {
+  NewAndDeleteSingle<PolymorphicStruct, false>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeletePolymorphicStructNoThrow) {
+  NewAndDeleteSingle<PolymorphicStruct, true>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, MakeUniquePtrPolymorphicStruct) {
+  MakeUniquePtrSingle<PolymorphicStruct>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, NewAndDeletePolymorphicStructArray) {
+  NewAndDeleteTriplet<PolymorphicStruct, false>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest,
+       NewAndDeletePolymorphicStructArrayNoThrow) {
+  NewAndDeleteTriplet<PolymorphicStruct, true>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, MakeUniquePtrPolymorphicStructArray) {
+  MakeUniquePtrTriplet<PolymorphicStruct>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+TEST_F(AllocatorShimCppOperatorTest, MakeVectorPolymorphicStruct) {
+  MakeVectorTriplet<PolymorphicStruct>();
+  ASSERT_FALSE(given_alignment_on_alloc_);
+  ASSERT_TRUE_IFF_SIZED(given_size_on_delete_);
+  ASSERT_FALSE(given_alignment_on_delete_);
+}
+
 #if PA_BUILDFLAG( \
     ENABLE_ALLOCATOR_SHIM_PARTITION_ALLOC_DISPATCH_WITH_ADVANCED_CHECKS_SUPPORT)
 
@@ -888,6 +1536,15 @@ void* MockReallocUncheckedWithAdvancedChecks(void*, size_t, void*);
 
 void MockFreeWithAdvancedChecks(void*, void*);
 
+void MockFreeWithSizeWithAdvancedChecks(void*, size_t, void*);
+
+void MockFreeWithAlignmentWithAdvancedChecks(void*, size_t, void*);
+
+void MockFreeWithSizeAndAlignmentWithAdvancedChecks(void*,
+                                                    size_t,
+                                                    size_t,
+                                                    void*);
+
 size_t MockGetSizeEstimateWithAdvancedChecks(void*, void*);
 
 size_t MockGoodSizeWithAdvancedChecks(size_t, void*);
@@ -898,8 +1555,6 @@ unsigned MockBatchMallocWithAdvancedChecks(size_t, void**, unsigned, void*);
 
 void MockBatchFreeWithAdvancedChecks(void**, unsigned, void*);
 
-void MockFreeDefiniteSizeWithAdvancedChecks(void*, size_t, void*);
-
 void MockTryFreeDefaultWithAdvancedChecks(void*, void*);
 
 void* MockAlignedMallocWithAdvancedChecks(size_t, size_t, void*);
@@ -926,12 +1581,15 @@ AllocatorDispatch g_mock_dispatch_for_advanced_checks = {
     .realloc_function = &MockReallocWithAdvancedChecks,
     .realloc_unchecked_function = &MockReallocUncheckedWithAdvancedChecks,
     .free_function = &MockFreeWithAdvancedChecks,
+    .free_with_size_function = &MockFreeWithSizeWithAdvancedChecks,
+    .free_with_alignment_function = &MockFreeWithAlignmentWithAdvancedChecks,
+    .free_with_size_and_alignment_function =
+        &MockFreeWithSizeAndAlignmentWithAdvancedChecks,
     .get_size_estimate_function = &MockGetSizeEstimateWithAdvancedChecks,
     .good_size_function = &MockGoodSizeWithAdvancedChecks,
     .claimed_address_function = &MockClaimedAddressWithAdvancedChecks,
     .batch_malloc_function = &MockBatchMallocWithAdvancedChecks,
     .batch_free_function = &MockBatchFreeWithAdvancedChecks,
-    .free_definite_size_function = &MockFreeDefiniteSizeWithAdvancedChecks,
     .try_free_default_function = &MockTryFreeDefaultWithAdvancedChecks,
     .aligned_malloc_function = &MockAlignedMallocWithAdvancedChecks,
     .aligned_malloc_unchecked_function =
@@ -990,6 +1648,32 @@ void MockFreeWithAdvancedChecks(void* address, void* context) {
   g_mock_dispatch_for_advanced_checks.next->free_function(address, context);
 }
 
+void MockFreeWithSizeWithAdvancedChecks(void* address,
+                                        size_t size,
+                                        void* context) {
+  g_mock_free_with_advanced_checks_count++;
+  g_mock_dispatch_for_advanced_checks.next->free_with_size_function(
+      address, size, context);
+}
+
+void MockFreeWithAlignmentWithAdvancedChecks(void* address,
+                                             size_t alignment,
+                                             void* context) {
+  g_mock_free_with_advanced_checks_count++;
+  g_mock_dispatch_for_advanced_checks.next->free_with_alignment_function(
+      address, alignment, context);
+}
+
+void MockFreeWithSizeAndAlignmentWithAdvancedChecks(void* address,
+                                                    size_t size,
+                                                    size_t alignment,
+                                                    void* context) {
+  g_mock_free_with_advanced_checks_count++;
+  g_mock_dispatch_for_advanced_checks.next
+      ->free_with_size_and_alignment_function(address, size, alignment,
+                                              context);
+}
+
 size_t MockGetSizeEstimateWithAdvancedChecks(void* address, void* context) {
   // no-op.
   return g_mock_dispatch_for_advanced_checks.next->get_size_estimate_function(
@@ -1025,14 +1709,6 @@ void MockBatchFreeWithAdvancedChecks(void** to_be_freed,
       to_be_freed, num_to_be_freed, context);
 }
 
-void MockFreeDefiniteSizeWithAdvancedChecks(void* address,
-                                            size_t size,
-                                            void* context) {
-  g_mock_free_with_advanced_checks_count++;
-  g_mock_dispatch_for_advanced_checks.next->free_definite_size_function(
-      address, size, context);
-}
-
 void MockTryFreeDefaultWithAdvancedChecks(void* address, void* context) {
   // no-op.
   g_mock_dispatch_for_advanced_checks.next->try_free_default_function(address,
diff --git a/base/allocator/partition_allocator/src/partition_alloc/shim/shim_alloc_functions.h b/base/allocator/partition_allocator/src/partition_alloc/shim/shim_alloc_functions.h
index b183df83a851c..7ae82b01a6672 100644
--- a/base/allocator/partition_allocator/src/partition_alloc/shim/shim_alloc_functions.h
+++ b/base/allocator/partition_allocator/src/partition_alloc/shim/shim_alloc_functions.h
@@ -112,6 +112,42 @@ PA_ALWAYS_INLINE void ShimCppDelete(void* address) {
   return chain_head->free_function(address, context);
 }
 
+#if PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+PA_ALWAYS_INLINE void ShimCppDeleteWithSize(void* address, size_t size) {
+  const allocator_shim::AllocatorDispatch* const chain_head =
+      allocator_shim::internal::GetChainHead();
+  void* context = nullptr;
+#if PA_BUILDFLAG(IS_APPLE) && !PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
+  context = malloc_default_zone();
+#endif
+  return chain_head->free_with_size_function(address, size, context);
+}
+
+PA_ALWAYS_INLINE void ShimCppDeleteWithAlignment(void* address,
+                                                 size_t alignment) {
+  const allocator_shim::AllocatorDispatch* const chain_head =
+      allocator_shim::internal::GetChainHead();
+  void* context = nullptr;
+#if PA_BUILDFLAG(IS_APPLE) && !PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
+  context = malloc_default_zone();
+#endif
+  return chain_head->free_with_alignment_function(address, alignment, context);
+}
+
+PA_ALWAYS_INLINE void ShimCppDeleteWithSizeAndAlignment(void* address,
+                                                        size_t size,
+                                                        size_t alignment) {
+  const allocator_shim::AllocatorDispatch* const chain_head =
+      allocator_shim::internal::GetChainHead();
+  void* context = nullptr;
+#if PA_BUILDFLAG(IS_APPLE) && !PA_BUILDFLAG(USE_PARTITION_ALLOC_AS_MALLOC)
+  context = malloc_default_zone();
+#endif
+  return chain_head->free_with_size_and_alignment_function(address, size,
+                                                           alignment, context);
+}
+#endif  // PA_BUILDFLAG(SHIM_SUPPORTS_SIZED_DEALLOC)
+
 PA_ALWAYS_INLINE void* ShimMalloc(size_t size, void* context) {
   const allocator_shim::AllocatorDispatch* const chain_head =
       allocator_shim::internal::GetChainHead();
@@ -213,6 +249,12 @@ PA_ALWAYS_INLINE void ShimFree(void* address, void* context) {
   return chain_head->free_function(address, context);
 }
 
+PA_ALWAYS_INLINE void ShimFreeWithSize(void* ptr, size_t size, void* context) {
+  const allocator_shim::AllocatorDispatch* const chain_head =
+      allocator_shim::internal::GetChainHead();
+  return chain_head->free_with_size_function(ptr, size, context);
+}
+
 PA_ALWAYS_INLINE size_t ShimGetSizeEstimate(const void* address,
                                             void* context) {
   const allocator_shim::AllocatorDispatch* const chain_head =
@@ -251,14 +293,6 @@ PA_ALWAYS_INLINE void ShimBatchFree(void** to_be_freed,
   return chain_head->batch_free_function(to_be_freed, num_to_be_freed, context);
 }
 
-PA_ALWAYS_INLINE void ShimFreeDefiniteSize(void* ptr,
-                                           size_t size,
-                                           void* context) {
-  const allocator_shim::AllocatorDispatch* const chain_head =
-      allocator_shim::internal::GetChainHead();
-  return chain_head->free_definite_size_function(ptr, size, context);
-}
-
 PA_ALWAYS_INLINE void ShimTryFreeDefault(void* ptr, void* context) {
   const allocator_shim::AllocatorDispatch* const chain_head =
       allocator_shim::internal::GetChainHead();
diff --git a/base/debug/stack_trace_unittest.cc b/base/debug/stack_trace_unittest.cc
index b2b4105f0b914..15918b7488da0 100644
--- a/base/debug/stack_trace_unittest.cc
+++ b/base/debug/stack_trace_unittest.cc
@@ -205,12 +205,14 @@ allocator_shim::AllocatorDispatch g_bad_malloc_dispatch = {
     &BadRealloc,        /* realloc_function */
     &BadRealloc,        /* realloc_unchecked_function */
     &BadFree,           /* free_function */
+    nullptr,            /* free_with_size_function */
+    nullptr,            /* free_with_alignment_function */
+    nullptr,            /* free_with_size_and_alignment_function */
     nullptr,            /* get_size_estimate_function */
     nullptr,            /* good_size_function */
     nullptr,            /* claimed_address_function */
     nullptr,            /* batch_malloc_function */
     nullptr,            /* batch_free_function */
-    nullptr,            /* free_definite_size_function */
     nullptr,            /* try_free_default_function */
     &BadAlignedAlloc,   /* aligned_malloc_function */
     &BadAlignedAlloc,   /* aligned_malloc_unchecked_function */
diff --git a/build_overrides/partition_alloc.gni b/build_overrides/partition_alloc.gni
index 4d135093e1bea..ae7040d2d2b95 100644
--- a/build_overrides/partition_alloc.gni
+++ b/build_overrides/partition_alloc.gni
@@ -111,6 +111,8 @@ if (is_win && is_component_build && (!use_custom_libcxx || libcxx_is_shared)) {
   use_allocator_shim_default = false
 }
 
+shim_supports_sized_dealloc_default = use_sized_deallocation
+
 use_partition_alloc_as_malloc_default =
     use_allocator_shim_default && _is_partition_alloc_everywhere_platform &&
     !_disable_partition_alloc_everywhere
diff --git a/components/gwp_asan/client/extreme_lightweight_detector_malloc_shims.cc b/components/gwp_asan/client/extreme_lightweight_detector_malloc_shims.cc
index 02d6fc6268451..f473d9ceed66d 100644
--- a/components/gwp_asan/client/extreme_lightweight_detector_malloc_shims.cc
+++ b/components/gwp_asan/client/extreme_lightweight_detector_malloc_shims.cc
@@ -96,7 +96,7 @@ bool TryInitSlow() {
   //
   // This code runs only on the codepaths of deallocations (`free`, `delete`,
   // etc.) and _never_ runs on the codepaths of allocations (`malloc`, `new`,
-  // etc.) because this allocator shim hooks only FreeFn, FreeDefiniteSizeFn,
+  // etc.) because this allocator shim hooks only FreeFn, FreeWithSizeFn,
   // etc. So, it's safe to allocate memory here as it doesn't recurse, however,
   // it's _NOT_ allowed to deallocate memory here as it _does_ recurse.
   //
@@ -218,16 +218,40 @@ void FreeFn(void* address, void* context) {
   MUSTTAIL return allocator_dispatch.next->free_function(address, context);
 }
 
-void FreeDefiniteSizeFn(void* address, size_t size, void* context) {
+void FreeWithSizeFn(void* address, size_t size, void* context) {
   if (sampling_state.Sample()) [[unlikely]] {
     if (Quarantine(address)) [[likely]] {
       return;
     }
   }
-  MUSTTAIL return allocator_dispatch.next->free_definite_size_function(
+  MUSTTAIL return allocator_dispatch.next->free_with_size_function(
       address, size, context);
 }
 
+void FreeWithAlignmentFn(void* address, size_t alignment, void* context) {
+  if (sampling_state.Sample()) [[unlikely]] {
+    if (Quarantine(address)) [[likely]] {
+      return;
+    }
+  }
+  MUSTTAIL return allocator_dispatch.next->free_with_alignment_function(
+      address, alignment, context);
+}
+
+void FreeWithSizeAndAlignmentFn(void* address,
+                                size_t size,
+                                size_t alignment,
+                                void* context) {
+  if (sampling_state.Sample()) [[unlikely]] {
+    if (Quarantine(address)) [[likely]] {
+      return;
+    }
+  }
+  MUSTTAIL return allocator_dispatch.next
+      ->free_with_size_and_alignment_function(address, size, alignment,
+                                              context);
+}
+
 AllocatorDispatch allocator_dispatch = {
     nullptr,  // alloc_function
     nullptr,  // alloc_unchecked_function
@@ -235,17 +259,19 @@ AllocatorDispatch allocator_dispatch = {
     nullptr,  // alloc_aligned_function
     // realloc doesn't always deallocate memory, so the Extreme LUD doesn't
     // support realloc.
-    nullptr,  // realloc_function
-    nullptr,  // realloc_unchecked_function
-    FreeFn,   // free_function
-    nullptr,  // get_size_estimate_function
-    nullptr,  // good_size_function
-    nullptr,  // claimed_address_function
-    nullptr,  // batch_malloc_function
+    nullptr,                     // realloc_function
+    nullptr,                     // realloc_unchecked_function
+    FreeFn,                      // free_function
+    FreeWithSizeFn,              // free_with_size_function
+    FreeWithAlignmentFn,         // free_with_alignment_function
+    FreeWithSizeAndAlignmentFn,  // free_with_size_and_alignment_function
+    nullptr,                     // get_size_estimate_function
+    nullptr,                     // good_size_function
+    nullptr,                     // claimed_address_function
+    nullptr,                     // batch_malloc_function
     // batch_free is rarely used, so the Extreme LUD doesn't support batch_free
     // (at least for now).
-    nullptr,             // batch_free_function
-    FreeDefiniteSizeFn,  // free_definite_size_function
+    nullptr,  // batch_free_function
     // try_free_default is rarely used, so the Extreme LUD doesn't support
     // try_free_default (at least for now).
     nullptr,  // try_free_default_function
diff --git a/components/gwp_asan/client/lightweight_detector/malloc_shims.cc b/components/gwp_asan/client/lightweight_detector/malloc_shims.cc
index 1d19ad5a40246..ef0959d5f76f2 100644
--- a/components/gwp_asan/client/lightweight_detector/malloc_shims.cc
+++ b/components/gwp_asan/client/lightweight_detector/malloc_shims.cc
@@ -62,16 +62,40 @@ void FreeFn(void* address, void* context) {
   MUSTTAIL return g_allocator_dispatch.next->free_function(address, context);
 }
 
-void FreeDefiniteSizeFn(void* address, size_t size, void* context) {
+void FreeWithSizeFn(void* address, size_t size, void* context) {
   if (MaybeQuarantine(address, size, context,
-                      FreeFunctionKind::kFreeDefiniteSize)) {
+                      FreeFunctionKind::kFreeWithSize)) {
     return;
   }
 
-  MUSTTAIL return g_allocator_dispatch.next->free_definite_size_function(
+  MUSTTAIL return g_allocator_dispatch.next->free_with_size_function(
       address, size, context);
 }
 
+void FreeWithAlignmentFn(void* address, size_t alignment, void* context) {
+  if (MaybeQuarantine(address, std::nullopt, context,
+                      FreeFunctionKind::kFreeWithAlignment)) {
+    return;
+  }
+
+  MUSTTAIL return g_allocator_dispatch.next->free_with_alignment_function(
+      address, alignment, context);
+}
+
+void FreeWithSizeAndAlignmentFn(void* address,
+                                size_t size,
+                                size_t alignment,
+                                void* context) {
+  if (MaybeQuarantine(address, size, context,
+                      FreeFunctionKind::kFreeWithSizeAndAlignment)) {
+    return;
+  }
+
+  MUSTTAIL return g_allocator_dispatch.next
+      ->free_with_size_and_alignment_function(address, size, alignment,
+                                              context);
+}
+
 void TryFreeDefaultFn(void* address, void* context) {
   if (MaybeQuarantine(address, std::nullopt, context,
                       FreeFunctionKind::kTryFreeDefault)) {
@@ -93,26 +117,28 @@ static void AlignedFreeFn(void* address, void* context) {
 }
 
 AllocatorDispatch g_allocator_dispatch = {
-    nullptr,             // alloc_function
-    nullptr,             // alloc_unchecked_function
-    nullptr,             // alloc_zero_initialized_function
-    nullptr,             // alloc_aligned_function
-    nullptr,             // realloc_function
-    nullptr,             // realloc_unchecked_function
-    FreeFn,              // free_function
-    nullptr,             // get_size_estimate_function
-    nullptr,             // good_size_function
-    nullptr,             // claimed_address_function
-    nullptr,             // batch_malloc_function
-    nullptr,             // batch_free_function
-    FreeDefiniteSizeFn,  // free_definite_size_function
-    TryFreeDefaultFn,    // try_free_default_function
-    nullptr,             // aligned_malloc_function
-    nullptr,             // aligned_malloc_unchecked_function
-    nullptr,             // aligned_realloc_function
-    nullptr,             // aligned_realloc_unchecked_function
-    AlignedFreeFn,       // aligned_free_function
-    nullptr              // next
+    nullptr,                     // alloc_function
+    nullptr,                     // alloc_unchecked_function
+    nullptr,                     // alloc_zero_initialized_function
+    nullptr,                     // alloc_aligned_function
+    nullptr,                     // realloc_function
+    nullptr,                     // realloc_unchecked_function
+    FreeFn,                      // free_function
+    FreeWithSizeFn,              // free_with_size_function
+    FreeWithAlignmentFn,         // free_with_alignment_function
+    FreeWithSizeAndAlignmentFn,  // free_with_size_and_alignment_function
+    nullptr,                     // get_size_estimate_function
+    nullptr,                     // good_size_function
+    nullptr,                     // claimed_address_function
+    nullptr,                     // batch_malloc_function
+    nullptr,                     // batch_free_function
+    TryFreeDefaultFn,            // try_free_default_function
+    nullptr,                     // aligned_malloc_function
+    nullptr,                     // aligned_malloc_unchecked_function
+    nullptr,                     // aligned_realloc_function
+    nullptr,                     // aligned_realloc_unchecked_function
+    AlignedFreeFn,               // aligned_free_function
+    nullptr                      // next
 };
 
 }  // namespace
@@ -145,9 +171,19 @@ void FinishFree(const AllocationInfo& allocation) {
     case FreeFunctionKind::kFree:
       next->free_function(allocation.address, context);
       break;
-    case FreeFunctionKind::kFreeDefiniteSize:
-      next->free_definite_size_function(allocation.address, allocation.size,
-                                        context);
+    case FreeFunctionKind::kFreeWithSize:
+      next->free_with_size_function(allocation.address, allocation.size,
+                                    context);
+      break;
+    case FreeFunctionKind::kFreeWithAlignment:
+      // TODO(crbug.com/412358843): Memory and forward alignment information.
+      next->free_function(allocation.address, context);
+      break;
+    case FreeFunctionKind::kFreeWithSizeAndAlignment:
+      // TODO(crbug.com/412358843): Similar to above, forward alignment
+      // information. We shall not forward size information here because it can
+      // confuse an allocator by alignment mismatch.
+      next->free_function(allocation.address, context);
       break;
     case FreeFunctionKind::kTryFreeDefault:
       next->try_free_default_function(allocation.address, context);
diff --git a/components/gwp_asan/client/lightweight_detector/malloc_shims.h b/components/gwp_asan/client/lightweight_detector/malloc_shims.h
index 8e316793530d0..255383ea4569b 100644
--- a/components/gwp_asan/client/lightweight_detector/malloc_shims.h
+++ b/components/gwp_asan/client/lightweight_detector/malloc_shims.h
@@ -19,7 +19,9 @@ namespace gwp_asan::internal::lud {
 enum class FreeFunctionKind : uint8_t {
   kUnknown,
   kFree,
-  kFreeDefiniteSize,
+  kFreeWithSize,
+  kFreeWithAlignment,
+  kFreeWithSizeAndAlignment,
   kTryFreeDefault,
   kAlignedFree,
 };
diff --git a/components/gwp_asan/client/sampling_malloc_shims.cc b/components/gwp_asan/client/sampling_malloc_shims.cc
index 08457abc4ad89..9c3cae5d9f05c 100644
--- a/components/gwp_asan/client/sampling_malloc_shims.cc
+++ b/components/gwp_asan/client/sampling_malloc_shims.cc
@@ -160,6 +160,44 @@ void FreeFn(void* address, void* context) {
   g_allocator_dispatch.next->free_function(address, context);
 }
 
+void FreeWithSizeFn(void* address, size_t size, void* context) {
+  if (gpa->PointerIsMine(address)) [[unlikely]] {
+    // TODO(vtsyrklevich): Perform this check in GuardedPageAllocator and report
+    // failed checks using the same pipeline.
+    CHECK_EQ(size, gpa->GetRequestedSize(address));
+    gpa->Deallocate(address);
+    return;
+  }
+
+  g_allocator_dispatch.next->free_with_size_function(address, size, context);
+}
+
+void FreeWithAlignmentFn(void* address, size_t alignment, void* context) {
+  if (gpa->PointerIsMine(address)) [[unlikely]] {
+    gpa->Deallocate(address);
+    return;
+  }
+
+  g_allocator_dispatch.next->free_with_alignment_function(address, alignment,
+                                                          context);
+}
+
+void FreeWithSizeAndAlignmentFn(void* address,
+                                size_t size,
+                                size_t alignment,
+                                void* context) {
+  if (gpa->PointerIsMine(address)) [[unlikely]] {
+    // TODO(vtsyrklevich): Perform this check in GuardedPageAllocator and report
+    // failed checks using the same pipeline.
+    CHECK_EQ(size, gpa->GetRequestedSize(address));
+    gpa->Deallocate(address);
+    return;
+  }
+
+  g_allocator_dispatch.next->free_with_size_and_alignment_function(
+      address, size, alignment, context);
+}
+
 size_t GetSizeEstimateFn(void* address, void* context) {
   if (gpa->PointerIsMine(address)) [[unlikely]] {
     return gpa->GetRequestedSize(address);
@@ -212,19 +250,6 @@ void BatchFreeFn(void** to_be_freed, unsigned num_to_be_freed, void* context) {
                                                  context);
 }
 
-void FreeDefiniteSizeFn(void* address, size_t size, void* context) {
-  if (gpa->PointerIsMine(address)) [[unlikely]] {
-    // TODO(vtsyrklevich): Perform this check in GuardedPageAllocator and report
-    // failed checks using the same pipeline.
-    CHECK_EQ(size, gpa->GetRequestedSize(address));
-    gpa->Deallocate(address);
-    return;
-  }
-
-  g_allocator_dispatch.next->free_definite_size_function(address, size,
-                                                         context);
-}
-
 void TryFreeDefaultFn(void* address, void* context) {
   if (gpa->PointerIsMine(address)) [[unlikely]] {
     gpa->Deallocate(address);
@@ -335,12 +360,14 @@ AllocatorDispatch g_allocator_dispatch = {
     &ReallocFn,
     &ReallocUncheckedFn,
     &FreeFn,
+    &FreeWithSizeFn,
+    &FreeWithAlignmentFn,
+    &FreeWithSizeAndAlignmentFn,
     &GetSizeEstimateFn,
     &GoodSizeFn,
     &ClaimedAddressFn,
     &BatchMallocFn,
     &BatchFreeFn,
-    &FreeDefiniteSizeFn,
     &TryFreeDefaultFn,
     &AlignedMallocFn,
     &AlignedMallocUncheckedFn,