DanglingPtr: fix dangling ptr for UnretainedWrapper
Removing `DanglingUntriaged` from UnretainedWrapper is hard because having a ptr being dangling is actually a feature for developers using `Unretained`. To keep the benefits of having DPD on those, we chose to make it a pointer not allowed to dangle only at invocation time. This CL increases binary size, which is due to adding `ReportPointeeAlive`. Binary-Size: Size increase is unavoidable (see above). Bug: 1291138 Change-Id: Ib794c3b85af6630ef4141c80ef32757a515170a8 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/3829315 Reviewed-by: Sergei Glazunov <glazunov@google.com> Owners-Override: Daniel Cheng <dcheng@chromium.org> Reviewed-by: danakj <danakj@chromium.org> Reviewed-by: Arthur Sonzogni <arthursonzogni@chromium.org> Commit-Queue: Paul Semel <paulsemel@chromium.org> Reviewed-by: Kentaro Hara <haraken@chromium.org> Reviewed-by: Daniel Cheng <dcheng@chromium.org> Cr-Commit-Position: refs/heads/main@{#1050647}
This commit is contained in:

committed by
Chromium LUCI CQ

parent
86a0865c11
commit
21c70d1075
ash/system/holding_space
base
allocator
partition_alloc_features.ccpartition_alloc_features.hpartition_alloc_support.ccpartition_alloc_support.h
partition_allocator
functional
memory
observer_list_threadsafe.hwin
chrome/browser
chromeos
policy
media
router
ui
components
download
internal
history
core
browser
performance_manager
freezing
spellcheck
browser
content
ipc
net
third_party/blink/renderer/platform/loader
@ -148,7 +148,8 @@ class HoldingSpaceAnimationRegistry::ProgressIndicatorAnimationDelegate
|
||||
auto animation = ProgressRingAnimation::CreateOfType(type);
|
||||
animation->AddUnsafeAnimationUpdatedCallback(base::BindRepeating(
|
||||
&ProgressIndicatorAnimationDelegate::OnRingAnimationUpdatedForKey,
|
||||
base::Unretained(this), key, animation.get()));
|
||||
base::Unretained(this), base::UnsafeDanglingUntriaged(key),
|
||||
animation.get()));
|
||||
|
||||
registry_->SetProgressRingAnimationForKey(key, std::move(animation))
|
||||
->Start();
|
||||
@ -287,7 +288,8 @@ class HoldingSpaceAnimationRegistry::ProgressIndicatorAnimationDelegate
|
||||
if (registry->GetProgressRingAnimationForKey(key) == animation)
|
||||
registry->SetProgressRingAnimationForKey(key, nullptr);
|
||||
},
|
||||
weak_factory_.GetWeakPtr(), key, animation));
|
||||
weak_factory_.GetWeakPtr(), base::UnsafeDanglingUntriaged(key),
|
||||
base::UnsafeDanglingUntriaged(animation)));
|
||||
}
|
||||
|
||||
ProgressIndicatorAnimationRegistry* const registry_;
|
||||
|
@ -11,6 +11,24 @@
|
||||
namespace base {
|
||||
namespace features {
|
||||
|
||||
BASE_FEATURE(kPartitionAllocUnretainedDanglingPtr,
|
||||
"PartitionAllocUnretainedDanglingPtr",
|
||||
FEATURE_DISABLED_BY_DEFAULT);
|
||||
|
||||
constexpr FeatureParam<UnretainedDanglingPtrMode>::Option
|
||||
kUnretainedDanglingPtrModeOption[] = {
|
||||
{UnretainedDanglingPtrMode::kCrash, "crash"},
|
||||
{UnretainedDanglingPtrMode::kDumpWithoutCrashing,
|
||||
"dump_without_crashing"},
|
||||
};
|
||||
const base::FeatureParam<UnretainedDanglingPtrMode>
|
||||
kUnretainedDanglingPtrModeParam = {
|
||||
&kPartitionAllocUnretainedDanglingPtr,
|
||||
"mode",
|
||||
UnretainedDanglingPtrMode::kDumpWithoutCrashing,
|
||||
&kUnretainedDanglingPtrModeOption,
|
||||
};
|
||||
|
||||
BASE_FEATURE(kPartitionAllocDanglingPtr,
|
||||
"PartitionAllocDanglingPtr",
|
||||
FEATURE_DISABLED_BY_DEFAULT);
|
||||
|
@ -15,6 +15,14 @@
|
||||
namespace base {
|
||||
namespace features {
|
||||
|
||||
extern const BASE_EXPORT Feature kPartitionAllocUnretainedDanglingPtr;
|
||||
enum class UnretainedDanglingPtrMode {
|
||||
kCrash,
|
||||
kDumpWithoutCrashing,
|
||||
};
|
||||
extern const BASE_EXPORT base::FeatureParam<UnretainedDanglingPtrMode>
|
||||
kUnretainedDanglingPtrModeParam;
|
||||
|
||||
// See /docs/dangling_ptr.md
|
||||
//
|
||||
// Usage:
|
||||
|
@ -14,6 +14,7 @@
|
||||
#include "base/allocator/partition_allocator/allocation_guard.h"
|
||||
#include "base/allocator/partition_allocator/dangling_raw_ptr_checks.h"
|
||||
#include "base/allocator/partition_allocator/memory_reclaimer.h"
|
||||
#include "base/allocator/partition_allocator/partition_alloc_base/debug/alias.h"
|
||||
#include "base/allocator/partition_allocator/partition_alloc_buildflags.h"
|
||||
#include "base/allocator/partition_allocator/partition_alloc_check.h"
|
||||
#include "base/allocator/partition_allocator/partition_alloc_config.h"
|
||||
@ -22,7 +23,9 @@
|
||||
#include "base/bind.h"
|
||||
#include "base/callback.h"
|
||||
#include "base/check.h"
|
||||
#include "base/debug/dump_without_crashing.h"
|
||||
#include "base/debug/stack_trace.h"
|
||||
#include "base/debug/task_trace.h"
|
||||
#include "base/feature_list.h"
|
||||
#include "base/immediate_crash.h"
|
||||
#include "base/metrics/histogram_functions.h"
|
||||
@ -543,5 +546,48 @@ void InstallDanglingRawPtrChecks() {
|
||||
void InstallDanglingRawPtrChecks() {}
|
||||
#endif // BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS)
|
||||
|
||||
void UnretainedDanglingRawPtrDetectedDumpWithoutCrashing(uintptr_t id) {
|
||||
PA_NO_CODE_FOLDING();
|
||||
debug::DumpWithoutCrashing();
|
||||
}
|
||||
|
||||
void UnretainedDanglingRawPtrDetectedCrash(uintptr_t id) {
|
||||
debug::TaskTrace task_trace;
|
||||
debug::StackTrace stack_trace;
|
||||
if (!task_trace.empty()) {
|
||||
LOG(ERROR) << "Detected dangling raw_ptr in unretained with id="
|
||||
<< StringPrintf("0x%016" PRIxPTR, id) << ":\n\n"
|
||||
<< task_trace << ":\n Stack trace:\n"
|
||||
<< stack_trace;
|
||||
} else {
|
||||
LOG(ERROR) << "Detected dangling raw_ptr in unretained with id="
|
||||
<< StringPrintf("0x%016" PRIxPTR, id) << ":\n\n"
|
||||
<< "Stack trace:\n"
|
||||
<< stack_trace;
|
||||
}
|
||||
IMMEDIATE_CRASH();
|
||||
}
|
||||
|
||||
void InstallUnretainedDanglingRawPtrChecks() {
|
||||
if (!FeatureList::IsEnabled(features::kPartitionAllocUnretainedDanglingPtr)) {
|
||||
partition_alloc::SetUnretainedDanglingRawPtrDetectedFn([](uintptr_t) {});
|
||||
partition_alloc::SetUnretainedDanglingRawPtrCheckEnabled(/*enabled=*/false);
|
||||
return;
|
||||
}
|
||||
|
||||
partition_alloc::SetUnretainedDanglingRawPtrCheckEnabled(/*enabled=*/true);
|
||||
switch (features::kUnretainedDanglingPtrModeParam.Get()) {
|
||||
case features::UnretainedDanglingPtrMode::kCrash:
|
||||
partition_alloc::SetUnretainedDanglingRawPtrDetectedFn(
|
||||
&UnretainedDanglingRawPtrDetectedCrash);
|
||||
break;
|
||||
|
||||
case features::UnretainedDanglingPtrMode::kDumpWithoutCrashing:
|
||||
partition_alloc::SetUnretainedDanglingRawPtrDetectedFn(
|
||||
&UnretainedDanglingRawPtrDetectedDumpWithoutCrashing);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace allocator
|
||||
} // namespace base
|
||||
|
@ -35,6 +35,7 @@ BASE_EXPORT std::map<std::string, std::string> ProposeSyntheticFinchTrials();
|
||||
// This is currently effective, only when compiled with
|
||||
// `enable_dangling_raw_ptr_checks` build flag.
|
||||
BASE_EXPORT void InstallDanglingRawPtrChecks();
|
||||
BASE_EXPORT void InstallUnretainedDanglingRawPtrChecks();
|
||||
|
||||
} // namespace allocator
|
||||
} // namespace base
|
||||
|
@ -12,6 +12,9 @@ namespace partition_alloc {
|
||||
namespace {
|
||||
DanglingRawPtrDetectedFn* g_dangling_raw_ptr_detected_fn = [](uintptr_t) {};
|
||||
DanglingRawPtrReleasedFn* g_dangling_raw_ptr_released_fn = [](uintptr_t) {};
|
||||
DanglingRawPtrDetectedFn* g_unretained_dangling_raw_ptr_detected_fn =
|
||||
[](uintptr_t) {};
|
||||
bool g_unretained_dangling_raw_ptr_check_enabled = false;
|
||||
} // namespace
|
||||
|
||||
DanglingRawPtrDetectedFn* GetDanglingRawPtrDetectedFn() {
|
||||
@ -34,6 +37,21 @@ void SetDanglingRawPtrReleasedFn(DanglingRawPtrReleasedFn fn) {
|
||||
g_dangling_raw_ptr_released_fn = fn;
|
||||
}
|
||||
|
||||
DanglingRawPtrDetectedFn* GetUnretainedDanglingRawPtrDetectedFn() {
|
||||
return g_unretained_dangling_raw_ptr_detected_fn;
|
||||
}
|
||||
|
||||
void SetUnretainedDanglingRawPtrDetectedFn(DanglingRawPtrDetectedFn* fn) {
|
||||
PA_DCHECK(fn);
|
||||
g_unretained_dangling_raw_ptr_detected_fn = fn;
|
||||
}
|
||||
|
||||
bool SetUnretainedDanglingRawPtrCheckEnabled(bool enabled) {
|
||||
bool old = g_unretained_dangling_raw_ptr_check_enabled;
|
||||
g_unretained_dangling_raw_ptr_check_enabled = enabled;
|
||||
return old;
|
||||
}
|
||||
|
||||
namespace internal {
|
||||
|
||||
PA_COMPONENT_EXPORT(PARTITION_ALLOC) void DanglingRawPtrDetected(uintptr_t id) {
|
||||
@ -43,5 +61,15 @@ PA_COMPONENT_EXPORT(PARTITION_ALLOC) void DanglingRawPtrReleased(uintptr_t id) {
|
||||
g_dangling_raw_ptr_released_fn(id);
|
||||
}
|
||||
|
||||
PA_COMPONENT_EXPORT(PARTITION_ALLOC)
|
||||
void UnretainedDanglingRawPtrDetected(uintptr_t id) {
|
||||
g_unretained_dangling_raw_ptr_detected_fn(id);
|
||||
}
|
||||
|
||||
PA_COMPONENT_EXPORT(PARTITION_ALLOC)
|
||||
bool IsUnretainedDanglingRawPtrCheckEnabled() {
|
||||
return g_unretained_dangling_raw_ptr_check_enabled;
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace partition_alloc
|
||||
|
@ -35,6 +35,13 @@ DanglingRawPtrDetectedFn* GetDanglingRawPtrDetectedFn();
|
||||
PA_COMPONENT_EXPORT(PARTITION_ALLOC)
|
||||
void SetDanglingRawPtrDetectedFn(DanglingRawPtrDetectedFn);
|
||||
|
||||
PA_COMPONENT_EXPORT(PARTITION_ALLOC)
|
||||
DanglingRawPtrDetectedFn* GetUnretainedDanglingRawPtrDetectedFn();
|
||||
PA_COMPONENT_EXPORT(PARTITION_ALLOC)
|
||||
void SetUnretainedDanglingRawPtrDetectedFn(DanglingRawPtrDetectedFn*);
|
||||
PA_COMPONENT_EXPORT(PARTITION_ALLOC)
|
||||
bool SetUnretainedDanglingRawPtrCheckEnabled(bool enabled);
|
||||
|
||||
// DanglingRawPtrReleased: Called after DanglingRawPtrDetected(id), once the
|
||||
// last dangling raw_ptr stops referencing the memory region.
|
||||
//
|
||||
@ -49,6 +56,10 @@ namespace internal {
|
||||
|
||||
PA_COMPONENT_EXPORT(PARTITION_ALLOC) void DanglingRawPtrDetected(uintptr_t id);
|
||||
PA_COMPONENT_EXPORT(PARTITION_ALLOC) void DanglingRawPtrReleased(uintptr_t id);
|
||||
PA_COMPONENT_EXPORT(PARTITION_ALLOC)
|
||||
void UnretainedDanglingRawPtrDetected(uintptr_t id);
|
||||
PA_COMPONENT_EXPORT(PARTITION_ALLOC)
|
||||
bool IsUnretainedDanglingRawPtrCheckEnabled();
|
||||
|
||||
} // namespace internal
|
||||
} // namespace partition_alloc
|
||||
|
@ -3846,6 +3846,71 @@ TEST_P(PartitionAllocTest, RefCountRealloc) {
|
||||
}
|
||||
}
|
||||
|
||||
int g_unretained_dangling_raw_ptr_detected_count = 0;
|
||||
|
||||
class UnretainedDanglingRawPtrTest : public PartitionAllocTest {
|
||||
public:
|
||||
void SetUp() override {
|
||||
PartitionAllocTest::SetUp();
|
||||
g_unretained_dangling_raw_ptr_detected_count = 0;
|
||||
old_detected_fn_ = partition_alloc::GetUnretainedDanglingRawPtrDetectedFn();
|
||||
|
||||
partition_alloc::SetUnretainedDanglingRawPtrDetectedFn(
|
||||
&UnretainedDanglingRawPtrTest::DanglingRawPtrDetected);
|
||||
old_unretained_dangling_ptr_enabled_ =
|
||||
partition_alloc::SetUnretainedDanglingRawPtrCheckEnabled(true);
|
||||
}
|
||||
void TearDown() override {
|
||||
partition_alloc::SetUnretainedDanglingRawPtrDetectedFn(old_detected_fn_);
|
||||
partition_alloc::SetUnretainedDanglingRawPtrCheckEnabled(
|
||||
old_unretained_dangling_ptr_enabled_);
|
||||
PartitionAllocTest::TearDown();
|
||||
}
|
||||
|
||||
private:
|
||||
static void DanglingRawPtrDetected(uintptr_t) {
|
||||
g_unretained_dangling_raw_ptr_detected_count++;
|
||||
}
|
||||
|
||||
partition_alloc::DanglingRawPtrDetectedFn* old_detected_fn_;
|
||||
bool old_unretained_dangling_ptr_enabled_;
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(AlternateBucketDistribution,
|
||||
UnretainedDanglingRawPtrTest,
|
||||
testing::Values(false, true));
|
||||
|
||||
TEST_P(UnretainedDanglingRawPtrTest, UnretainedDanglingPtrNoReport) {
|
||||
void* ptr = allocator.root()->Alloc(kTestAllocSize, type_name);
|
||||
EXPECT_TRUE(ptr);
|
||||
auto* ref_count =
|
||||
PartitionRefCountPointer(allocator.root()->ObjectToSlotStart(ptr));
|
||||
ref_count->Acquire();
|
||||
EXPECT_TRUE(ref_count->IsAlive());
|
||||
// Allocation is still live, so calling ReportIfDangling() should not result
|
||||
// in any detections.
|
||||
ref_count->ReportIfDangling();
|
||||
EXPECT_EQ(g_unretained_dangling_raw_ptr_detected_count, 0);
|
||||
EXPECT_FALSE(ref_count->Release());
|
||||
allocator.root()->Free(ptr);
|
||||
}
|
||||
|
||||
TEST_P(UnretainedDanglingRawPtrTest, UnretainedDanglingPtrShouldReport) {
|
||||
void* ptr = allocator.root()->Alloc(kTestAllocSize, type_name);
|
||||
EXPECT_TRUE(ptr);
|
||||
auto* ref_count =
|
||||
PartitionRefCountPointer(allocator.root()->ObjectToSlotStart(ptr));
|
||||
ref_count->Acquire();
|
||||
EXPECT_TRUE(ref_count->IsAlive());
|
||||
allocator.root()->Free(ptr);
|
||||
// At this point, memory shouldn't be alive...
|
||||
EXPECT_FALSE(ref_count->IsAlive());
|
||||
// ...and we should report the ptr as dangling.
|
||||
ref_count->ReportIfDangling();
|
||||
EXPECT_EQ(g_unretained_dangling_raw_ptr_detected_count, 1);
|
||||
EXPECT_TRUE(ref_count->Release());
|
||||
}
|
||||
|
||||
#endif // BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT)
|
||||
|
||||
#if BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS)
|
||||
|
@ -8,6 +8,7 @@
|
||||
#include <atomic>
|
||||
#include <cstdint>
|
||||
|
||||
#include "base/allocator/partition_allocator/dangling_raw_ptr_checks.h"
|
||||
#include "base/allocator/partition_allocator/partition_alloc_base/compiler_specific.h"
|
||||
#include "base/allocator/partition_allocator/partition_alloc_base/component_export.h"
|
||||
#include "base/allocator/partition_allocator/partition_alloc_base/debug/debugging_buildflags.h"
|
||||
@ -20,10 +21,6 @@
|
||||
#include "base/allocator/partition_allocator/tagging.h"
|
||||
#include "build/build_config.h"
|
||||
|
||||
#if BUILDFLAG(ENABLE_DANGLING_RAW_PTR_CHECKS)
|
||||
#include "base/allocator/partition_allocator/dangling_raw_ptr_checks.h"
|
||||
#endif
|
||||
|
||||
namespace partition_alloc::internal {
|
||||
|
||||
#if BUILDFLAG(ENABLE_BACKUP_REF_PTR_SUPPORT)
|
||||
@ -219,6 +216,18 @@ class PA_COMPONENT_EXPORT(PARTITION_ALLOC) PartitionRefCount {
|
||||
return alive;
|
||||
}
|
||||
|
||||
// Called when a raw_ptr is not banning dangling ptrs, but the user still
|
||||
// wants to ensure the pointer is not currently dangling. This is currently
|
||||
// used in UnretainedWrapper to make sure callbacks are not invoked with
|
||||
// dangling pointers. If such a raw_ptr exists but the allocation is no longer
|
||||
// alive, then we have a dangling pointer to a dead object.
|
||||
PA_ALWAYS_INLINE void ReportIfDangling() {
|
||||
if (!IsAlive()) {
|
||||
partition_alloc::internal::UnretainedDanglingRawPtrDetected(
|
||||
reinterpret_cast<uintptr_t>(this));
|
||||
}
|
||||
}
|
||||
|
||||
// GWP-ASan slots are assigned an extra reference (note `kPtrInc` below) to
|
||||
// make sure the `raw_ptr<T>` release operation will never attempt to call the
|
||||
// PA `free` on such a slot. GWP-ASan takes the extra reference into account
|
||||
|
@ -116,10 +116,20 @@ struct BindFailedCheckPreviousErrors {};
|
||||
BindFailedCheckPreviousErrors BindOnce(...);
|
||||
BindFailedCheckPreviousErrors BindRepeating(...);
|
||||
|
||||
// Unretained() allows binding a non-refcounted class, and to disable
|
||||
// refcounting on arguments that are refcounted objects.
|
||||
// Unretained(), UnsafeDangling() and UnsafeDanglingUntriaged() allow binding a
|
||||
// non-refcounted class, and to disable refcounting on arguments that are
|
||||
// refcounted. The main difference is whether or not the raw pointers will be
|
||||
// checked for dangling references (e.g. a pointer that points to an already
|
||||
// destroyed object) when the callback is run.
|
||||
//
|
||||
// EXAMPLE OF Unretained():
|
||||
// It is _required_ to use one of Unretained(), UnsafeDangling() or
|
||||
// UnsafeDanglingUntriaged() for raw pointer receivers now. For other arguments,
|
||||
// it remains optional. If not specified, default behavior is Unretained().
|
||||
|
||||
// Unretained() pointers will be checked for dangling pointers when the
|
||||
// callback is run, *if* the callback has not been cancelled.
|
||||
//
|
||||
// Example of Unretained() usage:
|
||||
//
|
||||
// class Foo {
|
||||
// public:
|
||||
@ -134,6 +144,31 @@ BindFailedCheckPreviousErrors BindRepeating(...);
|
||||
//
|
||||
// Without the Unretained() wrapper on |&foo|, the above call would fail
|
||||
// to compile because Foo does not support the AddRef() and Release() methods.
|
||||
//
|
||||
// Unretained() does not allow dangling pointers, e.g.:
|
||||
// class MyClass {
|
||||
// public:
|
||||
// OnError(int error);
|
||||
// private:
|
||||
// scoped_refptr<base::TaskRunner> runner_;
|
||||
// std::unique_ptr<AnotherClass> obj_;
|
||||
// };
|
||||
//
|
||||
// void MyClass::OnError(int error) {
|
||||
// // the pointer (which is also the receiver here) to `AnotherClass`
|
||||
// // might dangle depending on when the task is invoked.
|
||||
// runner_->PostTask(FROM_HERE, base::BindOnce(&AnotherClass::OnError,
|
||||
// base::Unretained(obj_.get()), error));
|
||||
// // one of the way to solve this issue here would be:
|
||||
// // runner_->PostTask(FROM_HERE,
|
||||
// // base::BindOnce(&AnotherClass::OnError,
|
||||
// // base::Owned(std::move(obj_)), error));
|
||||
// delete this;
|
||||
// }
|
||||
//
|
||||
// the above example is a BAD USAGE of Unretained(), which might result in a
|
||||
// use-after-free, as `AnotherClass::OnError` might be invoked with a dangling
|
||||
// pointer as receiver.
|
||||
template <typename T>
|
||||
inline internal::UnretainedWrapper<T> Unretained(T* o) {
|
||||
return internal::UnretainedWrapper<T>(o);
|
||||
@ -159,6 +194,81 @@ inline auto Unretained(raw_ref<T, I>&& o) {
|
||||
return internal::UnretainedRefWrapper(std::move(o));
|
||||
}
|
||||
|
||||
// Similar to `Unretained()`, but allows dangling pointers, e.g.:
|
||||
//
|
||||
// class MyClass {
|
||||
// public:
|
||||
// DoSomething(HandlerClass* handler);
|
||||
// private:
|
||||
// void MyClass::DoSomethingInternal(HandlerClass::Id id,
|
||||
// HandlerClass* handler);
|
||||
//
|
||||
// std::unordered_map<HandlerClass::Id, HandlerClass*> handlers_;
|
||||
// scoped_refptr<base::SequencedTaskRunner> runner_;
|
||||
// base::Lock lock_;
|
||||
// };
|
||||
// void MyClass::DoSomething(HandlerClass* handler) {
|
||||
// runner_->PostTask(FROM_HERE,
|
||||
// base::BindOnce(&MyClass::DoSomethingInternal,
|
||||
// base::Unretained(this),
|
||||
// handler->id(),
|
||||
// base::Unretained(handler)));
|
||||
// }
|
||||
// void MyClass::DoSomethingInternal(HandlerClass::Id id,
|
||||
// HandlerClass* handler) {
|
||||
// base::AutoLock locker(lock_);
|
||||
// if (handlers_.find(id) == std::end(handlers_)) return;
|
||||
// // Now we can use `handler`.
|
||||
// }
|
||||
//
|
||||
// As `DoSomethingInternal` is run on a sequence (and we can imagine
|
||||
// `handlers_` being modified on it as well), we protect the function from
|
||||
// using a dangling `handler` by making sure it is still contained in the
|
||||
// map.
|
||||
//
|
||||
// Strongly prefer `Unretained()`. This is useful in limited situations such as
|
||||
// the one above.
|
||||
template <typename T>
|
||||
inline internal::UnretainedWrapper<T, DisableDanglingPtrDetection>
|
||||
UnsafeDangling(T* o) {
|
||||
return internal::UnretainedWrapper<T, DisableDanglingPtrDetection>(o);
|
||||
}
|
||||
|
||||
template <typename T, typename I>
|
||||
internal::UnretainedWrapper<T, DisableDanglingPtrDetection> UnsafeDangling(
|
||||
const raw_ptr<T, I>& o) {
|
||||
return internal::UnretainedWrapper<T, DisableDanglingPtrDetection>(o);
|
||||
}
|
||||
|
||||
template <typename T, typename I>
|
||||
internal::UnretainedWrapper<T, DisableDanglingPtrDetection> UnsafeDangling(
|
||||
raw_ptr<T, I>&& o) {
|
||||
return internal::UnretainedWrapper<T, DisableDanglingPtrDetection>(
|
||||
std::move(o));
|
||||
}
|
||||
|
||||
// Like `UnsafeDangling()`, but used to annotate places that still need to be
|
||||
// triaged and either migrated to `Unretained()` and safer ownership patterns
|
||||
// (preferred) or `UnsafeDangling()` if the correct pattern to use is the one
|
||||
// in the `UnsafeDangling()` example above for example.
|
||||
template <typename T>
|
||||
inline internal::UnretainedWrapper<T, DanglingUntriaged>
|
||||
UnsafeDanglingUntriaged(T* o) {
|
||||
return internal::UnretainedWrapper<T, DanglingUntriaged>(o);
|
||||
}
|
||||
|
||||
template <typename T, typename I>
|
||||
internal::UnretainedWrapper<T, DanglingUntriaged> UnsafeDanglingUntriaged(
|
||||
const raw_ptr<T, I>& o) {
|
||||
return internal::UnretainedWrapper<T, DanglingUntriaged>(o);
|
||||
}
|
||||
|
||||
template <typename T, typename I>
|
||||
internal::UnretainedWrapper<T, DanglingUntriaged> UnsafeDanglingUntriaged(
|
||||
raw_ptr<T, I>&& o) {
|
||||
return internal::UnretainedWrapper<T, DanglingUntriaged>(std::move(o));
|
||||
}
|
||||
|
||||
// RetainedRef() accepts a ref counted object and retains a reference to it.
|
||||
// When the callback is called, the object is passed as a raw pointer.
|
||||
//
|
||||
|
@ -88,7 +88,7 @@ namespace internal {
|
||||
template <typename Functor, typename SFINAE = void>
|
||||
struct FunctorTraits;
|
||||
|
||||
template <typename T>
|
||||
template <typename T, typename RawPtrType = base::RawPtrBanDanglingIfSupported>
|
||||
class UnretainedWrapper {
|
||||
public:
|
||||
explicit UnretainedWrapper(T* o) : ptr_(o) {}
|
||||
@ -103,7 +103,16 @@ class UnretainedWrapper {
|
||||
template <typename U = T, typename I>
|
||||
explicit UnretainedWrapper(raw_ptr<U, I>&& o) : ptr_(std::move(o)) {}
|
||||
|
||||
T* get() const { return ptr_; }
|
||||
T* get() const {
|
||||
// `ptr_` is either a `raw_ptr` (if `T` is a supported type) or a regular
|
||||
// C++ pointer otherwise.
|
||||
if constexpr (std::is_same_v<RawPtrType,
|
||||
base::RawPtrBanDanglingIfSupported> &&
|
||||
!std::is_same_v<ImplType, T*>) {
|
||||
ptr_.ReportIfDangling();
|
||||
}
|
||||
return ptr_;
|
||||
}
|
||||
|
||||
private:
|
||||
#if defined(PA_ENABLE_MTE_CHECKED_PTR_SUPPORT_WITH_64_BITS_POINTERS)
|
||||
@ -118,8 +127,16 @@ class UnretainedWrapper {
|
||||
// than `raw_ptr`) when `raw_ptr` is `MTECheckedPtr`.
|
||||
using ImplType = T*;
|
||||
#else
|
||||
// `Unretained()` arguments often dangle by design (common design patterns
|
||||
// consists of managing objects lifetime inside the callbacks themselves using
|
||||
// stateful information), so disable direct dangling pointer detection of
|
||||
// `ptr_`.
|
||||
//
|
||||
// If the callback is invoked, dangling pointer detection will be triggered
|
||||
// before invoking the bound functor (unless stated other wise, see
|
||||
// `UnsafeDangling()`), when retrieving the pointer value via `get()` above.
|
||||
using ImplType = std::conditional_t<raw_ptr_traits::IsSupportedType<T>::value,
|
||||
raw_ptr<T, DanglingUntriaged>,
|
||||
raw_ptr<T, DisableDanglingPtrDetection>,
|
||||
T*>;
|
||||
#endif // defined(PA_ENABLE_MTE_CHECKED_PTR_SUPPORT_WITH_64_BITS_POINTERS)
|
||||
ImplType ptr_;
|
||||
@ -790,37 +807,42 @@ using MakeStorageType = typename StorageTraits<std::decay_t<T>>::Type;
|
||||
//
|
||||
// WeakCalls need special syntax that is applied to the first argument to check
|
||||
// if they should no-op themselves.
|
||||
template <bool is_weak_call, typename ReturnType>
|
||||
template <bool is_weak_call, typename ReturnType, size_t... indices>
|
||||
struct InvokeHelper;
|
||||
|
||||
template <typename ReturnType>
|
||||
struct InvokeHelper<false, ReturnType> {
|
||||
template <typename Functor, typename... RunArgs>
|
||||
static inline ReturnType MakeItSo(Functor&& functor, RunArgs&&... args) {
|
||||
template <typename ReturnType, size_t... indices>
|
||||
struct InvokeHelper<false, ReturnType, indices...> {
|
||||
template <typename Functor, typename BoundArgsTuple, typename... RunArgs>
|
||||
static inline ReturnType MakeItSo(Functor&& functor,
|
||||
BoundArgsTuple&& bound,
|
||||
RunArgs&&... args) {
|
||||
using Traits = MakeFunctorTraits<Functor>;
|
||||
return Traits::Invoke(std::forward<Functor>(functor),
|
||||
std::forward<RunArgs>(args)...);
|
||||
return Traits::Invoke(
|
||||
std::forward<Functor>(functor),
|
||||
Unwrap(std::get<indices>(std::forward<BoundArgsTuple>(bound)))...,
|
||||
std::forward<RunArgs>(args)...);
|
||||
}
|
||||
};
|
||||
|
||||
template <typename ReturnType>
|
||||
struct InvokeHelper<true, ReturnType> {
|
||||
template <typename ReturnType, size_t... indices>
|
||||
struct InvokeHelper<true, ReturnType, indices...> {
|
||||
// WeakCalls are only supported for functions with a void return type.
|
||||
// Otherwise, the function result would be undefined if the WeakPtr<>
|
||||
// is invalidated.
|
||||
static_assert(std::is_void_v<ReturnType>,
|
||||
"weak_ptrs can only bind to methods without return values");
|
||||
|
||||
template <typename Functor, typename BoundWeakPtr, typename... RunArgs>
|
||||
template <typename Functor, typename BoundArgsTuple, typename... RunArgs>
|
||||
static inline void MakeItSo(Functor&& functor,
|
||||
BoundWeakPtr&& weak_ptr,
|
||||
BoundArgsTuple&& bound,
|
||||
RunArgs&&... args) {
|
||||
if (!weak_ptr)
|
||||
if (!std::get<0>(bound))
|
||||
return;
|
||||
using Traits = MakeFunctorTraits<Functor>;
|
||||
Traits::Invoke(std::forward<Functor>(functor),
|
||||
std::forward<BoundWeakPtr>(weak_ptr),
|
||||
std::forward<RunArgs>(args)...);
|
||||
Traits::Invoke(
|
||||
std::forward<Functor>(functor),
|
||||
Unwrap(std::get<indices>(std::forward<BoundArgsTuple>(bound)))...,
|
||||
std::forward<RunArgs>(args)...);
|
||||
}
|
||||
};
|
||||
|
||||
@ -862,7 +884,7 @@ struct Invoker<StorageType, R(UnboundArgs...)> {
|
||||
template <typename Functor, typename BoundArgsTuple, size_t... indices>
|
||||
static inline R RunImpl(Functor&& functor,
|
||||
BoundArgsTuple&& bound,
|
||||
std::index_sequence<indices...>,
|
||||
std::index_sequence<indices...> seq,
|
||||
UnboundArgs&&... unbound_args) {
|
||||
static constexpr bool is_method = MakeFunctorTraits<Functor>::is_method;
|
||||
|
||||
@ -879,9 +901,18 @@ struct Invoker<StorageType, R(UnboundArgs...)> {
|
||||
IsWeakMethod<is_method,
|
||||
std::tuple_element_t<indices, DecayedArgsTuple>...>();
|
||||
|
||||
return InvokeHelper<is_weak_call, R>::MakeItSo(
|
||||
std::forward<Functor>(functor),
|
||||
Unwrap(std::get<indices>(std::forward<BoundArgsTuple>(bound)))...,
|
||||
// Do not `Unwrap()` here, as that immediately triggers dangling pointer
|
||||
// detection. Dangling pointer detection should only be triggered if the
|
||||
// callback is not cancelled, but cancellation status is not determined
|
||||
// until later inside the InvokeHelper::MakeItSo specialization for weak
|
||||
// calls.
|
||||
//
|
||||
// Dangling pointers when invoking a cancelled callback are not considered
|
||||
// a memory safety error because protecting raw pointers usage with weak
|
||||
// receivers (where the weak receiver usually own the pointed objects) is a
|
||||
// common and broadly used pattern in the codebase.
|
||||
return InvokeHelper<is_weak_call, R, indices...>::MakeItSo(
|
||||
std::forward<Functor>(functor), std::forward<BoundArgsTuple>(bound),
|
||||
std::forward<UnboundArgs>(unbound_args)...);
|
||||
}
|
||||
};
|
||||
@ -1492,9 +1523,11 @@ struct BindUnwrapTraits {
|
||||
}
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
struct BindUnwrapTraits<internal::UnretainedWrapper<T>> {
|
||||
static T* Unwrap(const internal::UnretainedWrapper<T>& o) { return o.get(); }
|
||||
template <typename T, typename ImplType>
|
||||
struct BindUnwrapTraits<internal::UnretainedWrapper<T, ImplType>> {
|
||||
static T* Unwrap(const internal::UnretainedWrapper<T, ImplType>& o) {
|
||||
return o.get();
|
||||
}
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
|
@ -9,6 +9,10 @@
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "base/allocator/partition_alloc_features.h"
|
||||
#include "base/allocator/partition_alloc_support.h"
|
||||
#include "base/allocator/partition_allocator/dangling_raw_ptr_checks.h"
|
||||
#include "base/allocator/partition_allocator/partition_alloc.h"
|
||||
#include "base/functional/callback.h"
|
||||
#include "base/memory/ptr_util.h"
|
||||
#include "base/memory/raw_ptr.h"
|
||||
@ -18,6 +22,7 @@
|
||||
#include "base/strings/string_number_conversions.h"
|
||||
#include "base/test/bind.h"
|
||||
#include "base/test/gtest_util.h"
|
||||
#include "base/test/scoped_feature_list.h"
|
||||
#include "build/build_config.h"
|
||||
#include "testing/gmock/include/gmock/gmock.h"
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
@ -1793,5 +1798,143 @@ TEST(BindDeathTest, BanFirstOwnerOfRefCountedType) {
|
||||
});
|
||||
}
|
||||
|
||||
#if BUILDFLAG(USE_BACKUP_REF_PTR)
|
||||
|
||||
void HandleOOM(size_t unused_size) {
|
||||
LOG(FATAL) << "Out of memory";
|
||||
}
|
||||
|
||||
// Basic set of options to mostly only enable `BackupRefPtr::kEnabled`.
|
||||
// This avoids the boilerplate of having too much options enabled for simple
|
||||
// testing purpose.
|
||||
static constexpr partition_alloc::PartitionOptions kOpts = {
|
||||
partition_alloc::PartitionOptions::AlignedAlloc::kDisallowed,
|
||||
partition_alloc::PartitionOptions::ThreadCache::kDisabled,
|
||||
partition_alloc::PartitionOptions::Quarantine::kDisallowed,
|
||||
partition_alloc::PartitionOptions::Cookie::kAllowed,
|
||||
partition_alloc::PartitionOptions::BackupRefPtr::kEnabled,
|
||||
partition_alloc::PartitionOptions::BackupRefPtrZapping::kEnabled,
|
||||
partition_alloc::PartitionOptions::UseConfigurablePool::kNo,
|
||||
};
|
||||
|
||||
class BindUnretainedDanglingInternalFixture : public BindTest {
|
||||
public:
|
||||
void SetUp() override {
|
||||
partition_alloc::PartitionAllocGlobalInit(HandleOOM);
|
||||
allocator_.init(kOpts);
|
||||
enabled_feature_list_.InitWithFeaturesAndParameters(
|
||||
{{features::kPartitionAllocUnretainedDanglingPtr, {{"mode", "crash"}}}},
|
||||
{/* disabled_features */});
|
||||
allocator::InstallUnretainedDanglingRawPtrChecks();
|
||||
}
|
||||
void TearDown() override {
|
||||
enabled_feature_list_.Reset();
|
||||
allocator::InstallUnretainedDanglingRawPtrChecks();
|
||||
allocator_.root()->PurgeMemory(
|
||||
partition_alloc::PurgeFlags::kDecommitEmptySlotSpans |
|
||||
partition_alloc::PurgeFlags::kDiscardUnusedSystemPages);
|
||||
partition_alloc::PartitionAllocGlobalUninitForTesting();
|
||||
}
|
||||
|
||||
// In unit tests, allocations being tested need to live in a separate PA
|
||||
// root so the test code doesn't interfere with various counters. Following
|
||||
// methods are helpers for managing allocations inside the separate allocator
|
||||
// root.
|
||||
template <typename T, typename... Args>
|
||||
raw_ptr<T> Alloc(Args&&... args) {
|
||||
void* ptr = allocator_.root()->Alloc(sizeof(T), "");
|
||||
T* p = new (reinterpret_cast<T*>(ptr)) T(std::forward<Args>(args)...);
|
||||
return raw_ptr<T>(p);
|
||||
}
|
||||
template <typename T>
|
||||
void Free(raw_ptr<T>& ptr) {
|
||||
allocator_.root()->Free(ptr);
|
||||
}
|
||||
|
||||
private:
|
||||
test::ScopedFeatureList enabled_feature_list_;
|
||||
partition_alloc::PartitionAllocator allocator_;
|
||||
};
|
||||
|
||||
class BindUnretainedDanglingTest
|
||||
: public BindUnretainedDanglingInternalFixture {};
|
||||
class BindUnretainedDanglingDeathTest
|
||||
: public BindUnretainedDanglingInternalFixture {};
|
||||
|
||||
bool PtrCheckFn(int* p) {
|
||||
return p != nullptr;
|
||||
}
|
||||
|
||||
class ClassWithWeakPtr {
|
||||
public:
|
||||
ClassWithWeakPtr() = default;
|
||||
void RawPtrArg(int* p) { *p = 123; }
|
||||
WeakPtr<ClassWithWeakPtr> GetWeakPtr() { return weak_factory_.GetWeakPtr(); }
|
||||
|
||||
private:
|
||||
WeakPtrFactory<ClassWithWeakPtr> weak_factory_{this};
|
||||
};
|
||||
|
||||
TEST_F(BindUnretainedDanglingTest, UnretainedNoDanglingPtr) {
|
||||
raw_ptr<int> p = Alloc<int>(3);
|
||||
auto callback = base::BindOnce(PingPong, base::Unretained(p));
|
||||
EXPECT_EQ(std::move(callback).Run(), 3);
|
||||
Free(p);
|
||||
}
|
||||
|
||||
TEST_F(BindUnretainedDanglingTest, UnsafeDanglingPtr) {
|
||||
raw_ptr<int> p = Alloc<int>(3);
|
||||
auto callback = base::BindOnce(PtrCheckFn, base::UnsafeDangling(p));
|
||||
Free(p);
|
||||
EXPECT_EQ(std::move(callback).Run(), true);
|
||||
}
|
||||
|
||||
TEST_F(BindUnretainedDanglingTest, UnsafeDanglingUntriagedPtr) {
|
||||
raw_ptr<int> p = Alloc<int>(3);
|
||||
auto callback = base::BindOnce(PtrCheckFn, base::UnsafeDanglingUntriaged(p));
|
||||
Free(p);
|
||||
EXPECT_EQ(std::move(callback).Run(), true);
|
||||
}
|
||||
|
||||
TEST_F(BindUnretainedDanglingTest, UnretainedWeakReceiverValidNoDangling) {
|
||||
raw_ptr<int> p = Alloc<int>(3);
|
||||
std::unique_ptr<ClassWithWeakPtr> r = std::make_unique<ClassWithWeakPtr>();
|
||||
auto callback = base::BindOnce(&ClassWithWeakPtr::RawPtrArg, r->GetWeakPtr(),
|
||||
base::Unretained(p));
|
||||
std::move(callback).Run();
|
||||
EXPECT_EQ(*p, 123);
|
||||
Free(p);
|
||||
}
|
||||
|
||||
TEST_F(BindUnretainedDanglingTest, UnretainedWeakReceiverInvalidNoDangling) {
|
||||
raw_ptr<int> p = Alloc<int>(3);
|
||||
std::unique_ptr<ClassWithWeakPtr> r = std::make_unique<ClassWithWeakPtr>();
|
||||
auto callback = base::BindOnce(&ClassWithWeakPtr::RawPtrArg, r->GetWeakPtr(),
|
||||
base::Unretained(p));
|
||||
r.reset();
|
||||
Free(p);
|
||||
std::move(callback).Run();
|
||||
// Should reach this point without crashing; there is a dangling pointer, but
|
||||
// the callback is cancelled because the WeakPtr is already invalidated.
|
||||
}
|
||||
|
||||
TEST_F(BindUnretainedDanglingDeathTest, UnretainedDanglingPtr) {
|
||||
raw_ptr<int> p = Alloc<int>(3);
|
||||
auto callback = base::BindOnce(PingPong, base::Unretained(p));
|
||||
Free(p);
|
||||
EXPECT_DEATH(std::move(callback).Run(), "");
|
||||
}
|
||||
|
||||
TEST_F(BindUnretainedDanglingDeathTest, UnretainedWeakReceiverDangling) {
|
||||
raw_ptr<int> p = Alloc<int>(3);
|
||||
std::unique_ptr<ClassWithWeakPtr> r = std::make_unique<ClassWithWeakPtr>();
|
||||
auto callback = base::BindOnce(&ClassWithWeakPtr::RawPtrArg, r->GetWeakPtr(),
|
||||
base::Unretained(p));
|
||||
Free(p);
|
||||
EXPECT_DEATH(std::move(callback).Run(), "");
|
||||
}
|
||||
|
||||
#endif // BUILDFLAG(USE_BACKUP_REF_PTR)
|
||||
|
||||
} // namespace
|
||||
} // namespace base
|
||||
|
@ -6,6 +6,7 @@
|
||||
#include <cstdint>
|
||||
|
||||
#include "base/allocator/buildflags.h"
|
||||
#include "base/allocator/partition_allocator/dangling_raw_ptr_checks.h"
|
||||
#include "base/process/process.h"
|
||||
|
||||
// USE_BACKUP_REF_PTR implies USE_PARTITION_ALLOC, needed for code under
|
||||
@ -53,6 +54,19 @@ void BackupRefPtrImpl<AllowDangling>::ReleaseInternal(uintptr_t address) {
|
||||
}
|
||||
}
|
||||
|
||||
template <bool AllowDangling>
|
||||
void BackupRefPtrImpl<AllowDangling>::ReportIfDanglingInternal(
|
||||
uintptr_t address) {
|
||||
if (partition_alloc::internal::IsUnretainedDanglingRawPtrCheckEnabled()) {
|
||||
if (IsSupportedAndNotNull(address)) {
|
||||
uintptr_t slot_start =
|
||||
partition_alloc::PartitionAllocGetSlotStartInBRPPool(address);
|
||||
partition_alloc::internal::PartitionRefCountPointer(slot_start)
|
||||
->ReportIfDangling();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <bool AllowDangling>
|
||||
bool BackupRefPtrImpl<AllowDangling>::IsPointeeAlive(uintptr_t address) {
|
||||
#if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
|
||||
|
@ -60,6 +60,25 @@ namespace base {
|
||||
// NOTE: All methods should be `ALWAYS_INLINE`. raw_ptr is meant to be a
|
||||
// lightweight replacement of a raw pointer, hence performance is critical.
|
||||
|
||||
// The following types are the different RawPtrType template option possible for
|
||||
// a `raw_ptr`:
|
||||
// - RawPtrMayDangle disables dangling pointers check when the object is
|
||||
// released.
|
||||
// - RawPtrBanDanglingIfSupported may enable dangling pointers check on object
|
||||
// destruction.
|
||||
//
|
||||
// We describe those types here so that they can be used outside of `raw_ptr` as
|
||||
// object markers, and their meaning might vary depending on where those markers
|
||||
// are being used. For instance, we are using those in `UnretainedWrapper` to
|
||||
// change behavior depending on RawPtrType.
|
||||
struct RawPtrMayDangle {};
|
||||
struct RawPtrBanDanglingIfSupported {};
|
||||
|
||||
namespace raw_ptr_traits {
|
||||
template <typename T>
|
||||
struct RawPtrTypeToImpl;
|
||||
}
|
||||
|
||||
namespace internal {
|
||||
// These classes/structures are part of the raw_ptr implementation.
|
||||
// DO NOT USE THESE CLASSES DIRECTLY YOURSELF.
|
||||
@ -553,6 +572,12 @@ struct BackupRefPtrImpl {
|
||||
return WrapRawPtr(wrapped_ptr);
|
||||
}
|
||||
|
||||
// Report the current wrapped pointer if pointee isn't alive anymore.
|
||||
template <typename T>
|
||||
static ALWAYS_INLINE void ReportIfDangling(T* wrapped_ptr) {
|
||||
ReportIfDanglingInternal(partition_alloc::UntagPtr(wrapped_ptr));
|
||||
}
|
||||
|
||||
// This is for accounting only, used by unit tests.
|
||||
static ALWAYS_INLINE void IncrementSwapCountForTest() {}
|
||||
static ALWAYS_INLINE void IncrementLessCountForTest() {}
|
||||
@ -568,6 +593,7 @@ struct BackupRefPtrImpl {
|
||||
static BASE_EXPORT NOINLINE void AcquireInternal(uintptr_t address);
|
||||
static BASE_EXPORT NOINLINE void ReleaseInternal(uintptr_t address);
|
||||
static BASE_EXPORT NOINLINE bool IsPointeeAlive(uintptr_t address);
|
||||
static BASE_EXPORT NOINLINE void ReportIfDanglingInternal(uintptr_t address);
|
||||
template <typename Z, typename = std::enable_if_t<offset_type<Z>, void>>
|
||||
static ALWAYS_INLINE bool IsValidDelta(uintptr_t address, Z delta_in_bytes) {
|
||||
if constexpr (std::is_signed_v<Z>)
|
||||
@ -665,35 +691,37 @@ struct AsanBackupRefPtrImpl {
|
||||
};
|
||||
|
||||
template <class Super>
|
||||
struct RawPtrCountingImplWrapperForTest : public Super {
|
||||
struct RawPtrCountingImplWrapperForTest
|
||||
: public raw_ptr_traits::RawPtrTypeToImpl<Super>::Impl {
|
||||
using SuperImpl = typename raw_ptr_traits::RawPtrTypeToImpl<Super>::Impl;
|
||||
template <typename T>
|
||||
static ALWAYS_INLINE T* WrapRawPtr(T* ptr) {
|
||||
++wrap_raw_ptr_cnt;
|
||||
return Super::WrapRawPtr(ptr);
|
||||
return SuperImpl::WrapRawPtr(ptr);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static ALWAYS_INLINE void ReleaseWrappedPtr(T* ptr) {
|
||||
++release_wrapped_ptr_cnt;
|
||||
Super::ReleaseWrappedPtr(ptr);
|
||||
SuperImpl::ReleaseWrappedPtr(ptr);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static ALWAYS_INLINE T* SafelyUnwrapPtrForDereference(T* wrapped_ptr) {
|
||||
++get_for_dereference_cnt;
|
||||
return Super::SafelyUnwrapPtrForDereference(wrapped_ptr);
|
||||
return SuperImpl::SafelyUnwrapPtrForDereference(wrapped_ptr);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static ALWAYS_INLINE T* SafelyUnwrapPtrForExtraction(T* wrapped_ptr) {
|
||||
++get_for_extraction_cnt;
|
||||
return Super::SafelyUnwrapPtrForExtraction(wrapped_ptr);
|
||||
return SuperImpl::SafelyUnwrapPtrForExtraction(wrapped_ptr);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static ALWAYS_INLINE T* UnsafelyUnwrapPtrForComparison(T* wrapped_ptr) {
|
||||
++get_for_comparison_cnt;
|
||||
return Super::UnsafelyUnwrapPtrForComparison(wrapped_ptr);
|
||||
return SuperImpl::UnsafelyUnwrapPtrForComparison(wrapped_ptr);
|
||||
}
|
||||
|
||||
static ALWAYS_INLINE void IncrementSwapCountForTest() {
|
||||
@ -816,6 +844,42 @@ struct IsSupportedType<T,
|
||||
#undef CHROME_WINDOWS_HANDLE_TYPE
|
||||
#endif
|
||||
|
||||
template <typename T>
|
||||
struct RawPtrTypeToImpl {};
|
||||
|
||||
template <typename T>
|
||||
struct RawPtrTypeToImpl<internal::RawPtrCountingImplWrapperForTest<T>> {
|
||||
using Impl = internal::RawPtrCountingImplWrapperForTest<T>;
|
||||
};
|
||||
|
||||
template <>
|
||||
struct RawPtrTypeToImpl<RawPtrMayDangle> {
|
||||
#if BUILDFLAG(USE_BACKUP_REF_PTR)
|
||||
using Impl = internal::BackupRefPtrImpl</*AllowDangling=*/true>;
|
||||
#elif BUILDFLAG(USE_ASAN_BACKUP_REF_PTR)
|
||||
using Impl = internal::AsanBackupRefPtrImpl;
|
||||
#elif defined(PA_ENABLE_MTE_CHECKED_PTR_SUPPORT_WITH_64_BITS_POINTERS)
|
||||
using Impl = internal::MTECheckedPtrImpl<
|
||||
internal::MTECheckedPtrImplPartitionAllocSupport>;
|
||||
#else
|
||||
using Impl = internal::RawPtrNoOpImpl;
|
||||
#endif
|
||||
};
|
||||
|
||||
template <>
|
||||
struct RawPtrTypeToImpl<RawPtrBanDanglingIfSupported> {
|
||||
#if BUILDFLAG(USE_BACKUP_REF_PTR)
|
||||
using Impl = internal::BackupRefPtrImpl</*AllowDangling=*/false>;
|
||||
#elif BUILDFLAG(USE_ASAN_BACKUP_REF_PTR)
|
||||
using Impl = internal::AsanBackupRefPtrImpl;
|
||||
#elif defined(PA_ENABLE_MTE_CHECKED_PTR_SUPPORT_WITH_64_BITS_POINTERS)
|
||||
using Impl = internal::MTECheckedPtrImpl<
|
||||
internal::MTECheckedPtrImplPartitionAllocSupport>;
|
||||
#else
|
||||
using Impl = internal::RawPtrNoOpImpl;
|
||||
#endif
|
||||
};
|
||||
|
||||
} // namespace raw_ptr_traits
|
||||
|
||||
// `raw_ptr<T>` is a non-owning smart pointer that has improved memory-safety
|
||||
@ -844,27 +908,12 @@ struct IsSupportedType<T,
|
||||
// non-default move constructor/assignment. Thus, it's possible to get an error
|
||||
// where the pointer is not actually dangling, and have to work around the
|
||||
// compiler. We have not managed to construct such an example in Chromium yet.
|
||||
#if BUILDFLAG(USE_BACKUP_REF_PTR)
|
||||
using RawPtrMayDangle = internal::BackupRefPtrImpl</*AllowDangling=*/true>;
|
||||
using RawPtrBanDanglingIfSupported =
|
||||
internal::BackupRefPtrImpl</*AllowDangling=*/false>;
|
||||
#elif BUILDFLAG(USE_ASAN_BACKUP_REF_PTR)
|
||||
using RawPtrMayDangle = internal::AsanBackupRefPtrImpl;
|
||||
using RawPtrBanDanglingIfSupported = internal::AsanBackupRefPtrImpl;
|
||||
#elif defined(PA_ENABLE_MTE_CHECKED_PTR_SUPPORT_WITH_64_BITS_POINTERS)
|
||||
using RawPtrMayDangle = internal::MTECheckedPtrImpl<
|
||||
internal::MTECheckedPtrImplPartitionAllocSupport>;
|
||||
using RawPtrBanDanglingIfSupported = internal::MTECheckedPtrImpl<
|
||||
internal::MTECheckedPtrImplPartitionAllocSupport>;
|
||||
#else
|
||||
using RawPtrMayDangle = internal::RawPtrNoOpImpl;
|
||||
using RawPtrBanDanglingIfSupported = internal::RawPtrNoOpImpl;
|
||||
#endif
|
||||
|
||||
using DefaultRawPtrImpl = RawPtrBanDanglingIfSupported;
|
||||
using DefaultRawPtrType = RawPtrBanDanglingIfSupported;
|
||||
|
||||
template <typename T, typename Impl = DefaultRawPtrImpl>
|
||||
template <typename T, typename RawPtrType = DefaultRawPtrType>
|
||||
class TRIVIAL_ABI GSL_POINTER raw_ptr {
|
||||
using Impl = typename raw_ptr_traits::RawPtrTypeToImpl<RawPtrType>::Impl;
|
||||
using DanglingRawPtr = std::conditional_t<
|
||||
raw_ptr_traits::IsRawPtrCountingImpl<Impl>::value,
|
||||
raw_ptr<T, internal::RawPtrCountingImplWrapperForTest<RawPtrMayDangle>>,
|
||||
@ -955,7 +1004,7 @@ class TRIVIAL_ABI GSL_POINTER raw_ptr {
|
||||
std::is_convertible<U*, T*>::value &&
|
||||
!std::is_void<typename std::remove_cv<T>::type>::value>>
|
||||
// NOLINTNEXTLINE(google-explicit-constructor)
|
||||
ALWAYS_INLINE raw_ptr(const raw_ptr<U, Impl>& ptr) noexcept
|
||||
ALWAYS_INLINE raw_ptr(const raw_ptr<U, RawPtrType>& ptr) noexcept
|
||||
: wrapped_ptr_(
|
||||
Impl::Duplicate(Impl::template Upcast<T, U>(ptr.wrapped_ptr_))) {}
|
||||
// Deliberately implicit in order to support implicit upcast.
|
||||
@ -964,7 +1013,7 @@ class TRIVIAL_ABI GSL_POINTER raw_ptr {
|
||||
std::is_convertible<U*, T*>::value &&
|
||||
!std::is_void<typename std::remove_cv<T>::type>::value>>
|
||||
// NOLINTNEXTLINE(google-explicit-constructor)
|
||||
ALWAYS_INLINE raw_ptr(raw_ptr<U, Impl>&& ptr) noexcept
|
||||
ALWAYS_INLINE raw_ptr(raw_ptr<U, RawPtrType>&& ptr) noexcept
|
||||
: wrapped_ptr_(Impl::template Upcast<T, U>(ptr.wrapped_ptr_)) {
|
||||
#if BUILDFLAG(USE_BACKUP_REF_PTR)
|
||||
ptr.wrapped_ptr_ = nullptr;
|
||||
@ -987,7 +1036,7 @@ class TRIVIAL_ABI GSL_POINTER raw_ptr {
|
||||
typename Unused = std::enable_if_t<
|
||||
std::is_convertible<U*, T*>::value &&
|
||||
!std::is_void<typename std::remove_cv<T>::type>::value>>
|
||||
ALWAYS_INLINE raw_ptr& operator=(const raw_ptr<U, Impl>& ptr) noexcept {
|
||||
ALWAYS_INLINE raw_ptr& operator=(const raw_ptr<U, RawPtrType>& ptr) noexcept {
|
||||
// Make sure that pointer isn't assigned to itself (look at pointer address,
|
||||
// not its value).
|
||||
#if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
|
||||
@ -1003,7 +1052,7 @@ class TRIVIAL_ABI GSL_POINTER raw_ptr {
|
||||
typename Unused = std::enable_if_t<
|
||||
std::is_convertible<U*, T*>::value &&
|
||||
!std::is_void<typename std::remove_cv<T>::type>::value>>
|
||||
ALWAYS_INLINE raw_ptr& operator=(raw_ptr<U, Impl>&& ptr) noexcept {
|
||||
ALWAYS_INLINE raw_ptr& operator=(raw_ptr<U, RawPtrType>&& ptr) noexcept {
|
||||
// Make sure that pointer isn't assigned to itself (look at pointer address,
|
||||
// not its value).
|
||||
#if DCHECK_IS_ON() || BUILDFLAG(ENABLE_BACKUP_REF_PTR_SLOW_CHECKS)
|
||||
@ -1253,6 +1302,12 @@ class TRIVIAL_ABI GSL_POINTER raw_ptr {
|
||||
perfetto::WriteIntoTracedValue(std::move(context), get());
|
||||
}
|
||||
|
||||
ALWAYS_INLINE void ReportIfDangling() const noexcept {
|
||||
#if BUILDFLAG(USE_BACKUP_REF_PTR)
|
||||
Impl::ReportIfDangling(wrapped_ptr_);
|
||||
#endif
|
||||
}
|
||||
|
||||
private:
|
||||
// This getter is meant for situations where the pointer is meant to be
|
||||
// dereferenced. It is allowed to crash on nullptr (it may or may not),
|
||||
@ -1415,22 +1470,24 @@ namespace std {
|
||||
|
||||
// Override so set/map lookups do not create extra raw_ptr. This also allows
|
||||
// dangling pointers to be used for lookup.
|
||||
template <typename T, typename Impl>
|
||||
struct less<raw_ptr<T, Impl>> {
|
||||
template <typename T, typename RawPtrType>
|
||||
struct less<raw_ptr<T, RawPtrType>> {
|
||||
using Impl =
|
||||
typename base::raw_ptr_traits::RawPtrTypeToImpl<RawPtrType>::Impl;
|
||||
using is_transparent = void;
|
||||
|
||||
bool operator()(const raw_ptr<T, Impl>& lhs,
|
||||
const raw_ptr<T, Impl>& rhs) const {
|
||||
bool operator()(const raw_ptr<T, RawPtrType>& lhs,
|
||||
const raw_ptr<T, RawPtrType>& rhs) const {
|
||||
Impl::IncrementLessCountForTest();
|
||||
return lhs < rhs;
|
||||
}
|
||||
|
||||
bool operator()(T* lhs, const raw_ptr<T, Impl>& rhs) const {
|
||||
bool operator()(T* lhs, const raw_ptr<T, RawPtrType>& rhs) const {
|
||||
Impl::IncrementLessCountForTest();
|
||||
return lhs < rhs;
|
||||
}
|
||||
|
||||
bool operator()(const raw_ptr<T, Impl>& lhs, T* rhs) const {
|
||||
bool operator()(const raw_ptr<T, RawPtrType>& lhs, T* rhs) const {
|
||||
Impl::IncrementLessCountForTest();
|
||||
return lhs < rhs;
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ namespace internal {
|
||||
template <typename, typename>
|
||||
struct Invoker;
|
||||
|
||||
template <typename T>
|
||||
template <typename T, typename Impl>
|
||||
class UnretainedWrapper;
|
||||
|
||||
template <typename T, bool>
|
||||
@ -69,8 +69,8 @@ class BASE_EXPORT RawPtrAsanBoundArgTracker {
|
||||
|
||||
// When argument is base::Unretained, add the argument to the set of
|
||||
// arguments protected in this scope.
|
||||
template <typename T>
|
||||
void AddArg(const internal::UnretainedWrapper<T>& arg) {
|
||||
template <typename T, typename Impl>
|
||||
void AddArg(const internal::UnretainedWrapper<T, Impl>& arg) {
|
||||
if constexpr (raw_ptr_traits::IsSupportedType<T>::value) {
|
||||
Add(reinterpret_cast<uintptr_t>(arg.get()));
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ static_assert(
|
||||
namespace {
|
||||
|
||||
using RawPtrCountingImpl =
|
||||
base::internal::RawPtrCountingImplWrapperForTest<base::DefaultRawPtrImpl>;
|
||||
base::internal::RawPtrCountingImplWrapperForTest<base::DefaultRawPtrType>;
|
||||
using RawPtrCountingMayDangleImpl =
|
||||
base::internal::RawPtrCountingImplWrapperForTest<base::RawPtrMayDangle>;
|
||||
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
namespace base {
|
||||
|
||||
template <class T, class Impl>
|
||||
template <class T, class RawPtrType>
|
||||
class raw_ref;
|
||||
|
||||
namespace internal {
|
||||
@ -51,9 +51,10 @@ constexpr inline bool is_raw_ref_v = is_raw_ref<T>::value;
|
||||
// Unlike a native `T&` reference, a mutable `raw_ref<T>` can be changed
|
||||
// independent of the underlying `T`, similar to `std::reference_wrapper`. That
|
||||
// means the reference inside it can be moved and reassigned.
|
||||
template <class T, class Impl = DefaultRawPtrImpl>
|
||||
template <class T, class RawPtrType = DefaultRawPtrType>
|
||||
class TRIVIAL_ABI GSL_POINTER raw_ref {
|
||||
using Inner = raw_ptr<T, Impl>;
|
||||
using Inner = raw_ptr<T, RawPtrType>;
|
||||
using Impl = typename raw_ptr_traits::RawPtrTypeToImpl<RawPtrType>::Impl;
|
||||
// These impls do not clear on move, which produces an inconsistent behaviour.
|
||||
// We want consistent behaviour such that using a raw_ref after move is caught
|
||||
// and aborts. Failure to clear would be indicated by the related death tests
|
||||
@ -106,13 +107,14 @@ class TRIVIAL_ABI GSL_POINTER raw_ref {
|
||||
// Deliberately implicit in order to support implicit upcast.
|
||||
template <class U, class = std::enable_if_t<std::is_convertible_v<U&, T&>>>
|
||||
// NOLINTNEXTLINE(google-explicit-constructor)
|
||||
ALWAYS_INLINE raw_ref(const raw_ref<U, Impl>& p) noexcept : inner_(p.inner_) {
|
||||
ALWAYS_INLINE raw_ref(const raw_ref<U, RawPtrType>& p) noexcept
|
||||
: inner_(p.inner_) {
|
||||
CHECK(inner_.get()); // Catch use-after-move.
|
||||
}
|
||||
// Deliberately implicit in order to support implicit upcast.
|
||||
template <class U, class = std::enable_if_t<std::is_convertible_v<U&, T&>>>
|
||||
// NOLINTNEXTLINE(google-explicit-constructor)
|
||||
ALWAYS_INLINE raw_ref(raw_ref<U, Impl>&& p) noexcept
|
||||
ALWAYS_INLINE raw_ref(raw_ref<U, RawPtrType>&& p) noexcept
|
||||
: inner_(std::move(p.inner_)) {
|
||||
CHECK(inner_.get()); // Catch use-after-move.
|
||||
if constexpr (need_clear_after_move)
|
||||
@ -121,13 +123,13 @@ class TRIVIAL_ABI GSL_POINTER raw_ref {
|
||||
|
||||
// Upcast assignment
|
||||
template <class U, class = std::enable_if_t<std::is_convertible_v<U&, T&>>>
|
||||
ALWAYS_INLINE raw_ref& operator=(const raw_ref<U, Impl>& p) noexcept {
|
||||
ALWAYS_INLINE raw_ref& operator=(const raw_ref<U, RawPtrType>& p) noexcept {
|
||||
CHECK(p.inner_.get()); // Catch use-after-move.
|
||||
inner_.operator=(p.inner_);
|
||||
return *this;
|
||||
}
|
||||
template <class U, class = std::enable_if_t<std::is_convertible_v<U&, T&>>>
|
||||
ALWAYS_INLINE raw_ref& operator=(raw_ref<U, Impl>&& p) noexcept {
|
||||
ALWAYS_INLINE raw_ref& operator=(raw_ref<U, RawPtrType>&& p) noexcept {
|
||||
CHECK(p.inner_.get()); // Catch use-after-move.
|
||||
inner_.operator=(std::move(p.inner_));
|
||||
if constexpr (need_clear_after_move)
|
||||
@ -176,42 +178,42 @@ class TRIVIAL_ABI GSL_POINTER raw_ref {
|
||||
|
||||
template <class U>
|
||||
friend ALWAYS_INLINE bool operator==(const raw_ref& lhs,
|
||||
const raw_ref<U, Impl>& rhs) {
|
||||
const raw_ref<U, RawPtrType>& rhs) {
|
||||
CHECK(lhs.inner_.get()); // Catch use-after-move.
|
||||
CHECK(rhs.inner_.get()); // Catch use-after-move.
|
||||
return lhs.inner_ == rhs.inner_;
|
||||
}
|
||||
template <class U>
|
||||
friend ALWAYS_INLINE bool operator!=(const raw_ref& lhs,
|
||||
const raw_ref<U, Impl>& rhs) {
|
||||
const raw_ref<U, RawPtrType>& rhs) {
|
||||
CHECK(lhs.inner_.get()); // Catch use-after-move.
|
||||
CHECK(rhs.inner_.get()); // Catch use-after-move.
|
||||
return lhs.inner_ != rhs.inner_;
|
||||
}
|
||||
template <class U>
|
||||
friend ALWAYS_INLINE bool operator<(const raw_ref& lhs,
|
||||
const raw_ref<U, Impl>& rhs) {
|
||||
const raw_ref<U, RawPtrType>& rhs) {
|
||||
CHECK(lhs.inner_.get()); // Catch use-after-move.
|
||||
CHECK(rhs.inner_.get()); // Catch use-after-move.
|
||||
return lhs.inner_ < rhs.inner_;
|
||||
}
|
||||
template <class U>
|
||||
friend ALWAYS_INLINE bool operator>(const raw_ref& lhs,
|
||||
const raw_ref<U, Impl>& rhs) {
|
||||
const raw_ref<U, RawPtrType>& rhs) {
|
||||
CHECK(lhs.inner_.get()); // Catch use-after-move.
|
||||
CHECK(rhs.inner_.get()); // Catch use-after-move.
|
||||
return lhs.inner_ > rhs.inner_;
|
||||
}
|
||||
template <class U>
|
||||
friend ALWAYS_INLINE bool operator<=(const raw_ref& lhs,
|
||||
const raw_ref<U, Impl>& rhs) {
|
||||
const raw_ref<U, RawPtrType>& rhs) {
|
||||
CHECK(lhs.inner_.get()); // Catch use-after-move.
|
||||
CHECK(rhs.inner_.get()); // Catch use-after-move.
|
||||
return lhs.inner_ <= rhs.inner_;
|
||||
}
|
||||
template <class U>
|
||||
friend ALWAYS_INLINE bool operator>=(const raw_ref& lhs,
|
||||
const raw_ref<U, Impl>& rhs) {
|
||||
const raw_ref<U, RawPtrType>& rhs) {
|
||||
CHECK(lhs.inner_.get()); // Catch use-after-move.
|
||||
CHECK(rhs.inner_.get()); // Catch use-after-move.
|
||||
return lhs.inner_ >= rhs.inner_;
|
||||
@ -321,22 +323,24 @@ namespace std {
|
||||
|
||||
// Override so set/map lookups do not create extra raw_ref. This also
|
||||
// allows C++ references to be used for lookup.
|
||||
template <typename T, typename Impl>
|
||||
struct less<raw_ref<T, Impl>> {
|
||||
template <typename T, typename RawPtrType>
|
||||
struct less<raw_ref<T, RawPtrType>> {
|
||||
using Impl =
|
||||
typename base::raw_ptr_traits::RawPtrTypeToImpl<RawPtrType>::Impl;
|
||||
using is_transparent = void;
|
||||
|
||||
bool operator()(const raw_ref<T, Impl>& lhs,
|
||||
const raw_ref<T, Impl>& rhs) const {
|
||||
bool operator()(const raw_ref<T, RawPtrType>& lhs,
|
||||
const raw_ref<T, RawPtrType>& rhs) const {
|
||||
Impl::IncrementLessCountForTest();
|
||||
return lhs < rhs;
|
||||
}
|
||||
|
||||
bool operator()(T& lhs, const raw_ref<T, Impl>& rhs) const {
|
||||
bool operator()(T& lhs, const raw_ref<T, RawPtrType>& rhs) const {
|
||||
Impl::IncrementLessCountForTest();
|
||||
return lhs < rhs;
|
||||
}
|
||||
|
||||
bool operator()(const raw_ref<T, Impl>& lhs, T& rhs) const {
|
||||
bool operator()(const raw_ref<T, RawPtrType>& lhs, T& rhs) const {
|
||||
Impl::IncrementLessCountForTest();
|
||||
return lhs < rhs;
|
||||
}
|
||||
|
@ -715,7 +715,7 @@ TEST(RawRef, CTAD) {
|
||||
}
|
||||
|
||||
using RawPtrCountingImpl =
|
||||
base::internal::RawPtrCountingImplWrapperForTest<base::DefaultRawPtrImpl>;
|
||||
base::internal::RawPtrCountingImplWrapperForTest<base::DefaultRawPtrType>;
|
||||
|
||||
template <typename T>
|
||||
using CountingRawRef = raw_ref<T, RawPtrCountingImpl>;
|
||||
|
@ -154,7 +154,7 @@ class ObserverListThreadSafe : public internal::ObserverListThreadSafeBase {
|
||||
task_runner->PostTask(
|
||||
current_notification->from_here,
|
||||
BindOnce(&ObserverListThreadSafe<ObserverType>::NotifyWrapper, this,
|
||||
observer,
|
||||
UnsafeDanglingUntriaged(observer),
|
||||
NotificationData(this, observer_id,
|
||||
current_notification->from_here,
|
||||
notification_data->method)));
|
||||
@ -200,7 +200,7 @@ class ObserverListThreadSafe : public internal::ObserverListThreadSafeBase {
|
||||
observer.second.task_runner->PostTask(
|
||||
from_here,
|
||||
BindOnce(&ObserverListThreadSafe<ObserverType>::NotifyWrapper, this,
|
||||
observer.first,
|
||||
base::UnsafeDanglingUntriaged(observer.first),
|
||||
NotificationData(this, observer.second.observer_id,
|
||||
from_here, method)));
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ bool ObjectWatcher::StartWatchingInternal(HANDLE object,
|
||||
// DoneWaiting can be synchronously called from RegisterWaitForSingleObject,
|
||||
// so set up all state now.
|
||||
callback_ = BindRepeating(&ObjectWatcher::Signal, weak_factory_.GetWeakPtr(),
|
||||
delegate);
|
||||
base::UnsafeDanglingUntriaged(delegate));
|
||||
object_ = object;
|
||||
|
||||
if (!RegisterWaitForSingleObject(&wait_object_, object, DoneWaiting, this,
|
||||
|
@ -207,11 +207,12 @@ void DlpDataTransferNotifier::ResizeAndShowWidget(const gfx::Size& bubble_size,
|
||||
|
||||
widget_closing_timer_.Start(
|
||||
FROM_HERE, base::Milliseconds(timeout_duration_ms),
|
||||
base::BindOnce(&DlpDataTransferNotifier::CloseWidget,
|
||||
base::Unretained(this),
|
||||
widget_.get(), // Safe as DlpClipboardNotificationHelper
|
||||
// owns `widget_` and outlives it.
|
||||
views::Widget::ClosedReason::kUnspecified));
|
||||
base::BindOnce(
|
||||
&DlpDataTransferNotifier::CloseWidget, base::Unretained(this),
|
||||
base::UnsafeDanglingUntriaged(
|
||||
widget_.get()), // Safe as DlpClipboardNotificationHelper
|
||||
// owns `widget_` and outlives it.
|
||||
views::Widget::ClosedReason::kUnspecified));
|
||||
}
|
||||
|
||||
} // namespace policy
|
||||
|
@ -580,7 +580,8 @@ void MediaRouterMojoImpl::RegisterMediaRoutesObserver(
|
||||
content::GetUIThreadTaskRunner({})->PostTask(
|
||||
FROM_HERE,
|
||||
base::BindOnce(&MediaRouterMojoImpl::NotifyOfExistingRoutesIfRegistered,
|
||||
weak_factory_.GetWeakPtr(), observer));
|
||||
weak_factory_.GetWeakPtr(),
|
||||
base::UnsafeDanglingUntriaged(observer)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -921,10 +921,11 @@ bool BrowserCommandController::ExecuteCommandWithDisposition(
|
||||
content::WebContents* const web_contents =
|
||||
browser_->tab_strip_model()->GetActiveWebContents();
|
||||
if (web_contents) {
|
||||
ShowPageInfoDialog(web_contents,
|
||||
base::BindOnce(&AppInfoDialogClosedCallback,
|
||||
base::Unretained(web_contents)),
|
||||
bubble_anchor_util::kAppMenuButton);
|
||||
ShowPageInfoDialog(
|
||||
web_contents,
|
||||
base::BindOnce(&AppInfoDialogClosedCallback,
|
||||
base::UnsafeDanglingUntriaged(web_contents)),
|
||||
bubble_anchor_util::kAppMenuButton);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -220,7 +220,7 @@ void ExtensionsToolbarContainer::ShowWidgetForExtension(
|
||||
UpdateIconVisibility(extension_id);
|
||||
GetAnimatingLayoutManager()->PostOrQueueAction(base::BindOnce(
|
||||
&ExtensionsToolbarContainer::AnchorAndShowWidgetImmediately,
|
||||
weak_ptr_factory_.GetWeakPtr(), widget));
|
||||
weak_ptr_factory_.GetWeakPtr(), base::UnsafeDanglingUntriaged(widget)));
|
||||
}
|
||||
|
||||
views::Widget*
|
||||
|
@ -465,7 +465,8 @@ void DownloadItemObserver::OnDownloadUpdated(DownloadItem* download) {
|
||||
// restarted. Holding on to the reservation now would prevent the name
|
||||
// from being used for a subsequent retry attempt.
|
||||
DownloadPathReservationTracker::GetTaskRunner()->PostTask(
|
||||
FROM_HERE, base::BindOnce(&RevokeReservation, download));
|
||||
FROM_HERE, base::BindOnce(&RevokeReservation,
|
||||
base::UnsafeDanglingUntriaged(download)));
|
||||
download->RemoveObserver(this);
|
||||
download->RemoveUserData(&kUserDataKey);
|
||||
break;
|
||||
@ -480,7 +481,8 @@ void DownloadItemObserver::OnDownloadDestroyed(DownloadItem* download) {
|
||||
// Items should be COMPLETE/INTERRUPTED/CANCELLED before being destroyed.
|
||||
NOTREACHED();
|
||||
DownloadPathReservationTracker::GetTaskRunner()->PostTask(
|
||||
FROM_HERE, base::BindOnce(&RevokeReservation, download));
|
||||
FROM_HERE, base::BindOnce(&RevokeReservation,
|
||||
base::UnsafeDanglingUntriaged(download)));
|
||||
}
|
||||
|
||||
// static
|
||||
|
@ -236,7 +236,8 @@ void InProgressDownloadManager::OnUrlDownloadStarted(
|
||||
StartDownload(std::move(download_create_info), std::move(input_stream),
|
||||
std::move(url_loader_factory_provider),
|
||||
base::BindOnce(&InProgressDownloadManager::CancelUrlDownload,
|
||||
weak_factory_.GetWeakPtr(), downloader),
|
||||
weak_factory_.GetWeakPtr(),
|
||||
base::UnsafeDanglingUntriaged(downloader)),
|
||||
std::move(callback));
|
||||
}
|
||||
|
||||
|
@ -286,7 +286,7 @@ void ResourceDownloader::Destroy() {
|
||||
delegate_task_runner_->PostTask(
|
||||
FROM_HERE,
|
||||
base::BindOnce(&UrlDownloadHandler::Delegate::OnUrlDownloadStopped,
|
||||
delegate_, this));
|
||||
delegate_, base::UnsafeDanglingUntriaged(this)));
|
||||
}
|
||||
|
||||
void ResourceDownloader::RequestWakeLock(
|
||||
|
@ -195,7 +195,8 @@ void HistoryService::ClearCachedDataForContextID(ContextID context_id) {
|
||||
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
|
||||
ScheduleTask(PRIORITY_NORMAL,
|
||||
base::BindOnce(&HistoryBackend::ClearCachedDataForContextID,
|
||||
history_backend_, context_id));
|
||||
history_backend_,
|
||||
base::UnsafeDanglingUntriaged(context_id)));
|
||||
}
|
||||
|
||||
void HistoryService::ClearAllOnDemandFavicons() {
|
||||
@ -471,7 +472,8 @@ void HistoryService::UpdateWithPageEndTime(ContextID context_id,
|
||||
ScheduleTask(
|
||||
PRIORITY_NORMAL,
|
||||
base::BindOnce(&HistoryBackend::UpdateWithPageEndTime, history_backend_,
|
||||
context_id, nav_entry_id, url, end_ts));
|
||||
base::UnsafeDanglingUntriaged(context_id), nav_entry_id,
|
||||
url, end_ts));
|
||||
}
|
||||
|
||||
void HistoryService::SetBrowsingTopicsAllowed(ContextID context_id,
|
||||
|
@ -205,7 +205,7 @@ void FreezingVoteTokenPMRegistry::UnregisterVote(FreezingVoteTokenImpl* token) {
|
||||
FreezingVoteTokenPMRegistry::GetOrCreateInstance(graph);
|
||||
registry->UnregisterVoteOnPMSequence(token);
|
||||
},
|
||||
token));
|
||||
base::UnsafeDanglingUntriaged(token)));
|
||||
}
|
||||
|
||||
void FreezingVoteTokenPMRegistry::RegisterVoteOnPMSequence(
|
||||
|
@ -583,7 +583,7 @@ void WindowsSpellChecker::RecordChromeLocalesStats(
|
||||
base::BindOnce(
|
||||
&windows_spell_checker::BackgroundHelper::RecordChromeLocalesStats,
|
||||
base::Unretained(background_helper_.get()), std::move(chrome_locales),
|
||||
metrics));
|
||||
base::UnsafeDanglingUntriaged(metrics)));
|
||||
}
|
||||
|
||||
void WindowsSpellChecker::RecordSpellcheckLocalesStats(
|
||||
|
@ -106,7 +106,7 @@ void HistogramController::InsertChildHistogramFetcherInterface(
|
||||
// the number of known processes
|
||||
child_histogram_fetcher.set_disconnect_handler(base::BindOnce(
|
||||
&HistogramController::RemoveChildHistogramFetcherInterface<T>,
|
||||
base::Unretained(this), base::Unretained(host)));
|
||||
base::Unretained(this), base::UnsafeDanglingUntriaged(host)));
|
||||
GetChildHistogramFetcherMap<T>()[host] = std::move(child_histogram_fetcher);
|
||||
}
|
||||
|
||||
|
@ -186,6 +186,7 @@ void PartitionAllocSupport::ReconfigureAfterZygoteFork(
|
||||
void PartitionAllocSupport::ReconfigureAfterFeatureListInit(
|
||||
const std::string& process_type) {
|
||||
base::allocator::InstallDanglingRawPtrChecks();
|
||||
base::allocator::InstallUnretainedDanglingRawPtrChecks();
|
||||
{
|
||||
base::AutoLock scoped_lock(lock_);
|
||||
// Avoid initializing more than once.
|
||||
|
@ -817,10 +817,10 @@ class ChannelAssociatedGroupController
|
||||
client->NotifyError(reason);
|
||||
} else {
|
||||
endpoint->task_runner()->PostTask(
|
||||
FROM_HERE,
|
||||
base::BindOnce(&ChannelAssociatedGroupController::
|
||||
NotifyEndpointOfErrorOnEndpointThread,
|
||||
this, endpoint->id(), base::Unretained(endpoint)));
|
||||
FROM_HERE, base::BindOnce(&ChannelAssociatedGroupController::
|
||||
NotifyEndpointOfErrorOnEndpointThread,
|
||||
this, endpoint->id(),
|
||||
base::UnsafeDanglingUntriaged(endpoint)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1108,8 +1108,9 @@ void HttpCache::ProcessQueuedTransactions(ActiveEntry* entry) {
|
||||
// Post a task instead of invoking the io callback of another transaction here
|
||||
// to avoid re-entrancy.
|
||||
base::ThreadTaskRunnerHandle::Get()->PostTask(
|
||||
FROM_HERE, base::BindOnce(&HttpCache::OnProcessQueuedTransactions,
|
||||
GetWeakPtr(), entry));
|
||||
FROM_HERE,
|
||||
base::BindOnce(&HttpCache::OnProcessQueuedTransactions, GetWeakPtr(),
|
||||
base::UnsafeDanglingUntriaged(entry)));
|
||||
}
|
||||
|
||||
void HttpCache::ProcessAddToEntryQueue(ActiveEntry* entry) {
|
||||
|
@ -903,7 +903,8 @@ void NetworkQualityEstimator::AddEffectiveConnectionTypeObserver(
|
||||
FROM_HERE,
|
||||
base::BindOnce(&NetworkQualityEstimator::
|
||||
NotifyEffectiveConnectionTypeObserverIfPresent,
|
||||
weak_ptr_factory_.GetWeakPtr(), observer));
|
||||
weak_ptr_factory_.GetWeakPtr(),
|
||||
base::UnsafeDanglingUntriaged(observer)));
|
||||
}
|
||||
|
||||
void NetworkQualityEstimator::RemoveEffectiveConnectionTypeObserver(
|
||||
@ -924,7 +925,8 @@ void NetworkQualityEstimator::AddPeerToPeerConnectionsCountObserver(
|
||||
FROM_HERE,
|
||||
base::BindOnce(&NetworkQualityEstimator::
|
||||
NotifyPeerToPeerConnectionsCountObserverIfPresent,
|
||||
weak_ptr_factory_.GetWeakPtr(), observer));
|
||||
weak_ptr_factory_.GetWeakPtr(),
|
||||
base::UnsafeDanglingUntriaged(observer)));
|
||||
}
|
||||
|
||||
void NetworkQualityEstimator::RemovePeerToPeerConnectionsCountObserver(
|
||||
|
@ -1391,7 +1391,8 @@ void TransportClientSocketPool::InvokeUserCallbackLater(
|
||||
}
|
||||
base::ThreadTaskRunnerHandle::Get()->PostTask(
|
||||
FROM_HERE, base::BindOnce(&TransportClientSocketPool::InvokeUserCallback,
|
||||
weak_factory_.GetWeakPtr(), handle));
|
||||
weak_factory_.GetWeakPtr(),
|
||||
base::UnsafeDanglingUntriaged(handle)));
|
||||
}
|
||||
|
||||
void TransportClientSocketPool::InvokeUserCallback(ClientSocketHandle* handle) {
|
||||
|
@ -105,7 +105,7 @@ void TrackedChildURLLoaderFactoryBundle::AddObserverOnMainThread() {
|
||||
FROM_HERE,
|
||||
base::BindOnce(
|
||||
&HostChildURLLoaderFactoryBundle::AddObserver,
|
||||
main_thread_host_bundle_->first, base::Unretained(this),
|
||||
main_thread_host_bundle_->first, base::UnsafeDanglingUntriaged(this),
|
||||
std::make_unique<
|
||||
HostChildURLLoaderFactoryBundle::ObserverPtrAndTaskRunner>(
|
||||
AsWeakPtr(), base::SequencedTaskRunnerHandle::Get())));
|
||||
@ -117,7 +117,8 @@ void TrackedChildURLLoaderFactoryBundle::RemoveObserverOnMainThread() {
|
||||
main_thread_host_bundle_->second->PostTask(
|
||||
FROM_HERE,
|
||||
base::BindOnce(&HostChildURLLoaderFactoryBundle::RemoveObserver,
|
||||
main_thread_host_bundle_->first, base::Unretained(this)));
|
||||
main_thread_host_bundle_->first,
|
||||
base::UnsafeDanglingUntriaged(this)));
|
||||
}
|
||||
|
||||
void TrackedChildURLLoaderFactoryBundle::OnUpdate(
|
||||
|
Reference in New Issue
Block a user