0

Some refactoring and fix of small issues of PaintOpBuffer

1. Refactor ShrinkToFit() and MoveRetainingBufferIfPossible() to share
   ReallocNeededToFit() for the common shrinking logic. Update
   comments of the functions and add unit tests.

2. Let the moving constructor and the assignment operator leave the
   PaintOpBuffer in a consistent state, without needing Reset()
   explicitly. Previously has_draw_ops_ etc. were stale after moving.

3. Let the destructor destroy paint ops only, without resetting the
   fields.

4. Let operator== check more fields before checking equality of paint
   ops to improve performance. Let it not check subrecord_bytes_used_
   which should not affect equality of PaintOpBuffer. The latter
   change doesn't change functionality because we always shrink to fit
   for PaintRecords.

Bug: 1385848
Change-Id: I280a5308e627935e8df8322b2265f4644f5f4c0c
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/4063832
Commit-Queue: Xianzhu Wang <wangxianzhu@chromium.org>
Reviewed-by: Vladimir Levin <vmpstr@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1079962}
This commit is contained in:
Xianzhu Wang
2022-12-06 20:57:22 +00:00
committed by Chromium LUCI CQ
parent ad11ff27fe
commit 71db0f9f4a
3 changed files with 143 additions and 48 deletions

@ -2929,11 +2929,12 @@ PaintOpBuffer::PaintOpBuffer(PaintOpBuffer&& other) {
}
PaintOpBuffer::~PaintOpBuffer() {
Reset();
DestroyOps();
}
PaintOpBuffer& PaintOpBuffer::operator=(PaintOpBuffer&& other) {
data_ = std::move(other.data_);
DCHECK(!other.data_);
used_ = other.used_;
reserved_ = other.reserved_;
op_count_ = other.op_count_;
@ -2950,22 +2951,28 @@ PaintOpBuffer& PaintOpBuffer::operator=(PaintOpBuffer&& other) {
other.has_effects_preventing_lcd_text_for_save_layer_alpha_;
are_ops_destroyed_ = other.are_ops_destroyed_;
// Make sure the other pob can destruct safely.
other.used_ = 0;
other.op_count_ = 0;
// Make sure the other pob can destruct safely or is ready for reuse.
other.reserved_ = 0;
other.ResetRetainingBuffer();
return *this;
}
void PaintOpBuffer::Reset() {
void PaintOpBuffer::DestroyOps() {
if (!are_ops_destroyed_) {
for (PaintOp& op : Iterator(this)) {
op.DestroyThis();
}
}
}
// Leave data_ allocated, reserved_ unchanged. ShrinkToFit will take care of
// that if called.
void PaintOpBuffer::Reset() {
DestroyOps();
// Leave data_ allocated, reserved_ unchanged. ShrinkToFit() will take care
// of that if called.
ResetRetainingBuffer();
}
void PaintOpBuffer::ResetRetainingBuffer() {
used_ = 0;
op_count_ = 0;
num_slow_paths_up_to_min_for_MSAA_ = 0;
@ -3099,21 +3106,14 @@ const PaintOp* PaintOpBuffer::PlaybackFoldingIterator::NextUnfoldedOp() {
}
sk_sp<PaintRecord> PaintOpBuffer::MoveRetainingBufferIfPossible() {
if (!data_ || used_ == reserved_)
return sk_make_sp<PaintOpBuffer>(std::move(*this));
const size_t old_reserved = reserved_;
BufferDataPtr old;
if (!used_) {
old = std::move(data_);
reserved_ = 0;
} else {
old = ReallocBuffer(used_);
sk_sp<PaintRecord> result = sk_make_sp<PaintRecord>(std::move(*this));
if (BufferDataPtr old_data = result->ReallocIfNeededToFit()) {
// Reuse the original buffer for future recording.
data_ = std::move(old_data);
reserved_ = old_reserved;
}
sk_sp<PaintRecord> copy = sk_make_sp<PaintOpBuffer>(std::move(*this));
data_ = std::move(old);
reserved_ = old_reserved;
return copy;
return result;
}
void PaintOpBuffer::Playback(SkCanvas* canvas,
@ -3285,7 +3285,7 @@ void* PaintOpBuffer::AllocatePaintOp(size_t skip) {
DCHECK_LT(skip, PaintOp::kMaxSkip);
if (used_ + skip > reserved_) {
// Start reserved_ at kInitialBufferSize and then double.
// ShrinkToFit can make this smaller afterwards.
// ShrinkToFit() can make this smaller afterwards.
size_t new_size = reserved_ ? reserved_ : kInitialBufferSize;
while (used_ + skip > new_size)
new_size *= 2;
@ -3300,30 +3300,36 @@ void* PaintOpBuffer::AllocatePaintOp(size_t skip) {
}
void PaintOpBuffer::ShrinkToFit() {
if (used_ == reserved_)
return;
ReallocIfNeededToFit();
}
PaintOpBuffer::BufferDataPtr PaintOpBuffer::ReallocIfNeededToFit() {
if (used_ == reserved_) {
return nullptr;
}
if (!used_) {
reserved_ = 0;
data_.reset();
} else {
ReallocBuffer(used_);
return std::move(data_);
}
return ReallocBuffer(used_);
}
bool PaintOpBuffer::operator==(const PaintOpBuffer& other) const {
if (op_count_ != other.op_count_)
return false;
if (num_slow_paths_up_to_min_for_MSAA_ !=
other.num_slow_paths_up_to_min_for_MSAA_)
return false;
if (subrecord_bytes_used_ != other.subrecord_bytes_used_)
return false;
if (subrecord_op_count_ != other.subrecord_op_count_)
return false;
if (has_non_aa_paint_ != other.has_non_aa_paint_)
return false;
if (has_discardable_images_ != other.has_discardable_images_)
// Check status fields first, which is faster than checking equality of
// paint operations. This doesn't need to be complete, and should not check
// data buffer capacity related fields because they don't affect equality.
if (op_count_ != other.op_count_ || used_ != other.used_ ||
num_slow_paths_up_to_min_for_MSAA_ !=
other.num_slow_paths_up_to_min_for_MSAA_ ||
subrecord_op_count_ != other.subrecord_op_count_ ||
has_draw_ops_ != other.has_draw_ops_ ||
has_draw_text_ops_ != other.has_draw_text_ops_ ||
has_effects_preventing_lcd_text_for_save_layer_alpha_ !=
other.has_effects_preventing_lcd_text_for_save_layer_alpha_ ||
has_non_aa_paint_ != other.has_non_aa_paint_ ||
has_discardable_images_ != other.has_discardable_images_) {
return false;
}
Iterator left_iter(this);
Iterator right_iter(&other);

@ -1158,12 +1158,16 @@ class CC_PAINT_EXPORT PaintOpBuffer : public SkRefCnt {
PaintOpBuffer();
PaintOpBuffer(const PaintOpBuffer&) = delete;
// `other` will be reset to the initial state.
PaintOpBuffer(PaintOpBuffer&& other);
~PaintOpBuffer() override;
PaintOpBuffer& operator=(const PaintOpBuffer&) = delete;
// `other` will be reset in the initial state.
PaintOpBuffer& operator=(PaintOpBuffer&& other);
// Resets the PaintOpBuffer to the initial state, except that the current
// data buffer is retained.
void Reset();
// Replays the paint op buffer into the canvas.
@ -1222,8 +1226,12 @@ class CC_PAINT_EXPORT PaintOpBuffer : public SkRefCnt {
bool NeedsAdditionalInvalidationForLCDText(
const PaintOpBuffer& old_buffer) const;
// Moves the contents of this to the returned object, retaining the buffer if
// possible.
// Resize the PaintOpBuffer to exactly fit the current amount of used space.
void ShrinkToFit();
// Takes the contents of this. The result is shrunk to fit. If the
// shrinking-to-fit allocates a new data buffer, this PaintOpBuffer retains
// the original data buffer for future use.
sk_sp<PaintOpBuffer> MoveRetainingBufferIfPossible();
bool operator==(const PaintOpBuffer& other) const;
@ -1231,9 +1239,6 @@ class CC_PAINT_EXPORT PaintOpBuffer : public SkRefCnt {
return !(*this == other);
}
// Resize the PaintOpBuffer to exactly fit the current amount of used space.
void ShrinkToFit();
const PaintOp& GetFirstOp() const {
return reinterpret_cast<const PaintOp&>(*data_);
}
@ -1528,6 +1533,8 @@ class CC_PAINT_EXPORT PaintOpBuffer : public SkRefCnt {
friend class SolidColorAnalyzer;
using BufferDataPtr = std::unique_ptr<char, base::AlignedFreeDeleter>;
void DestroyOps();
// Replays the paint op buffer into the canvas. If |indices| is specified, it
// contains indices in an increasing order and only the indices specified in
// the vector will be replayed.
@ -1539,9 +1546,15 @@ class CC_PAINT_EXPORT PaintOpBuffer : public SkRefCnt {
// the old exists). Returns the old buffer.
BufferDataPtr ReallocBuffer(size_t new_size);
// Shrinks the buffer to fit `used_`. Returns the old buffer if this
// allocated a new buffer, or nullptr.
BufferDataPtr ReallocIfNeededToFit();
// Returns the allocated op.
void* AllocatePaintOp(size_t skip);
void ResetRetainingBuffer();
BufferDataPtr data_;
size_t used_ = 0;
size_t reserved_ = 0;

@ -135,6 +135,8 @@ class PaintOpAppendTest : public ::testing::Test {
void VerifyOps(PaintOpBuffer* buffer) {
EXPECT_EQ(buffer->size(), 4u);
EXPECT_TRUE(buffer->has_draw_ops());
EXPECT_TRUE(buffer->has_save_layer_ops());
PaintOpBuffer::Iterator iter(buffer);
ASSERT_EQ(iter->GetType(), PaintOpType::SaveLayer);
@ -158,6 +160,19 @@ class PaintOpAppendTest : public ::testing::Test {
EXPECT_FALSE(iter);
}
void CheckInitialState(PaintOpBuffer* buffer, bool expect_empty_buffer) {
EXPECT_FALSE(buffer->has_draw_ops());
EXPECT_FALSE(buffer->has_save_layer_ops());
EXPECT_EQ(0u, buffer->size());
EXPECT_EQ(0u, buffer->total_op_count());
EXPECT_EQ(0u, buffer->next_op_offset());
EXPECT_FALSE(PaintOpBuffer::Iterator(buffer));
if (expect_empty_buffer) {
EXPECT_EQ(0u, buffer->paint_ops_size());
EXPECT_EQ(sizeof(PaintOpBuffer), buffer->bytes_used());
}
}
private:
SkRect rect_;
PaintFlags flags_;
@ -171,6 +186,7 @@ TEST_F(PaintOpAppendTest, SimpleAppend) {
VerifyOps(&buffer);
buffer.Reset();
CheckInitialState(&buffer, false);
PushOps(&buffer);
VerifyOps(&buffer);
}
@ -184,8 +200,7 @@ TEST_F(PaintOpAppendTest, MoveThenDestruct) {
VerifyOps(&destination);
// Original should be empty, and safe to destruct.
EXPECT_EQ(original.size(), 0u);
EXPECT_EQ(original.bytes_used(), sizeof(PaintOpBuffer));
CheckInitialState(&original, true);
}
TEST_F(PaintOpAppendTest, MoveThenDestructOperatorEq) {
@ -198,9 +213,7 @@ TEST_F(PaintOpAppendTest, MoveThenDestructOperatorEq) {
VerifyOps(&destination);
// Original should be empty, and safe to destruct.
EXPECT_EQ(original.size(), 0u);
EXPECT_EQ(original.bytes_used(), sizeof(PaintOpBuffer));
EXPECT_FALSE(PaintOpBuffer::Iterator(&original));
CheckInitialState(&original, true);
}
TEST_F(PaintOpAppendTest, MoveThenReappend) {
@ -4252,6 +4265,69 @@ TEST(PaintOpBufferTest, PathCaching) {
path.getGenerationID(), &cached_path));
}
TEST(PaintOpBufferTest, ShrinkToFit) {
PaintOpBuffer buffer;
EXPECT_EQ(sizeof(PaintOpBuffer), buffer.bytes_used());
EXPECT_FALSE(&buffer.GetFirstOp());
buffer.ShrinkToFit();
EXPECT_EQ(sizeof(PaintOpBuffer), buffer.bytes_used());
EXPECT_FALSE(&buffer.GetFirstOp());
buffer.push<DrawColorOp>(SkColors::kRed, SkBlendMode::kSrc);
EXPECT_GT(buffer.bytes_used(), sizeof(PaintOpBuffer) + sizeof(DrawColorOp));
const PaintOp* first_op = &buffer.GetFirstOp();
ASSERT_TRUE(first_op);
buffer.ShrinkToFit();
EXPECT_EQ(sizeof(PaintOpBuffer) + sizeof(DrawColorOp), buffer.bytes_used());
EXPECT_NE(first_op, &buffer.GetFirstOp());
first_op = &buffer.GetFirstOp();
buffer.ShrinkToFit();
EXPECT_EQ(sizeof(PaintOpBuffer) + sizeof(DrawColorOp), buffer.bytes_used());
EXPECT_EQ(first_op, &buffer.GetFirstOp());
}
TEST(PaintOpBufferTest, MoveRetainingBufferIfPossible) {
PaintOpBuffer buffer;
sk_sp<PaintOpBuffer> move_to = buffer.MoveRetainingBufferIfPossible();
EXPECT_EQ(sizeof(PaintOpBuffer), move_to->bytes_used());
EXPECT_FALSE(&move_to->GetFirstOp());
EXPECT_EQ(sizeof(PaintOpBuffer), buffer.bytes_used());
EXPECT_FALSE(&buffer.GetFirstOp());
// `buffer` has more reserved than used.
buffer.push<DrawColorOp>(SkColors::kRed, SkBlendMode::kSrc);
size_t old_bytes_used = buffer.bytes_used();
EXPECT_GT(old_bytes_used, sizeof(PaintOpBuffer) + sizeof(DrawColorOp));
const PaintOp* first_op = &buffer.GetFirstOp();
ASSERT_TRUE(first_op);
EXPECT_EQ(1u, buffer.size());
// `move_to` should allocate a new data buffer that fits, and `buffer` should
// retain the old data buffer.
move_to = buffer.MoveRetainingBufferIfPossible();
EXPECT_EQ(1u, move_to->size());
EXPECT_EQ(sizeof(PaintOpBuffer) + sizeof(DrawColorOp), move_to->bytes_used());
EXPECT_NE(first_op, &move_to->GetFirstOp());
EXPECT_EQ(first_op, &buffer.GetFirstOp());
EXPECT_EQ(old_bytes_used, buffer.bytes_used());
// `buffer` now fits.
buffer = std::move(*move_to);
old_bytes_used = buffer.bytes_used();
EXPECT_EQ(old_bytes_used, sizeof(PaintOpBuffer) + sizeof(DrawColorOp));
first_op = &buffer.GetFirstOp();
ASSERT_TRUE(first_op);
// `move_to` takes the data buffer of `buffer`.
move_to = buffer.MoveRetainingBufferIfPossible();
EXPECT_EQ(1u, move_to->size());
EXPECT_EQ(sizeof(PaintOpBuffer) + sizeof(DrawColorOp), move_to->bytes_used());
EXPECT_EQ(first_op, &move_to->GetFirstOp());
EXPECT_FALSE(&buffer.GetFirstOp());
EXPECT_EQ(sizeof(PaintOpBuffer), buffer.bytes_used());
}
TEST(IteratorTest, IterationTest) {
PaintOpBuffer buffer;
buffer.push<SaveOp>();