0

Creates VideoFrameSubmitter.

This CL allows for a switch-enabled rerouting of VideoFrames into the
VideoFrameSubmitter, which commits frames to the CompositorFrameSink.
This allows video frames to be rendered onto a Surface.

BUG: 746182, 739854
Cq-Include-Trybots: master.tryserver.chromium.linux:linux_layout_tests_slimming_paint_v2
Change-Id: Ic4bd3f135e351c8a94c38524c69d5f59f296a0f3
Reviewed-on: https://chromium-review.googlesource.com/592398
Commit-Queue: CJ DiMeglio <lethalantidote@chromium.org>
Reviewed-by: Kentaro Hara <haraken@chromium.org>
Reviewed-by: Dimitri Glazkov <dglazkov@chromium.org>
Reviewed-by: apacible <apacible@chromium.org>
Reviewed-by: Dale Curtis <dalecurtis@chromium.org>
Reviewed-by: Justin Novosad <junov@chromium.org>
Reviewed-by: Mounir Lamouri <mlamouri@chromium.org>
Cr-Commit-Position: refs/heads/master@{#498788}
This commit is contained in:
CJ DiMeglio
2017-08-31 08:38:04 +00:00
committed by Commit Bot
parent 6008e9d038
commit 2302d2015a
45 changed files with 986 additions and 165 deletions

@ -35,6 +35,8 @@
#include "services/service_manager/public/cpp/connect.h"
#include "services/service_manager/public/cpp/interface_provider.h"
#include "services/ui/public/cpp/gpu/context_provider_command_buffer.h"
#include "third_party/WebKit/public/platform/WebSurfaceLayerBridge.h"
#include "third_party/WebKit/public/platform/WebVideoFrameSubmitter.h"
#include "third_party/WebKit/public/web/WebKit.h"
#include "third_party/WebKit/public/web/WebLocalFrame.h"
#include "url/origin.h"
@ -174,7 +176,8 @@ blink::WebMediaPlayer* MediaFactory::CreateMediaPlayer(
blink::WebMediaPlayerClient* client,
blink::WebMediaPlayerEncryptedMediaClient* encrypted_client,
blink::WebContentDecryptionModule* initial_cdm,
const blink::WebString& sink_id) {
const blink::WebString& sink_id,
blink::WebLayerTreeView* layer_tree_view) {
blink::WebLocalFrame* web_frame = render_frame_->GetWebFrame();
blink::WebSecurityOrigin security_origin =
render_frame_->GetWebFrame()->GetSecurityOrigin();
@ -265,6 +268,7 @@ blink::WebMediaPlayer* MediaFactory::CreateMediaPlayer(
mojo::MakeRequest(&watch_time_recorder_provider_));
}
DCHECK(layer_tree_view);
std::unique_ptr<media::WebMediaPlayerParams> params(
new media::WebMediaPlayerParams(
std::move(media_log),
@ -283,7 +287,8 @@ blink::WebMediaPlayer* MediaFactory::CreateMediaPlayer(
enable_instant_source_buffer_gc, embedded_media_experience_enabled,
watch_time_recorder_provider_.get(),
base::Bind(&MediaFactory::CreateVideoDecodeStatsRecorder,
base::Unretained(this))));
base::Unretained(this)),
base::Bind(&blink::WebSurfaceLayerBridge::Create, layer_tree_view)));
media::WebMediaPlayerImpl* media_player = new media::WebMediaPlayerImpl(
web_frame, client, encrypted_client, GetWebMediaPlayerDelegate(),

@ -29,6 +29,7 @@
namespace blink {
class WebContentDecryptionModule;
class WebEncryptedMediaClient;
class WebLayerTreeView;
class WebLocalFrame;
class WebMediaPlayer;
class WebMediaPlayerClient;
@ -91,12 +92,15 @@ class MediaFactory {
// to a ContentDecryptionModule if MediaKeys have been provided to the
// |encrypted_client| (otherwise null). |sink_id|, when not empty, identifies
// the audio sink to use for this player (see HTMLMediaElement.sinkId).
// The |layer_tree_view| will be used to generate the correct FrameSinkId for
// the Surface containing the corresponding HTMLMediaElement.
blink::WebMediaPlayer* CreateMediaPlayer(
const blink::WebMediaPlayerSource& source,
blink::WebMediaPlayerClient* client,
blink::WebMediaPlayerEncryptedMediaClient* encrypted_client,
blink::WebContentDecryptionModule* initial_cdm,
const blink::WebString& sink_id);
const blink::WebString& sink_id,
blink::WebLayerTreeView* layer_tree_view);
// Provides an EncryptedMediaClient to connect blink's EME layer to media's
// implementation of requestMediaKeySystemAccess. Will always return the same

@ -264,6 +264,7 @@ using blink::WebFrameSerializer;
using blink::WebFrameSerializerClient;
using blink::WebHistoryItem;
using blink::WebHTTPBody;
using blink::WebLayerTreeView;
using blink::WebLocalFrame;
using blink::WebMediaPlayer;
using blink::WebMediaPlayerClient;
@ -2969,9 +2970,10 @@ blink::WebMediaPlayer* RenderFrameImpl::CreateMediaPlayer(
WebMediaPlayerClient* client,
WebMediaPlayerEncryptedMediaClient* encrypted_client,
WebContentDecryptionModule* initial_cdm,
const blink::WebString& sink_id) {
return media_factory_.CreateMediaPlayer(source, client, encrypted_client,
initial_cdm, sink_id);
const blink::WebString& sink_id,
blink::WebLayerTreeView* layer_tree_view) {
return media_factory_.CreateMediaPlayer(
source, client, encrypted_client, initial_cdm, sink_id, layer_tree_view);
}
std::unique_ptr<blink::WebApplicationCacheHost>

@ -107,6 +107,7 @@ struct WebImeTextSpan;
struct WebContextMenuData;
struct WebCursorInfo;
struct WebFindOptions;
class WebLayerTreeView;
class WebRelatedAppsFetcher;
} // namespace blink
@ -498,7 +499,8 @@ class CONTENT_EXPORT RenderFrameImpl
blink::WebMediaPlayerClient* client,
blink::WebMediaPlayerEncryptedMediaClient* encrypted_client,
blink::WebContentDecryptionModule* initial_cdm,
const blink::WebString& sink_id) override;
const blink::WebString& sink_id,
blink::WebLayerTreeView* layer_tree_view) override;
std::unique_ptr<blink::WebApplicationCacheHost> CreateApplicationCacheHost(
blink::WebApplicationCacheHostClient* client) override;
std::unique_ptr<blink::WebContentSettingsClient>

@ -3,6 +3,7 @@ include_rules = [
"+cc/layers/video_frame_provider.h",
"+cc/layers/video_layer.h",
"+components/scheduler", # Only allowed in tests.
"+components/viz/common/surfaces/frame_sink_id.h",
"+gin",
"+media",
"+mojo/edk/embedder",

@ -10,7 +10,9 @@
#include "base/time/default_tick_clock.h"
#include "base/trace_event/auto_open_close_event.h"
#include "base/trace_event/trace_event.h"
#include "media/base/media_switches.h"
#include "media/base/video_frame.h"
#include "third_party/WebKit/public/platform/WebVideoFrameSubmitter.h"
namespace media {
@ -19,8 +21,8 @@ namespace media {
const int kBackgroundRenderingTimeoutMs = 250;
VideoFrameCompositor::VideoFrameCompositor(
const scoped_refptr<base::SingleThreadTaskRunner>& compositor_task_runner)
: compositor_task_runner_(compositor_task_runner),
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner)
: task_runner_(task_runner),
tick_clock_(new base::DefaultTickClock()),
background_rendering_enabled_(true),
background_rendering_timer_(
@ -37,20 +39,38 @@ VideoFrameCompositor::VideoFrameCompositor(
new_background_frame_(false),
// Assume 60Hz before the first UpdateCurrentFrame() call.
last_interval_(base::TimeDelta::FromSecondsD(1.0 / 60)),
callback_(nullptr) {
background_rendering_timer_.SetTaskRunner(compositor_task_runner_);
callback_(nullptr),
surface_layer_for_video_enabled_(
base::FeatureList::IsEnabled(media::kUseSurfaceLayerForVideo)) {
background_rendering_timer_.SetTaskRunner(task_runner_);
if (surface_layer_for_video_enabled_)
submitter_ = blink::WebVideoFrameSubmitter::Create(this);
}
VideoFrameCompositor::~VideoFrameCompositor() {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(!callback_);
DCHECK(!rendering_);
if (client_)
client_->StopUsingProvider();
if (submitter_)
submitter_->StopUsingProvider();
}
void VideoFrameCompositor::EnableSubmission(const viz::FrameSinkId& id) {
DCHECK(task_runner_->BelongsToCurrentThread());
submitter_->StartSubmitting(id);
}
bool VideoFrameCompositor::IsClientSinkAvailable() {
DCHECK(task_runner_->BelongsToCurrentThread());
return !surface_layer_for_video_enabled_ ? client_ != nullptr
: submitter_.get() != nullptr;
}
void VideoFrameCompositor::OnRendererStateUpdate(bool new_state) {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_NE(rendering_, new_state);
rendering_ = new_state;
@ -77,18 +97,25 @@ void VideoFrameCompositor::OnRendererStateUpdate(bool new_state) {
DCHECK(!background_rendering_timer_.IsRunning());
}
if (!client_)
if (!IsClientSinkAvailable())
return;
if (rendering_)
client_->StartRendering();
else
client_->StopRendering();
if (surface_layer_for_video_enabled_) {
if (rendering_)
submitter_->StartRendering();
else
submitter_->StopRendering();
} else {
if (rendering_)
client_->StartRendering();
else
client_->StopRendering();
}
}
void VideoFrameCompositor::SetVideoFrameProviderClient(
cc::VideoFrameProvider::Client* client) {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
DCHECK(task_runner_->BelongsToCurrentThread());
if (client_)
client_->StopUsingProvider();
client_ = client;
@ -99,23 +126,23 @@ void VideoFrameCompositor::SetVideoFrameProviderClient(
}
scoped_refptr<VideoFrame> VideoFrameCompositor::GetCurrentFrame() {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
DCHECK(task_runner_->BelongsToCurrentThread());
return current_frame_;
}
void VideoFrameCompositor::PutCurrentFrame() {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
DCHECK(task_runner_->BelongsToCurrentThread());
rendered_last_frame_ = true;
}
bool VideoFrameCompositor::UpdateCurrentFrame(base::TimeTicks deadline_min,
base::TimeTicks deadline_max) {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
DCHECK(task_runner_->BelongsToCurrentThread());
return CallRender(deadline_min, deadline_max, false);
}
bool VideoFrameCompositor::HasCurrentFrame() {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
DCHECK(task_runner_->BelongsToCurrentThread());
return static_cast<bool>(current_frame_);
}
@ -125,7 +152,7 @@ void VideoFrameCompositor::Start(RenderCallback* callback) {
base::AutoLock lock(callback_lock_);
DCHECK(!callback_);
callback_ = callback;
compositor_task_runner_->PostTask(
task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoFrameCompositor::OnRendererStateUpdate,
base::Unretained(this), true));
}
@ -137,7 +164,7 @@ void VideoFrameCompositor::Stop() {
base::AutoLock lock(callback_lock_);
DCHECK(callback_);
callback_ = nullptr;
compositor_task_runner_->PostTask(
task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoFrameCompositor::OnRendererStateUpdate,
base::Unretained(this), false));
}
@ -145,22 +172,27 @@ void VideoFrameCompositor::Stop() {
void VideoFrameCompositor::PaintSingleFrame(
const scoped_refptr<VideoFrame>& frame,
bool repaint_duplicate_frame) {
if (!compositor_task_runner_->BelongsToCurrentThread()) {
compositor_task_runner_->PostTask(
if (!task_runner_->BelongsToCurrentThread()) {
task_runner_->PostTask(
FROM_HERE,
base::Bind(&VideoFrameCompositor::PaintSingleFrame,
base::Unretained(this), frame, repaint_duplicate_frame));
return;
}
if (ProcessNewFrame(frame, repaint_duplicate_frame) && client_)
client_->DidReceiveFrame();
if (ProcessNewFrame(frame, repaint_duplicate_frame) &&
IsClientSinkAvailable()) {
if (!surface_layer_for_video_enabled_)
client_->DidReceiveFrame();
else
submitter_->DidReceiveFrame();
}
}
scoped_refptr<VideoFrame>
VideoFrameCompositor::GetCurrentFrameAndUpdateIfStale() {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
if (client_ || !rendering_ || !is_background_rendering_)
DCHECK(task_runner_->BelongsToCurrentThread());
if (IsClientSinkAvailable() || !rendering_ || !is_background_rendering_)
return current_frame_;
DCHECK(!last_background_render_.is_null());
@ -192,14 +224,14 @@ base::TimeDelta VideoFrameCompositor::GetCurrentFrameTimestamp() const {
void VideoFrameCompositor::SetOnNewProcessedFrameCallback(
const OnNewProcessedFrameCB& cb) {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
DCHECK(task_runner_->BelongsToCurrentThread());
new_processed_frame_cb_ = cb;
}
bool VideoFrameCompositor::ProcessNewFrame(
const scoped_refptr<VideoFrame>& frame,
bool repaint_duplicate_frame) {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
DCHECK(task_runner_->BelongsToCurrentThread());
if (frame && current_frame_ && !repaint_duplicate_frame &&
frame->unique_id() == current_frame_->unique_id()) {
@ -219,20 +251,24 @@ bool VideoFrameCompositor::ProcessNewFrame(
}
void VideoFrameCompositor::BackgroundRender() {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
DCHECK(task_runner_->BelongsToCurrentThread());
const base::TimeTicks now = tick_clock_->NowTicks();
last_background_render_ = now;
bool new_frame = CallRender(now, now + last_interval_, true);
if (new_frame && client_)
client_->DidReceiveFrame();
if (new_frame && IsClientSinkAvailable()) {
if (!surface_layer_for_video_enabled_)
client_->DidReceiveFrame();
else
submitter_->DidReceiveFrame();
}
}
bool VideoFrameCompositor::CallRender(base::TimeTicks deadline_min,
base::TimeTicks deadline_max,
bool background_rendering) {
DCHECK(compositor_task_runner_->BelongsToCurrentThread());
DCHECK(task_runner_->BelongsToCurrentThread());
base::AutoLock lock(callback_lock_);
if (!callback_) {
// Even if we no longer have a callback, return true if we have a frame
// which |client_| hasn't seen before.

@ -18,6 +18,7 @@
#include "cc/layers/video_frame_provider.h"
#include "media/base/video_renderer_sink.h"
#include "media/blink/media_blink_export.h"
#include "third_party/WebKit/public/platform/WebVideoFrameSubmitter.h"
#include "ui/gfx/geometry/size.h"
namespace base {
@ -26,6 +27,10 @@ class AutoOpenCloseEvent;
}
}
namespace viz {
class FrameSinkId;
}
namespace media {
class VideoFrame;
@ -60,18 +65,21 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
// Used to report back the time when the new frame has been processed.
using OnNewProcessedFrameCB = base::Callback<void(base::TimeTicks)>;
// |compositor_task_runner| is the task runner on which this class will live,
// |task_runner| is the task runner on which this class will live,
// though it may be constructed on any thread.
explicit VideoFrameCompositor(
const scoped_refptr<base::SingleThreadTaskRunner>&
compositor_task_runner);
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner);
// Destruction must happen on the compositor thread; Stop() must have been
// called before destruction starts.
~VideoFrameCompositor() override;
// Signals the VideoFrameSubmitter to prepare to receive BeginFrames and
// submit video frames given by VideoFrameCompositor.
void EnableSubmission(const viz::FrameSinkId& id);
// cc::VideoFrameProvider implementation. These methods must be called on the
// |compositor_task_runner_|.
// |task_runner_|.
void SetVideoFrameProviderClient(
cc::VideoFrameProvider::Client* client) override;
bool UpdateCurrentFrame(base::TimeTicks deadline_min,
@ -125,7 +133,17 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
background_rendering_enabled_ = enabled;
}
void set_submitter_for_test(
std::unique_ptr<blink::WebVideoFrameSubmitter> submitter) {
submitter_ = std::move(submitter);
}
private:
// Indicates whether the endpoint for the VideoFrame exists.
// TODO(lethalantidote): Update this function to read creation/destruction
// signals of the SurfaceLayerImpl.
bool IsClientSinkAvailable();
// Called on the compositor thread in response to Start() or Stop() calls;
// must be used to change |rendering_| state.
void OnRendererStateUpdate(bool new_state);
@ -146,14 +164,18 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
base::TimeTicks deadline_max,
bool background_rendering);
scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner_;
// This will run tasks on the compositor thread. If
// kEnableSurfaceLayerForVideo is enabled, it will instead run tasks on the
// media thread.
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
std::unique_ptr<base::TickClock> tick_clock_;
// Allows tests to disable the background rendering task.
bool background_rendering_enabled_;
// Manages UpdateCurrentFrame() callbacks if |client_| has stopped sending
// them for various reasons. Runs on |compositor_task_runner_| and is reset
// them for various reasons. Runs on |task_runner_| and is reset
// after each successful UpdateCurrentFrame() call.
base::Timer background_rendering_timer_;
@ -177,6 +199,10 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
// AutoOpenCloseEvent for begin/end events.
std::unique_ptr<base::trace_event::AutoOpenCloseEvent> auto_open_close_;
std::unique_ptr<blink::WebVideoFrameSubmitter> submitter_;
// Whether the use of a surface layer instead of a video layer is enabled.
bool surface_layer_for_video_enabled_ = false;
DISALLOW_COPY_AND_ASSIGN(VideoFrameCompositor);
};

@ -2,33 +2,70 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/blink/video_frame_compositor.h"
#include "base/bind.h"
#include "base/macros.h"
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/test/scoped_feature_list.h"
#include "base/test/simple_test_tick_clock.h"
#include "cc/layers/video_frame_provider.h"
#include "components/viz/common/surfaces/frame_sink_id.h"
#include "media/base/gmock_callback_support.h"
#include "media/base/video_frame.h"
#include "media/blink/video_frame_compositor.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/WebKit/public/platform/WebVideoFrameSubmitter.h"
using testing::_;
using testing::DoAll;
using testing::Return;
using testing::StrictMock;
namespace media {
class VideoFrameCompositorTest : public testing::Test,
public cc::VideoFrameProvider::Client,
public VideoRendererSink::RenderCallback {
class MockWebVideoFrameSubmitter : public blink::WebVideoFrameSubmitter {
public:
// blink::WebVideoFrameSubmitter implementation.
void StopUsingProvider() override {}
MOCK_METHOD1(StartSubmitting, void(const viz::FrameSinkId&));
MOCK_METHOD0(StartRendering, void());
MOCK_METHOD0(StopRendering, void());
void DidReceiveFrame() override { ++did_receive_frame_count_; }
int did_receive_frame_count() { return did_receive_frame_count_; }
private:
int did_receive_frame_count_ = 0;
};
class VideoFrameCompositorTest : public VideoRendererSink::RenderCallback,
public ::testing::TestWithParam<bool> {
public:
VideoFrameCompositorTest()
: tick_clock_(new base::SimpleTestTickClock()),
compositor_(new VideoFrameCompositor(message_loop.task_runner())),
did_receive_frame_count_(0) {
compositor_->SetVideoFrameProviderClient(this);
client_(new StrictMock<MockWebVideoFrameSubmitter>()) {}
void SetUp() {
if (IsSurfaceLayerForVideoEnabled()) {
feature_list_.InitFromCommandLine("UseSurfaceLayerForVideo", "");
// When SurfaceLayerForVideo is enabled, |compositor_| owns the
// |submitter_|. Otherwise, the |compositor_| treats the |submitter_| if
// were a VideoFrameProviderClient in the VideoLayer code path, holding
// only a bare pointer.
}
submitter_ = client_.get();
compositor_ =
base::MakeUnique<VideoFrameCompositor>(message_loop.task_runner());
if (!IsSurfaceLayerForVideoEnabled()) {
compositor_->SetVideoFrameProviderClient(client_.get());
} else {
EXPECT_CALL(*submitter_, StartSubmitting(_));
compositor_->set_submitter_for_test(std::move(client_));
compositor_->EnableSubmission(viz::FrameSinkId(1, 1));
}
compositor_->set_tick_clock_for_testing(
std::unique_ptr<base::TickClock>(tick_clock_));
// Disable background rendering by default.
@ -46,14 +83,9 @@ class VideoFrameCompositorTest : public testing::Test,
}
VideoFrameCompositor* compositor() { return compositor_.get(); }
int did_receive_frame_count() { return did_receive_frame_count_; }
protected:
// cc::VideoFrameProvider::Client implementation.
void StopUsingProvider() override {}
MOCK_METHOD0(StartRendering, void());
MOCK_METHOD0(StopRendering, void());
void DidReceiveFrame() override { ++did_receive_frame_count_; }
bool IsSurfaceLayerForVideoEnabled() { return GetParam(); }
// VideoRendererSink::RenderCallback implementation.
MOCK_METHOD3(Render,
@ -63,7 +95,7 @@ class VideoFrameCompositorTest : public testing::Test,
MOCK_METHOD0(OnFrameDropped, void());
void StartVideoRendererSink() {
EXPECT_CALL(*this, StartRendering());
EXPECT_CALL(*submitter_, StartRendering());
const bool had_current_frame = !!compositor_->GetCurrentFrame();
compositor()->Start(this);
// If we previously had a frame, we should still have one now.
@ -73,7 +105,7 @@ class VideoFrameCompositorTest : public testing::Test,
void StopVideoRendererSink(bool have_client) {
if (have_client)
EXPECT_CALL(*this, StopRendering());
EXPECT_CALL(*submitter_, StopRendering());
const bool had_current_frame = !!compositor_->GetCurrentFrame();
compositor()->Stop();
// If we previously had a frame, we should still have one now.
@ -88,29 +120,32 @@ class VideoFrameCompositorTest : public testing::Test,
base::MessageLoop message_loop;
base::SimpleTestTickClock* tick_clock_; // Owned by |compositor_|
StrictMock<MockWebVideoFrameSubmitter>* submitter_;
std::unique_ptr<StrictMock<MockWebVideoFrameSubmitter>> client_;
std::unique_ptr<VideoFrameCompositor> compositor_;
int did_receive_frame_count_;
private:
base::test::ScopedFeatureList feature_list_;
DISALLOW_COPY_AND_ASSIGN(VideoFrameCompositorTest);
};
TEST_F(VideoFrameCompositorTest, InitialValues) {
TEST_P(VideoFrameCompositorTest, InitialValues) {
EXPECT_FALSE(compositor()->GetCurrentFrame().get());
}
TEST_F(VideoFrameCompositorTest, PaintSingleFrame) {
TEST_P(VideoFrameCompositorTest, PaintSingleFrame) {
scoped_refptr<VideoFrame> expected = VideoFrame::CreateEOSFrame();
// Should notify compositor synchronously.
EXPECT_EQ(0, did_receive_frame_count());
EXPECT_EQ(0, submitter_->did_receive_frame_count());
compositor()->PaintSingleFrame(expected);
scoped_refptr<VideoFrame> actual = compositor()->GetCurrentFrame();
EXPECT_EQ(expected, actual);
EXPECT_EQ(1, did_receive_frame_count());
EXPECT_EQ(1, submitter_->did_receive_frame_count());
}
TEST_F(VideoFrameCompositorTest, VideoRendererSinkFrameDropped) {
TEST_P(VideoFrameCompositorTest, VideoRendererSinkFrameDropped) {
scoped_refptr<VideoFrame> opaque_frame = CreateOpaqueFrame();
EXPECT_CALL(*this, Render(_, _, _)).WillRepeatedly(Return(opaque_frame));
@ -144,21 +179,23 @@ TEST_F(VideoFrameCompositorTest, VideoRendererSinkFrameDropped) {
StopVideoRendererSink(true);
}
TEST_F(VideoFrameCompositorTest, VideoLayerShutdownWhileRendering) {
EXPECT_CALL(*this, Render(_, _, true)).WillOnce(Return(nullptr));
StartVideoRendererSink();
compositor_->SetVideoFrameProviderClient(nullptr);
StopVideoRendererSink(false);
TEST_P(VideoFrameCompositorTest, VideoLayerShutdownWhileRendering) {
if (!IsSurfaceLayerForVideoEnabled()) {
EXPECT_CALL(*this, Render(_, _, true)).WillOnce(Return(nullptr));
StartVideoRendererSink();
compositor_->SetVideoFrameProviderClient(nullptr);
StopVideoRendererSink(false);
}
}
TEST_F(VideoFrameCompositorTest, StartFiresBackgroundRender) {
TEST_P(VideoFrameCompositorTest, StartFiresBackgroundRender) {
scoped_refptr<VideoFrame> opaque_frame = CreateOpaqueFrame();
EXPECT_CALL(*this, Render(_, _, true)).WillRepeatedly(Return(opaque_frame));
StartVideoRendererSink();
StopVideoRendererSink(true);
}
TEST_F(VideoFrameCompositorTest, BackgroundRenderTicks) {
TEST_P(VideoFrameCompositorTest, BackgroundRenderTicks) {
scoped_refptr<VideoFrame> opaque_frame = CreateOpaqueFrame();
compositor_->set_background_rendering_for_testing(true);
@ -179,7 +216,7 @@ TEST_F(VideoFrameCompositorTest, BackgroundRenderTicks) {
StopVideoRendererSink(true);
}
TEST_F(VideoFrameCompositorTest,
TEST_P(VideoFrameCompositorTest,
UpdateCurrentFrameWorksWhenBackgroundRendered) {
scoped_refptr<VideoFrame> opaque_frame = CreateOpaqueFrame();
compositor_->set_background_rendering_for_testing(true);
@ -207,7 +244,7 @@ TEST_F(VideoFrameCompositorTest,
StopVideoRendererSink(true);
}
TEST_F(VideoFrameCompositorTest, GetCurrentFrameAndUpdateIfStale) {
TEST_P(VideoFrameCompositorTest, GetCurrentFrameAndUpdateIfStale) {
scoped_refptr<VideoFrame> opaque_frame_1 = CreateOpaqueFrame();
scoped_refptr<VideoFrame> opaque_frame_2 = CreateOpaqueFrame();
compositor_->set_background_rendering_for_testing(true);
@ -235,8 +272,12 @@ TEST_F(VideoFrameCompositorTest, GetCurrentFrameAndUpdateIfStale) {
testing::Mock::VerifyAndClearExpectations(this);
// Clear our client, which means no mock function calls for Client.
compositor()->SetVideoFrameProviderClient(nullptr);
if (IsSurfaceLayerForVideoEnabled()) {
compositor()->set_submitter_for_test(nullptr);
} else {
// Clear our client, which means no mock function calls for Client.
compositor()->SetVideoFrameProviderClient(nullptr);
}
// This call should still not call background render, because we aren't in the
// background rendering state yet.
@ -262,4 +303,8 @@ TEST_F(VideoFrameCompositorTest, GetCurrentFrameAndUpdateIfStale) {
StopVideoRendererSink(false);
}
INSTANTIATE_TEST_CASE_P(SubmitterEnabled,
VideoFrameCompositorTest,
::testing::Bool());
} // namespace media

@ -226,11 +226,6 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
base::Bind(&WebMediaPlayerImpl::OnProgress, AsWeakPtr()),
tick_clock_.get()),
url_index_(url_index),
// Threaded compositing isn't enabled universally yet.
compositor_task_runner_(params->compositor_task_runner()
? params->compositor_task_runner()
: base::ThreadTaskRunnerHandle::Get()),
compositor_(new VideoFrameCompositor(compositor_task_runner_)),
#if defined(OS_ANDROID) // WMPI_CAST
cast_impl_(this, client_, params->context_3d_cb()),
#endif
@ -264,8 +259,25 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
DCHECK(client_);
DCHECK(delegate_);
if (surface_layer_for_video_enabled_)
bridge_ = base::WrapUnique(blink::WebSurfaceLayerBridge::Create());
if (surface_layer_for_video_enabled_) {
bridge_ = params->create_bridge_callback().Run(this);
// TODO(lethalantidote): Use a seperate task_runner. https://crbug/753605.
vfc_task_runner_ = media_task_runner_;
} else {
// Threaded compositing isn't enabled universally yet.
vfc_task_runner_ = params->compositor_task_runner()
? params->compositor_task_runner()
: base::ThreadTaskRunnerHandle::Get();
}
compositor_ = base::MakeUnique<VideoFrameCompositor>(vfc_task_runner_);
if (surface_layer_for_video_enabled_) {
vfc_task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoFrameCompositor::EnableSubmission,
base::Unretained(compositor_.get()),
bridge_->GetFrameSinkId()));
}
// If we're supposed to force video overlays, then make sure that they're
// enabled all the time.
@ -329,12 +341,12 @@ WebMediaPlayerImpl::~WebMediaPlayerImpl() {
// Destruct compositor resources in the proper order.
client_->SetWebLayer(nullptr);
if (!surface_layer_for_video_enabled_ && video_weblayer_) {
static_cast<cc::VideoLayer*>(video_weblayer_->layer())->StopUsingProvider();
}
// TODO(lethalantidote): Handle destruction of compositor for surface layer.
// https://crbug/739854.
compositor_task_runner_->DeleteSoon(FROM_HERE, compositor_);
vfc_task_runner_->DeleteSoon(FROM_HERE, std::move(compositor_));
media_log_->AddEvent(
media_log_->CreateEvent(MediaLogEvent::WEBMEDIAPLAYER_DESTROYED));
@ -357,6 +369,17 @@ void WebMediaPlayerImpl::Load(LoadType load_type,
DoLoad(load_type, url, cors_mode);
}
void WebMediaPlayerImpl::OnWebLayerReplaced() {
DCHECK(bridge_);
bridge_->GetWebLayer()->CcLayer()->SetContentsOpaque(opaque_);
// TODO(lethalantidote): Figure out how to persist opaque setting
// without calling WebLayerImpl's SetContentsOpaueIsFixed;
// https://crbug/739859.
// TODO(lethalantidote): Figure out how to pass along rotation information.
// https://crbug/750313.
client_->SetWebLayer(bridge_->GetWebLayer());
}
bool WebMediaPlayerImpl::SupportsOverlayFullscreenVideo() {
#if defined(OS_ANDROID)
return !using_media_player_renderer_ &&
@ -1421,16 +1444,10 @@ void WebMediaPlayerImpl::OnMetadata(PipelineMetadata metadata) {
if (!surface_layer_for_video_enabled_) {
DCHECK(!video_weblayer_);
video_weblayer_.reset(new cc_blink::WebLayerImpl(cc::VideoLayer::Create(
compositor_, pipeline_metadata_.video_rotation)));
compositor_.get(), pipeline_metadata_.video_rotation)));
video_weblayer_->layer()->SetContentsOpaque(opaque_);
video_weblayer_->SetContentsOpaqueIsFixed(true);
client_->SetWebLayer(video_weblayer_.get());
} else if (bridge_->GetWebLayer()) {
bridge_->GetWebLayer()->CcLayer()->SetContentsOpaque(opaque_);
// TODO(lethalantidote): Figure out how to persist opaque setting
// without calling WebLayerImpl's SetContentsOpaueIsFixed;
// https://crbug/739859.
client_->SetWebLayer(bridge_->GetWebLayer());
}
}
@ -1759,10 +1776,10 @@ void WebMediaPlayerImpl::OnFrameShown() {
frame_time_report_cb_.Reset(
base::Bind(&WebMediaPlayerImpl::ReportTimeFromForegroundToFirstFrame,
AsWeakPtr(), base::TimeTicks::Now()));
compositor_task_runner_->PostTask(
vfc_task_runner_->PostTask(
FROM_HERE,
base::Bind(&VideoFrameCompositor::SetOnNewProcessedFrameCallback,
base::Unretained(compositor_),
base::Unretained(compositor_.get()),
BindToCurrentLoop(frame_time_report_cb_.callback())));
}
@ -2051,7 +2068,7 @@ std::unique_ptr<Renderer> WebMediaPlayerImpl::CreateRenderer() {
#endif
return renderer_factory_selector_->GetCurrentFactory()->CreateRenderer(
media_task_runner_, worker_task_runner_, audio_source_provider_.get(),
compositor_, request_overlay_info_cb, client_->TargetColorSpace());
compositor_.get(), request_overlay_info_cb, client_->TargetColorSpace());
}
void WebMediaPlayerImpl::StartPipeline() {
@ -2173,9 +2190,9 @@ scoped_refptr<VideoFrame> WebMediaPlayerImpl::GetCurrentFrameFromCompositor()
DCHECK(main_task_runner_->BelongsToCurrentThread());
TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor");
// Needed when the |main_task_runner_| and |compositor_task_runner_| are the
// Needed when the |main_task_runner_| and |vfc_task_runner_| are the
// same to avoid deadlock in the Wait() below.
if (compositor_task_runner_->BelongsToCurrentThread()) {
if (vfc_task_runner_->BelongsToCurrentThread()) {
scoped_refptr<VideoFrame> video_frame =
compositor_->GetCurrentFrameAndUpdateIfStale();
if (!video_frame) {
@ -2191,9 +2208,9 @@ scoped_refptr<VideoFrame> WebMediaPlayerImpl::GetCurrentFrameFromCompositor()
scoped_refptr<VideoFrame> video_frame;
base::WaitableEvent event(base::WaitableEvent::ResetPolicy::AUTOMATIC,
base::WaitableEvent::InitialState::NOT_SIGNALED);
compositor_task_runner_->PostTask(
vfc_task_runner_->PostTask(
FROM_HERE,
base::Bind(&GetCurrentFrameAndSignal, base::Unretained(compositor_),
base::Bind(&GetCurrentFrameAndSignal, base::Unretained(compositor_.get()),
&video_frame, &event));
event.Wait();

@ -92,6 +92,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
public WebMediaPlayerDelegate::Observer,
public Pipeline::Client,
public MediaObserverClient,
public blink::WebSurfaceLayerBridgeObserver,
public base::SupportsWeakPtr<WebMediaPlayerImpl> {
public:
// Constructs a WebMediaPlayer implementation using Chromium's media stack.
@ -106,6 +107,9 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
std::unique_ptr<WebMediaPlayerParams> params);
~WebMediaPlayerImpl() override;
// WebSurfaceLayerBridgeObserver implementation.
void OnWebLayerReplaced() override;
void Load(LoadType load_type,
const blink::WebMediaPlayerSource& source,
CORSMode cors_mode) override;
@ -639,8 +643,12 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
UrlIndex* url_index_;
// Video rendering members.
scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner_;
VideoFrameCompositor* compositor_; // Deleted on |compositor_task_runner_|.
// The |compositor_| runs on the compositor thread, or if
// kEnableSurfaceLayerForVideo is enabled, the media thread. This task runner
// posts tasks for the |compositor_| on the correct thread.
scoped_refptr<base::SingleThreadTaskRunner> vfc_task_runner_;
std::unique_ptr<VideoFrameCompositor>
compositor_; // Deleted on |vfc_task_runner_|.
SkCanvasVideoRenderer skcanvas_video_renderer_;
// The compositor layer for displaying the video content when using composited

@ -20,6 +20,7 @@
#include "base/test/simple_test_tick_clock.h"
#include "base/threading/thread.h"
#include "base/threading/thread_task_runner_handle.h"
#include "cc/blink/web_layer_impl.h"
#include "media/base/media_log.h"
#include "media/base/media_switches.h"
#include "media/base/test_helpers.h"
@ -34,6 +35,7 @@
#include "third_party/WebKit/public/platform/WebMediaPlayerClient.h"
#include "third_party/WebKit/public/platform/WebSecurityOrigin.h"
#include "third_party/WebKit/public/platform/WebSize.h"
#include "third_party/WebKit/public/platform/WebSurfaceLayerBridge.h"
#include "third_party/WebKit/public/web/WebFrameClient.h"
#include "third_party/WebKit/public/web/WebLocalFrame.h"
#include "third_party/WebKit/public/web/WebScopedUserGesture.h"
@ -43,6 +45,8 @@
using ::testing::AnyNumber;
using ::testing::InSequence;
using ::testing::Return;
using ::testing::ReturnRef;
using ::testing::StrictMock;
using ::testing::_;
namespace media {
@ -218,6 +222,12 @@ class MockWebMediaPlayerDelegate : public WebMediaPlayerDelegate {
bool is_closed_ = false;
};
class MockSurfaceLayerBridge : public blink::WebSurfaceLayerBridge {
public:
MOCK_CONST_METHOD0(GetWebLayer, blink::WebLayer*());
MOCK_CONST_METHOD0(GetFrameSinkId, const viz::FrameSinkId&());
};
class WebMediaPlayerImplTest : public testing::Test {
public:
WebMediaPlayerImplTest()
@ -261,8 +271,11 @@ class WebMediaPlayerImplTest : public testing::Test {
RequestRoutingTokenCallback(), nullptr,
kMaxKeyframeDistanceToDisableBackgroundVideo,
kMaxKeyframeDistanceToDisableBackgroundVideoMSE, false, false,
provider_.get(), base::Bind(&CreateCapabilitiesRecorder)));
}
provider_.get(),
base::Bind(&CreateCapabilitiesRecorder),
base::Bind(&WebMediaPlayerImplTest::CreateMockSurfaceLayerBridge,
base::Unretained(this))));
}
~WebMediaPlayerImplTest() override {
// Destruct WebMediaPlayerImpl and pump the message loop to ensure that
@ -271,15 +284,23 @@ class WebMediaPlayerImplTest : public testing::Test {
// NOTE: This should be done before any other member variables are
// destructed since WMPI may reference them during destruction.
wmpi_.reset();
base::RunLoop().RunUntilIdle();
web_view_->Close();
}
protected:
std::unique_ptr<blink::WebSurfaceLayerBridge> CreateMockSurfaceLayerBridge(
blink::WebSurfaceLayerBridgeObserver*) {
return base::WrapUnique<blink::WebSurfaceLayerBridge>(
surface_layer_bridge_);
}
void SetNetworkState(blink::WebMediaPlayer::NetworkState state) {
wmpi_->SetNetworkState(state);
}
void SetReadyState(blink::WebMediaPlayer::ReadyState state) {
wmpi_->SetReadyState(state);
}
@ -437,6 +458,8 @@ class WebMediaPlayerImplTest : public testing::Test {
mojom::WatchTimeRecorderProviderPtr provider_;
StrictMock<MockSurfaceLayerBridge>* surface_layer_bridge_ = nullptr;
// The WebMediaPlayerImpl instance under test.
std::unique_ptr<WebMediaPlayerImpl> wmpi_;
@ -925,6 +948,26 @@ TEST_F(WebMediaPlayerImplTest, InfiniteDuration) {
EXPECT_EQ(base::TimeDelta(), GetCurrentTimeInternal());
}
// TODO(lethalantidote): Once |client_| is converted from a dummy to a mock,
// test that |web_layer| is actually used by |client_|.
// http://crbug/755880.
TEST_F(WebMediaPlayerImplTest, OnWebLayerReplacedGetsWebLayerFromBridge) {
base::test::ScopedFeatureList feature_list;
feature_list.InitFromCommandLine("UseSurfaceLayerForVideo", "");
surface_layer_bridge_ = new StrictMock<MockSurfaceLayerBridge>();
viz::FrameSinkId id = viz::FrameSinkId(1, 1);
EXPECT_CALL(*surface_layer_bridge_, GetFrameSinkId()).WillOnce(ReturnRef(id));
InitializeWebMediaPlayerImpl();
std::unique_ptr<cc_blink::WebLayerImpl> web_layer =
base::MakeUnique<cc_blink::WebLayerImpl>();
EXPECT_CALL(*surface_layer_bridge_, GetWebLayer())
.WillRepeatedly(Return(web_layer.get()));
wmpi_->OnWebLayerReplaced();
}
class WebMediaPlayerImplBackgroundBehaviorTest
: public WebMediaPlayerImplTest,
public ::testing::WithParamInterface<
@ -941,7 +984,6 @@ class WebMediaPlayerImplBackgroundBehaviorTest
void SetUp() override {
WebMediaPlayerImplTest::SetUp();
SetUpMediaSuspend(IsMediaSuspendOn());
std::string enabled_features;

@ -28,7 +28,9 @@ WebMediaPlayerParams::WebMediaPlayerParams(
bool enable_instant_source_buffer_gc,
bool embedded_media_experience_enabled,
mojom::WatchTimeRecorderProvider* provider,
CreateCapabilitiesRecorderCB create_capabilities_recorder_cb)
CreateCapabilitiesRecorderCB create_capabilities_recorder_cb,
base::Callback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
blink::WebSurfaceLayerBridgeObserver*)> create_bridge_callback)
: defer_load_cb_(defer_load_cb),
audio_renderer_sink_(audio_renderer_sink),
media_log_(std::move(media_log)),
@ -49,7 +51,8 @@ WebMediaPlayerParams::WebMediaPlayerParams(
embedded_media_experience_enabled_(embedded_media_experience_enabled),
watch_time_recorder_provider_(provider),
create_capabilities_recorder_cb_(
std::move(create_capabilities_recorder_cb)) {}
std::move(create_capabilities_recorder_cb)),
create_bridge_callback_(create_bridge_callback) {}
WebMediaPlayerParams::~WebMediaPlayerParams() {}

@ -26,7 +26,9 @@ class TaskRunner;
namespace blink {
class WebContentDecryptionModule;
}
class WebSurfaceLayerBridge;
class WebSurfaceLayerBridgeObserver;
} // namespace blink
namespace media {
@ -73,7 +75,9 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
bool enable_instant_source_buffer_gc,
bool embedded_media_experience_enabled,
mojom::WatchTimeRecorderProvider* provider,
CreateCapabilitiesRecorderCB create_capabilities_recorder_cb);
CreateCapabilitiesRecorderCB create_capabilities_recorder_cb,
base::Callback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
blink::WebSurfaceLayerBridgeObserver*)> bridge_callback);
~WebMediaPlayerParams();
@ -140,6 +144,11 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
return watch_time_recorder_provider_;
}
const base::Callback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
blink::WebSurfaceLayerBridgeObserver*)>& create_bridge_callback() const {
return create_bridge_callback_;
}
CreateCapabilitiesRecorderCB create_capabilities_recorder_cb() const {
return create_capabilities_recorder_cb_;
}
@ -164,6 +173,9 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
const bool embedded_media_experience_enabled_;
mojom::WatchTimeRecorderProvider* watch_time_recorder_provider_;
CreateCapabilitiesRecorderCB create_capabilities_recorder_cb_;
base::Callback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
blink::WebSurfaceLayerBridgeObserver*)>
create_bridge_callback_;
DISALLOW_IMPLICIT_CONSTRUCTORS(WebMediaPlayerParams);
};

@ -50,6 +50,7 @@ class Settings;
class ShadowRoot;
class WebCredentialManagerClient;
class WebFrameClient;
class WebLayerTreeView;
class WebMediaPlayer;
class WebMediaPlayerClient;
class WebMediaPlayerSource;
@ -102,7 +103,8 @@ class CORE_EXPORT CoreInitializer {
WebFrameClient*,
HTMLMediaElement&,
const WebMediaPlayerSource&,
WebMediaPlayerClient*) const = 0;
WebMediaPlayerClient*,
WebLayerTreeView*) const = 0;
virtual WebRemotePlaybackClient* CreateWebRemotePlaybackClient(
HTMLMediaElement&) const = 0;

@ -814,7 +814,8 @@ PluginView* LocalFrameClientImpl::CreatePlugin(
std::unique_ptr<WebMediaPlayer> LocalFrameClientImpl::CreateWebMediaPlayer(
HTMLMediaElement& html_media_element,
const WebMediaPlayerSource& source,
WebMediaPlayerClient* client) {
WebMediaPlayerClient* client,
WebLayerTreeView* layer_tree_view) {
WebLocalFrameImpl* web_frame =
WebLocalFrameImpl::FromFrame(html_media_element.GetDocument().GetFrame());
@ -822,7 +823,7 @@ std::unique_ptr<WebMediaPlayer> LocalFrameClientImpl::CreateWebMediaPlayer(
return nullptr;
return CoreInitializer::GetInstance().CreateWebMediaPlayer(
web_frame->Client(), html_media_element, source, client);
web_frame->Client(), html_media_element, source, client, layer_tree_view);
}
WebRemotePlaybackClient* LocalFrameClientImpl::CreateWebRemotePlaybackClient(

@ -166,7 +166,8 @@ class LocalFrameClientImpl final : public LocalFrameClient {
std::unique_ptr<WebMediaPlayer> CreateWebMediaPlayer(
HTMLMediaElement&,
const WebMediaPlayerSource&,
WebMediaPlayerClient*) override;
WebMediaPlayerClient*,
WebLayerTreeView*) override;
WebRemotePlaybackClient* CreateWebRemotePlaybackClient(
HTMLMediaElement&) override;
void DidChangeScrollOffset() override;

@ -89,6 +89,7 @@ class WebApplicationCacheHost;
class WebApplicationCacheHostClient;
class WebCookieJar;
class WebFrame;
class WebLayerTreeView;
class WebMediaPlayer;
class WebMediaPlayerClient;
class WebMediaPlayerSource;
@ -231,7 +232,8 @@ class CORE_EXPORT LocalFrameClient : public FrameClient {
virtual std::unique_ptr<WebMediaPlayer> CreateWebMediaPlayer(
HTMLMediaElement&,
const WebMediaPlayerSource&,
WebMediaPlayerClient*) = 0;
WebMediaPlayerClient*,
WebLayerTreeView*) = 0;
virtual WebRemotePlaybackClient* CreateWebRemotePlaybackClient(
HTMLMediaElement&) = 0;

@ -72,6 +72,7 @@
#include "platform/RuntimeEnabledFeatures.h"
#include "platform/graphics/CanvasHeuristicParameters.h"
#include "platform/graphics/CanvasMetrics.h"
#include "platform/graphics/GraphicsLayer.h"
#include "platform/graphics/ImageBuffer.h"
#include "platform/graphics/RecordingImageBufferSurface.h"
#include "platform/graphics/UnacceleratedImageBufferSurface.h"
@ -140,7 +141,8 @@ inline HTMLCanvasElement::HTMLCanvasElement(Document& document)
externally_allocated_memory_(0),
origin_clean_(true),
did_fail_to_create_image_buffer_(false),
image_buffer_is_clear_(false) {
image_buffer_is_clear_(false),
surface_layer_bridge_(nullptr) {
CanvasMetrics::CountCanvasContextUsage(CanvasMetrics::kCanvasCreated);
UseCounter::Count(document, WebFeature::kHTMLCanvasElement);
}
@ -148,6 +150,10 @@ inline HTMLCanvasElement::HTMLCanvasElement(Document& document)
DEFINE_NODE_FACTORY(HTMLCanvasElement)
HTMLCanvasElement::~HTMLCanvasElement() {
if (surface_layer_bridge_ && surface_layer_bridge_->GetWebLayer()) {
GraphicsLayer::UnregisterContentsLayer(
surface_layer_bridge_->GetWebLayer());
}
v8::Isolate::GetCurrent()->AdjustAmountOfExternalAllocatedMemory(
-externally_allocated_memory_);
}
@ -1467,13 +1473,14 @@ void HTMLCanvasElement::CreateLayer() {
layer_tree_view =
frame->GetPage()->GetChromeClient().GetWebLayerTreeView(frame);
surface_layer_bridge_ =
WTF::MakeUnique<::blink::SurfaceLayerBridge>(this, layer_tree_view);
WTF::MakeUnique<::blink::SurfaceLayerBridge>(layer_tree_view, this);
// Creates a placeholder layer first before Surface is created.
surface_layer_bridge_->CreateSolidColorLayer();
}
}
void HTMLCanvasElement::OnWebLayerReplaced() {
GraphicsLayer::RegisterContentsLayer(surface_layer_bridge_->GetWebLayer());
SetNeedsCompositingUpdate();
}

@ -75,15 +75,16 @@ class
typedef CanvasRenderingContext2DOrWebGLRenderingContextOrWebGL2RenderingContextOrImageBitmapRenderingContext
RenderingContext;
class CORE_EXPORT HTMLCanvasElement final : public HTMLElement,
public ContextLifecycleObserver,
public PageVisibilityObserver,
public CanvasImageSource,
public CanvasRenderingContextHost,
public SurfaceLayerBridgeObserver,
public ImageBufferClient,
public ImageBitmapSource,
public OffscreenCanvasPlaceholder {
class CORE_EXPORT HTMLCanvasElement final
: public HTMLElement,
public ContextLifecycleObserver,
public PageVisibilityObserver,
public CanvasImageSource,
public CanvasRenderingContextHost,
public WebSurfaceLayerBridgeObserver,
public ImageBufferClient,
public ImageBitmapSource,
public OffscreenCanvasPlaceholder {
DEFINE_WRAPPERTYPEINFO();
USING_GARBAGE_COLLECTED_MIXIN(HTMLCanvasElement);
USING_PRE_FINALIZER(HTMLCanvasElement, Dispose);

@ -1233,8 +1233,10 @@ void HTMLMediaElement::StartPlayerLoad() {
return;
}
web_media_player_ =
frame->Client()->CreateWebMediaPlayer(*this, source, this);
web_media_player_ = frame->Client()->CreateWebMediaPlayer(
*this, source, this,
frame->GetPage()->GetChromeClient().GetWebLayerTreeView(frame));
if (!web_media_player_) {
MediaLoadingFailed(WebMediaPlayer::kNetworkStateFormatError,
BuildElementErrorMessage(

@ -35,7 +35,8 @@ class MediaStubLocalFrameClient : public EmptyLocalFrameClient {
std::unique_ptr<WebMediaPlayer> CreateWebMediaPlayer(
HTMLMediaElement&,
const WebMediaPlayerSource&,
WebMediaPlayerClient*) override {
WebMediaPlayerClient*,
WebLayerTreeView*) override {
return WTF::WrapUnique(new MockWebMediaPlayer());
}
};

@ -190,7 +190,8 @@ PluginView* EmptyLocalFrameClient::CreatePlugin(HTMLPlugInElement&,
std::unique_ptr<WebMediaPlayer> EmptyLocalFrameClient::CreateWebMediaPlayer(
HTMLMediaElement&,
const WebMediaPlayerSource&,
WebMediaPlayerClient*) {
WebMediaPlayerClient*,
WebLayerTreeView*) {
return nullptr;
}

@ -328,7 +328,8 @@ class CORE_EXPORT EmptyLocalFrameClient : public LocalFrameClient {
std::unique_ptr<WebMediaPlayer> CreateWebMediaPlayer(
HTMLMediaElement&,
const WebMediaPlayerSource&,
WebMediaPlayerClient*) override;
WebMediaPlayerClient*,
WebLayerTreeView*) override;
WebRemotePlaybackClient* CreateWebRemotePlaybackClient(
HTMLMediaElement&) override;

@ -55,7 +55,8 @@ class VideoStubLocalFrameClient : public EmptyLocalFrameClient {
std::unique_ptr<WebMediaPlayer> CreateWebMediaPlayer(
HTMLMediaElement&,
const WebMediaPlayerSource&,
WebMediaPlayerClient* client) override {
WebMediaPlayerClient* client,
WebLayerTreeView* view) override {
return WTF::MakeUnique<StubWebMediaPlayer>(client);
}
};

@ -238,14 +238,15 @@ std::unique_ptr<WebMediaPlayer> ModulesInitializer::CreateWebMediaPlayer(
WebFrameClient* web_frame_client,
HTMLMediaElement& html_media_element,
const WebMediaPlayerSource& source,
WebMediaPlayerClient* media_player_client) const {
WebMediaPlayerClient* media_player_client,
WebLayerTreeView* view) const {
HTMLMediaElementEncryptedMedia& encrypted_media =
HTMLMediaElementEncryptedMedia::From(html_media_element);
WebString sink_id(
HTMLMediaElementAudioOutputDevice::sinkId(html_media_element));
return WTF::WrapUnique(web_frame_client->CreateMediaPlayer(
source, media_player_client, &encrypted_media,
encrypted_media.ContentDecryptionModule(), sink_id));
encrypted_media.ContentDecryptionModule(), sink_id, view));
}
WebRemotePlaybackClient* ModulesInitializer::CreateWebRemotePlaybackClient(

@ -34,7 +34,8 @@ class MODULES_EXPORT ModulesInitializer : public CoreInitializer {
WebFrameClient*,
HTMLMediaElement&,
const WebMediaPlayerSource&,
WebMediaPlayerClient*) const override;
WebMediaPlayerClient*,
WebLayerTreeView*) const override;
WebRemotePlaybackClient* CreateWebRemotePlaybackClient(
HTMLMediaElement&) const override;

@ -105,7 +105,8 @@ class StubLocalFrameClientForImpl : public EmptyLocalFrameClient {
std::unique_ptr<WebMediaPlayer> CreateWebMediaPlayer(
HTMLMediaElement&,
const WebMediaPlayerSource&,
WebMediaPlayerClient*) override {
WebMediaPlayerClient*,
WebLayerTreeView*) override {
return WTF::WrapUnique(new MockWebMediaPlayerForImpl);
}

@ -119,7 +119,8 @@ class StubLocalFrameClientForOrientationLockDelegate final
std::unique_ptr<WebMediaPlayer> CreateWebMediaPlayer(
HTMLMediaElement&,
const WebMediaPlayerSource&,
WebMediaPlayerClient*) override {
WebMediaPlayerClient*,
WebLayerTreeView*) override {
return WTF::MakeUnique<MockWebMediaPlayerForOrientationLockDelegate>();
}
};

@ -81,7 +81,8 @@ class StubLocalFrameClient : public EmptyLocalFrameClient {
std::unique_ptr<WebMediaPlayer> CreateWebMediaPlayer(
HTMLMediaElement&,
const WebMediaPlayerSource&,
WebMediaPlayerClient*) override {
WebMediaPlayerClient*,
WebLayerTreeView*) override {
return WTF::MakeUnique<MockVideoWebMediaPlayer>();
}
};

@ -672,6 +672,7 @@ component("platform") {
"exported/WebURLLoaderTestDelegate.cpp",
"exported/WebURLRequest.cpp",
"exported/WebURLResponse.cpp",
"exported/WebVideoFrameSubmitter.cpp",
"exported/WrappedResourceRequest.h",
"exported/WrappedResourceResponse.h",
"exported/linux/WebFontRenderStyle.cpp",
@ -1000,6 +1001,10 @@ component("platform") {
"graphics/UnacceleratedImageBufferSurface.h",
"graphics/UnacceleratedStaticBitmapImage.cpp",
"graphics/UnacceleratedStaticBitmapImage.h",
"graphics/VideoFrameResourceProvider.cpp",
"graphics/VideoFrameResourceProvider.h",
"graphics/VideoFrameSubmitter.cpp",
"graphics/VideoFrameSubmitter.h",
"graphics/WebGraphicsContext3DProviderWrapper.h",
"graphics/compositing/ContentLayerClientImpl.cpp",
"graphics/compositing/ContentLayerClientImpl.h",
@ -1804,6 +1809,7 @@ test("blink_platform_unittests") {
"graphics/ImageBufferTest.cpp",
"graphics/PaintInvalidationReasonTest.cpp",
"graphics/RecordingImageBufferSurfaceTest.cpp",
"graphics/VideoFrameSubmitterTest.cpp",
"graphics/compositing/ContentLayerClientImplTest.cpp",
"graphics/compositing/PaintArtifactCompositorTest.cpp",
"graphics/compositing/PaintChunksToCcLayerTest.cpp",
@ -1917,6 +1923,8 @@ test("blink_platform_unittests") {
"//mojo/common:test_common_custom_types_blink",
"//mojo/edk/system",
"//mojo/public/cpp/bindings/tests:for_blink_tests",
"//services/viz/public/interfaces",
"//services/viz/public/interfaces:interfaces_blink",
"//skia",
"//testing/gmock",
"//testing/gtest",

@ -43,6 +43,7 @@ include_rules = [
"+public/platform",
#TODO(nverne): remove this
"+public/web/WebKit.h",
"+services/viz/public/interfaces/compositing/compositor_frame_sink.mojom-blink.h",
"+skia/ext",
"+third_party/ced/src/compact_enc_det/compact_enc_det.h",
"+third_party/khronos",

@ -8,10 +8,12 @@
namespace blink {
WebSurfaceLayerBridge::~WebSurfaceLayerBridge() {}
WebSurfaceLayerBridge* WebSurfaceLayerBridge::Create() {
return new SurfaceLayerBridge(nullptr, nullptr);
std::unique_ptr<WebSurfaceLayerBridge> WebSurfaceLayerBridge::Create(
WebLayerTreeView* layer_tree_view,
WebSurfaceLayerBridgeObserver* observer) {
return base::MakeUnique<SurfaceLayerBridge>(layer_tree_view, observer);
}
WebSurfaceLayerBridge::~WebSurfaceLayerBridge() {}
} // namespace blink

@ -0,0 +1,20 @@
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "public/platform/WebVideoFrameSubmitter.h"
#include "third_party/WebKit/Source/platform/graphics/VideoFrameSubmitter.h"
namespace cc {
class VideoFrameProvider;
} // namespace cc
namespace blink {
std::unique_ptr<WebVideoFrameSubmitter> WebVideoFrameSubmitter::Create(
cc::VideoFrameProvider* provider) {
return base::MakeUnique<VideoFrameSubmitter>(provider);
}
} // namespace blink

@ -5,15 +5,19 @@ include_rules = [
"+base/callback.h",
"+base/compiler_specific.h",
"+base/message_loop",
"+base/threading/thread.h",
"+base/threading/thread_checker.h",
"+cc",
"-cc/blink",
"+components/viz/common",
"+components/viz/test/fake_external_begin_frame_source.h",
"+gpu/command_buffer/client/gles2_interface.h",
"+gpu/command_buffer/client/gpu_memory_buffer_manager.h",
"+gpu/command_buffer/common/capabilities.h",
"+gpu/command_buffer/common/mailbox.h",
"+gpu/command_buffer/common/sync_token.h",
"+media/base/media_switches.h",
"+media/base/video_frame.h",
"+services/viz/public/interfaces",
"+public/web/WebSettings.h",
]

@ -4,6 +4,7 @@
#include "platform/graphics/SurfaceLayerBridge.h"
#include "base/feature_list.h"
#include "cc/layers/layer.h"
#include "cc/layers/solid_color_layer.h"
#include "cc/layers/surface_layer.h"
@ -11,6 +12,7 @@
#include "components/viz/common/surfaces/surface_id.h"
#include "components/viz/common/surfaces/surface_info.h"
#include "components/viz/common/surfaces/surface_sequence.h"
#include "media/base/media_switches.h"
#include "platform/graphics/GraphicsLayer.h"
#include "platform/mojo/MojoHelper.h"
#include "platform/wtf/Functional.h"
@ -53,8 +55,8 @@ class SequenceSurfaceReferenceFactoryImpl
} // namespace
SurfaceLayerBridge::SurfaceLayerBridge(SurfaceLayerBridgeObserver* observer,
WebLayerTreeView* layer_tree_view)
SurfaceLayerBridge::SurfaceLayerBridge(WebLayerTreeView* layer_tree_view,
WebSurfaceLayerBridgeObserver* observer)
: weak_factory_(this),
observer_(observer),
binding_(this),
@ -80,9 +82,6 @@ SurfaceLayerBridge::SurfaceLayerBridge(SurfaceLayerBridgeObserver* observer,
SurfaceLayerBridge::~SurfaceLayerBridge() {
observer_ = nullptr;
if (web_layer_) {
GraphicsLayer::UnregisterContentsLayer(web_layer_.get());
}
}
void SurfaceLayerBridge::SatisfyCallback(const viz::SurfaceSequence& sequence) {
@ -100,6 +99,7 @@ void SurfaceLayerBridge::CreateSolidColorLayer() {
web_layer_ = Platform::Current()->CompositorSupport()->CreateLayerFromCCLayer(
cc_layer_.get());
GraphicsLayer::RegisterContentsLayer(web_layer_.get());
}
@ -108,20 +108,22 @@ void SurfaceLayerBridge::OnFirstSurfaceActivation(
if (!current_surface_id_.is_valid() && surface_info.is_valid()) {
// First time a SurfaceId is received
current_surface_id_ = surface_info.id();
GraphicsLayer::UnregisterContentsLayer(web_layer_.get());
web_layer_->RemoveFromParent();
if (web_layer_) {
GraphicsLayer::UnregisterContentsLayer(web_layer_.get());
web_layer_->RemoveFromParent();
}
scoped_refptr<cc::SurfaceLayer> surface_layer =
cc::SurfaceLayer::Create(ref_factory_);
surface_layer->SetPrimarySurfaceInfo(surface_info);
surface_layer->SetFallbackSurfaceInfo(surface_info);
surface_layer->SetStretchContentToFillBounds(true);
surface_layer->SetIsDrawable(true);
cc_layer_ = surface_layer;
web_layer_ =
Platform::Current()->CompositorSupport()->CreateLayerFromCCLayer(
cc_layer_.get());
GraphicsLayer::RegisterContentsLayer(web_layer_.get());
} else if (current_surface_id_ != surface_info.id()) {
// A different SurfaceId is received, prompting change to existing
// SurfaceLayer

@ -28,20 +28,14 @@ namespace blink {
class WebLayer;
class WebLayerTreeView;
class PLATFORM_EXPORT SurfaceLayerBridgeObserver {
public:
SurfaceLayerBridgeObserver() {}
virtual ~SurfaceLayerBridgeObserver() {}
virtual void OnWebLayerReplaced() = 0;
};
// The SurfaceLayerBridge facilitates communication about changes to a Surface
// between the Render and Browser processes.
class PLATFORM_EXPORT SurfaceLayerBridge
: public blink::mojom::blink::OffscreenCanvasSurfaceClient,
public WebSurfaceLayerBridge {
public:
SurfaceLayerBridge(SurfaceLayerBridgeObserver*, WebLayerTreeView*);
~SurfaceLayerBridge() override;
SurfaceLayerBridge(WebLayerTreeView*, WebSurfaceLayerBridgeObserver*);
virtual ~SurfaceLayerBridge();
void CreateSolidColorLayer();
@ -53,7 +47,9 @@ class PLATFORM_EXPORT SurfaceLayerBridge
// Implementation of WebSurfaceLayerBridge.
WebLayer* GetWebLayer() const override { return web_layer_.get(); }
const viz::FrameSinkId& GetFrameSinkId() const { return frame_sink_id_; }
const viz::FrameSinkId& GetFrameSinkId() const override {
return frame_sink_id_;
}
private:
mojom::blink::OffscreenCanvasSurfacePtr service_;
@ -64,7 +60,7 @@ class PLATFORM_EXPORT SurfaceLayerBridge
scoped_refptr<viz::SurfaceReferenceFactory> ref_factory_;
base::WeakPtrFactory<SurfaceLayerBridge> weak_factory_;
SurfaceLayerBridgeObserver* observer_;
WebSurfaceLayerBridgeObserver* observer_;
mojo::Binding<blink::mojom::blink::OffscreenCanvasSurfaceClient> binding_;

@ -0,0 +1,34 @@
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "platform/graphics/VideoFrameResourceProvider.h"
#include "cc/quads/render_pass.h"
#include "cc/quads/solid_color_draw_quad.h"
namespace blink {
VideoFrameResourceProvider::VideoFrameResourceProvider() = default;
void VideoFrameResourceProvider::AppendQuads(cc::RenderPass& render_pass) {
gfx::Rect rect(0, 0, 10000, 10000);
gfx::Rect visible_rect(0, 0, 10000, 10000);
viz::SharedQuadState* shared_state =
render_pass.CreateAndAppendSharedQuadState();
shared_state->SetAll(gfx::Transform(), rect, rect, rect, false, 1,
SkBlendMode::kSrcOver, 0);
cc::SolidColorDrawQuad* solid_color_quad =
render_pass.CreateAndAppendDrawQuad<cc::SolidColorDrawQuad>();
// Fluxuate colors for placeholder testing.
static int r = 0;
static int g = 0;
static int b = 0;
r++;
g += 2;
b += 3;
solid_color_quad->SetNew(shared_state, rect, visible_rect,
SkColorSetRGB(r % 255, g % 255, b % 255), false);
}
} // namespace blink

@ -0,0 +1,21 @@
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
namespace cc {
class RenderPass;
}
namespace blink {
// Placeholder class, to be implemented in full in later CL.
// VideoFrameResourceProvider obtains required GPU resources for the video
// frame.
class VideoFrameResourceProvider {
public:
VideoFrameResourceProvider();
void AppendQuads(cc::RenderPass&);
};
} // namespace blink

@ -0,0 +1,119 @@
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "platform/graphics/VideoFrameSubmitter.h"
#include "cc/base/filter_operations.h"
#include "cc/scheduler/video_frame_controller.h"
#include "components/viz/common/surfaces/local_surface_id_allocator.h"
#include "media/base/video_frame.h"
#include "public/platform/InterfaceProvider.h"
#include "public/platform/Platform.h"
#include "public/platform/modules/offscreencanvas/offscreen_canvas_surface.mojom-blink.h"
#include "services/viz/public/interfaces/compositing/compositor_frame_sink.mojom-blink.h"
namespace blink {
VideoFrameSubmitter::VideoFrameSubmitter(cc::VideoFrameProvider* provider)
: provider_(provider), binding_(this), is_rendering_(false) {
current_local_surface_id_ = local_surface_id_allocator_.GenerateId();
}
VideoFrameSubmitter::~VideoFrameSubmitter() {}
void VideoFrameSubmitter::StopUsingProvider() {
if (is_rendering_)
StopRendering();
provider_ = nullptr;
}
void VideoFrameSubmitter::StopRendering() {
DCHECK(is_rendering_);
viz::BeginFrameAck current_begin_frame_ack =
viz::BeginFrameAck::CreateManualAckWithDamage();
SubmitFrame(current_begin_frame_ack);
is_rendering_ = false;
compositor_frame_sink_->SetNeedsBeginFrame(false);
}
void VideoFrameSubmitter::DidReceiveFrame() {
if (!is_rendering_) {
viz::BeginFrameAck current_begin_frame_ack =
viz::BeginFrameAck::CreateManualAckWithDamage();
SubmitFrame(current_begin_frame_ack);
}
}
void VideoFrameSubmitter::StartRendering() {
DCHECK(!is_rendering_);
compositor_frame_sink_->SetNeedsBeginFrame(true);
is_rendering_ = true;
}
void VideoFrameSubmitter::StartSubmitting(const viz::FrameSinkId& id) {
DCHECK(id.is_valid());
// Class to be renamed.
mojom::blink::OffscreenCanvasProviderPtr canvas_provider;
Platform::Current()->GetInterfaceProvider()->GetInterface(
mojo::MakeRequest(&canvas_provider));
viz::mojom::blink::CompositorFrameSinkClientPtr client;
binding_.Bind(mojo::MakeRequest(&client));
canvas_provider->CreateCompositorFrameSink(
id, std::move(client), mojo::MakeRequest(&compositor_frame_sink_));
}
void VideoFrameSubmitter::SubmitFrame(viz::BeginFrameAck begin_frame_ack) {
DCHECK(compositor_frame_sink_);
if (!provider_)
return;
cc::CompositorFrame compositor_frame;
scoped_refptr<media::VideoFrame> video_frame = provider_->GetCurrentFrame();
std::unique_ptr<cc::RenderPass> render_pass = cc::RenderPass::Create();
// TODO(lethalantidote): Replace with true size. Current is just for test.
gfx::Size viewport_size(10000, 10000);
render_pass->SetNew(50, gfx::Rect(viewport_size), gfx::Rect(viewport_size),
gfx::Transform());
render_pass->filters = cc::FilterOperations();
resource_provider_.AppendQuads(*render_pass);
compositor_frame.render_pass_list.push_back(std::move(render_pass));
compositor_frame.metadata.begin_frame_ack = begin_frame_ack;
compositor_frame.metadata.device_scale_factor = 1;
compositor_frame.metadata.may_contain_video = true;
// TODO(lethalantidote): Address third/fourth arg in SubmitCompositorFrame.
compositor_frame_sink_->SubmitCompositorFrame(
current_local_surface_id_, std::move(compositor_frame), nullptr, 0);
provider_->PutCurrentFrame();
}
void VideoFrameSubmitter::OnBeginFrame(const viz::BeginFrameArgs& args) {
viz::BeginFrameAck current_begin_frame_ack =
viz::BeginFrameAck(args.source_id, args.sequence_number, false);
if (args.type == viz::BeginFrameArgs::MISSED) {
compositor_frame_sink_->DidNotProduceFrame(current_begin_frame_ack);
return;
}
current_begin_frame_ack.has_damage = true;
if (!provider_ ||
!provider_->UpdateCurrentFrame(args.frame_time + args.interval,
args.frame_time + 2 * args.interval) ||
!is_rendering_) {
compositor_frame_sink_->DidNotProduceFrame(current_begin_frame_ack);
return;
}
SubmitFrame(current_begin_frame_ack);
}
void VideoFrameSubmitter::DidReceiveCompositorFrameAck(
const WTF::Vector<viz::ReturnedResource>& resources) {}
} // namespace blink

@ -0,0 +1,76 @@
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/threading/thread_checker.h"
#include "components/viz/common/surfaces/local_surface_id_allocator.h"
#include "mojo/public/cpp/bindings/binding.h"
#include "platform/PlatformExport.h"
#include "platform/graphics/VideoFrameResourceProvider.h"
#include "platform/wtf/Functional.h"
#include "public/platform/WebVideoFrameSubmitter.h"
#include "services/viz/public/interfaces/compositing/compositor_frame_sink.mojom-blink.h"
namespace blink {
// This class facilitates the communication between the media thread and the
// mojo thread, providing commpositor frames containing video frames to the
// |compositor_frame_sink_|. This class should be created from a
// SequencedTaskRunner, and any methods that call for mojo communication should
// also be ran from a SequencedTaskRunner.
class PLATFORM_EXPORT VideoFrameSubmitter
: public WebVideoFrameSubmitter,
public viz::mojom::blink::CompositorFrameSinkClient {
public:
explicit VideoFrameSubmitter(cc::VideoFrameProvider*);
~VideoFrameSubmitter() override;
static void CreateCompositorFrameSink(
const viz::FrameSinkId,
mojo::Binding<viz::mojom::blink::CompositorFrameSinkClient>*,
viz::mojom::blink::CompositorFrameSinkPtr*);
void SubmitFrame(viz::BeginFrameAck);
bool Rendering() { return is_rendering_; };
cc::VideoFrameProvider* Provider() { return provider_; }
mojo::Binding<viz::mojom::blink::CompositorFrameSinkClient>* Binding() {
return &binding_;
}
void SetSink(viz::mojom::blink::CompositorFrameSinkPtr* sink) {
compositor_frame_sink_ = std::move(*sink);
}
// VideoFrameProvider::Client implementation.
void StopUsingProvider() override;
void StartRendering() override;
void StopRendering() override;
void DidReceiveFrame() override;
// WebVideoFrameSubmitter implementation.
void StartSubmitting(const viz::FrameSinkId&) override;
// cc::mojom::CompositorFrameSinkClient implementation.
void DidReceiveCompositorFrameAck(
const WTF::Vector<viz::ReturnedResource>& resources) override;
void OnBeginFrame(const viz::BeginFrameArgs&) override;
void OnBeginFramePausedChanged(bool paused) override {}
void ReclaimResources(
const WTF::Vector<viz::ReturnedResource>& resources) override {}
private:
cc::VideoFrameProvider* provider_;
viz::mojom::blink::CompositorFrameSinkPtr compositor_frame_sink_;
mojo::Binding<viz::mojom::blink::CompositorFrameSinkClient> binding_;
viz::LocalSurfaceIdAllocator local_surface_id_allocator_;
viz::LocalSurfaceId current_local_surface_id_;
VideoFrameResourceProvider resource_provider_;
bool is_rendering_;
THREAD_CHECKER(thread_checker_);
DISALLOW_COPY_AND_ASSIGN(VideoFrameSubmitter);
};
} // namespace blink

@ -0,0 +1,263 @@
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "platform/graphics/VideoFrameSubmitter.h"
#include "base/test/simple_test_tick_clock.h"
#include "base/threading/thread.h"
#include "cc/layers/video_frame_provider.h"
#include "cc/test/layer_test_common.h"
#include "cc/trees/task_runner_provider.h"
#include "components/viz/test/fake_external_begin_frame_source.h"
#include "media/base/video_frame.h"
#include "mojo/public/cpp/bindings/interface_request.h"
#include "platform/wtf/Functional.h"
#include "services/viz/public/interfaces/compositing/compositor_frame_sink.mojom-blink.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
using testing::_;
using testing::Return;
using testing::StrictMock;
namespace blink {
namespace {
class MockVideoFrameProvider : public cc::VideoFrameProvider {
public:
MockVideoFrameProvider() {}
~MockVideoFrameProvider() {}
MOCK_METHOD1(SetVideoFrameProviderClient, void(Client*));
MOCK_METHOD2(UpdateCurrentFrame, bool(base::TimeTicks, base::TimeTicks));
MOCK_METHOD0(HasCurrentFrame, bool());
MOCK_METHOD0(GetCurrentFrame, scoped_refptr<media::VideoFrame>());
MOCK_METHOD0(PutCurrentFrame, void());
};
class MockCompositorFrameSink : public viz::mojom::blink::CompositorFrameSink {
public:
MockCompositorFrameSink(
viz::mojom::blink::CompositorFrameSinkRequest* request)
: binding_(this, std::move(*request)) {}
~MockCompositorFrameSink() {}
MOCK_METHOD1(SetNeedsBeginFrame, void(bool));
void SubmitCompositorFrame(
const viz::LocalSurfaceId& id,
cc::CompositorFrame frame,
::viz::mojom::blink::HitTestRegionListPtr hit_test_region_list,
uint64_t submit_time) {
DoSubmitCompositorFrame(id, &frame);
}
MOCK_METHOD2(DoSubmitCompositorFrame,
void(const viz::LocalSurfaceId&, cc::CompositorFrame*));
MOCK_METHOD1(DidNotProduceFrame, void(const viz::BeginFrameAck&));
private:
mojo::Binding<viz::mojom::blink::CompositorFrameSink> binding_;
};
} // namespace
class VideoFrameSubmitterTest : public ::testing::Test {
public:
VideoFrameSubmitterTest()
: thread_("ThreadForTest"),
now_src_(new base::SimpleTestTickClock()),
begin_frame_source_(new viz::FakeExternalBeginFrameSource(0.f, false)),
provider_(new StrictMock<MockVideoFrameProvider>()) {}
void SetUp() override {
thread_.StartAndWaitForTesting();
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitterTest::MakeSubmitter,
base::Unretained(this)));
thread_.FlushForTesting();
}
void TearDown() override {
thread_.task_runner()->DeleteSoon(FROM_HERE, std::move(submitter_));
thread_.task_runner()->DeleteSoon(FROM_HERE, std::move(sink_));
}
void MakeSubmitter() {
submitter_ = base::MakeUnique<VideoFrameSubmitter>(provider_.get());
viz::mojom::blink::CompositorFrameSinkPtr submitter_sink;
viz::mojom::blink::CompositorFrameSinkRequest request =
mojo::MakeRequest(&submitter_sink);
sink_ = base::MakeUnique<StrictMock<MockCompositorFrameSink>>(&request);
submitter_->SetSink(&submitter_sink);
}
protected:
base::Thread thread_;
std::unique_ptr<base::SimpleTestTickClock> now_src_;
std::unique_ptr<viz::FakeExternalBeginFrameSource> begin_frame_source_;
std::unique_ptr<StrictMock<MockCompositorFrameSink>> sink_;
std::unique_ptr<StrictMock<MockVideoFrameProvider>> provider_;
std::unique_ptr<VideoFrameSubmitter> submitter_;
};
TEST_F(VideoFrameSubmitterTest, StatRenderingFlipsBits) {
EXPECT_FALSE(submitter_->Rendering());
EXPECT_CALL(*sink_, SetNeedsBeginFrame(true));
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::StartRendering,
base::Unretained(submitter_.get())));
thread_.FlushForTesting();
EXPECT_TRUE(submitter_->Rendering());
}
TEST_F(VideoFrameSubmitterTest, StopUsingProviderNullsProvider) {
EXPECT_FALSE(submitter_->Rendering());
EXPECT_EQ(provider_.get(), submitter_->Provider());
submitter_->StopUsingProvider();
EXPECT_EQ(nullptr, submitter_->Provider());
}
TEST_F(VideoFrameSubmitterTest,
StopUsingProviderSubmitsFrameAndStopsRendering) {
EXPECT_CALL(*sink_, SetNeedsBeginFrame(true));
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::StartRendering,
base::Unretained(submitter_.get())));
thread_.FlushForTesting();
EXPECT_TRUE(submitter_->Rendering());
EXPECT_CALL(*provider_, GetCurrentFrame());
EXPECT_CALL(*sink_, DoSubmitCompositorFrame(_, _));
EXPECT_CALL(*provider_, PutCurrentFrame());
EXPECT_CALL(*sink_, SetNeedsBeginFrame(false));
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::StopUsingProvider,
base::Unretained(submitter_.get())));
thread_.FlushForTesting();
EXPECT_FALSE(submitter_->Rendering());
}
TEST_F(VideoFrameSubmitterTest, DidReceiveFrameDoesNothingIfRendering) {
EXPECT_CALL(*sink_, SetNeedsBeginFrame(true));
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::StartRendering,
base::Unretained(submitter_.get())));
thread_.FlushForTesting();
EXPECT_TRUE(submitter_->Rendering());
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::DidReceiveFrame,
base::Unretained(submitter_.get())));
thread_.FlushForTesting();
}
TEST_F(VideoFrameSubmitterTest, DidReceiveFrameSubmitsFrame) {
EXPECT_FALSE(submitter_->Rendering());
EXPECT_CALL(*provider_, GetCurrentFrame());
EXPECT_CALL(*sink_, DoSubmitCompositorFrame(_, _));
EXPECT_CALL(*provider_, PutCurrentFrame());
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::DidReceiveFrame,
base::Unretained(submitter_.get())));
thread_.FlushForTesting();
}
TEST_F(VideoFrameSubmitterTest, SubmitFrameWithoutProviderReturns) {
submitter_->StopUsingProvider();
viz::BeginFrameAck begin_frame_ack =
viz::BeginFrameAck::CreateManualAckWithDamage();
thread_.task_runner()->PostTask(
FROM_HERE,
base::Bind(&VideoFrameSubmitter::SubmitFrame,
base::Unretained(submitter_.get()), begin_frame_ack));
thread_.FlushForTesting();
}
TEST_F(VideoFrameSubmitterTest, OnBeginFrameSubmitsFrame) {
EXPECT_CALL(*sink_, SetNeedsBeginFrame(true));
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::StartRendering,
base::Unretained(submitter_.get())));
thread_.FlushForTesting();
EXPECT_CALL(*provider_, UpdateCurrentFrame(_, _)).WillOnce(Return(true));
EXPECT_CALL(*provider_, GetCurrentFrame());
EXPECT_CALL(*sink_, DoSubmitCompositorFrame(_, _));
EXPECT_CALL(*provider_, PutCurrentFrame());
viz::BeginFrameArgs args = begin_frame_source_->CreateBeginFrameArgs(
BEGINFRAME_FROM_HERE, now_src_.get());
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::OnBeginFrame,
base::Unretained(submitter_.get()), args));
thread_.FlushForTesting();
}
TEST_F(VideoFrameSubmitterTest, MissedFrameArgDoesNotProduceFrame) {
EXPECT_CALL(*sink_, DidNotProduceFrame(_));
viz::BeginFrameArgs args = begin_frame_source_->CreateBeginFrameArgs(
BEGINFRAME_FROM_HERE, now_src_.get());
args.type = viz::BeginFrameArgs::MISSED;
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::OnBeginFrame,
base::Unretained(submitter_.get()), args));
thread_.FlushForTesting();
}
TEST_F(VideoFrameSubmitterTest, MissingProviderDoesNotProduceFrame) {
submitter_->StopUsingProvider();
EXPECT_CALL(*sink_, DidNotProduceFrame(_));
viz::BeginFrameArgs args = begin_frame_source_->CreateBeginFrameArgs(
BEGINFRAME_FROM_HERE, now_src_.get());
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::OnBeginFrame,
base::Unretained(submitter_.get()), args));
thread_.FlushForTesting();
}
TEST_F(VideoFrameSubmitterTest, NoUpdateOnFrameDoesNotProduceFrame) {
EXPECT_CALL(*provider_, UpdateCurrentFrame(_, _)).WillOnce(Return(false));
EXPECT_CALL(*sink_, DidNotProduceFrame(_));
viz::BeginFrameArgs args = begin_frame_source_->CreateBeginFrameArgs(
BEGINFRAME_FROM_HERE, now_src_.get());
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::OnBeginFrame,
base::Unretained(submitter_.get()), args));
thread_.FlushForTesting();
}
TEST_F(VideoFrameSubmitterTest, NotRenderingDoesNotProduceFrame) {
EXPECT_CALL(*provider_, UpdateCurrentFrame(_, _)).WillOnce(Return(true));
EXPECT_CALL(*sink_, DidNotProduceFrame(_));
viz::BeginFrameArgs args = begin_frame_source_->CreateBeginFrameArgs(
BEGINFRAME_FROM_HERE, now_src_.get());
thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VideoFrameSubmitter::OnBeginFrame,
base::Unretained(submitter_.get()), args));
thread_.FlushForTesting();
}
} // namespace blink

@ -369,6 +369,7 @@ source_set("blink_headers") {
"platform/WebURLRequest.h",
"platform/WebURLResponse.h",
"platform/WebVector.h",
"platform/WebVideoFrameSubmitter.h",
"platform/WebViewportStyle.h",
"platform/WebWorkerFetchContext.h",
"platform/linux/WebFallbackFont.h",

@ -7,15 +7,29 @@
#include "WebCommon.h"
#include "WebLayer.h"
#include "WebLayerTreeView.h"
namespace viz {
class FrameSinkId;
}
namespace blink {
// Listens for updates made on the WebLayer by the WebSurfaceLayerBridge.
class BLINK_PLATFORM_EXPORT WebSurfaceLayerBridgeObserver {
public:
virtual void OnWebLayerReplaced() = 0;
};
// Maintains and exposes the SurfaceLayer.
class BLINK_PLATFORM_EXPORT WebSurfaceLayerBridge {
public:
static WebSurfaceLayerBridge* Create();
static std::unique_ptr<WebSurfaceLayerBridge> Create(
WebLayerTreeView*,
WebSurfaceLayerBridgeObserver*);
virtual ~WebSurfaceLayerBridge();
virtual WebLayer* GetWebLayer() const = 0;
virtual const viz::FrameSinkId& GetFrameSinkId() const = 0;
};
} // namespace blink

@ -0,0 +1,30 @@
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef WebVideoFrameSubmitter_h
#define WebVideoFrameSubmitter_h
#include "WebCommon.h"
#include "cc/layers/video_frame_provider.h"
namespace viz {
class FrameSinkId;
} // namespace viz
namespace blink {
// Exposes the VideoFrameSubmitter, which submits CompositorFrames containing
// information from VideoFrames.
class BLINK_PLATFORM_EXPORT WebVideoFrameSubmitter
: public cc::VideoFrameProvider::Client {
public:
static std::unique_ptr<WebVideoFrameSubmitter> Create(
cc::VideoFrameProvider*);
virtual ~WebVideoFrameSubmitter() = default;
virtual void StartSubmitting(const viz::FrameSinkId&) = 0;
};
} // namespace blink
#endif // WebVideoFrameSubmitter_h

@ -101,6 +101,7 @@ class WebEncryptedMediaClient;
class WebExternalPopupMenu;
class WebExternalPopupMenuClient;
class WebFileChooserCompletion;
class WebLayerTreeView;
class WebLocalFrame;
class WebMediaPlayer;
class WebMediaPlayerClient;
@ -147,7 +148,8 @@ class BLINK_EXPORT WebFrameClient {
WebMediaPlayerClient*,
WebMediaPlayerEncryptedMediaClient*,
WebContentDecryptionModule*,
const WebString& sink_id) {
const WebString& sink_id,
WebLayerTreeView*) {
return nullptr;
}