0

[Cast Streaming] Cleanup and modernize CastEnvironment

This patch just cleans up and modernizes the CastEnvironment
class, removing some unused functions, improving some checks,
and moving to `enum class` instead of `enum`.

As part of improving this class, some threading concerns are alleviated,
related classes are refactored, and a new test helper,
TestWithCastEnvironment, is added to handle some of the complexity
of working with the new QuitClosures combined with the CastEnvironment
class.

Low-Coverage-Reason: TESTS_IN_SEPARATE_CL coverage improved in a separate patch.
Change-Id: I4fcc47fd531bac0d3c308cb486275576eb8fff58
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6120584
Reviewed-by: Mark Foltz <mfoltz@chromium.org>
Commit-Queue: Jordan Bayles <jophba@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1407636}
This commit is contained in:
Jordan Bayles
2025-01-16 16:09:02 -08:00
committed by Chromium LUCI CQ
parent 10d10bb178
commit 552828b3cc
44 changed files with 996 additions and 2375 deletions

@ -17,7 +17,7 @@
#include "components/openscreen_platform/task_runner.h"
#include "media/base/audio_codecs.h"
#include "media/base/video_codecs.h"
#include "media/cast/cast_environment.h"
#include "media/cast/test/test_with_cast_environment.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "testing/gmock/include/gmock/gmock.h"
@ -145,7 +145,7 @@ RemotingSinkMetadata DefaultSinkMetadata() {
class MediaRemoterTest : public mojom::CastMessageChannel,
public MediaRemoter::Client,
public ::testing::Test {
public media::cast::TestWithCastEnvironment {
public:
MediaRemoterTest() : sink_metadata_(DefaultSinkMetadata()) {}
@ -153,7 +153,7 @@ class MediaRemoterTest : public mojom::CastMessageChannel,
MediaRemoterTest(const MediaRemoterTest&) = delete;
MediaRemoterTest& operator=(const MediaRemoterTest&) = delete;
MediaRemoterTest& operator=(MediaRemoterTest&&) = delete;
~MediaRemoterTest() override { task_environment_.RunUntilIdle(); }
~MediaRemoterTest() override = default;
protected:
// mojom::CastMessageChannel mock implementation (inbound messages).
@ -178,7 +178,7 @@ class MediaRemoterTest : public mojom::CastMessageChannel,
EXPECT_CALL(remoting_source_, OnSinkAvailable(_));
media_remoter_ =
std::make_unique<MediaRemoter>(*this, sink_metadata_, rpc_dispatcher_);
task_environment_.RunUntilIdle();
RunUntilIdle();
Mock::VerifyAndClear(this);
Mock::VerifyAndClear(&remoting_source_);
}
@ -188,7 +188,7 @@ class MediaRemoterTest : public mojom::CastMessageChannel,
ASSERT_TRUE(remoter_);
EXPECT_CALL(*this, RequestRemotingStreaming());
remoter_->Start();
task_environment_.RunUntilIdle();
RunUntilIdle();
Mock::VerifyAndClear(this);
}
@ -200,7 +200,7 @@ class MediaRemoterTest : public mojom::CastMessageChannel,
EXPECT_CALL(rpc_dispatcher_, Unsubscribe());
EXPECT_CALL(*this, RestartMirroringStreaming());
remoter_->Stop(media::mojom::RemotingStopReason::USER_DISABLED);
task_environment_.RunUntilIdle();
RunUntilIdle();
Mock::VerifyAndClear(this);
Mock::VerifyAndClear(&remoting_source_);
}
@ -208,20 +208,14 @@ class MediaRemoterTest : public mojom::CastMessageChannel,
// Signals that a remoting streaming session starts successfully.
void RemotingStreamingStarted() {
ASSERT_TRUE(media_remoter_);
scoped_refptr<media::cast::CastEnvironment> cast_environment =
new media::cast::CastEnvironment(
base::DefaultTickClock::GetInstance(),
task_environment_.GetMainThreadTaskRunner(),
task_environment_.GetMainThreadTaskRunner(),
task_environment_.GetMainThreadTaskRunner());
openscreen_test_senders_ = std::make_unique<OpenscreenTestSenders>();
media_remoter_->StartRpcMessaging(
cast_environment, std::move(openscreen_test_senders_->audio_sender),
cast_environment(), std::move(openscreen_test_senders_->audio_sender),
std::move(openscreen_test_senders_->video_sender),
MirrorSettings::GetDefaultAudioConfig(media::AudioCodec::kUnknown),
MirrorSettings::GetDefaultVideoConfig(media::VideoCodec::kUnknown));
task_environment_.RunUntilIdle();
RunUntilIdle();
Mock::VerifyAndClear(&remoting_source_);
}
@ -232,7 +226,7 @@ class MediaRemoterTest : public mojom::CastMessageChannel,
EXPECT_CALL(remoting_source_, OnSinkAvailable(_))
.Times(is_remoting_disabled ? 0 : 1);
media_remoter_->OnMirroringResumed();
task_environment_.RunUntilIdle();
RunUntilIdle();
Mock::VerifyAndClear(&remoting_source_);
}
@ -243,7 +237,7 @@ class MediaRemoterTest : public mojom::CastMessageChannel,
EXPECT_CALL(remoting_source_, OnSinkGone());
EXPECT_CALL(*this, RestartMirroringStreaming());
media_remoter_->OnRemotingFailed();
task_environment_.RunUntilIdle();
RunUntilIdle();
Mock::VerifyAndClear(this);
Mock::VerifyAndClear(&remoting_source_);
}
@ -289,7 +283,6 @@ class MediaRemoterTest : public mojom::CastMessageChannel,
}
private:
base::test::TaskEnvironment task_environment_;
base::test::ScopedFeatureList feature_list_;
mojo::Receiver<mojom::CastMessageChannel> receiver_{this};
mojo::Remote<mojom::CastMessageChannel> inbound_channel_;

@ -65,7 +65,7 @@ MirroringGpuFactoriesFactory::GetInstance() {
// NOTE: this Unretained is safe because `this` is deleted on the VIDEO
// thread.
cast_environment_->PostTask(
CastEnvironment::VIDEO, FROM_HERE,
CastEnvironment::ThreadId::kVideo, FROM_HERE,
base::BindOnce(&MirroringGpuFactoriesFactory::BindOnVideoThread,
base::Unretained(this)));
@ -75,15 +75,15 @@ MirroringGpuFactoriesFactory::GetInstance() {
vea_provider.InitWithNewPipeAndPassReceiver());
auto codec_factory = std::make_unique<media::MojoCodecFactoryDefault>(
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::VIDEO),
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::kVideo),
context_provider_,
/*enable_video_decode_accelerator=*/false,
/*enable_video_encode_accelerator=*/true, std::move(vea_provider));
instance_ = media::MojoGpuVideoAcceleratorFactories::Create(
std::move(gpu_channel_host),
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::MAIN),
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::VIDEO),
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::kMain),
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::kVideo),
context_provider_, std::move(codec_factory),
gpu_->gpu_memory_buffer_manager(),
/*enable_video_gpu_memory_buffers=*/true,
@ -95,7 +95,7 @@ MirroringGpuFactoriesFactory::GetInstance() {
}
void MirroringGpuFactoriesFactory::BindOnVideoThread() {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::VIDEO));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kVideo));
CHECK(context_provider_);
if (context_provider_->BindToCurrentSequence() !=
gpu::ContextResult::kSuccess) {
@ -107,7 +107,7 @@ void MirroringGpuFactoriesFactory::BindOnVideoThread() {
void MirroringGpuFactoriesFactory::OnContextLost() {
cast_environment_->PostTask(
CastEnvironment::VIDEO, FROM_HERE,
CastEnvironment::ThreadId::kVideo, FROM_HERE,
base::BindOnce(&media::MojoGpuVideoAcceleratorFactories::DestroyContext,
base::Unretained(instance_.get())));
@ -119,7 +119,7 @@ void MirroringGpuFactoriesFactory::DestroyInstance() {
CHECK(instance_);
// The GPU factories object, after construction, must only be accessed on the
// video encoding thread (including for deletion).
cast_environment_->GetTaskRunner(CastEnvironment::VIDEO)
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::kVideo)
->DeleteSoon(FROM_HERE, std::move(instance_));
}

@ -8,6 +8,7 @@
#include "base/functional/bind.h"
#include "base/functional/callback.h"
#include "base/functional/callback_helpers.h"
#include "base/task/single_thread_task_runner.h"
#include "base/values.h"
#include "components/mirroring/service/openscreen_session_host.h"
@ -40,7 +41,7 @@ void MirroringService::Start(
session_host_ = std::make_unique<OpenscreenSessionHost>(
std::move(params), max_resolution, std::move(observer),
std::move(resource_provider), std::move(outbound_channel),
std::move(inbound_channel), io_task_runner_);
std::move(inbound_channel), io_task_runner_, base::DoNothing());
session_host_->AsyncInitialize();
}

@ -13,6 +13,7 @@
#include "base/cpu.h"
#include "base/functional/bind.h"
#include "base/functional/callback_forward.h"
#include "base/logging.h"
#include "base/memory/raw_ptr.h"
#include "base/metrics/histogram_functions.h"
@ -322,7 +323,8 @@ OpenscreenSessionHost::OpenscreenSessionHost(
mojo::PendingRemote<mojom::ResourceProvider> resource_provider,
mojo::PendingRemote<mojom::CastMessageChannel> outbound_channel,
mojo::PendingReceiver<mojom::CastMessageChannel> inbound_channel,
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner)
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
base::OnceClosure deletion_cb)
: session_params_(*session_params),
observer_(std::move(observer)),
resource_provider_(std::move(resource_provider)),
@ -330,7 +332,8 @@ OpenscreenSessionHost::OpenscreenSessionHost(
session_params_.destination_id,
std::move(outbound_channel),
std::move(inbound_channel)),
logger_(kLogPrefix, observer_) {
logger_(kLogPrefix, observer_),
deletion_cb_(std::move(deletion_cb)) {
CHECK(resource_provider_);
openscreen_platform::EventTraceLoggingPlatform::EnsureInstance();
@ -399,6 +402,11 @@ OpenscreenSessionHost::~OpenscreenSessionHost() {
if (set_network_context_proxy_) {
openscreen_platform::ClearNetworkContextGetter();
}
if (deletion_cb_) {
CHECK(!cast_environment_);
std::move(deletion_cb_).Run();
}
}
void OpenscreenSessionHost::AsyncInitialize(
@ -460,11 +468,11 @@ void OpenscreenSessionHost::OnNegotiated(
NumberOfEncodeThreads();
}
// NOTE: the audio and video encode threads are reused across negotiations
// and should only be instantiated once each.
const bool initially_starting_session =
!audio_encode_thread_ && !video_encode_thread_;
// NOTE: the CastEnvironment and its associated threads should only be
// instantiated once.
const bool initially_starting_session = !cast_environment_;
if (initially_starting_session) {
CHECK(!audio_encode_thread_ && !video_encode_thread_);
audio_encode_thread_ = base::ThreadPool::CreateSingleThreadTaskRunner(
{base::TaskPriority::USER_BLOCKING,
base::TaskShutdownBehavior::SKIP_ON_SHUTDOWN},
@ -474,13 +482,12 @@ void OpenscreenSessionHost::OnNegotiated(
base::TaskShutdownBehavior::SKIP_ON_SHUTDOWN,
base::WithBaseSyncPrimitives(), base::MayBlock()},
base::SingleThreadTaskRunnerThreadMode::DEDICATED);
cast_environment_ = base::MakeRefCounted<media::cast::CastEnvironment>(
*base::DefaultTickClock::GetInstance(),
base::SingleThreadTaskRunner::GetCurrentDefault(), audio_encode_thread_,
video_encode_thread_, std::move(deletion_cb_));
}
cast_environment_ = base::MakeRefCounted<media::cast::CastEnvironment>(
base::DefaultTickClock::GetInstance(),
base::SingleThreadTaskRunner::GetCurrentDefault(), audio_encode_thread_,
video_encode_thread_);
if (state_ == State::kRemoting) {
CHECK(media_remoter_);
CHECK(!audio_config || audio_config->is_remoting());
@ -807,7 +814,8 @@ void OpenscreenSessionHost::StopStreaming() {
logger_.LogInfo(
base::StrCat({"stopped streaming. state=",
base::NumberToString(static_cast<int>(state_))}));
if (!cast_environment_) {
if (!session_) {
return;
}
@ -815,16 +823,13 @@ void OpenscreenSessionHost::StopStreaming() {
PauseCapturingVideo();
audio_stream_.reset();
video_stream_.reset();
gpu_factories_factory_.reset();
// The factory should be deleted on the VIDEO thread to ensure it is not
// deleted before BindOnVideoThread() can be called.
video_encode_thread_->DeleteSoon(FROM_HERE,
std::move(gpu_factories_factory_));
// Since the environment and its properties are ref-counted, this call to
// release it may not immediately close any of its resources.
cast_environment_ = nullptr;
if (gpu_factories_factory_) {
video_encode_thread_->DeleteSoon(FROM_HERE,
std::move(gpu_factories_factory_));
}
}
void OpenscreenSessionHost::StopSession() {
@ -1188,7 +1193,7 @@ void OpenscreenSessionHost::StartCapturingAudio() {
void OpenscreenSessionHost::StopCapturingAudio() {
if (audio_input_device_) {
audio_input_device_->Stop();
audio_input_device_ = nullptr;
audio_input_device_.reset();
}
audio_capturing_callback_.reset();
}

@ -6,6 +6,7 @@
#define COMPONENTS_MIRRORING_SERVICE_OPENSCREEN_SESSION_HOST_H_
#include "base/component_export.h"
#include "base/functional/callback_forward.h"
#include "base/gtest_prod_util.h"
#include "base/logging.h"
#include "base/memory/weak_ptr.h"
@ -94,7 +95,8 @@ class COMPONENT_EXPORT(MIRRORING_SERVICE) OpenscreenSessionHost final
mojo::PendingRemote<mojom::ResourceProvider> resource_provider,
mojo::PendingRemote<mojom::CastMessageChannel> outbound_channel,
mojo::PendingReceiver<mojom::CastMessageChannel> inbound_channel,
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner);
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
base::OnceClosure deletion_cb);
~OpenscreenSessionHost() override;
@ -304,6 +306,9 @@ class COMPONENT_EXPORT(MIRRORING_SERVICE) OpenscreenSessionHost final
// Manages the clock and thread proxies for the audio sender, video sender,
// and media remoter.
//
// NOTE: this is lazy initialized on the first session negotiation, and then
// destructed only on the destruction of this class.
scoped_refptr<media::cast::CastEnvironment> cast_environment_;
// Task runners used specifically for audio, video encoding.
@ -360,6 +365,9 @@ class COMPONENT_EXPORT(MIRRORING_SERVICE) OpenscreenSessionHost final
// casting session.
std::unique_ptr<OpenscreenStatsClient> stats_client_;
// Callback invoked once this instance and all of its resources are released.
base::OnceClosure deletion_cb_;
// Used in callbacks executed on task runners, such as by RtpStream.
// TODO(crbug.com/40238714): determine if weak pointers can be removed.
base::WeakPtrFactory<OpenscreenSessionHost> weak_factory_{this};

@ -12,6 +12,7 @@
#include "base/functional/bind.h"
#include "base/functional/callback.h"
#include "base/functional/callback_forward.h"
#include "base/run_loop.h"
#include "base/test/scoped_feature_list.h"
#include "base/test/task_environment.h"
@ -169,7 +170,19 @@ class OpenscreenSessionHostTest : public mojom::ResourceProvider,
media::cast::encoding_support::ClearHardwareCodecDenyListForTesting();
}
~OpenscreenSessionHostTest() override { task_environment_.RunUntilIdle(); }
void OnSessionHostDeletion() {
ASSERT_TRUE(session_host_deletion_cb_);
if (session_host_deletion_cb_) {
std::move(session_host_deletion_cb_).Run();
}
}
~OpenscreenSessionHostTest() override {
// We may have already deleted the session host if the session was stopped.
if (session_host_) {
DeleteSessionHost();
}
}
protected:
// mojom::SessionObserver implementation.
@ -342,7 +355,11 @@ class OpenscreenSessionHostTest : public mojom::ResourceProvider,
std::move(session_params), gfx::Size(1920, 1080),
std::move(session_observer_remote), std::move(resource_provider_remote),
std::move(outbound_channel_remote),
inbound_channel_.BindNewPipeAndPassReceiver(), nullptr);
inbound_channel_.BindNewPipeAndPassReceiver(), nullptr,
// NOTE: unretained used is safe since we wait for this task to complete
// before deleting `this`.
base::BindOnce(&OpenscreenSessionHostTest::OnSessionHostDeletion,
base::Unretained(this)));
session_host_->AsyncInitialize(MakeOnInitializedCallback());
task_environment_.RunUntilIdle();
Mock::VerifyAndClear(this);
@ -378,12 +395,19 @@ class OpenscreenSessionHostTest : public mojom::ResourceProvider,
// Negotiate mirroring.
void NegotiateMirroring() { session_host_->NegotiateMirroring(); }
void DeleteSessionHost() {
ASSERT_TRUE(session_host_);
session_host_deletion_cb_ = task_environment_.QuitClosure();
session_host_.reset();
task_environment_.RunUntilQuit();
}
void StopSession() {
if (video_host_) {
EXPECT_CALL(*video_host_, OnStopped());
}
EXPECT_CALL(*this, DidStop());
session_host_.reset();
DeleteSessionHost();
task_environment_.RunUntilIdle();
Mock::VerifyAndClear(this);
}
@ -695,6 +719,7 @@ class OpenscreenSessionHostTest : public mojom::ResourceProvider,
bool force_letterboxing_{false};
std::unique_ptr<OpenscreenSessionHost> session_host_;
base::OnceClosure session_host_deletion_cb_;
std::unique_ptr<MockNetworkContext> network_context_;
std::unique_ptr<openscreen::cast::Answer> answer_;

@ -111,7 +111,7 @@ class COMPONENT_EXPORT(MIRRORING_SERVICE) RemotingSender final
// The backing frame sender implementation.
std::unique_ptr<media::cast::FrameSender> frame_sender_;
raw_ptr<const base::TickClock> clock_;
raw_ref<const base::TickClock> clock_;
// Callback that is run to notify when a fatal error occurs.
base::OnceClosure error_callback_;

@ -19,6 +19,7 @@
#include "media/cast/constants.h"
#include "media/cast/openscreen/remoting_proto_utils.h"
#include "media/cast/sender/frame_sender.h"
#include "media/cast/test/test_with_cast_environment.h"
#include "media/cast/test/utility/default_config.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/common/mojo_data_pipe_read_write.h"
@ -148,15 +149,9 @@ class MojoSenderWrapper {
} // namespace
class RemotingSenderTest : public ::testing::Test {
class RemotingSenderTest : public media::cast::TestWithCastEnvironment {
public:
RemotingSenderTest()
: task_environment_(base::test::TaskEnvironment::TimeSource::MOCK_TIME),
cast_environment_(new media::cast::CastEnvironment(
task_environment_.GetMockTickClock(),
task_environment_.GetMainThreadTaskRunner(),
task_environment_.GetMainThreadTaskRunner(),
task_environment_.GetMainThreadTaskRunner())) {
RemotingSenderTest() {
media::cast::FrameSenderConfig video_config =
media::cast::GetDefaultVideoSenderConfig();
std::unique_ptr<testing::StrictMock<FakeSender>> fake_sender =
@ -173,7 +168,7 @@ class RemotingSenderTest : public ::testing::Test {
producer_end, consumer_end));
remoting_sender_ = base::WrapUnique(new RemotingSender(
cast_environment_, std::move(fake_sender), video_config,
cast_environment(), std::move(fake_sender), video_config,
std::move(consumer_end), sender.InitWithNewPipeAndPassReceiver(),
base::BindOnce(
[](bool expecting_error_callback_run) {
@ -203,24 +198,14 @@ class RemotingSenderTest : public ::testing::Test {
third_buffer_->set_is_key_frame(true);
}
void TearDown() final {
remoting_sender_.reset();
// Allow any pending tasks to run before destruction.
RunPendingTasks();
}
protected:
// Allow pending tasks, such as Mojo method calls, to execute.
void RunPendingTasks() { task_environment_.RunUntilIdle(); }
void RunPendingTasks() { RunUntilIdle(); }
void SendFrameCancelled(media::cast::FrameId id) {
remoting_sender_->OnFrameCanceled(id);
}
base::test::TaskEnvironment task_environment_;
const scoped_refptr<media::cast::CastEnvironment> cast_environment_;
raw_ptr<testing::StrictMock<FakeSender>, DanglingUntriaged> sender_;
bool expecting_error_callback_run_ = false;
@ -246,7 +231,7 @@ TEST_F(RemotingSenderTest, SendsFramesViaMojoDataPipe) {
EXPECT_CALL(*sender_, GetUnacknowledgedFrameCount).WillOnce(Return(0));
EXPECT_CALL(*sender_, EnqueueFrame(_))
.WillOnce(AreEqualFirstFrame(first_buffer_, &task_environment_));
.WillOnce(AreEqualFirstFrame(first_buffer_, &task_environment()));
mojo_sender_wrapper_->SendFrame(first_buffer_);
EXPECT_TRUE(mojo_sender_wrapper_->is_frame_in_flight());
RunPendingTasks();
@ -254,7 +239,7 @@ TEST_F(RemotingSenderTest, SendsFramesViaMojoDataPipe) {
EXPECT_CALL(*sender_, GetUnacknowledgedFrameCount).WillOnce(Return(0));
EXPECT_CALL(*sender_, EnqueueFrame(_))
.WillOnce(AreEqualNotFirstFrame(second_buffer_, &task_environment_));
.WillOnce(AreEqualNotFirstFrame(second_buffer_, &task_environment()));
mojo_sender_wrapper_->SendFrame(second_buffer_);
EXPECT_TRUE(mojo_sender_wrapper_->is_frame_in_flight());
RunPendingTasks();
@ -262,7 +247,7 @@ TEST_F(RemotingSenderTest, SendsFramesViaMojoDataPipe) {
EXPECT_CALL(*sender_, GetUnacknowledgedFrameCount).WillOnce(Return(0));
EXPECT_CALL(*sender_, EnqueueFrame(_))
.WillOnce(AreEqualNotFirstFrame(third_buffer_, &task_environment_));
.WillOnce(AreEqualNotFirstFrame(third_buffer_, &task_environment()));
mojo_sender_wrapper_->SendFrame(third_buffer_);
EXPECT_TRUE(mojo_sender_wrapper_->is_frame_in_flight());
RunPendingTasks();
@ -295,7 +280,7 @@ TEST_F(RemotingSenderTest, CancelsOrAcksFramesInFlight) {
EXPECT_TRUE(mojo_sender_wrapper_->is_frame_in_flight());
EXPECT_CALL(*sender_, EnqueueFrame(_))
.WillOnce(AreEqualFirstFrame(first_buffer_, &task_environment_));
.WillOnce(AreEqualFirstFrame(first_buffer_, &task_environment()));
EXPECT_CALL(*sender_, GetUnacknowledgedFrameCount)
.WillOnce(Return(media::cast::kMaxUnackedFrames - 1));
SendFrameCancelled(first_frame_id_);
@ -315,7 +300,7 @@ TEST_F(RemotingSenderTest, FramesWaitWhenEnqueueFails) {
EXPECT_CALL(*sender_, GetUnacknowledgedFrameCount).WillOnce(Return(0));
EXPECT_CALL(*sender_, EnqueueFrame(_))
.WillOnce(AreEqualFirstFrame(first_buffer_, &task_environment_));
.WillOnce(AreEqualFirstFrame(first_buffer_, &task_environment()));
SendFrameCancelled(first_frame_id_);
RunPendingTasks();
EXPECT_FALSE(mojo_sender_wrapper_->is_frame_in_flight());

@ -15,7 +15,6 @@
#include "media/base/mock_filters.h"
#include "media/base/video_frame.h"
#include "media/cast/cast_config.h"
#include "media/cast/cast_environment.h"
#include "media/cast/sender/audio_sender.h"
#include "media/cast/sender/video_sender.h"
#include "media/cast/test/utility/audio_utility.h"
@ -111,13 +110,7 @@ class MockAudioSender : public media::cast::AudioSender {
class RtpStreamTest : public ::testing::Test {
public:
RtpStreamTest()
: cast_environment_(new media::cast::CastEnvironment(
&testing_clock_,
task_environment_.GetMainThreadTaskRunner(),
task_environment_.GetMainThreadTaskRunner(),
task_environment_.GetMainThreadTaskRunner())),
client_(&testing_clock_) {
RtpStreamTest() : client_(&testing_clock_) {
testing_clock_.Advance(base::TimeTicks::Now() - base::TimeTicks());
}
@ -138,7 +131,6 @@ class RtpStreamTest : public ::testing::Test {
base::test::TaskEnvironment task_environment_{
base::test::TaskEnvironment::TimeSource::MOCK_TIME};
base::SimpleTestTickClock testing_clock_;
const scoped_refptr<media::cast::CastEnvironment> cast_environment_;
StreamClient client_;
};

@ -31,8 +31,6 @@ source_set("common") {
"constants.h",
# NOTE: Logging code is pulled in by CastEnvironment.
"logging/encoding_event_subscriber.cc",
"logging/encoding_event_subscriber.h",
"logging/log_event_dispatcher.cc",
"logging/log_event_dispatcher.h",
"logging/logging_defines.cc",
@ -40,13 +38,9 @@ source_set("common") {
"logging/proto/proto_utils.cc",
"logging/proto/proto_utils.h",
"logging/raw_event_subscriber.h",
"logging/raw_event_subscriber_bundle.cc",
"logging/raw_event_subscriber_bundle.h",
"logging/receiver_time_offset_estimator.h",
"logging/receiver_time_offset_estimator_impl.cc",
"logging/receiver_time_offset_estimator_impl.h",
"logging/simple_event_subscriber.cc",
"logging/simple_event_subscriber.h",
"logging/stats_event_subscriber.cc",
"logging/stats_event_subscriber.h",
]
@ -167,6 +161,8 @@ static_library("test_support") {
"test/fake_video_encode_accelerator_factory.h",
"test/mock_openscreen_environment.cc",
"test/mock_openscreen_environment.h",
"test/test_with_cast_environment.cc",
"test/test_with_cast_environment.h",
"test/utility/audio_utility.cc",
"test/utility/audio_utility.h",
"test/utility/default_config.cc",
@ -198,9 +194,8 @@ test("cast_unittests") {
"encoding/external_video_encoder_unittest.cc",
"encoding/video_encoder_unittest.cc",
"encoding/vpx_quantizer_parser_unittest.cc",
"logging/encoding_event_subscriber_unittest.cc",
"logging/log_event_dispatcher_unittest.cc",
"logging/receiver_time_offset_estimator_impl_unittest.cc",
"logging/simple_event_subscriber_unittest.cc",
"logging/stats_event_subscriber_unittest.cc",
"sender/audio_sender_unittest.cc",
"sender/openscreen_frame_sender_unittest.cc",

@ -7,26 +7,29 @@
#include <utility>
#include "base/functional/bind.h"
#include "base/functional/callback.h"
#include "base/location.h"
#include "base/notreached.h"
#include "base/task/single_thread_task_runner.h"
#include "base/time/tick_clock.h"
using base::SingleThreadTaskRunner;
namespace media {
namespace cast {
namespace media::cast {
CastEnvironment::CastEnvironment(
const base::TickClock* clock,
scoped_refptr<SingleThreadTaskRunner> main_thread_proxy,
scoped_refptr<SingleThreadTaskRunner> audio_thread_proxy,
scoped_refptr<SingleThreadTaskRunner> video_thread_proxy)
const base::TickClock& clock,
scoped_refptr<base::SingleThreadTaskRunner> main_thread_proxy,
scoped_refptr<base::SingleThreadTaskRunner> audio_thread_proxy,
scoped_refptr<base::SingleThreadTaskRunner> video_thread_proxy,
base::OnceClosure deletion_cb)
: main_thread_proxy_(main_thread_proxy),
audio_thread_proxy_(audio_thread_proxy),
video_thread_proxy_(video_thread_proxy),
clock_(clock),
logger_(this) {}
logger_(main_thread_proxy, std::move(deletion_cb)) {
CHECK(main_thread_proxy);
CHECK(audio_thread_proxy);
CHECK(video_thread_proxy);
}
CastEnvironment::~CastEnvironment() = default;
bool CastEnvironment::PostTask(ThreadId identifier,
@ -43,35 +46,24 @@ bool CastEnvironment::PostDelayedTask(ThreadId identifier,
->PostDelayedTask(from_here, std::move(task), delay);
}
scoped_refptr<SingleThreadTaskRunner> CastEnvironment::GetTaskRunner(
base::TimeTicks CastEnvironment::NowTicks() const {
return Clock().NowTicks();
}
scoped_refptr<base::SingleThreadTaskRunner> CastEnvironment::GetTaskRunner(
ThreadId identifier) const {
switch (identifier) {
case CastEnvironment::MAIN:
case ThreadId::kMain:
return main_thread_proxy_;
case CastEnvironment::AUDIO:
case ThreadId::kAudio:
return audio_thread_proxy_;
case CastEnvironment::VIDEO:
case ThreadId::kVideo:
return video_thread_proxy_;
default:
NOTREACHED() << "Invalid Thread identifier";
}
}
bool CastEnvironment::CurrentlyOn(ThreadId identifier) {
switch (identifier) {
case CastEnvironment::MAIN:
return main_thread_proxy_.get() &&
main_thread_proxy_->RunsTasksInCurrentSequence();
case CastEnvironment::AUDIO:
return audio_thread_proxy_.get() &&
audio_thread_proxy_->RunsTasksInCurrentSequence();
case CastEnvironment::VIDEO:
return video_thread_proxy_.get() &&
video_thread_proxy_->RunsTasksInCurrentSequence();
default:
NOTREACHED() << "Invalid thread identifier";
}
bool CastEnvironment::CurrentlyOn(ThreadId identifier) const {
return GetTaskRunner(identifier)->RunsTasksInCurrentSequence();
}
} // namespace cast
} // namespace media
} // namespace media::cast

@ -5,83 +5,85 @@
#ifndef MEDIA_CAST_CAST_ENVIRONMENT_H_
#define MEDIA_CAST_CAST_ENVIRONMENT_H_
#include "base/functional/callback_forward.h"
#include "base/memory/raw_ptr.h"
#include "base/memory/ref_counted.h"
#include "base/task/single_thread_task_runner.h"
#include "base/time/tick_clock.h"
#include "base/time/time.h"
#include "media/cast/logging/log_event_dispatcher.h"
namespace media {
namespace cast {
namespace base {
class SingleThreadTaskRunner;
class TimeDelta;
class TimeTicks;
class TickClock;
} // namespace base
namespace media::cast {
class CastEnvironment : public base::RefCountedThreadSafe<CastEnvironment> {
public:
// An enumeration of the cast threads.
enum ThreadId {
enum class ThreadId {
// The main thread is where the cast system is configured and where timers
// and network IO is performed.
MAIN,
kMain,
// The audio thread is where all send side audio processing is done,
// primarily encoding / decoding but also re-sampling.
AUDIO,
kAudio,
// The video encoder thread is where the video processing is done.
VIDEO,
kVideo,
};
CastEnvironment(
const base::TickClock* clock,
const base::TickClock& clock,
scoped_refptr<base::SingleThreadTaskRunner> main_thread_proxy,
scoped_refptr<base::SingleThreadTaskRunner> audio_thread_proxy,
scoped_refptr<base::SingleThreadTaskRunner> video_thread_proxy);
scoped_refptr<base::SingleThreadTaskRunner> video_thread_proxy,
base::OnceClosure deletion_cb);
CastEnvironment(const CastEnvironment&) = delete;
CastEnvironment(CastEnvironment&&) = delete;
CastEnvironment& operator=(const CastEnvironment&) = delete;
CastEnvironment& operator=(CastEnvironment&&) = delete;
// These are the same methods in message_loop.h, but are guaranteed to either
// get posted to the MessageLoop if it's still alive, or be deleted otherwise.
// They return true iff the thread existed and the task was posted. Note that
// even if the task is posted, there's no guarantee that it will run, since
// the target thread may already have a Quit message in its queue.
// Convenience methods for posting tasks to the task runner associated with
// `identifier`. They return true iff the thread existed and the task was
// posted. Note that even if the task is posted, there's no guarantee that it
// will run, since the target thread may already have a Quit message in its
// queue.
bool PostTask(ThreadId identifier,
const base::Location& from_here,
const base::Location& location,
base::OnceClosure task);
bool PostDelayedTask(ThreadId identifier,
const base::Location& from_here,
const base::Location& location,
base::OnceClosure task,
base::TimeDelta delay);
bool CurrentlyOn(ThreadId identifier);
[[nodiscard]] bool CurrentlyOn(ThreadId identifier) const;
// All of the media::cast implementation must use this TickClock.
const base::TickClock* Clock() const { return clock_; }
const base::TickClock& Clock() const { return *clock_; }
// Convenience method for accessing the current time from the clock.
base::TimeTicks NowTicks() const;
// Thread-safe log event dispatcher.
LogEventDispatcher* logger() { return &logger_; }
LogEventDispatcher& logger() { return logger_; }
scoped_refptr<base::SingleThreadTaskRunner> GetTaskRunner(
ThreadId identifier) const;
bool HasAudioThread() { return audio_thread_proxy_.get() ? true : false; }
bool HasVideoThread() { return video_thread_proxy_.get() ? true : false; }
protected:
virtual ~CastEnvironment();
// Subclasses may final these.
scoped_refptr<base::SingleThreadTaskRunner> main_thread_proxy_;
scoped_refptr<base::SingleThreadTaskRunner> audio_thread_proxy_;
scoped_refptr<base::SingleThreadTaskRunner> video_thread_proxy_;
raw_ptr<const base::TickClock> clock_;
raw_ref<const base::TickClock> clock_;
LogEventDispatcher logger_;
private:
friend class base::RefCountedThreadSafe<CastEnvironment>;
};
} // namespace cast
} // namespace media
} // namespace media::cast
#endif // MEDIA_CAST_CAST_ENVIRONMENT_H_

@ -21,6 +21,7 @@
#include "base/memory/raw_span.h"
#include "base/numerics/byte_conversions.h"
#include "base/numerics/safe_conversions.h"
#include "base/time/tick_clock.h"
#include "base/time/time.h"
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
@ -181,10 +182,9 @@ class AudioEncoder::ImplBase
"cast.stream", "Audio Encode", TRACE_ID_LOCAL(audio_frame.get()),
"encoder_utilization", audio_frame->encoder_utilization);
audio_frame->encode_completion_time =
cast_environment_->Clock()->NowTicks();
audio_frame->encode_completion_time = cast_environment_->NowTicks();
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(callback_, std::move(audio_frame),
samples_dropped_from_buffer_));
samples_dropped_from_buffer_ = 0;
@ -787,7 +787,7 @@ void AudioEncoder::InsertAudio(std::unique_ptr<AudioBus> audio_bus,
DCHECK(audio_bus.get());
CHECK_EQ(InitializationResult(), STATUS_INITIALIZED);
cast_environment_->PostTask(
CastEnvironment::AUDIO, FROM_HERE,
CastEnvironment::ThreadId::kAudio, FROM_HERE,
base::BindOnce(&AudioEncoder::ImplBase::EncodeAudio, impl_,
std::move(audio_bus), recorded_time));
}

@ -19,19 +19,17 @@
#include "build/build_config.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_codecs.h"
#include "media/base/fake_single_thread_task_runner.h"
#include "media/base/media.h"
#include "media/base/video_codecs.h"
#include "media/cast/cast_config.h"
#include "media/cast/cast_environment.h"
#include "media/cast/common/rtp_time.h"
#include "media/cast/common/sender_encoded_frame.h"
#include "media/cast/test/test_with_cast_environment.h"
#include "media/cast/test/utility/audio_utility.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/openscreen/src/cast/streaming/public/encoded_frame.h"
namespace media {
namespace cast {
namespace media::cast {
static const int kNumChannels = 2;
@ -39,8 +37,7 @@ namespace {
class TestEncodedAudioFrameReceiver {
public:
TestEncodedAudioFrameReceiver() : frames_received_(0) {}
TestEncodedAudioFrameReceiver() = default;
TestEncodedAudioFrameReceiver(const TestEncodedAudioFrameReceiver&) = delete;
TestEncodedAudioFrameReceiver& operator=(
const TestEncodedAudioFrameReceiver&) = delete;
@ -83,9 +80,9 @@ class TestEncodedAudioFrameReceiver {
}
private:
int frames_received_;
int frames_received_ = 0;
RtpTimeTicks rtp_lower_bound_;
int samples_per_frame_;
int samples_per_frame_ = 0;
base::TimeTicks lower_bound_;
base::TimeTicks upper_bound_;
};
@ -111,23 +108,17 @@ struct TestScenario {
} // namespace
class AudioEncoderTest : public ::testing::TestWithParam<TestScenario> {
class AudioEncoderTest : public ::testing::TestWithParam<TestScenario>,
public WithCastEnvironment {
public:
AudioEncoderTest() {
InitializeMediaLibrary();
testing_clock_.Advance(base::TimeTicks::Now() - base::TimeTicks());
}
void SetUp() final {
task_runner_ = new FakeSingleThreadTaskRunner(&testing_clock_);
cast_environment_ = new CastEnvironment(&testing_clock_, task_runner_,
task_runner_, task_runner_);
}
AudioEncoderTest(const AudioEncoderTest&) = delete;
AudioEncoderTest(AudioEncoderTest&&) = delete;
AudioEncoderTest& operator=(const AudioEncoderTest&) = delete;
virtual ~AudioEncoderTest() = default;
AudioEncoderTest& operator=(AudioEncoderTest&&) = delete;
void RunTestForCodec(AudioCodec codec) {
const TestScenario& scenario = GetParam();
@ -141,18 +132,14 @@ class AudioEncoderTest : public ::testing::TestWithParam<TestScenario> {
const bool simulate_missing_data = scenario.durations_in_ms[i] < 0;
const base::TimeDelta duration =
base::Milliseconds(std::abs(scenario.durations_in_ms[i]));
receiver_->SetCaptureTimeBounds(
testing_clock_.NowTicks() - frame_duration,
testing_clock_.NowTicks() + duration);
if (simulate_missing_data) {
task_runner_->RunTasks();
testing_clock_.Advance(duration);
} else {
receiver_->SetCaptureTimeBounds(NowTicks() - frame_duration,
NowTicks() + duration);
if (!simulate_missing_data) {
audio_encoder_->InsertAudio(audio_bus_factory_->NextAudioBus(duration),
testing_clock_.NowTicks());
task_runner_->RunTasks();
testing_clock_.Advance(duration);
NowTicks());
}
RunUntilIdle();
AdvanceClock(duration);
if (codec == AudioCodec::kOpus) {
const int bitrate = audio_encoder_->GetBitrate();
@ -173,14 +160,14 @@ class AudioEncoderTest : public ::testing::TestWithParam<TestScenario> {
private:
void CreateObjectsForCodec(AudioCodec codec) {
audio_bus_factory_.reset(
new TestAudioBusFactory(kNumChannels, kDefaultAudioSamplingRate,
TestAudioBusFactory::kMiddleANoteFreq, 0.5f));
audio_bus_factory_ = std::make_unique<TestAudioBusFactory>(
kNumChannels, kDefaultAudioSamplingRate,
TestAudioBusFactory::kMiddleANoteFreq, 0.5f);
receiver_.reset(new TestEncodedAudioFrameReceiver());
receiver_ = std::make_unique<TestEncodedAudioFrameReceiver>();
audio_encoder_ = std::make_unique<AudioEncoder>(
cast_environment_, kNumChannels, kDefaultAudioSamplingRate,
cast_environment(), kNumChannels, kDefaultAudioSamplingRate,
kDefaultAudioEncoderBitrate, codec,
base::BindRepeating(&TestEncodedAudioFrameReceiver::FrameEncoded,
base::Unretained(receiver_.get())));
@ -188,12 +175,9 @@ class AudioEncoderTest : public ::testing::TestWithParam<TestScenario> {
receiver_->SetSamplesPerFrame(audio_encoder_->GetSamplesPerFrame());
}
base::SimpleTestTickClock testing_clock_;
scoped_refptr<FakeSingleThreadTaskRunner> task_runner_;
std::unique_ptr<TestAudioBusFactory> audio_bus_factory_;
std::unique_ptr<TestEncodedAudioFrameReceiver> receiver_;
std::unique_ptr<AudioEncoder> audio_encoder_;
scoped_refptr<CastEnvironment> cast_environment_;
};
TEST_P(AudioEncoderTest, EncodeOpus) {
@ -264,5 +248,4 @@ INSTANTIATE_TEST_SUITE_P(
TestScenario(kTwoBigUnderruns, std::size(kTwoBigUnderruns)),
TestScenario(kMixedUnderruns, std::size(kMixedUnderruns))));
} // namespace cast
} // namespace media
} // namespace media::cast

@ -171,7 +171,7 @@ class ExternalVideoEncoder::VEAClientImpl final
encoder_active_);
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(
status_change_cb_,
encoder_active_ ? media::EncoderStatus::Codes::kOk
@ -283,7 +283,7 @@ class ExternalVideoEncoder::VEAClientImpl final
encoder_active_ = false;
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(status_change_cb_, status, STATUS_CODEC_RUNTIME_ERROR));
// Flush all in progress frames to avoid any getting stuck.
@ -350,7 +350,7 @@ class ExternalVideoEncoder::VEAClientImpl final
if (metadata.dropped_frame()) {
CHECK(key_frame_encountered_);
// The encoder drops a frame.
cast_environment_->PostTask(CastEnvironment::MAIN, FROM_HERE,
cast_environment_->PostTask(CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(output_cb_, nullptr));
in_progress_frame_encodes_.pop_front();
if (encoder_active_) {
@ -481,10 +481,9 @@ class ExternalVideoEncoder::VEAClientImpl final
quantizer_estimator_.Reset();
}
encoded_frame->encode_completion_time =
cast_environment_->Clock()->NowTicks();
encoded_frame->encode_completion_time = cast_environment_->NowTicks();
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(output_cb_, std::move(encoded_frame)));
in_progress_frame_encodes_.pop_front();
@ -527,7 +526,7 @@ class ExternalVideoEncoder::VEAClientImpl final
std::unique_ptr<SenderEncodedFrame> no_result(nullptr);
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(output_cb_, std::move(no_result)));
in_progress_frame_encodes_.pop_back();
}
@ -665,7 +664,7 @@ ExternalVideoEncoder::ExternalVideoEncoder(
output_cb_(std::move(output_cb)),
frame_size_(frame_size),
bit_rate_(video_config.start_bitrate) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
DCHECK_GT(video_config.max_frame_rate, 0);
DCHECK(!frame_size_.IsEmpty());
DCHECK(status_change_cb);
@ -679,12 +678,12 @@ ExternalVideoEncoder::ExternalVideoEncoder(
}
ExternalVideoEncoder::~ExternalVideoEncoder() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
DestroyClientSoon();
}
void ExternalVideoEncoder::DestroyClientSoon() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
// Ensure |client_| is destroyed from the encoder task runner by dropping the
// reference to it within an encoder task.
if (client_) {
@ -695,14 +694,14 @@ void ExternalVideoEncoder::DestroyClientSoon() {
void ExternalVideoEncoder::SetErrorToMetricsProvider(
const media::EncoderStatus& encoder_status) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
metrics_provider_->SetError(encoder_status);
}
bool ExternalVideoEncoder::EncodeVideoFrame(
scoped_refptr<media::VideoFrame> video_frame,
base::TimeTicks reference_time) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
if (!client_ || video_frame->visible_rect().size() != frame_size_) {
return false;
@ -717,7 +716,7 @@ bool ExternalVideoEncoder::EncodeVideoFrame(
}
void ExternalVideoEncoder::SetBitRate(int new_bit_rate) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
DCHECK_GT(new_bit_rate, 0);
bit_rate_ = new_bit_rate;
@ -730,7 +729,7 @@ void ExternalVideoEncoder::SetBitRate(int new_bit_rate) {
}
void ExternalVideoEncoder::GenerateKeyFrame() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
key_frame_requested_ = true;
}
@ -740,14 +739,14 @@ void ExternalVideoEncoder::OnCreateVideoEncodeAccelerator(
const StatusChangeCallback& status_change_cb,
scoped_refptr<base::SingleThreadTaskRunner> encoder_task_runner,
std::unique_ptr<media::VideoEncodeAccelerator> vea) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
// The callback will be invoked with null pointers in the case where the
// system does not support or lacks the resources to provide GPU-accelerated
// video encoding.
if (!encoder_task_runner || !vea) {
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(status_change_cb, STATUS_CODEC_INIT_FAILED));
return;
}
@ -767,7 +766,7 @@ void ExternalVideoEncoder::OnCreateVideoEncodeAccelerator(
NOTREACHED() << "Fake software video encoder cannot be external";
default:
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(status_change_cb, STATUS_UNSUPPORTED_CODEC));
return;
}

@ -179,8 +179,7 @@ MediaVideoEncoderWrapper::MediaVideoEncoderWrapper(
gpu_factories_(gpu_factories),
is_hardware_encoder_(video_config.use_hardware_encoder),
codec_(video_config.video_codec()) {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->HasVideoThread());
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
CHECK(metrics_provider_);
CHECK(status_change_cb_);
CHECK(output_cb_);
@ -196,10 +195,10 @@ MediaVideoEncoderWrapper::MediaVideoEncoderWrapper(
}
MediaVideoEncoderWrapper::~MediaVideoEncoderWrapper() {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
if (encoder_) {
cast_environment_->GetTaskRunner(CastEnvironment::VIDEO)
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::kVideo)
->DeleteSoon(FROM_HERE, encoder_.release());
}
weak_factory_.InvalidateWeakPtrs();
@ -208,7 +207,7 @@ MediaVideoEncoderWrapper::~MediaVideoEncoderWrapper() {
bool MediaVideoEncoderWrapper::EncodeVideoFrame(
scoped_refptr<media::VideoFrame> video_frame,
base::TimeTicks reference_time) {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
CHECK(!video_frame->visible_rect().IsEmpty());
// TODO(crbug.com/282984511): consider adding optimization to store frames
@ -254,7 +253,7 @@ bool MediaVideoEncoderWrapper::EncodeVideoFrame(
// Inform the encoder about the new target bit rate.
void MediaVideoEncoderWrapper::SetBitRate(int new_bit_rate) {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
options_.bitrate =
Bitrate::ConstantBitrate(base::checked_cast<uint32_t>(new_bit_rate));
@ -268,14 +267,14 @@ void MediaVideoEncoderWrapper::SetBitRate(int new_bit_rate) {
// Inform the encoder to encode the next frame as a key frame.
void MediaVideoEncoderWrapper::GenerateKeyFrame() {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
encode_options_.key_frame = true;
}
void MediaVideoEncoderWrapper::OnEncodedFrame(
VideoEncoderOutput output,
std::optional<media::VideoEncoder::CodecDescription> description) {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
const CachedMetadata& metadata = recent_metadata_.front();
auto encoded_frame = std::make_unique<SenderEncodedFrame>();
@ -287,8 +286,7 @@ void MediaVideoEncoderWrapper::OnEncodedFrame(
encoded_frame->rtp_timestamp = metadata.rtp_timestamp;
encoded_frame->reference_time = metadata.reference_time;
encoded_frame->encode_completion_time =
cast_environment_->Clock()->NowTicks();
encoded_frame->encode_completion_time = cast_environment_->NowTicks();
// TODO(crbug.com/282984511): generalize logic for encoder related metrics.
// This is based heavily on the logic in media/cast/encoding/vpx_encoder.cc.
@ -313,7 +311,7 @@ void MediaVideoEncoderWrapper::OnEncodedFrame(
}
void MediaVideoEncoderWrapper::OnEncoderStatus(EncoderStatus error) {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
if (!last_recorded_status_ || error != last_recorded_status_.value()) {
last_recorded_status_ = error;
@ -330,20 +328,20 @@ void MediaVideoEncoderWrapper::OnEncoderStatus(EncoderStatus error) {
void MediaVideoEncoderWrapper::OnEncoderInfo(
const VideoEncoderInfo& encoder_info) {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
// TODO(crbug.com/282984511): support handling `supports_frame_size_change`
// property.
}
void MediaVideoEncoderWrapper::ConstructEncoder() {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
// TODO(crbug.com/282984511): consider adding a fake software encoder for
// testing.
if (is_hardware_encoder_) {
CHECK(gpu_factories_);
encoder_ = CreateHardwareEncoder(
*gpu_factories_,
cast_environment_->GetTaskRunner(CastEnvironment::MAIN));
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::kMain));
} else {
encoder_ = CreateSoftwareEncoder(codec_);
}
@ -377,7 +375,7 @@ base::TimeDelta MediaVideoEncoderWrapper::GetFrameDuration(
// The VideoEncoder API requires you to Flush() and wait before calling
// ChangeOptions(). This results in this annoying set of nested callbacks.
void MediaVideoEncoderWrapper::UpdateEncoderOptions() {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
CHECK(!is_updating_options_);
is_updating_options_ = true;
@ -396,7 +394,7 @@ void MediaVideoEncoderWrapper::UpdateEncoderOptions() {
void MediaVideoEncoderWrapper::CallEncoderOnCorrectThread(
base::OnceClosure closure) {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
// If hardware, let the encoder do the post to the accelerator / VIDEO thread
// on its own.
@ -405,37 +403,37 @@ void MediaVideoEncoderWrapper::CallEncoderOnCorrectThread(
// If software, manually post it to the VIDEO thread to not block MAIN.
} else {
cast_environment_->PostTask(CastEnvironment::VIDEO, FROM_HERE,
cast_environment_->PostTask(CastEnvironment::ThreadId::kVideo, FROM_HERE,
std::move(closure));
}
}
media::VideoEncoder::EncoderInfoCB MediaVideoEncoderWrapper::GetInfoCB() {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
return base::BindPostTask(
cast_environment_->GetTaskRunner(CastEnvironment::MAIN),
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::kMain),
base::BindRepeating(&MediaVideoEncoderWrapper::OnEncoderInfo,
weak_factory_.GetWeakPtr()));
}
media::VideoEncoder::OutputCB MediaVideoEncoderWrapper::GetOutputCB() {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
return base::BindPostTask(
cast_environment_->GetTaskRunner(CastEnvironment::MAIN),
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::kMain),
base::BindRepeating(&MediaVideoEncoderWrapper::OnEncodedFrame,
weak_factory_.GetWeakPtr()));
}
media::VideoEncoder::EncoderStatusCB MediaVideoEncoderWrapper::GetDoneCB() {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
return base::BindPostTask(
cast_environment_->GetTaskRunner(CastEnvironment::MAIN),
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::kMain),
base::BindOnce(&MediaVideoEncoderWrapper::OnEncoderStatus,
weak_factory_.GetWeakPtr()));
}
void MediaVideoEncoderWrapper::OnOptionsUpdated(EncoderStatus status) {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
// Now that we are done updating options we can begin encoding frames
// again.
is_updating_options_ = false;
@ -446,9 +444,9 @@ void MediaVideoEncoderWrapper::OnOptionsUpdated(EncoderStatus status) {
media::VideoEncoder::EncoderStatusCB
MediaVideoEncoderWrapper::GetOptionsUpdateDoneCB() {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
return base::BindPostTask(
cast_environment_->GetTaskRunner(CastEnvironment::MAIN),
cast_environment_->GetTaskRunner(CastEnvironment::ThreadId::kMain),
base::BindOnce(&MediaVideoEncoderWrapper::OnOptionsUpdated,
weak_factory_.GetWeakPtr()));
}

@ -31,7 +31,7 @@ SizeAdaptableVideoEncoderBase::SizeAdaptableVideoEncoderBase(
next_frame_id_(FrameId::first()) {
CHECK(output_cb_);
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(status_change_cb_, STATUS_INITIALIZED));
}
@ -42,7 +42,7 @@ SizeAdaptableVideoEncoderBase::~SizeAdaptableVideoEncoderBase() {
bool SizeAdaptableVideoEncoderBase::EncodeVideoFrame(
scoped_refptr<media::VideoFrame> video_frame,
base::TimeTicks reference_time) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
const gfx::Size frame_size = video_frame->visible_rect().size();
if (frame_size.IsEmpty()) {
@ -70,7 +70,7 @@ bool SizeAdaptableVideoEncoderBase::EncodeVideoFrame(
}
void SizeAdaptableVideoEncoderBase::SetBitRate(int new_bit_rate) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
video_config_.start_bitrate = new_bit_rate;
if (encoder_) {
encoder_->SetBitRate(new_bit_rate);
@ -78,7 +78,7 @@ void SizeAdaptableVideoEncoderBase::SetBitRate(int new_bit_rate) {
}
void SizeAdaptableVideoEncoderBase::GenerateKeyFrame() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
if (encoder_) {
encoder_->GenerateKeyFrame();
}
@ -86,7 +86,7 @@ void SizeAdaptableVideoEncoderBase::GenerateKeyFrame() {
StatusChangeCallback
SizeAdaptableVideoEncoderBase::CreateEncoderStatusChangeCallback() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
return base::BindRepeating(
&SizeAdaptableVideoEncoderBase::OnEncoderStatusChange,
weak_factory_.GetWeakPtr());
@ -94,7 +94,7 @@ SizeAdaptableVideoEncoderBase::CreateEncoderStatusChangeCallback() {
VideoEncoder::FrameEncodedCallback
SizeAdaptableVideoEncoderBase::CreateFrameEncodedCallback() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
return base::BindRepeating(
&SizeAdaptableVideoEncoderBase::OnEncodedVideoFrame,
weak_factory_.GetWeakPtr());
@ -104,7 +104,7 @@ void SizeAdaptableVideoEncoderBase::OnEncoderReplaced(
VideoEncoder* replacement_encoder) {}
void SizeAdaptableVideoEncoderBase::DestroyEncoder() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
// The weak pointers are invalidated to prevent future calls back to |this|.
// This effectively cancels any of |encoder_|'s posted tasks that have not yet
// run.
@ -114,7 +114,7 @@ void SizeAdaptableVideoEncoderBase::DestroyEncoder() {
void SizeAdaptableVideoEncoderBase::TrySpawningReplacementEncoder(
const gfx::Size& size_needed) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
// If prior frames are still encoding in the current encoder, let them finish
// first.
@ -138,7 +138,7 @@ void SizeAdaptableVideoEncoderBase::TrySpawningReplacementEncoder(
void SizeAdaptableVideoEncoderBase::OnEncoderStatusChange(
OperationalStatus status) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
if (frames_in_encoder_ == kEncoderIsInitializing &&
status == STATUS_INITIALIZED) {
// Begin using the replacement encoder.
@ -150,7 +150,7 @@ void SizeAdaptableVideoEncoderBase::OnEncoderStatusChange(
void SizeAdaptableVideoEncoderBase::OnEncodedVideoFrame(
std::unique_ptr<SenderEncodedFrame> encoded_frame) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
--frames_in_encoder_;
DCHECK_GE(frames_in_encoder_, 0);

@ -29,7 +29,7 @@ namespace {
void InitializeEncoderOnEncoderThread(
const scoped_refptr<CastEnvironment>& environment,
SoftwareVideoEncoder* encoder) {
DCHECK(environment->CurrentlyOn(CastEnvironment::VIDEO));
DCHECK(environment->CurrentlyOn(CastEnvironment::ThreadId::kVideo));
encoder->Initialize();
}
@ -40,7 +40,7 @@ void EncodeVideoFrameOnEncoderThread(
base::TimeTicks reference_time,
const VideoEncoderImpl::CodecDynamicConfig& dynamic_config,
VideoEncoderImpl::FrameEncodedCallback output_cb) {
DCHECK(environment->CurrentlyOn(CastEnvironment::VIDEO));
DCHECK(environment->CurrentlyOn(CastEnvironment::ThreadId::kVideo));
if (dynamic_config.key_frame_requested) {
encoder->GenerateKeyFrame();
}
@ -51,8 +51,8 @@ void EncodeVideoFrameOnEncoderThread(
video_frame->metadata().capture_begin_time;
encoded_frame->capture_end_time = video_frame->metadata().capture_end_time;
encoder->Encode(std::move(video_frame), reference_time, encoded_frame.get());
encoded_frame->encode_completion_time = environment->Clock()->NowTicks();
environment->PostTask(CastEnvironment::MAIN, FROM_HERE,
encoded_frame->encode_completion_time = environment->NowTicks();
environment->PostTask(CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(output_cb, std::move(encoded_frame)));
}
} // namespace
@ -64,7 +64,6 @@ VideoEncoderImpl::VideoEncoderImpl(
StatusChangeCallback status_change_cb,
FrameEncodedCallback output_cb)
: cast_environment_(cast_environment) {
CHECK(cast_environment_->HasVideoThread());
CHECK(status_change_cb);
CHECK(output_cb);
@ -74,7 +73,7 @@ VideoEncoderImpl::VideoEncoderImpl(
encoder_ =
std::make_unique<VpxEncoder>(video_config, std::move(metrics_provider));
cast_environment_->PostTask(
CastEnvironment::VIDEO, FROM_HERE,
CastEnvironment::ThreadId::kVideo, FROM_HERE,
base::BindOnce(&InitializeEncoderOnEncoderThread, cast_environment,
encoder_.get()));
} else if (codec == VideoCodec::kUnknown &&
@ -87,7 +86,7 @@ VideoEncoderImpl::VideoEncoderImpl(
encoder_ =
std::make_unique<Av1Encoder>(video_config, std::move(metrics_provider));
cast_environment_->PostTask(
CastEnvironment::VIDEO, FROM_HERE,
CastEnvironment::ThreadId::kVideo, FROM_HERE,
base::BindOnce(&InitializeEncoderOnEncoderThread, cast_environment,
encoder_.get()));
#endif
@ -99,17 +98,17 @@ VideoEncoderImpl::VideoEncoderImpl(
dynamic_config_.bit_rate = video_config.start_bitrate;
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(
std::move(status_change_cb),
encoder_.get() ? STATUS_INITIALIZED : STATUS_UNSUPPORTED_CODEC));
}
VideoEncoderImpl::~VideoEncoderImpl() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
if (encoder_) {
cast_environment_->PostTask(
CastEnvironment::VIDEO, FROM_HERE,
CastEnvironment::ThreadId::kVideo, FROM_HERE,
base::BindOnce(&base::DeletePointer<SoftwareVideoEncoder>,
encoder_.release()));
}
@ -118,11 +117,11 @@ VideoEncoderImpl::~VideoEncoderImpl() {
bool VideoEncoderImpl::EncodeVideoFrame(
scoped_refptr<media::VideoFrame> video_frame,
base::TimeTicks reference_time) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
DCHECK(!video_frame->visible_rect().IsEmpty());
cast_environment_->PostTask(
CastEnvironment::VIDEO, FROM_HERE,
CastEnvironment::ThreadId::kVideo, FROM_HERE,
base::BindOnce(&EncodeVideoFrameOnEncoderThread, cast_environment_,
encoder_.get(), std::move(video_frame), reference_time,
dynamic_config_, output_cb_));

@ -21,7 +21,6 @@
#include "base/task/single_thread_task_runner.h"
#include "base/task/thread_pool.h"
#include "base/test/scoped_feature_list.h"
#include "base/test/task_environment.h"
#include "base/time/time.h"
#include "build/build_config.h"
#include "gpu/command_buffer/client/test_shared_image_interface.h"
@ -35,6 +34,7 @@
#include "media/cast/common/rtp_time.h"
#include "media/cast/common/sender_encoded_frame.h"
#include "media/cast/test/fake_video_encode_accelerator_factory.h"
#include "media/cast/test/test_with_cast_environment.h"
#include "media/cast/test/utility/default_config.h"
#include "media/cast/test/utility/video_utility.h"
#include "media/video/mock_gpu_video_accelerator_factories.h"
@ -80,28 +80,22 @@ struct VideoEncoderTestParam {
bool enable_media_encoder_feature;
};
class VideoEncoderTest
: public ::testing::TestWithParam<VideoEncoderTestParam> {
class VideoEncoderTest : public ::testing::TestWithParam<VideoEncoderTestParam>,
public WithCastEnvironment {
public:
VideoEncoderTest(const VideoEncoderTest&) = delete;
VideoEncoderTest& operator=(const VideoEncoderTest&) = delete;
protected:
VideoEncoderTest()
: task_environment_(base::test::TaskEnvironment::TimeSource::MOCK_TIME),
video_config_(GetDefaultVideoSenderConfig()),
: video_config_(GetDefaultVideoSenderConfig()),
codec_params_(video_config_.video_codec_params.value()) {
task_runner_ = task_environment_.GetMainThreadTaskRunner();
accelerator_task_runner_ = base::ThreadPool::CreateSingleThreadTaskRunner(
{base::TaskPriority::USER_BLOCKING,
base::TaskShutdownBehavior::SKIP_ON_SHUTDOWN},
base::SingleThreadTaskRunnerThreadMode::DEDICATED);
cast_environment_ = base::MakeRefCounted<CastEnvironment>(
task_environment_.GetMockTickClock(), task_runner_, task_runner_,
task_runner_);
task_environment_.AdvanceClock(base::TimeTicks::Now() - base::TimeTicks());
first_frame_time_ = Now();
first_frame_time_ = NowTicks();
if (GetParam().use_hardware_encoder) {
vea_factory_ = std::make_unique<FakeVideoEncodeAcceleratorFactory>(
@ -161,7 +155,7 @@ class VideoEncoderTest
.Times(testing::AtLeast(expected_frames));
video_encoder_ = VideoEncoder::Create(
cast_environment_, video_config_, std::move(metrics_provider),
cast_environment(), video_config_, std::move(metrics_provider),
base::BindRepeating(&VideoEncoderTest::OnOperationalStatusChange,
base::Unretained(this)),
std::move(output_cb),
@ -190,25 +184,20 @@ class VideoEncoderTest
void DestroyEncoder() { video_encoder_.reset(); }
base::TimeTicks Now() {
return task_environment_.GetMockTickClock()->NowTicks();
}
void RunTasksAndAdvanceClock() {
CHECK_GT(video_config_.max_frame_rate, 0);
const base::TimeDelta frame_duration =
base::Microseconds(1000000.0 / video_config_.max_frame_rate);
task_environment_.AdvanceClock(frame_duration);
accelerator_task_runner_->PostTask(FROM_HERE,
task_environment_.QuitClosure());
task_environment_.RunUntilQuit();
task_runner_->PostTask(FROM_HERE, task_environment_.QuitClosure());
task_environment_.RunUntilQuit();
AdvanceClock(frame_duration);
accelerator_task_runner_->PostTask(FROM_HERE, QuitClosure());
RunUntilQuit();
GetMainThreadTaskRunner()->PostTask(FROM_HERE, QuitClosure());
RunUntilQuit();
}
// Creates a new VideoFrame of the given |size|, filled with a test pattern.
scoped_refptr<media::VideoFrame> CreateTestVideoFrame(const gfx::Size& size) {
const base::TimeDelta timestamp = Now() - first_frame_time_;
const base::TimeDelta timestamp = NowTicks() - first_frame_time_;
scoped_refptr<media::VideoFrame> frame = media::VideoFrame::CreateFrame(
PIXEL_FORMAT_I420, size, gfx::Rect(size), size, timestamp);
PopulateVideoFrame(frame.get(), 123);
@ -241,11 +230,7 @@ class VideoEncoderTest
operational_status_ == STATUS_INITIALIZED);
}
base::test::TaskEnvironment task_environment_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
scoped_refptr<base::SingleThreadTaskRunner> accelerator_task_runner_;
scoped_refptr<CastEnvironment> cast_environment_;
FrameSenderConfig video_config_;
raw_ref<VideoCodecParams> codec_params_;
std::unique_ptr<FakeVideoEncodeAcceleratorFactory> vea_factory_;
@ -312,7 +297,7 @@ TEST_P(VideoEncoderTest, EncodesVariedFrameSizes) {
// successfully encoded.
while (encoded_frames.size() <= kNumFramesExpected ||
AnyOfLastFramesAreEmpty(encoded_frames, kNumFramesExpected)) {
const auto reference_time = Now();
const auto reference_time = NowTicks();
auto video_frame = CreateTestVideoFrame(frame_size);
expectations.emplace(
reference_time,
@ -413,7 +398,7 @@ TEST_P(VideoEncoderTest, CanBeDestroyedBeforeVEAIsCreated) {
// Send a frame to spawn creation of the ExternalVideoEncoder instance.
const bool encode_result = video_encoder()->EncodeVideoFrame(
CreateTestVideoFrame(gfx::Size(128, 72)), Now());
CreateTestVideoFrame(gfx::Size(128, 72)), NowTicks());
// Hardware encoders should fail to encode at this point, since the VEA has
// not responded yet. Since software encoders don't use VEA, they should

@ -1,368 +0,0 @@
// Copyright 2014 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/logging/encoding_event_subscriber.h"
#include <stdint.h>
#include <algorithm>
#include <cstring>
#include <utility>
#include "base/logging.h"
#include "base/not_fatal_until.h"
#include "base/numerics/safe_conversions.h"
#include "media/cast/logging/proto/proto_utils.h"
using google::protobuf::RepeatedPtrField;
using media::cast::proto::AggregatedFrameEvent;
using media::cast::proto::AggregatedPacketEvent;
using media::cast::proto::BasePacketEvent;
using media::cast::proto::LogMetadata;
namespace {
// A size limit on maps to keep lookups fast.
const size_t kMaxMapSize = 200;
// The smallest (oredered by RTP timestamp) |kNumMapEntriesToTransfer| entries
// will be moved when the map size reaches |kMaxMapSize|.
// Must be smaller than |kMaxMapSize|.
const size_t kNumMapEntriesToTransfer = 100;
template <typename ProtoPtr>
bool IsRtpTimestampLessThan(const ProtoPtr& lhs, const ProtoPtr& rhs) {
return lhs->relative_rtp_timestamp() < rhs->relative_rtp_timestamp();
}
BasePacketEvent* GetNewBasePacketEvent(AggregatedPacketEvent* event_proto,
int packet_id,
int size) {
BasePacketEvent* base = event_proto->add_base_packet_event();
base->set_packet_id(packet_id);
base->set_size(size);
return base;
}
} // namespace
namespace media {
namespace cast {
EncodingEventSubscriber::EncodingEventSubscriber(
EventMediaType event_media_type,
size_t max_frames)
: event_media_type_(event_media_type), max_frames_(max_frames) {
Reset();
}
EncodingEventSubscriber::~EncodingEventSubscriber() {
DCHECK(thread_checker_.CalledOnValidThread());
}
void EncodingEventSubscriber::OnReceiveFrameEvent(
const FrameEvent& frame_event) {
DCHECK(thread_checker_.CalledOnValidThread());
if (event_media_type_ != frame_event.media_type) {
return;
}
const RtpTimeDelta relative_rtp_timestamp =
GetRelativeRtpTimestamp(frame_event.rtp_timestamp);
const uint32_t lower_32_bits = relative_rtp_timestamp.lower_32_bits();
AggregatedFrameEvent* event_proto_ptr = nullptr;
// Look up existing entry. If not found, create a new entry and add to map.
auto it = frame_event_map_.find(relative_rtp_timestamp);
if (it == frame_event_map_.end()) {
if (!ShouldCreateNewProto(lower_32_bits)) {
return;
}
IncrementStoredProtoCount(lower_32_bits);
auto event_proto = std::make_unique<AggregatedFrameEvent>();
event_proto->set_relative_rtp_timestamp(lower_32_bits);
event_proto_ptr = event_proto.get();
frame_event_map_.insert(
std::make_pair(relative_rtp_timestamp, std::move(event_proto)));
} else {
if (it->second->event_type_size() >= kMaxEventsPerProto) {
DVLOG(2) << "Too many events in frame " << frame_event.rtp_timestamp
<< ". Using new frame event proto.";
AddFrameEventToStorage(std::move(it->second));
if (!ShouldCreateNewProto(lower_32_bits)) {
frame_event_map_.erase(it);
return;
}
IncrementStoredProtoCount(lower_32_bits);
it->second = std::make_unique<AggregatedFrameEvent>();
it->second->set_relative_rtp_timestamp(lower_32_bits);
}
event_proto_ptr = it->second.get();
}
event_proto_ptr->add_event_type(ToProtoEventType(frame_event.type));
event_proto_ptr->add_event_timestamp_ms(
(frame_event.timestamp - base::TimeTicks()).InMilliseconds());
if (frame_event.type == FRAME_CAPTURE_END) {
if (frame_event.media_type == VIDEO_EVENT && frame_event.width > 0 &&
frame_event.height > 0) {
event_proto_ptr->set_width(frame_event.width);
event_proto_ptr->set_height(frame_event.height);
}
} else if (frame_event.type == FRAME_ENCODED) {
event_proto_ptr->set_encoded_frame_size(frame_event.size);
if (frame_event.encoder_cpu_utilization >= 0.0) {
event_proto_ptr->set_encoder_cpu_percent_utilized(
base::saturated_cast<int32_t>(
frame_event.encoder_cpu_utilization * 100.0 + 0.5));
}
if (frame_event.idealized_bitrate_utilization >= 0.0) {
event_proto_ptr->set_idealized_bitrate_percent_utilized(
base::saturated_cast<int32_t>(
frame_event.idealized_bitrate_utilization * 100.0 + 0.5));
}
if (frame_event.media_type == VIDEO_EVENT) {
event_proto_ptr->set_key_frame(frame_event.key_frame);
event_proto_ptr->set_target_bitrate(frame_event.target_bitrate);
}
} else if (frame_event.type == FRAME_PLAYOUT) {
event_proto_ptr->set_delay_millis(frame_event.delay_delta.InMilliseconds());
}
if (frame_event_map_.size() > kMaxMapSize) {
TransferFrameEvents(kNumMapEntriesToTransfer);
}
DCHECK(frame_event_map_.size() <= kMaxMapSize);
DCHECK(frame_event_storage_.size() <= max_frames_);
}
void EncodingEventSubscriber::OnReceivePacketEvent(
const PacketEvent& packet_event) {
DCHECK(thread_checker_.CalledOnValidThread());
if (event_media_type_ != packet_event.media_type) {
return;
}
const RtpTimeDelta relative_rtp_timestamp =
GetRelativeRtpTimestamp(packet_event.rtp_timestamp);
uint32_t lower_32_bits = relative_rtp_timestamp.lower_32_bits();
auto it = packet_event_map_.find(relative_rtp_timestamp);
BasePacketEvent* base_packet_event_proto = nullptr;
// Look up existing entry. If not found, create a new entry and add to map.
if (it == packet_event_map_.end()) {
if (!ShouldCreateNewProto(lower_32_bits)) {
return;
}
IncrementStoredProtoCount(lower_32_bits);
auto event_proto = std::make_unique<AggregatedPacketEvent>();
event_proto->set_relative_rtp_timestamp(lower_32_bits);
base_packet_event_proto = GetNewBasePacketEvent(
event_proto.get(), packet_event.packet_id, packet_event.size);
packet_event_map_.insert(
std::make_pair(relative_rtp_timestamp, std::move(event_proto)));
} else {
// Found existing entry, now look up existing BasePacketEvent using packet
// ID. If not found, create a new entry and add to proto.
RepeatedPtrField<BasePacketEvent>* field =
it->second->mutable_base_packet_event();
for (RepeatedPtrField<BasePacketEvent>::pointer_iterator base_it =
field->pointer_begin();
base_it != field->pointer_end(); ++base_it) {
if ((*base_it)->packet_id() == packet_event.packet_id) {
base_packet_event_proto = *base_it;
break;
}
}
if (!base_packet_event_proto) {
if (it->second->base_packet_event_size() >= kMaxPacketsPerFrame) {
DVLOG(3) << "Too many packets in AggregatedPacketEvent "
<< packet_event.rtp_timestamp << ". "
<< "Using new packet event proto.";
AddPacketEventToStorage(std::move(it->second));
if (!ShouldCreateNewProto(lower_32_bits)) {
packet_event_map_.erase(it);
return;
}
IncrementStoredProtoCount(lower_32_bits);
it->second = std::make_unique<AggregatedPacketEvent>();
it->second->set_relative_rtp_timestamp(lower_32_bits);
}
base_packet_event_proto = GetNewBasePacketEvent(
it->second.get(), packet_event.packet_id, packet_event.size);
} else if (base_packet_event_proto->event_type_size() >=
kMaxEventsPerProto) {
DVLOG(3) << "Too many events in packet " << packet_event.rtp_timestamp
<< ", " << packet_event.packet_id
<< ". Using new packet event proto.";
AddPacketEventToStorage(std::move(it->second));
if (!ShouldCreateNewProto(lower_32_bits)) {
packet_event_map_.erase(it);
return;
}
IncrementStoredProtoCount(lower_32_bits);
it->second = std::make_unique<AggregatedPacketEvent>();
it->second->set_relative_rtp_timestamp(lower_32_bits);
base_packet_event_proto = GetNewBasePacketEvent(
it->second.get(), packet_event.packet_id, packet_event.size);
}
}
base_packet_event_proto->add_event_type(ToProtoEventType(packet_event.type));
base_packet_event_proto->add_event_timestamp_ms(
(packet_event.timestamp - base::TimeTicks()).InMilliseconds());
// |base_packet_event_proto| could have been created with a receiver event
// which does not have the packet size and we would need to overwrite it when
// we see a sender event, which does have the packet size.
if (packet_event.size > 0) {
base_packet_event_proto->set_size(packet_event.size);
}
if (packet_event_map_.size() > kMaxMapSize) {
TransferPacketEvents(kNumMapEntriesToTransfer);
}
DCHECK(packet_event_map_.size() <= kMaxMapSize);
DCHECK(packet_event_storage_.size() <= max_frames_);
}
void EncodingEventSubscriber::GetEventsAndReset(
LogMetadata* metadata,
FrameEventList* frame_events,
PacketEventList* packet_events) {
DCHECK(thread_checker_.CalledOnValidThread());
// Flush all events.
TransferFrameEvents(frame_event_map_.size());
TransferPacketEvents(packet_event_map_.size());
std::sort(frame_event_storage_.begin(), frame_event_storage_.end(),
&IsRtpTimestampLessThan<std::unique_ptr<AggregatedFrameEvent>>);
std::sort(packet_event_storage_.begin(), packet_event_storage_.end(),
&IsRtpTimestampLessThan<std::unique_ptr<AggregatedPacketEvent>>);
metadata->set_is_audio(event_media_type_ == AUDIO_EVENT);
metadata->set_first_rtp_timestamp(first_rtp_timestamp_.lower_32_bits());
metadata->set_num_frame_events(frame_event_storage_.size());
metadata->set_num_packet_events(packet_event_storage_.size());
metadata->set_reference_timestamp_ms_at_unix_epoch(
(base::TimeTicks::UnixEpoch() - base::TimeTicks()).InMilliseconds());
frame_events->swap(frame_event_storage_);
packet_events->swap(packet_event_storage_);
Reset();
}
void EncodingEventSubscriber::TransferFrameEvents(size_t max_num_entries) {
DCHECK(frame_event_map_.size() >= max_num_entries);
auto it = frame_event_map_.begin();
for (size_t i = 0; i < max_num_entries && it != frame_event_map_.end();
i++, ++it) {
AddFrameEventToStorage(std::move(it->second));
}
frame_event_map_.erase(frame_event_map_.begin(), it);
}
void EncodingEventSubscriber::TransferPacketEvents(size_t max_num_entries) {
auto it = packet_event_map_.begin();
for (size_t i = 0; i < max_num_entries && it != packet_event_map_.end();
i++, ++it) {
AddPacketEventToStorage(std::move(it->second));
}
packet_event_map_.erase(packet_event_map_.begin(), it);
}
void EncodingEventSubscriber::AddFrameEventToStorage(
std::unique_ptr<AggregatedFrameEvent> frame_event_proto) {
if (frame_event_storage_.size() >= max_frames_) {
auto& entry = frame_event_storage_[frame_event_storage_index_];
DecrementStoredProtoCount(entry->relative_rtp_timestamp());
entry = std::move(frame_event_proto);
} else {
frame_event_storage_.push_back(std::move(frame_event_proto));
}
frame_event_storage_index_ = (frame_event_storage_index_ + 1) % max_frames_;
}
void EncodingEventSubscriber::AddPacketEventToStorage(
std::unique_ptr<AggregatedPacketEvent> packet_event_proto) {
if (packet_event_storage_.size() >= max_frames_) {
auto& entry = packet_event_storage_[packet_event_storage_index_];
DecrementStoredProtoCount(entry->relative_rtp_timestamp());
entry = std::move(packet_event_proto);
} else {
packet_event_storage_.push_back(std::move(packet_event_proto));
}
packet_event_storage_index_ = (packet_event_storage_index_ + 1) % max_frames_;
}
bool EncodingEventSubscriber::ShouldCreateNewProto(
uint32_t relative_rtp_timestamp_lower_32_bits) const {
auto it = stored_proto_counts_.find(relative_rtp_timestamp_lower_32_bits);
int proto_count = it == stored_proto_counts_.end() ? 0 : it->second;
DVLOG_IF(2, proto_count >= kMaxProtosPerFrame)
<< relative_rtp_timestamp_lower_32_bits
<< " already reached max number of protos.";
return proto_count < kMaxProtosPerFrame;
}
void EncodingEventSubscriber::IncrementStoredProtoCount(
uint32_t relative_rtp_timestamp_lower_32_bits) {
stored_proto_counts_[relative_rtp_timestamp_lower_32_bits]++;
DCHECK_LE(stored_proto_counts_[relative_rtp_timestamp_lower_32_bits],
kMaxProtosPerFrame)
<< relative_rtp_timestamp_lower_32_bits
<< " exceeded max number of event protos.";
}
void EncodingEventSubscriber::DecrementStoredProtoCount(
uint32_t relative_rtp_timestamp_lower_32_bits) {
auto it = stored_proto_counts_.find(relative_rtp_timestamp_lower_32_bits);
CHECK(it != stored_proto_counts_.end(), base::NotFatalUntil::M130)
<< "no event protos for " << relative_rtp_timestamp_lower_32_bits;
if (it->second > 1) {
it->second--;
} else {
stored_proto_counts_.erase(it);
}
}
RtpTimeDelta EncodingEventSubscriber::GetRelativeRtpTimestamp(
RtpTimeTicks rtp_timestamp) {
if (!seen_first_rtp_timestamp_) {
seen_first_rtp_timestamp_ = true;
first_rtp_timestamp_ = rtp_timestamp;
}
return rtp_timestamp - first_rtp_timestamp_;
}
void EncodingEventSubscriber::Reset() {
frame_event_map_.clear();
frame_event_storage_.clear();
frame_event_storage_index_ = 0;
packet_event_map_.clear();
packet_event_storage_.clear();
packet_event_storage_index_ = 0;
stored_proto_counts_.clear();
seen_first_rtp_timestamp_ = false;
first_rtp_timestamp_ = RtpTimeTicks();
}
} // namespace cast
} // namespace media

@ -1,136 +0,0 @@
// Copyright 2014 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAST_LOGGING_ENCODING_EVENT_SUBSCRIBER_H_
#define MEDIA_CAST_LOGGING_ENCODING_EVENT_SUBSCRIBER_H_
#include <stddef.h>
#include <map>
#include <memory>
#include <vector>
#include "base/threading/thread_checker.h"
#include "media/cast/logging/logging_defines.h"
#include "media/cast/logging/proto/raw_events.pb.h"
#include "media/cast/logging/raw_event_subscriber.h"
namespace media {
namespace cast {
// Number of packets per frame recorded by the subscriber.
// Once the max number of packets has been reached, a new aggregated proto
// will be created.
static const int kMaxPacketsPerFrame = 256;
// Number of events per frame/packet proto recorded by the subscriber.
// Once the max number of events has been reached, a new aggregated proto
// will be created.
static const int kMaxEventsPerProto = 16;
// Max number of AggregatedFrameEvent / AggregatedPacketEvent protos stored for
// a frame. Once the max number of protos has been reached for that frame,
// further events for that frame will be dropped.
static const int kMaxProtosPerFrame = 10;
using FrameEventList =
std::vector<std::unique_ptr<proto::AggregatedFrameEvent>>;
using PacketEventList =
std::vector<std::unique_ptr<proto::AggregatedPacketEvent>>;
// A RawEventSubscriber implementation that subscribes to events,
// encodes them in protocol buffer format, and aggregates them into a more
// compact structure. Aggregation is per-frame, and uses a map with RTP
// timestamp as key. Periodically, old entries in the map will be transferred
// to a storage vector. This helps keep the size of the map small and
// lookup times fast. The storage itself is a circular buffer that will
// overwrite old entries once it has reached the size configured by user.
class EncodingEventSubscriber final : public RawEventSubscriber {
public:
// |event_media_type|: The subscriber will only process events that
// corresponds to this type.
// |max_frames|: How many events to keep in the frame / packet storage.
// This helps keep memory usage bounded.
// Every time one of |OnReceive[Frame,Packet]Event()| is
// called, it will check if the respective map size has exceeded |max_frames|.
// If so, it will remove the oldest aggregated entry (ordered by RTP
// timestamp).
EncodingEventSubscriber(EventMediaType event_media_type, size_t max_frames);
EncodingEventSubscriber(const EncodingEventSubscriber&) = delete;
EncodingEventSubscriber& operator=(const EncodingEventSubscriber&) = delete;
~EncodingEventSubscriber() final;
// RawReventSubscriber implementations.
void OnReceiveFrameEvent(const FrameEvent& frame_event) final;
void OnReceivePacketEvent(const PacketEvent& packet_event) final;
// Assigns frame events and packet events received so far to |frame_events|
// and |packet_events| and resets the internal state.
// In addition, assign metadata associated with these events to |metadata|.
// The protos in |frame_events| and |packets_events| are sorted in
// ascending RTP timestamp order.
void GetEventsAndReset(proto::LogMetadata* metadata,
FrameEventList* frame_events,
PacketEventList* packet_events);
private:
using FrameEventMap =
std::map<RtpTimeDelta, std::unique_ptr<proto::AggregatedFrameEvent>>;
using PacketEventMap =
std::map<RtpTimeDelta, std::unique_ptr<proto::AggregatedPacketEvent>>;
// Transfer up to |max_num_entries| smallest entries from |frame_event_map_|
// to |frame_event_storage_|. This helps keep size of |frame_event_map_| small
// and lookup speed fast.
void TransferFrameEvents(size_t max_num_entries);
// See above.
void TransferPacketEvents(size_t max_num_entries);
void AddFrameEventToStorage(
std::unique_ptr<proto::AggregatedFrameEvent> frame_event_proto);
void AddPacketEventToStorage(
std::unique_ptr<proto::AggregatedPacketEvent> packet_event_proto);
bool ShouldCreateNewProto(
uint32_t relative_rtp_timestamp_lower_32_bits) const;
void IncrementStoredProtoCount(uint32_t relative_rtp_timestamp_lower_32_bits);
void DecrementStoredProtoCount(uint32_t relative_rtp_timestamp_lower_32_bits);
// Returns the difference between |rtp_timestamp| and |first_rtp_timestamp_|.
// Sets |first_rtp_timestamp_| if it is not already set.
RtpTimeDelta GetRelativeRtpTimestamp(RtpTimeTicks rtp_timestamp);
// Clears the maps and first RTP timestamp seen.
void Reset();
const EventMediaType event_media_type_;
const size_t max_frames_;
FrameEventMap frame_event_map_;
FrameEventList frame_event_storage_;
int frame_event_storage_index_;
PacketEventMap packet_event_map_;
PacketEventList packet_event_storage_;
int packet_event_storage_index_;
// Maps from the lower 32 bits of a RTP timestamp to the number of
// AggregatedFrameEvent / AggregatedPacketEvent protos that have been stored
// for that frame.
std::map<uint32_t, int> stored_proto_counts_;
// All functions must be called on the main thread.
base::ThreadChecker thread_checker_;
// Set to true on first event encountered after a |Reset()|.
bool seen_first_rtp_timestamp_;
// Set to RTP timestamp of first event encountered after a |Reset()|.
RtpTimeTicks first_rtp_timestamp_;
};
} // namespace cast
} // namespace media
#endif // MEDIA_CAST_LOGGING_ENCODING_EVENT_SUBSCRIBER_H_

@ -1,849 +0,0 @@
// Copyright 2014 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/logging/encoding_event_subscriber.h"
#include <stdint.h>
#include <memory>
#include <utility>
#include "base/memory/ref_counted.h"
#include "base/test/simple_test_tick_clock.h"
#include "base/time/tick_clock.h"
#include "media/base/fake_single_thread_task_runner.h"
#include "media/cast/cast_environment.h"
#include "media/cast/logging/logging_defines.h"
#include "testing/gtest/include/gtest/gtest.h"
using media::cast::proto::AggregatedFrameEvent;
using media::cast::proto::AggregatedPacketEvent;
using media::cast::proto::BasePacketEvent;
using media::cast::proto::LogMetadata;
namespace {
int64_t InMilliseconds(base::TimeTicks event_time) {
return (event_time - base::TimeTicks()).InMilliseconds();
}
} // namespace
namespace media {
namespace cast {
class EncodingEventSubscriberTest : public ::testing::Test {
protected:
EncodingEventSubscriberTest()
: task_runner_(new FakeSingleThreadTaskRunner(&testing_clock_)),
cast_environment_(new CastEnvironment(&testing_clock_,
task_runner_,
task_runner_,
task_runner_)) {}
void Init(EventMediaType event_media_type) {
DCHECK(!event_subscriber_);
event_subscriber_ =
std::make_unique<EncodingEventSubscriber>(event_media_type, 10);
cast_environment_->logger()->Subscribe(event_subscriber_.get());
}
~EncodingEventSubscriberTest() override {
if (event_subscriber_) {
cast_environment_->logger()->Unsubscribe(event_subscriber_.get());
}
}
void GetEventsAndReset() {
event_subscriber_->GetEventsAndReset(&metadata_, &frame_events_,
&packet_events_);
first_rtp_timestamp_ =
RtpTimeTicks().Expand(metadata_.first_rtp_timestamp());
}
base::SimpleTestTickClock testing_clock_;
scoped_refptr<FakeSingleThreadTaskRunner> task_runner_;
scoped_refptr<CastEnvironment> cast_environment_;
std::unique_ptr<EncodingEventSubscriber> event_subscriber_;
FrameEventList frame_events_;
PacketEventList packet_events_;
LogMetadata metadata_;
RtpTimeTicks first_rtp_timestamp_;
};
TEST_F(EncodingEventSubscriberTest, FrameEventTruncating) {
Init(VIDEO_EVENT);
base::TimeTicks now(testing_clock_.NowTicks());
// Entry with RTP timestamp 0 should get dropped.
int width = 320;
int height = 180;
for (int i = 0; i < 11; i++) {
auto capture_begin_event = std::make_unique<FrameEvent>();
capture_begin_event->timestamp = now;
capture_begin_event->type = FRAME_CAPTURE_BEGIN;
capture_begin_event->media_type = VIDEO_EVENT;
capture_begin_event->rtp_timestamp =
RtpTimeTicks().Expand<uint32_t>(i * 100);
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
auto capture_end_event = std::make_unique<FrameEvent>();
capture_end_event->timestamp = now;
capture_end_event->type = FRAME_CAPTURE_END;
capture_end_event->media_type = VIDEO_EVENT;
capture_end_event->rtp_timestamp = RtpTimeTicks().Expand<uint32_t>(i * 100);
capture_end_event->width = width;
capture_end_event->height = height;
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_end_event));
auto decoded_event = std::make_unique<FrameEvent>();
decoded_event->timestamp = now;
decoded_event->type = FRAME_DECODED;
decoded_event->media_type = VIDEO_EVENT;
decoded_event->rtp_timestamp = RtpTimeTicks().Expand<uint32_t>(i * 100);
decoded_event->frame_id = FrameId::first();
cast_environment_->logger()->DispatchFrameEvent(std::move(decoded_event));
width += 160;
height += 90;
}
GetEventsAndReset();
ASSERT_EQ(10u, frame_events_.size());
EXPECT_EQ(100u, frame_events_.front()->relative_rtp_timestamp());
EXPECT_EQ(1000u, frame_events_.back()->relative_rtp_timestamp());
width = 320;
height = 180;
for (const auto& event : frame_events_) {
width += 160;
height += 90;
EXPECT_EQ(width, event->width());
EXPECT_EQ(height, event->height());
}
}
TEST_F(EncodingEventSubscriberTest, PacketEventTruncating) {
Init(AUDIO_EVENT);
base::TimeTicks now(testing_clock_.NowTicks());
// Entry with RTP timestamp 0 should get dropped.
for (int i = 0; i < 11; i++) {
auto receive_event = std::make_unique<PacketEvent>();
receive_event->timestamp = now;
receive_event->type = PACKET_RECEIVED;
receive_event->media_type = AUDIO_EVENT;
receive_event->rtp_timestamp = RtpTimeTicks().Expand<uint32_t>(i * 100);
receive_event->frame_id = FrameId::first();
receive_event->packet_id = i;
receive_event->max_packet_id = 10;
receive_event->size = 123;
cast_environment_->logger()->DispatchPacketEvent(std::move(receive_event));
}
GetEventsAndReset();
ASSERT_EQ(10u, packet_events_.size());
EXPECT_EQ(100u, packet_events_.front()->relative_rtp_timestamp());
EXPECT_EQ(1000u, packet_events_.back()->relative_rtp_timestamp());
}
TEST_F(EncodingEventSubscriberTest, TooManyProtos) {
Init(VIDEO_EVENT);
size_t num_frame_event_protos = 3;
size_t num_packet_event_protos = kMaxProtosPerFrame - num_frame_event_protos;
base::TimeTicks now(testing_clock_.NowTicks());
for (size_t i = 0; i < num_frame_event_protos; i++) {
for (int j = 0; j < kMaxEventsPerProto; j++) {
auto capture_begin_event = std::make_unique<FrameEvent>();
capture_begin_event->timestamp = now;
capture_begin_event->type = FRAME_CAPTURE_BEGIN;
capture_begin_event->media_type = VIDEO_EVENT;
capture_begin_event->rtp_timestamp = RtpTimeTicks();
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
}
}
for (size_t i = 0; i < num_packet_event_protos; i++) {
for (int j = 0; j < kMaxEventsPerProto; j++) {
auto receive_event = std::make_unique<PacketEvent>();
receive_event->timestamp = now;
receive_event->type = PACKET_RECEIVED;
receive_event->media_type = VIDEO_EVENT;
receive_event->rtp_timestamp = RtpTimeTicks();
receive_event->frame_id = FrameId::first();
receive_event->packet_id = 0;
receive_event->max_packet_id = 10;
receive_event->size = 123;
cast_environment_->logger()->DispatchPacketEvent(
std::move(receive_event));
}
}
auto capture_begin_event = std::make_unique<FrameEvent>();
capture_begin_event->timestamp = now;
capture_begin_event->type = FRAME_CAPTURE_BEGIN;
capture_begin_event->media_type = VIDEO_EVENT;
capture_begin_event->rtp_timestamp = RtpTimeTicks();
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
GetEventsAndReset();
EXPECT_EQ(num_frame_event_protos, frame_events_.size());
EXPECT_EQ(num_packet_event_protos, packet_events_.size());
}
TEST_F(EncodingEventSubscriberTest, EventFiltering) {
Init(VIDEO_EVENT);
base::TimeTicks now(testing_clock_.NowTicks());
RtpTimeTicks rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
auto video_event = std::make_unique<FrameEvent>();
video_event->timestamp = now;
video_event->type = FRAME_DECODED;
video_event->media_type = VIDEO_EVENT;
video_event->rtp_timestamp = rtp_timestamp;
video_event->frame_id = FrameId::first();
cast_environment_->logger()->DispatchFrameEvent(std::move(video_event));
// This is an AUDIO_EVENT and shouldn't be processed by the subscriber.
auto audio_event = std::make_unique<FrameEvent>();
audio_event->timestamp = now;
audio_event->type = FRAME_DECODED;
audio_event->media_type = AUDIO_EVENT;
audio_event->rtp_timestamp = rtp_timestamp;
audio_event->frame_id = FrameId::first();
cast_environment_->logger()->DispatchFrameEvent(std::move(audio_event));
GetEventsAndReset();
ASSERT_EQ(1u, frame_events_.size());
auto it = frame_events_.begin();
const AggregatedFrameEvent* frame_event = it->get();
ASSERT_EQ(1, frame_event->event_type_size());
EXPECT_EQ(media::cast::proto::FRAME_DECODED, frame_event->event_type(0));
GetEventsAndReset();
EXPECT_TRUE(packet_events_.empty());
}
TEST_F(EncodingEventSubscriberTest, FrameEvent) {
Init(VIDEO_EVENT);
base::TimeTicks now(testing_clock_.NowTicks());
RtpTimeTicks rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
auto decode_event = std::make_unique<FrameEvent>();
decode_event->timestamp = now;
decode_event->type = FRAME_DECODED;
decode_event->media_type = VIDEO_EVENT;
decode_event->rtp_timestamp = rtp_timestamp;
decode_event->frame_id = FrameId::first();
cast_environment_->logger()->DispatchFrameEvent(std::move(decode_event));
GetEventsAndReset();
ASSERT_EQ(1u, frame_events_.size());
auto it = frame_events_.begin();
const AggregatedFrameEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
ASSERT_EQ(1, event->event_type_size());
EXPECT_EQ(media::cast::proto::FRAME_DECODED, event->event_type(0));
ASSERT_EQ(1, event->event_timestamp_ms_size());
EXPECT_EQ(InMilliseconds(now), event->event_timestamp_ms(0));
EXPECT_EQ(0, event->encoded_frame_size());
EXPECT_EQ(0, event->delay_millis());
GetEventsAndReset();
EXPECT_TRUE(frame_events_.empty());
}
TEST_F(EncodingEventSubscriberTest, FrameEventDelay) {
Init(AUDIO_EVENT);
base::TimeTicks now(testing_clock_.NowTicks());
RtpTimeTicks rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
int delay_ms = 100;
auto playout_event = std::make_unique<FrameEvent>();
playout_event->timestamp = now;
playout_event->type = FRAME_PLAYOUT;
playout_event->media_type = AUDIO_EVENT;
playout_event->rtp_timestamp = rtp_timestamp;
playout_event->frame_id = FrameId::first();
playout_event->delay_delta = base::Milliseconds(delay_ms);
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
GetEventsAndReset();
ASSERT_EQ(1u, frame_events_.size());
auto it = frame_events_.begin();
const AggregatedFrameEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
ASSERT_EQ(1, event->event_type_size());
EXPECT_EQ(media::cast::proto::FRAME_PLAYOUT, event->event_type(0));
ASSERT_EQ(1, event->event_timestamp_ms_size());
EXPECT_EQ(InMilliseconds(now), event->event_timestamp_ms(0));
EXPECT_EQ(0, event->encoded_frame_size());
EXPECT_EQ(100, event->delay_millis());
EXPECT_FALSE(event->has_key_frame());
}
TEST_F(EncodingEventSubscriberTest, FrameEventSize) {
Init(VIDEO_EVENT);
base::TimeTicks now(testing_clock_.NowTicks());
RtpTimeTicks rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
int size = 123;
bool key_frame = true;
int target_bitrate = 1024;
double encoder_cpu_utilization = 0.90;
double idealized_bitrate_utilization = 0.42;
auto encode_event = std::make_unique<FrameEvent>();
encode_event->timestamp = now;
encode_event->type = FRAME_ENCODED;
encode_event->media_type = VIDEO_EVENT;
encode_event->rtp_timestamp = rtp_timestamp;
encode_event->frame_id = FrameId::first();
encode_event->size = size;
encode_event->key_frame = key_frame;
encode_event->target_bitrate = target_bitrate;
encode_event->encoder_cpu_utilization = encoder_cpu_utilization;
encode_event->idealized_bitrate_utilization = idealized_bitrate_utilization;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
GetEventsAndReset();
ASSERT_EQ(1u, frame_events_.size());
auto it = frame_events_.begin();
const AggregatedFrameEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
ASSERT_EQ(1, event->event_type_size());
EXPECT_EQ(media::cast::proto::FRAME_ENCODED, event->event_type(0));
ASSERT_EQ(1, event->event_timestamp_ms_size());
EXPECT_EQ(InMilliseconds(now), event->event_timestamp_ms(0));
EXPECT_EQ(size, event->encoded_frame_size());
EXPECT_EQ(0, event->delay_millis());
EXPECT_TRUE(event->has_key_frame());
EXPECT_EQ(key_frame, event->key_frame());
EXPECT_EQ(target_bitrate, event->target_bitrate());
EXPECT_EQ(90, event->encoder_cpu_percent_utilized());
EXPECT_EQ(42, event->idealized_bitrate_percent_utilized());
}
TEST_F(EncodingEventSubscriberTest, MultipleFrameEvents) {
Init(AUDIO_EVENT);
RtpTimeTicks rtp_timestamp1 = RtpTimeTicks().Expand(UINT32_C(100));
RtpTimeTicks rtp_timestamp2 = rtp_timestamp1.Expand(UINT32_C(200));
base::TimeTicks now1(testing_clock_.NowTicks());
auto playout_event = std::make_unique<FrameEvent>();
playout_event->timestamp = now1;
playout_event->type = FRAME_PLAYOUT;
playout_event->media_type = AUDIO_EVENT;
playout_event->rtp_timestamp = rtp_timestamp1;
playout_event->frame_id = FrameId::first();
playout_event->delay_delta = base::Milliseconds(100);
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
task_runner_->Sleep(base::Milliseconds(20));
base::TimeTicks now2(testing_clock_.NowTicks());
auto encode_event = std::make_unique<FrameEvent>();
encode_event->timestamp = now2;
encode_event->type = FRAME_ENCODED;
encode_event->media_type = AUDIO_EVENT;
encode_event->rtp_timestamp = rtp_timestamp2;
encode_event->frame_id = FrameId::first();
encode_event->size = 123;
encode_event->encoder_cpu_utilization = 0.44;
encode_event->idealized_bitrate_utilization = 0.55;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
task_runner_->Sleep(base::Milliseconds(20));
base::TimeTicks now3(testing_clock_.NowTicks());
auto decode_event = std::make_unique<FrameEvent>();
decode_event->timestamp = now3;
decode_event->type = FRAME_DECODED;
decode_event->media_type = AUDIO_EVENT;
decode_event->rtp_timestamp = rtp_timestamp1;
decode_event->frame_id = FrameId::first();
cast_environment_->logger()->DispatchFrameEvent(std::move(decode_event));
GetEventsAndReset();
ASSERT_EQ(2u, frame_events_.size());
auto it = frame_events_.begin();
{
const AggregatedFrameEvent* event = it->get();
EXPECT_EQ((rtp_timestamp1 - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
ASSERT_EQ(2, event->event_type_size());
EXPECT_EQ(media::cast::proto::FRAME_PLAYOUT, event->event_type(0));
EXPECT_EQ(media::cast::proto::FRAME_DECODED, event->event_type(1));
ASSERT_EQ(2, event->event_timestamp_ms_size());
EXPECT_EQ(InMilliseconds(now1), event->event_timestamp_ms(0));
EXPECT_EQ(InMilliseconds(now3), event->event_timestamp_ms(1));
EXPECT_FALSE(event->has_key_frame());
}
++it;
{
const AggregatedFrameEvent* event = it->get();
EXPECT_EQ((rtp_timestamp2 - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
ASSERT_EQ(1, event->event_type_size());
EXPECT_EQ(media::cast::proto::FRAME_ENCODED, event->event_type(0));
ASSERT_EQ(1, event->event_timestamp_ms_size());
EXPECT_EQ(InMilliseconds(now2), event->event_timestamp_ms(0));
EXPECT_FALSE(event->has_key_frame());
EXPECT_EQ(44, event->encoder_cpu_percent_utilized());
EXPECT_EQ(55, event->idealized_bitrate_percent_utilized());
}
}
TEST_F(EncodingEventSubscriberTest, PacketEvent) {
Init(AUDIO_EVENT);
base::TimeTicks now(testing_clock_.NowTicks());
RtpTimeTicks rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
int packet_id = 2;
int size = 100;
auto receive_event = std::make_unique<PacketEvent>();
;
receive_event->timestamp = now;
receive_event->type = PACKET_RECEIVED;
receive_event->media_type = AUDIO_EVENT;
receive_event->rtp_timestamp = rtp_timestamp;
receive_event->frame_id = FrameId::first();
receive_event->packet_id = packet_id;
receive_event->max_packet_id = 10;
receive_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(receive_event));
GetEventsAndReset();
ASSERT_EQ(1u, packet_events_.size());
auto it = packet_events_.begin();
const AggregatedPacketEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
ASSERT_EQ(1, event->base_packet_event_size());
const BasePacketEvent& base_event = event->base_packet_event(0);
EXPECT_EQ(packet_id, base_event.packet_id());
ASSERT_EQ(1, base_event.event_type_size());
EXPECT_EQ(media::cast::proto::PACKET_RECEIVED, base_event.event_type(0));
ASSERT_EQ(1, base_event.event_timestamp_ms_size());
EXPECT_EQ(InMilliseconds(now), base_event.event_timestamp_ms(0));
EXPECT_EQ(size, base_event.size());
GetEventsAndReset();
EXPECT_TRUE(packet_events_.empty());
}
TEST_F(EncodingEventSubscriberTest, MultiplePacketEventsForPacket) {
Init(VIDEO_EVENT);
base::TimeTicks now1(testing_clock_.NowTicks());
RtpTimeTicks rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
int packet_id = 2;
int size = 100;
auto send_event = std::make_unique<PacketEvent>();
send_event->timestamp = now1;
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp;
send_event->frame_id = FrameId::first();
send_event->packet_id = packet_id;
send_event->max_packet_id = 10;
send_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
task_runner_->Sleep(base::Milliseconds(20));
base::TimeTicks now2(testing_clock_.NowTicks());
auto retransmit_event = std::make_unique<PacketEvent>();
retransmit_event->timestamp = now2;
retransmit_event->type = PACKET_RETRANSMITTED;
retransmit_event->media_type = VIDEO_EVENT;
retransmit_event->rtp_timestamp = rtp_timestamp;
retransmit_event->frame_id = FrameId::first();
retransmit_event->packet_id = packet_id;
retransmit_event->max_packet_id = 10;
retransmit_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(retransmit_event));
GetEventsAndReset();
ASSERT_EQ(1u, packet_events_.size());
auto it = packet_events_.begin();
const AggregatedPacketEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
ASSERT_EQ(1, event->base_packet_event_size());
const BasePacketEvent& base_event = event->base_packet_event(0);
EXPECT_EQ(packet_id, base_event.packet_id());
ASSERT_EQ(2, base_event.event_type_size());
EXPECT_EQ(media::cast::proto::PACKET_SENT_TO_NETWORK,
base_event.event_type(0));
EXPECT_EQ(media::cast::proto::PACKET_RETRANSMITTED, base_event.event_type(1));
ASSERT_EQ(2, base_event.event_timestamp_ms_size());
EXPECT_EQ(InMilliseconds(now1), base_event.event_timestamp_ms(0));
EXPECT_EQ(InMilliseconds(now2), base_event.event_timestamp_ms(1));
}
TEST_F(EncodingEventSubscriberTest, MultiplePacketEventsForFrame) {
Init(VIDEO_EVENT);
base::TimeTicks now1(testing_clock_.NowTicks());
RtpTimeTicks rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
int packet_id_1 = 2;
int packet_id_2 = 3;
int size = 100;
auto send_event = std::make_unique<PacketEvent>();
send_event->timestamp = now1;
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp;
send_event->frame_id = FrameId::first();
send_event->packet_id = packet_id_1;
send_event->max_packet_id = 10;
send_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
task_runner_->Sleep(base::Milliseconds(20));
base::TimeTicks now2(testing_clock_.NowTicks());
auto retransmit_event = std::make_unique<PacketEvent>();
retransmit_event->timestamp = now2;
retransmit_event->type = PACKET_RETRANSMITTED;
retransmit_event->media_type = VIDEO_EVENT;
retransmit_event->rtp_timestamp = rtp_timestamp;
retransmit_event->frame_id = FrameId::first();
retransmit_event->packet_id = packet_id_2;
retransmit_event->max_packet_id = 10;
retransmit_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(retransmit_event));
GetEventsAndReset();
ASSERT_EQ(1u, packet_events_.size());
auto it = packet_events_.begin();
const AggregatedPacketEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
ASSERT_EQ(2, event->base_packet_event_size());
const BasePacketEvent& base_event = event->base_packet_event(0);
EXPECT_EQ(packet_id_1, base_event.packet_id());
ASSERT_EQ(1, base_event.event_type_size());
EXPECT_EQ(media::cast::proto::PACKET_SENT_TO_NETWORK,
base_event.event_type(0));
ASSERT_EQ(1, base_event.event_timestamp_ms_size());
EXPECT_EQ(InMilliseconds(now1), base_event.event_timestamp_ms(0));
const BasePacketEvent& base_event_2 = event->base_packet_event(1);
EXPECT_EQ(packet_id_2, base_event_2.packet_id());
ASSERT_EQ(1, base_event_2.event_type_size());
EXPECT_EQ(media::cast::proto::PACKET_RETRANSMITTED,
base_event_2.event_type(0));
ASSERT_EQ(1, base_event_2.event_timestamp_ms_size());
EXPECT_EQ(InMilliseconds(now2), base_event_2.event_timestamp_ms(0));
}
TEST_F(EncodingEventSubscriberTest, MultiplePacketEvents) {
Init(VIDEO_EVENT);
base::TimeTicks now1(testing_clock_.NowTicks());
RtpTimeTicks rtp_timestamp_1 = RtpTimeTicks().Expand(UINT32_C(100));
RtpTimeTicks rtp_timestamp_2 = rtp_timestamp_1.Expand(UINT32_C(200));
int packet_id_1 = 2;
int packet_id_2 = 3;
int size = 100;
auto send_event = std::make_unique<PacketEvent>();
send_event->timestamp = now1;
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp_1;
send_event->frame_id = FrameId::first();
send_event->packet_id = packet_id_1;
send_event->max_packet_id = 10;
send_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
task_runner_->Sleep(base::Milliseconds(20));
base::TimeTicks now2(testing_clock_.NowTicks());
auto retransmit_event = std::make_unique<PacketEvent>();
retransmit_event->timestamp = now2;
retransmit_event->type = PACKET_RETRANSMITTED;
retransmit_event->media_type = VIDEO_EVENT;
retransmit_event->rtp_timestamp = rtp_timestamp_2;
retransmit_event->frame_id = FrameId::first();
retransmit_event->packet_id = packet_id_2;
retransmit_event->max_packet_id = 10;
retransmit_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(retransmit_event));
GetEventsAndReset();
ASSERT_EQ(2u, packet_events_.size());
auto it = packet_events_.begin();
{
const AggregatedPacketEvent* event = it->get();
EXPECT_EQ((rtp_timestamp_1 - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
ASSERT_EQ(1, event->base_packet_event_size());
const BasePacketEvent& base_event = event->base_packet_event(0);
EXPECT_EQ(packet_id_1, base_event.packet_id());
ASSERT_EQ(1, base_event.event_type_size());
EXPECT_EQ(media::cast::proto::PACKET_SENT_TO_NETWORK,
base_event.event_type(0));
ASSERT_EQ(1, base_event.event_timestamp_ms_size());
EXPECT_EQ(InMilliseconds(now1), base_event.event_timestamp_ms(0));
}
++it;
ASSERT_TRUE(it != packet_events_.end());
{
const AggregatedPacketEvent* event = it->get();
EXPECT_EQ((rtp_timestamp_2 - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
ASSERT_EQ(1, event->base_packet_event_size());
const BasePacketEvent& base_event_2 = event->base_packet_event(0);
EXPECT_EQ(packet_id_2, base_event_2.packet_id());
ASSERT_EQ(1, base_event_2.event_type_size());
EXPECT_EQ(media::cast::proto::PACKET_RETRANSMITTED,
base_event_2.event_type(0));
ASSERT_EQ(1, base_event_2.event_timestamp_ms_size());
EXPECT_EQ(InMilliseconds(now2), base_event_2.event_timestamp_ms(0));
}
}
TEST_F(EncodingEventSubscriberTest, FirstRtpTimeTicks) {
Init(VIDEO_EVENT);
RtpTimeTicks rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(12345));
base::TimeTicks now(testing_clock_.NowTicks());
auto capture_begin_event = std::make_unique<FrameEvent>();
capture_begin_event->timestamp = now;
capture_begin_event->type = FRAME_CAPTURE_BEGIN;
capture_begin_event->media_type = VIDEO_EVENT;
capture_begin_event->rtp_timestamp = rtp_timestamp;
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
auto capture_end_event = std::make_unique<FrameEvent>();
capture_end_event->timestamp = now;
capture_end_event->type = FRAME_CAPTURE_END;
capture_end_event->media_type = VIDEO_EVENT;
capture_end_event->rtp_timestamp =
rtp_timestamp + RtpTimeDelta::FromTicks(30);
capture_end_event->width = 1280;
capture_end_event->height = 720;
cast_environment_->logger()->DispatchFrameEvent(std::move(capture_end_event));
GetEventsAndReset();
EXPECT_EQ(rtp_timestamp, first_rtp_timestamp_);
auto it = frame_events_.begin();
ASSERT_NE(frame_events_.end(), it);
EXPECT_EQ(0u, (*it)->relative_rtp_timestamp());
++it;
ASSERT_NE(frame_events_.end(), it);
EXPECT_EQ(30u, (*it)->relative_rtp_timestamp());
EXPECT_EQ(1280, (*it)->width());
EXPECT_EQ(720, (*it)->height());
rtp_timestamp = rtp_timestamp.Expand(UINT32_C(67890));
capture_begin_event = std::make_unique<FrameEvent>();
capture_begin_event->timestamp = now;
capture_begin_event->type = FRAME_CAPTURE_BEGIN;
capture_begin_event->media_type = VIDEO_EVENT;
capture_begin_event->rtp_timestamp = rtp_timestamp;
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
GetEventsAndReset();
EXPECT_EQ(rtp_timestamp, first_rtp_timestamp_);
}
TEST_F(EncodingEventSubscriberTest, RelativeRtpTimeTicksWrapAround) {
Init(VIDEO_EVENT);
RtpTimeTicks rtp_timestamp = RtpTimeTicks() - RtpTimeDelta::FromTicks(20);
base::TimeTicks now(testing_clock_.NowTicks());
auto capture_begin_event = std::make_unique<FrameEvent>();
;
capture_begin_event->timestamp = now;
capture_begin_event->type = FRAME_CAPTURE_BEGIN;
capture_begin_event->media_type = VIDEO_EVENT;
capture_begin_event->rtp_timestamp = rtp_timestamp;
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
// RtpTimeTicks has now wrapped around.
auto capture_end_event = std::make_unique<FrameEvent>();
capture_end_event->timestamp = now;
capture_end_event->type = FRAME_CAPTURE_END;
capture_end_event->media_type = VIDEO_EVENT;
capture_end_event->rtp_timestamp =
rtp_timestamp + RtpTimeDelta::FromTicks(30);
capture_end_event->width = 1280;
capture_end_event->height = 720;
cast_environment_->logger()->DispatchFrameEvent(std::move(capture_end_event));
GetEventsAndReset();
auto it = frame_events_.begin();
ASSERT_NE(frame_events_.end(), it);
EXPECT_EQ(0u, (*it)->relative_rtp_timestamp());
++it;
ASSERT_NE(frame_events_.end(), it);
EXPECT_EQ(30u, (*it)->relative_rtp_timestamp());
EXPECT_EQ(1280, (*it)->width());
EXPECT_EQ(720, (*it)->height());
}
TEST_F(EncodingEventSubscriberTest, MaxEventsPerProto) {
Init(VIDEO_EVENT);
RtpTimeTicks rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
for (int i = 0; i < kMaxEventsPerProto + 1; i++) {
auto ack_event = std::make_unique<FrameEvent>();
ack_event->timestamp = testing_clock_.NowTicks();
ack_event->type = FRAME_ACK_RECEIVED;
ack_event->media_type = VIDEO_EVENT;
ack_event->rtp_timestamp = rtp_timestamp;
ack_event->frame_id = FrameId::first();
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_event));
task_runner_->Sleep(base::Milliseconds(30));
}
GetEventsAndReset();
ASSERT_EQ(2u, frame_events_.size());
auto frame_it = frame_events_.begin();
ASSERT_TRUE(frame_it != frame_events_.end());
const AggregatedFrameEvent* frame_event = frame_it->get();
EXPECT_EQ(kMaxEventsPerProto, frame_event->event_type_size());
for (int i = 0; i < kMaxPacketsPerFrame + 1; i++) {
auto send_event = std::make_unique<PacketEvent>();
send_event->timestamp = testing_clock_.NowTicks();
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp;
send_event->frame_id = FrameId::first();
send_event->packet_id = i;
send_event->max_packet_id = kMaxPacketsPerFrame;
send_event->size = 123;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
task_runner_->Sleep(base::Milliseconds(30));
}
GetEventsAndReset();
EXPECT_EQ(2u, packet_events_.size());
auto packet_it = packet_events_.begin();
ASSERT_TRUE(packet_it != packet_events_.end());
{
const AggregatedPacketEvent* packet_event = packet_it->get();
EXPECT_EQ(kMaxPacketsPerFrame, packet_event->base_packet_event_size());
}
++packet_it;
{
const AggregatedPacketEvent* packet_event = packet_it->get();
EXPECT_EQ(1, packet_event->base_packet_event_size());
}
for (int j = 0; j < kMaxEventsPerProto + 1; j++) {
auto send_event = std::make_unique<PacketEvent>();
send_event->timestamp = testing_clock_.NowTicks();
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp;
send_event->frame_id = FrameId::first();
send_event->packet_id = 0;
send_event->max_packet_id = 0;
send_event->size = 123;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
task_runner_->Sleep(base::Milliseconds(30));
}
GetEventsAndReset();
EXPECT_EQ(2u, packet_events_.size());
packet_it = packet_events_.begin();
ASSERT_TRUE(packet_it != packet_events_.end());
{
const AggregatedPacketEvent* packet_event = packet_it->get();
EXPECT_EQ(kMaxEventsPerProto,
packet_event->base_packet_event(0).event_type_size());
}
++packet_it;
{
const AggregatedPacketEvent* packet_event = packet_it->get();
EXPECT_EQ(1, packet_event->base_packet_event(0).event_type_size());
}
}
} // namespace cast
} // namespace media

@ -8,99 +8,104 @@
#include "base/containers/contains.h"
#include "base/functional/bind.h"
#include "base/functional/callback_forward.h"
#include "base/functional/callback_helpers.h"
#include "base/location.h"
#include "base/memory/scoped_refptr.h"
#include "base/not_fatal_until.h"
#include "base/ranges/algorithm.h"
#include "base/synchronization/waitable_event.h"
#include "base/task/single_thread_task_runner.h"
#include "media/cast/cast_environment.h"
namespace media {
namespace cast {
namespace media::cast {
LogEventDispatcher::LogEventDispatcher(CastEnvironment* env)
: env_(env), impl_(new Impl()) {
DCHECK(env_);
namespace {
void RunOnThread(base::SingleThreadTaskRunner& task_runner,
base::OnceClosure task) {
if (task_runner.RunsTasksInCurrentSequence()) {
std::move(task).Run();
} else {
task_runner.PostTask(FROM_HERE, std::move(task));
}
}
LogEventDispatcher::~LogEventDispatcher() = default;
} // namespace
LogEventDispatcher::LogEventDispatcher(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
base::OnceClosure deletion_cb)
: task_runner_(std::move(task_runner)),
impl_(
std::make_unique<LogEventDispatcher::Impl>(std::move(deletion_cb))) {}
LogEventDispatcher::~LogEventDispatcher() {
// `impl_` is destroyed on the task runner to ensure that base::Unretained is
// safe in callbacks that use it below.
task_runner_->DeleteSoon(FROM_HERE, std::move(impl_));
}
void LogEventDispatcher::DispatchFrameEvent(
std::unique_ptr<FrameEvent> event) const {
if (env_->CurrentlyOn(CastEnvironment::MAIN)) {
impl_->DispatchFrameEvent(std::move(event));
} else {
env_->PostTask(CastEnvironment::MAIN, FROM_HERE,
base::BindOnce(&LogEventDispatcher::Impl::DispatchFrameEvent,
impl_, std::move(event)));
}
RunOnThread(*task_runner_,
base::BindOnce(&LogEventDispatcher::Impl::DispatchFrameEvent,
// Here and below: Unretained is safe because impl_
// is destroyed on the main task runner.
base::Unretained(impl_.get()), std::move(event)));
}
void LogEventDispatcher::DispatchPacketEvent(
std::unique_ptr<PacketEvent> event) const {
if (env_->CurrentlyOn(CastEnvironment::MAIN)) {
impl_->DispatchPacketEvent(std::move(event));
} else {
env_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
base::BindOnce(&LogEventDispatcher::Impl::DispatchPacketEvent, impl_,
std::move(event)));
}
RunOnThread(*task_runner_,
base::BindOnce(&LogEventDispatcher::Impl::DispatchPacketEvent,
base::Unretained(impl_.get()), std::move(event)));
}
void LogEventDispatcher::DispatchBatchOfEvents(
std::unique_ptr<std::vector<FrameEvent>> frame_events,
std::unique_ptr<std::vector<PacketEvent>> packet_events) const {
if (env_->CurrentlyOn(CastEnvironment::MAIN)) {
impl_->DispatchBatchOfEvents(std::move(frame_events),
std::move(packet_events));
} else {
env_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
base::BindOnce(&LogEventDispatcher::Impl::DispatchBatchOfEvents, impl_,
std::move(frame_events), std::move(packet_events)));
}
RunOnThread(
*task_runner_,
base::BindOnce(&LogEventDispatcher::Impl::DispatchBatchOfEvents,
base::Unretained(impl_.get()), std::move(frame_events),
std::move(packet_events)));
}
void LogEventDispatcher::Subscribe(RawEventSubscriber* subscriber) {
if (env_->CurrentlyOn(CastEnvironment::MAIN)) {
impl_->Subscribe(subscriber);
} else {
env_->PostTask(CastEnvironment::MAIN, FROM_HERE,
base::BindOnce(&LogEventDispatcher::Impl::Subscribe, impl_,
subscriber));
}
RunOnThread(*task_runner_,
base::BindOnce(&LogEventDispatcher::Impl::Subscribe,
base::Unretained(impl_.get()), subscriber));
}
void LogEventDispatcher::Unsubscribe(RawEventSubscriber* subscriber) {
if (env_->CurrentlyOn(CastEnvironment::MAIN)) {
if (task_runner_->RunsTasksInCurrentSequence()) {
impl_->Unsubscribe(subscriber);
} else {
// This method, once it returns, guarantees |subscriber| will not receive
// any more events. Therefore, when called on a thread other than the
// CastEnvironment's MAIN thread, block until the unsubscribe task
// completes.
struct Helper {
static void UnsubscribeAndSignal(const scoped_refptr<Impl>& impl,
RawEventSubscriber* subscriber,
base::WaitableEvent* done) {
impl->Unsubscribe(subscriber);
done->Signal();
}
};
base::WaitableEvent done(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
CHECK(env_->PostTask(CastEnvironment::MAIN, FROM_HERE,
base::BindOnce(&Helper::UnsubscribeAndSignal, impl_,
subscriber, &done)));
// `task_runner_`'s thread, block until the unsubscribe task completes.
base::WaitableEvent done;
CHECK(task_runner_->PostTask(
FROM_HERE, base::BindOnce(
[](Impl* impl, RawEventSubscriber* subscriber,
base::WaitableEvent* done) {
impl->Unsubscribe(subscriber);
done->Signal();
},
base::Unretained(impl_.get()), subscriber, &done)));
done.Wait();
}
}
LogEventDispatcher::Impl::Impl() = default;
LogEventDispatcher::Impl::Impl(base::OnceClosure deletion_cb)
: deletion_cb_(std::move(deletion_cb)) {}
LogEventDispatcher::Impl::~Impl() {
DCHECK(subscribers_.empty());
CHECK(subscribers_.empty());
if (deletion_cb_) {
std::move(deletion_cb_).Run();
}
}
void LogEventDispatcher::Impl::DispatchFrameEvent(
@ -131,7 +136,7 @@ void LogEventDispatcher::Impl::DispatchBatchOfEvents(
}
void LogEventDispatcher::Impl::Subscribe(RawEventSubscriber* subscriber) {
DCHECK(!base::Contains(subscribers_, subscriber));
CHECK(!base::Contains(subscribers_, subscriber));
subscribers_.push_back(subscriber);
}
@ -141,5 +146,4 @@ void LogEventDispatcher::Impl::Unsubscribe(RawEventSubscriber* subscriber) {
subscribers_.erase(it);
}
} // namespace cast
} // namespace media
} // namespace media::cast

@ -8,27 +8,29 @@
#include <memory>
#include <vector>
#include "base/functional/callback_forward.h"
#include "base/memory/raw_ptr.h"
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_refptr.h"
#include "base/task/single_thread_task_runner.h"
#include "media/cast/logging/logging_defines.h"
#include "media/cast/logging/raw_event_subscriber.h"
namespace media {
namespace cast {
class CastEnvironment;
namespace media::cast {
// A thread-safe receiver of logging events that manages an active list of
// EventSubscribers and dispatches the logging events to them on the MAIN
// thread. All methods, constructor, and destructor can be invoked on any
// thread.
// EventSubscribers and dispatches the logging events to them on `task_runner`.
// All methods, constructor, and destructor can be invoked on any thread.
class LogEventDispatcher {
public:
// |env| outlives this instance (and generally owns this instance).
explicit LogEventDispatcher(CastEnvironment* env);
explicit LogEventDispatcher(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
base::OnceClosure deletion_cb);
LogEventDispatcher(const LogEventDispatcher&) = delete;
LogEventDispatcher(LogEventDispatcher&&) = delete;
LogEventDispatcher& operator=(const LogEventDispatcher&) = delete;
LogEventDispatcher& operator=(LogEventDispatcher&&) = delete;
~LogEventDispatcher();
@ -40,22 +42,25 @@ class LogEventDispatcher {
std::unique_ptr<std::vector<FrameEvent>> frame_events,
std::unique_ptr<std::vector<PacketEvent>> packet_events) const;
// Adds |subscriber| to the active list to begin receiving events on MAIN
// thread. Unsubscribe() must be called before |subscriber| is destroyed.
// Adds `subscriber` to the active list to begin receiving events on MAIN
// thread. Unsubscribe() must be called before `subscriber` is destroyed.
void Subscribe(RawEventSubscriber* subscriber);
// Removes |subscriber| from the active list. Once this method returns, the
// |subscriber| is guaranteed not to receive any more events.
// Removes `subscriber` from the active list. Once this method returns, the
// `subscriber` is guaranteed not to receive any more events.
void Unsubscribe(RawEventSubscriber* subscriber);
private:
// The part of the implementation that runs exclusively on the MAIN thread.
class Impl : public base::RefCountedThreadSafe<Impl> {
class Impl {
public:
Impl();
explicit Impl(base::OnceClosure deletion_cb);
Impl(const Impl&) = delete;
Impl(Impl&&) = delete;
Impl& operator=(const Impl&) = delete;
Impl& operator=(Impl&&) = delete;
~Impl();
void DispatchFrameEvent(std::unique_ptr<FrameEvent> event) const;
void DispatchPacketEvent(std::unique_ptr<PacketEvent> event) const;
@ -66,18 +71,14 @@ class LogEventDispatcher {
void Unsubscribe(RawEventSubscriber* subscriber);
private:
friend class base::RefCountedThreadSafe<Impl>;
~Impl();
base::OnceClosure deletion_cb_;
std::vector<raw_ptr<RawEventSubscriber, VectorExperimental>> subscribers_;
};
const raw_ptr<CastEnvironment> env_; // Owner of this instance.
const scoped_refptr<Impl> impl_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
std::unique_ptr<Impl> impl_;
};
} // namespace cast
} // namespace media
} // namespace media::cast
#endif // MEDIA_CAST_LOGGING_LOG_EVENT_DISPATCHER_H_

@ -0,0 +1,285 @@
// Copyright 2025 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/logging/log_event_dispatcher.h"
#include <memory>
#include <vector>
#include "base/functional/bind.h"
#include "base/functional/callback_forward.h"
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_refptr.h"
#include "base/synchronization/waitable_event.h"
#include "base/task/single_thread_task_runner.h"
#include "base/task/task_traits.h"
#include "base/task/thread_pool.h"
#include "base/test/task_environment.h"
#include "media/cast/logging/logging_defines.h"
#include "media/cast/logging/raw_event_subscriber.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media::cast {
namespace {
class MockRawEventSubscriber : public RawEventSubscriber {
public:
MOCK_METHOD(void, OnReceiveFrameEvent, (const FrameEvent&), (override));
MOCK_METHOD(void, OnReceivePacketEvent, (const PacketEvent&), (override));
};
class LogEventDispatcherTest : public ::testing::Test {
public:
LogEventDispatcherTest()
: dispatcher_(std::make_unique<LogEventDispatcher>(
task_environment_.GetMainThreadTaskRunner(),
base::BindOnce(&LogEventDispatcherTest::OnDispatcherDeletion,
// Safe because we wait to delete `this` until
// this callback is executed.
base::Unretained(this)))) {
dispatcher_->Subscribe(&subscriber_);
is_subscribed_ = true;
}
void OnDispatcherDeletion() {
ASSERT_TRUE(quit_closure_);
std::move(quit_closure_).Run();
}
~LogEventDispatcherTest() override {
quit_closure_ = task_environment_.QuitClosure();
Unsubscribe();
dispatcher_.reset();
// Ensure that the Impl gets deleted on the main thread.
task_environment_.RunUntilQuit();
}
void Unsubscribe() {
if (is_subscribed_) {
dispatcher_->Unsubscribe(&subscriber_);
is_subscribed_ = false;
}
}
scoped_refptr<base::SingleThreadTaskRunner> CreateTaskRunner() {
return base::ThreadPool::CreateSingleThreadTaskRunner(
{base::TaskPriority::USER_BLOCKING,
base::TaskShutdownBehavior::SKIP_ON_SHUTDOWN,
base::WithBaseSyncPrimitives(), base::MayBlock()},
base::SingleThreadTaskRunnerThreadMode::DEDICATED);
}
base::test::TaskEnvironment& task_environment() { return task_environment_; }
LogEventDispatcher& dispatcher() { return *dispatcher_; }
testing::StrictMock<MockRawEventSubscriber>& subscriber() {
return subscriber_;
}
private:
base::test::TaskEnvironment task_environment_{
base::test::TaskEnvironment::TimeSource::MOCK_TIME};
std::unique_ptr<LogEventDispatcher> dispatcher_;
testing::StrictMock<MockRawEventSubscriber> subscriber_;
bool is_subscribed_ = false;
// Used to ensure destruction.
base::OnceClosure quit_closure_;
};
} // namespace
// Simple test: do nothing expect construct and deconstruct the test suite,
// which automatically subscribes and unsubscribes.
TEST_F(LogEventDispatcherTest, SubscribeAndUnsubscribe) {}
TEST_F(LogEventDispatcherTest, DispatchFrameEvent) {
auto frame_event = std::make_unique<FrameEvent>();
frame_event->type = FRAME_CAPTURE_BEGIN;
EXPECT_CALL(subscriber(), OnReceiveFrameEvent(testing::Ref(*frame_event)))
.WillOnce([closure = task_environment().QuitClosure()]() {
std::move(closure).Run();
});
dispatcher().DispatchFrameEvent(std::move(frame_event));
task_environment().RunUntilQuit();
}
TEST_F(LogEventDispatcherTest, DispatchFrameEventOnAnotherThread) {
auto frame_event = std::make_unique<FrameEvent>();
frame_event->type = FRAME_CAPTURE_BEGIN;
EXPECT_CALL(subscriber(), OnReceiveFrameEvent(testing::Ref(*frame_event)))
.WillOnce([closure = task_environment().QuitClosure()]() {
std::move(closure).Run();
});
auto task_runner = CreateTaskRunner();
task_runner->PostTask(
FROM_HERE,
base::BindOnce(
[](LogEventDispatcherTest* test,
std::unique_ptr<FrameEvent> frame_event) {
test->dispatcher().DispatchFrameEvent(std::move(frame_event));
}
// Safe because we own the task runner.
,
base::Unretained(this), std::move(frame_event)));
task_environment().RunUntilQuit();
}
TEST_F(LogEventDispatcherTest, DispatchPacketEvent) {
auto packet_event = std::make_unique<PacketEvent>();
packet_event->type = PACKET_SENT_TO_NETWORK;
EXPECT_CALL(subscriber(), OnReceivePacketEvent(testing::Ref(*packet_event)))
.WillOnce([closure = task_environment().QuitClosure()]() {
std::move(closure).Run();
});
dispatcher().DispatchPacketEvent(std::move(packet_event));
task_environment().RunUntilQuit();
}
TEST_F(LogEventDispatcherTest, DispatchPacketEventOnAnotherThread) {
auto packet_event = std::make_unique<PacketEvent>();
packet_event->type = PACKET_SENT_TO_NETWORK;
EXPECT_CALL(subscriber(), OnReceivePacketEvent(testing::Ref(*packet_event)))
.WillOnce([closure = task_environment().QuitClosure()]() {
std::move(closure).Run();
});
auto task_runner = CreateTaskRunner();
task_runner->PostTask(
FROM_HERE,
base::BindOnce(
[](LogEventDispatcherTest* test,
std::unique_ptr<PacketEvent> packet_event) {
test->dispatcher().DispatchPacketEvent(std::move(packet_event));
}
// Safe because we own the task runner.
,
base::Unretained(this), std::move(packet_event)));
task_environment().RunUntilQuit();
}
TEST_F(LogEventDispatcherTest, DispatchBatchOfEvents) {
auto frame_events = std::make_unique<std::vector<FrameEvent>>();
frame_events->push_back(FrameEvent());
frame_events->back().type = FRAME_CAPTURE_BEGIN;
frame_events->push_back(FrameEvent());
frame_events->back().type = FRAME_CAPTURE_END;
auto packet_events = std::make_unique<std::vector<PacketEvent>>();
packet_events->push_back(PacketEvent());
packet_events->back().type = PACKET_SENT_TO_NETWORK;
constexpr int kExpectedEventCount = 3;
int event_count = 0;
auto event_closure = [&, quit_closure = task_environment().QuitClosure()] {
if (++event_count == kExpectedEventCount) {
std::move(quit_closure).Run();
}
};
EXPECT_CALL(subscriber(),
OnReceiveFrameEvent(testing::Ref(frame_events->at(0))))
.WillOnce(testing::InvokeWithoutArgs(event_closure));
EXPECT_CALL(subscriber(),
OnReceiveFrameEvent(testing::Ref(frame_events->at(1))))
.WillOnce(testing::InvokeWithoutArgs(event_closure));
EXPECT_CALL(subscriber(),
OnReceivePacketEvent(testing::Ref(packet_events->at(0))))
.WillOnce(testing::InvokeWithoutArgs(event_closure));
dispatcher().DispatchBatchOfEvents(std::move(frame_events),
std::move(packet_events));
task_environment().RunUntilQuit();
}
TEST_F(LogEventDispatcherTest, DispatchBatchOfEventsOnAnotherThread) {
auto frame_events = std::make_unique<std::vector<FrameEvent>>();
frame_events->push_back(FrameEvent());
frame_events->back().type = FRAME_CAPTURE_BEGIN;
frame_events->push_back(FrameEvent());
frame_events->back().type = FRAME_CAPTURE_END;
auto packet_events = std::make_unique<std::vector<PacketEvent>>();
packet_events->push_back(PacketEvent());
packet_events->back().type = PACKET_SENT_TO_NETWORK;
constexpr int kExpectedEventCount = 3;
int event_count = 0;
auto event_closure = [&, quit_closure = task_environment().QuitClosure()] {
if (++event_count == kExpectedEventCount) {
std::move(quit_closure).Run();
}
};
EXPECT_CALL(subscriber(),
OnReceiveFrameEvent(testing::Ref(frame_events->at(0))))
.WillOnce(testing::InvokeWithoutArgs(event_closure));
EXPECT_CALL(subscriber(),
OnReceiveFrameEvent(testing::Ref(frame_events->at(1))))
.WillOnce(testing::InvokeWithoutArgs(event_closure));
EXPECT_CALL(subscriber(),
OnReceivePacketEvent(testing::Ref(packet_events->at(0))))
.WillOnce(testing::InvokeWithoutArgs(event_closure));
auto task_runner = CreateTaskRunner();
task_runner->PostTask(
FROM_HERE,
base::BindOnce(
[](LogEventDispatcherTest* test,
std::unique_ptr<std::vector<FrameEvent>> frame_events,
std::unique_ptr<std::vector<PacketEvent>> packet_events) {
test->dispatcher().DispatchBatchOfEvents(std::move(frame_events),
std::move(packet_events));
}
// Safe because we own the task runner.
,
base::Unretained(this), std::move(frame_events),
std::move(packet_events)));
task_environment().RunUntilQuit();
}
TEST_F(LogEventDispatcherTest, UnsubscribeDuringDispatch) {
auto frame_event = std::make_unique<FrameEvent>();
frame_event->type = FRAME_CAPTURE_BEGIN;
EXPECT_CALL(subscriber(), OnReceiveFrameEvent(testing::Ref(*frame_event)))
.WillOnce(
[&, closure = task_environment().QuitClosure()](const FrameEvent&) {
Unsubscribe();
std::move(closure).Run();
});
dispatcher().DispatchFrameEvent(std::move(frame_event));
task_environment().RunUntilQuit();
}
TEST_F(LogEventDispatcherTest, UnsubscribeOnDifferentThreadDuringDispatch) {
base::WaitableEvent wait_for_unsubscribe;
auto frame_event = std::make_unique<FrameEvent>();
frame_event->type = FRAME_CAPTURE_BEGIN;
auto task_runner = CreateTaskRunner();
EXPECT_CALL(subscriber(), OnReceiveFrameEvent(testing::Ref(*frame_event)))
.WillOnce([&, quit_closure =
task_environment().QuitClosure()](const FrameEvent&) {
task_runner->PostTask(FROM_HERE,
base::BindOnce(
[](LogEventDispatcherTest* test,
base::OnceClosure quit_closure) {
test->Unsubscribe();
std::move(quit_closure).Run();
},
// Safe because we own the task runner.
base::Unretained(this), quit_closure));
});
dispatcher().DispatchFrameEvent(std::move(frame_event));
task_environment().RunUntilQuit();
}
} // namespace media::cast

@ -1,100 +0,0 @@
// Copyright 2014 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/logging/raw_event_subscriber_bundle.h"
#include <memory>
#include "media/cast/cast_environment.h"
#include "media/cast/logging/receiver_time_offset_estimator_impl.h"
namespace media {
namespace cast {
RawEventSubscriberBundleForStream::RawEventSubscriberBundleForStream(
const scoped_refptr<CastEnvironment>& cast_environment,
bool is_audio,
ReceiverTimeOffsetEstimator* offset_estimator)
: cast_environment_(cast_environment),
event_subscriber_(
is_audio ? AUDIO_EVENT : VIDEO_EVENT,
is_audio ? kMaxAudioEventEntries : kMaxVideoEventEntries),
stats_subscriber_(is_audio ? AUDIO_EVENT : VIDEO_EVENT,
cast_environment->Clock(),
offset_estimator) {
cast_environment_->logger()->Subscribe(&event_subscriber_);
cast_environment_->logger()->Subscribe(&stats_subscriber_);
}
RawEventSubscriberBundleForStream::~RawEventSubscriberBundleForStream() {
cast_environment_->logger()->Unsubscribe(&event_subscriber_);
cast_environment_->logger()->Unsubscribe(&stats_subscriber_);
}
EncodingEventSubscriber*
RawEventSubscriberBundleForStream::GetEncodingEventSubscriber() {
return &event_subscriber_;
}
StatsEventSubscriber*
RawEventSubscriberBundleForStream::GetStatsEventSubscriber() {
return &stats_subscriber_;
}
RawEventSubscriberBundle::RawEventSubscriberBundle(
const scoped_refptr<CastEnvironment>& cast_environment)
: cast_environment_(cast_environment) {}
RawEventSubscriberBundle::~RawEventSubscriberBundle() {
if (receiver_offset_estimator_.get()) {
cast_environment_->logger()->Unsubscribe(receiver_offset_estimator_.get());
}
}
void RawEventSubscriberBundle::AddEventSubscribers(bool is_audio) {
if (!receiver_offset_estimator_.get()) {
receiver_offset_estimator_ =
std::make_unique<ReceiverTimeOffsetEstimatorImpl>();
cast_environment_->logger()->Subscribe(receiver_offset_estimator_.get());
}
auto it = subscribers_.find(is_audio);
if (it != subscribers_.end()) {
return;
}
subscribers_.insert(std::make_pair(
is_audio,
std::make_unique<RawEventSubscriberBundleForStream>(
cast_environment_, is_audio, receiver_offset_estimator_.get())));
}
void RawEventSubscriberBundle::RemoveEventSubscribers(bool is_audio) {
auto it = subscribers_.find(is_audio);
if (it == subscribers_.end()) {
return;
}
subscribers_.erase(it);
if (subscribers_.empty()) {
cast_environment_->logger()->Unsubscribe(receiver_offset_estimator_.get());
receiver_offset_estimator_.reset();
}
}
EncodingEventSubscriber* RawEventSubscriberBundle::GetEncodingEventSubscriber(
bool is_audio) {
auto it = subscribers_.find(is_audio);
return it == subscribers_.end() ? nullptr
: it->second->GetEncodingEventSubscriber();
}
StatsEventSubscriber* RawEventSubscriberBundle::GetStatsEventSubscriber(
bool is_audio) {
auto it = subscribers_.find(is_audio);
return it == subscribers_.end() ? nullptr
: it->second->GetStatsEventSubscriber();
}
} // namespace cast
} // namespace media

@ -1,86 +0,0 @@
// Copyright 2014 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAST_LOGGING_RAW_EVENT_SUBSCRIBER_BUNDLE_H_
#define MEDIA_CAST_LOGGING_RAW_EVENT_SUBSCRIBER_BUNDLE_H_
#include "base/memory/scoped_refptr.h"
#include "media/cast/logging/encoding_event_subscriber.h"
#include "media/cast/logging/stats_event_subscriber.h"
namespace media {
namespace cast {
class CastEnvironment;
class ReceiverTimeOffsetEstimator;
// Allow 9MB for serialized video / audio event logs.
const int kMaxSerializedBytes = 9000000;
// Assume serialized log data for each frame will take up to 150 bytes.
const int kMaxVideoEventEntries = kMaxSerializedBytes / 150;
// Assume serialized log data for each frame will take up to 75 bytes.
const int kMaxAudioEventEntries = kMaxSerializedBytes / 75;
// A bundle for raw event subscribers for a single stream.
// It contains an EncodingEventSubscriber and a StatsSubscriber.
class RawEventSubscriberBundleForStream {
public:
RawEventSubscriberBundleForStream(
const scoped_refptr<CastEnvironment>& cast_environment,
bool is_audio,
ReceiverTimeOffsetEstimator* offset_estimator);
RawEventSubscriberBundleForStream(const RawEventSubscriberBundleForStream&) =
delete;
RawEventSubscriberBundleForStream& operator=(
const RawEventSubscriberBundleForStream&) = delete;
~RawEventSubscriberBundleForStream();
EncodingEventSubscriber* GetEncodingEventSubscriber();
StatsEventSubscriber* GetStatsEventSubscriber();
private:
const scoped_refptr<CastEnvironment> cast_environment_;
EncodingEventSubscriber event_subscriber_;
StatsEventSubscriber stats_subscriber_;
};
// A bundle of subscribers for all streams. An instance of this object
// is associated with a CastEnvironment.
// This class can be used for managing event subscribers
// in a session where they could be multiple streams (i.e. CastSessionDelegate).
// It also contains a ReceiverTimeOffsetEstimator that is shared by subscribers
// of different streams.
class RawEventSubscriberBundle {
public:
explicit RawEventSubscriberBundle(
const scoped_refptr<CastEnvironment>& cast_environment);
RawEventSubscriberBundle(const RawEventSubscriberBundle&) = delete;
RawEventSubscriberBundle& operator=(const RawEventSubscriberBundle&) = delete;
~RawEventSubscriberBundle();
void AddEventSubscribers(bool is_audio);
void RemoveEventSubscribers(bool is_audio);
EncodingEventSubscriber* GetEncodingEventSubscriber(bool is_audio);
StatsEventSubscriber* GetStatsEventSubscriber(bool is_audio);
private:
const scoped_refptr<CastEnvironment> cast_environment_;
// Map from (is_audio) -> RawEventSubscriberBundleForStream.
// TODO(imcheng): This works because we only have 1 audio and 1 video stream.
// This needs to scale better.
std::map<bool, std::unique_ptr<RawEventSubscriberBundleForStream>>
subscribers_;
std::unique_ptr<ReceiverTimeOffsetEstimator> receiver_offset_estimator_;
};
} // namespace cast
} // namespace media
#endif // MEDIA_CAST_LOGGING_RAW_EVENT_SUBSCRIBER_BUNDLE_H_

@ -9,41 +9,62 @@
#include <memory>
#include <utility>
#include "base/functional/callback_forward.h"
#include "base/memory/ref_counted.h"
#include "base/test/simple_test_tick_clock.h"
#include "base/test/task_environment.h"
#include "base/time/tick_clock.h"
#include "base/time/time.h"
#include "media/base/fake_single_thread_task_runner.h"
#include "media/cast/cast_environment.h"
#include "media/cast/logging/log_event_dispatcher.h"
#include "media/cast/logging/logging_defines.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
namespace cast {
namespace media::cast {
class ReceiverTimeOffsetEstimatorImplTest : public ::testing::Test {
protected:
ReceiverTimeOffsetEstimatorImplTest()
: task_runner_(new FakeSingleThreadTaskRunner(&sender_clock_)),
cast_environment_(new CastEnvironment(&sender_clock_,
task_runner_,
task_runner_,
task_runner_)) {
cast_environment_->logger()->Subscribe(&estimator_);
: log_dispatcher_(std::make_unique<LogEventDispatcher>(
task_environment_.GetMainThreadTaskRunner(),
// NOTE: Unretained is safe because we wait for this task to execute
// before deleting `this`.
base::BindOnce(
&ReceiverTimeOffsetEstimatorImplTest::OnDispatcherDeletion,
base::Unretained(this)))) {
log_dispatcher().Subscribe(&estimator_);
// Synchronize the SimpleTestTickClock with the TaskEnvironment.
receiver_clock_.SetNowTicks(NowTicks());
}
~ReceiverTimeOffsetEstimatorImplTest() override {
cast_environment_->logger()->Unsubscribe(&estimator_);
log_dispatcher().Unsubscribe(&estimator_);
// Ensure any asynchronous MAIN thread deletes (such as the
// LogEventDispatcher, which posts a task to delete its internal Impl) are
// executed.
dispatcher_deletion_cb_ = task_environment_.QuitClosure();
log_dispatcher_.reset();
task_environment_.RunUntilQuit();
}
void OnDispatcherDeletion() {
ASSERT_TRUE(dispatcher_deletion_cb_);
std::move(dispatcher_deletion_cb_).Run();
}
void AdvanceClocks(base::TimeDelta time) {
task_runner_->Sleep(time);
task_environment_.FastForwardBy(time);
receiver_clock_.Advance(time);
}
base::SimpleTestTickClock sender_clock_;
scoped_refptr<FakeSingleThreadTaskRunner> task_runner_;
scoped_refptr<CastEnvironment> cast_environment_;
base::TimeTicks NowTicks() const { return task_environment_.NowTicks(); }
LogEventDispatcher& log_dispatcher() { return *log_dispatcher_; }
base::test::TaskEnvironment task_environment_{
base::test::TaskEnvironment::TimeSource::MOCK_TIME};
std::unique_ptr<LogEventDispatcher> log_dispatcher_;
base::OnceClosure dispatcher_deletion_cb_;
base::SimpleTestTickClock receiver_clock_;
ReceiverTimeOffsetEstimatorImpl estimator_;
};
@ -68,7 +89,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
AdvanceClocks(base::Milliseconds(20));
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->timestamp = NowTicks();
encode_event->type = FRAME_ENCODED;
encode_event->media_type = VIDEO_EVENT;
encode_event->rtp_timestamp = rtp_timestamp;
@ -78,10 +99,10 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
encode_event->target_bitrate = 5678;
encode_event->encoder_cpu_utilization = 9.10;
encode_event->idealized_bitrate_utilization = 11.12;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
log_dispatcher().DispatchFrameEvent(std::move(encode_event));
std::unique_ptr<PacketEvent> send_event(new PacketEvent());
send_event->timestamp = sender_clock_.NowTicks();
send_event->timestamp = NowTicks();
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp;
@ -89,7 +110,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
send_event->packet_id = 56;
send_event->max_packet_id = 78;
send_event->size = 1500;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
log_dispatcher().DispatchPacketEvent(std::move(send_event));
EXPECT_FALSE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
@ -100,7 +121,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
ack_sent_event->media_type = VIDEO_EVENT;
ack_sent_event->rtp_timestamp = rtp_timestamp;
ack_sent_event->frame_id = frame_id;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_sent_event));
log_dispatcher().DispatchFrameEvent(std::move(ack_sent_event));
std::unique_ptr<PacketEvent> receive_event(new PacketEvent());
receive_event->timestamp = receiver_clock_.NowTicks();
@ -111,18 +132,18 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EstimateOffset) {
receive_event->packet_id = 56;
receive_event->max_packet_id = 78;
receive_event->size = 1500;
cast_environment_->logger()->DispatchPacketEvent(std::move(receive_event));
log_dispatcher().DispatchPacketEvent(std::move(receive_event));
EXPECT_FALSE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
AdvanceClocks(base::Milliseconds(30));
std::unique_ptr<FrameEvent> ack_event(new FrameEvent());
ack_event->timestamp = sender_clock_.NowTicks();
ack_event->timestamp = NowTicks();
ack_event->type = FRAME_ACK_RECEIVED;
ack_event->media_type = VIDEO_EVENT;
ack_event->rtp_timestamp = rtp_timestamp;
ack_event->frame_id = frame_id;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_event));
log_dispatcher().DispatchFrameEvent(std::move(ack_event));
EXPECT_TRUE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
@ -151,7 +172,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
AdvanceClocks(base::Milliseconds(20));
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->timestamp = NowTicks();
encode_event->type = FRAME_ENCODED;
encode_event->media_type = VIDEO_EVENT;
encode_event->rtp_timestamp = rtp_timestamp;
@ -161,10 +182,10 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
encode_event->target_bitrate = 5678;
encode_event->encoder_cpu_utilization = 9.10;
encode_event->idealized_bitrate_utilization = 11.12;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
log_dispatcher().DispatchFrameEvent(std::move(encode_event));
std::unique_ptr<PacketEvent> send_event(new PacketEvent());
send_event->timestamp = sender_clock_.NowTicks();
send_event->timestamp = NowTicks();
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp;
@ -172,14 +193,14 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
send_event->packet_id = 56;
send_event->max_packet_id = 78;
send_event->size = 1500;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
log_dispatcher().DispatchPacketEvent(std::move(send_event));
EXPECT_FALSE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
AdvanceClocks(base::Milliseconds(10));
base::TimeTicks event_b_time = receiver_clock_.NowTicks();
AdvanceClocks(base::Milliseconds(30));
base::TimeTicks event_c_time = sender_clock_.NowTicks();
base::TimeTicks event_c_time = NowTicks();
std::unique_ptr<FrameEvent> ack_event(new FrameEvent());
ack_event->timestamp = event_c_time;
@ -187,7 +208,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
ack_event->media_type = VIDEO_EVENT;
ack_event->rtp_timestamp = rtp_timestamp;
ack_event->frame_id = frame_id;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_event));
log_dispatcher().DispatchFrameEvent(std::move(ack_event));
EXPECT_FALSE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
@ -200,7 +221,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
receive_event->packet_id = 56;
receive_event->max_packet_id = 78;
receive_event->size = 1500;
cast_environment_->logger()->DispatchPacketEvent(std::move(receive_event));
log_dispatcher().DispatchPacketEvent(std::move(receive_event));
std::unique_ptr<FrameEvent> ack_sent_event(new FrameEvent());
ack_sent_event->timestamp = event_b_time;
@ -208,7 +229,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, EventCArrivesBeforeEventB) {
ack_sent_event->media_type = VIDEO_EVENT;
ack_sent_event->rtp_timestamp = rtp_timestamp;
ack_sent_event->frame_id = frame_id;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_sent_event));
log_dispatcher().DispatchFrameEvent(std::move(ack_sent_event));
EXPECT_TRUE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
@ -243,7 +264,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
// Events times in chronological order: 20, 30 x2, 50, 55, 60, 77, 80, 110
AdvanceClocks(base::Milliseconds(20));
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->timestamp = NowTicks();
encode_event->type = FRAME_ENCODED;
encode_event->media_type = VIDEO_EVENT;
encode_event->rtp_timestamp = rtp_timestamp_a;
@ -253,10 +274,10 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
encode_event->target_bitrate = 5678;
encode_event->encoder_cpu_utilization = 9.10;
encode_event->idealized_bitrate_utilization = 11.12;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
log_dispatcher().DispatchFrameEvent(std::move(encode_event));
std::unique_ptr<PacketEvent> send_event(new PacketEvent());
send_event->timestamp = sender_clock_.NowTicks();
send_event->timestamp = NowTicks();
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp_a;
@ -264,11 +285,11 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
send_event->packet_id = 56;
send_event->max_packet_id = 78;
send_event->size = 1500;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
log_dispatcher().DispatchPacketEvent(std::move(send_event));
AdvanceClocks(base::Milliseconds(10));
encode_event = std::make_unique<FrameEvent>();
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->timestamp = NowTicks();
encode_event->type = FRAME_ENCODED;
encode_event->media_type = VIDEO_EVENT;
encode_event->rtp_timestamp = rtp_timestamp_b;
@ -278,10 +299,10 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
encode_event->target_bitrate = 5678;
encode_event->encoder_cpu_utilization = 9.10;
encode_event->idealized_bitrate_utilization = 11.12;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
log_dispatcher().DispatchFrameEvent(std::move(encode_event));
send_event = std::make_unique<PacketEvent>();
send_event->timestamp = sender_clock_.NowTicks();
send_event->timestamp = NowTicks();
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp_b;
@ -289,7 +310,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
send_event->packet_id = 56;
send_event->max_packet_id = 78;
send_event->size = 1500;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
log_dispatcher().DispatchPacketEvent(std::move(send_event));
std::unique_ptr<FrameEvent> ack_sent_event(new FrameEvent());
ack_sent_event->timestamp = receiver_clock_.NowTicks();
@ -297,7 +318,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
ack_sent_event->media_type = VIDEO_EVENT;
ack_sent_event->rtp_timestamp = rtp_timestamp_a;
ack_sent_event->frame_id = frame_id_a;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_sent_event));
log_dispatcher().DispatchFrameEvent(std::move(ack_sent_event));
AdvanceClocks(base::Milliseconds(20));
@ -310,7 +331,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
receive_event->packet_id = 56;
receive_event->max_packet_id = 78;
receive_event->size = 1500;
cast_environment_->logger()->DispatchPacketEvent(std::move(receive_event));
log_dispatcher().DispatchPacketEvent(std::move(receive_event));
ack_sent_event = std::make_unique<FrameEvent>();
ack_sent_event->timestamp = receiver_clock_.NowTicks();
@ -318,29 +339,29 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
ack_sent_event->media_type = VIDEO_EVENT;
ack_sent_event->rtp_timestamp = rtp_timestamp_b;
ack_sent_event->frame_id = frame_id_b;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_sent_event));
log_dispatcher().DispatchFrameEvent(std::move(ack_sent_event));
AdvanceClocks(base::Milliseconds(5));
std::unique_ptr<FrameEvent> ack_event(new FrameEvent());
ack_event->timestamp = sender_clock_.NowTicks();
ack_event->timestamp = NowTicks();
ack_event->type = FRAME_ACK_RECEIVED;
ack_event->media_type = VIDEO_EVENT;
ack_event->rtp_timestamp = rtp_timestamp_b;
ack_event->frame_id = frame_id_b;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_event));
log_dispatcher().DispatchFrameEvent(std::move(ack_event));
AdvanceClocks(base::Milliseconds(5));
ack_event = std::make_unique<FrameEvent>();
ack_event->timestamp = sender_clock_.NowTicks();
ack_event->timestamp = NowTicks();
ack_event->type = FRAME_ACK_RECEIVED;
ack_event->media_type = VIDEO_EVENT;
ack_event->rtp_timestamp = rtp_timestamp_a;
ack_event->frame_id = frame_id_a;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_event));
log_dispatcher().DispatchFrameEvent(std::move(ack_event));
AdvanceClocks(base::Milliseconds(17));
encode_event = std::make_unique<FrameEvent>();
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->timestamp = NowTicks();
encode_event->type = FRAME_ENCODED;
encode_event->media_type = VIDEO_EVENT;
encode_event->rtp_timestamp = rtp_timestamp_c;
@ -350,10 +371,10 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
encode_event->target_bitrate = 5678;
encode_event->encoder_cpu_utilization = 9.10;
encode_event->idealized_bitrate_utilization = 11.12;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
log_dispatcher().DispatchFrameEvent(std::move(encode_event));
send_event = std::make_unique<PacketEvent>();
send_event->timestamp = sender_clock_.NowTicks();
send_event->timestamp = NowTicks();
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp_c;
@ -361,7 +382,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
send_event->packet_id = 56;
send_event->max_packet_id = 78;
send_event->size = 1500;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
log_dispatcher().DispatchPacketEvent(std::move(send_event));
AdvanceClocks(base::Milliseconds(3));
receive_event = std::make_unique<PacketEvent>();
@ -373,7 +394,7 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
receive_event->packet_id = 56;
receive_event->max_packet_id = 78;
receive_event->size = 1500;
cast_environment_->logger()->DispatchPacketEvent(std::move(receive_event));
log_dispatcher().DispatchPacketEvent(std::move(receive_event));
ack_sent_event = std::make_unique<FrameEvent>();
ack_sent_event->timestamp = receiver_clock_.NowTicks();
@ -381,16 +402,16 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
ack_sent_event->media_type = VIDEO_EVENT;
ack_sent_event->rtp_timestamp = rtp_timestamp_c;
ack_sent_event->frame_id = frame_id_c;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_sent_event));
log_dispatcher().DispatchFrameEvent(std::move(ack_sent_event));
AdvanceClocks(base::Milliseconds(30));
ack_event = std::make_unique<FrameEvent>();
ack_event->timestamp = sender_clock_.NowTicks();
ack_event->timestamp = NowTicks();
ack_event->type = FRAME_ACK_RECEIVED;
ack_event->media_type = VIDEO_EVENT;
ack_event->rtp_timestamp = rtp_timestamp_c;
ack_event->frame_id = frame_id_c;
cast_environment_->logger()->DispatchFrameEvent(std::move(ack_event));
log_dispatcher().DispatchFrameEvent(std::move(ack_event));
EXPECT_TRUE(estimator_.GetReceiverOffsetBounds(&lower_bound, &upper_bound));
int64_t lower_bound_ms = lower_bound.InMilliseconds();
@ -401,5 +422,4 @@ TEST_F(ReceiverTimeOffsetEstimatorImplTest, MultipleIterations) {
EXPECT_GT(upper_bound_ms, true_offset_ms);
}
} // namespace cast
} // namespace media
} // namespace media::cast

@ -1,44 +0,0 @@
// Copyright 2014 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/logging/simple_event_subscriber.h"
#include "base/check.h"
namespace media {
namespace cast {
SimpleEventSubscriber::SimpleEventSubscriber() = default;
SimpleEventSubscriber::~SimpleEventSubscriber() {
DCHECK(thread_checker_.CalledOnValidThread());
}
void SimpleEventSubscriber::OnReceiveFrameEvent(const FrameEvent& frame_event) {
DCHECK(thread_checker_.CalledOnValidThread());
frame_events_.push_back(frame_event);
}
void SimpleEventSubscriber::OnReceivePacketEvent(
const PacketEvent& packet_event) {
DCHECK(thread_checker_.CalledOnValidThread());
packet_events_.push_back(packet_event);
}
void SimpleEventSubscriber::GetFrameEventsAndReset(
std::vector<FrameEvent>* frame_events) {
DCHECK(thread_checker_.CalledOnValidThread());
frame_events->swap(frame_events_);
frame_events_.clear();
}
void SimpleEventSubscriber::GetPacketEventsAndReset(
std::vector<PacketEvent>* packet_events) {
DCHECK(thread_checker_.CalledOnValidThread());
packet_events->swap(packet_events_);
packet_events_.clear();
}
} // namespace cast
} // namespace media

@ -1,53 +0,0 @@
// Copyright 2014 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAST_LOGGING_SIMPLE_EVENT_SUBSCRIBER_H_
#define MEDIA_CAST_LOGGING_SIMPLE_EVENT_SUBSCRIBER_H_
#include <vector>
#include "base/compiler_specific.h"
#include "base/threading/thread_checker.h"
#include "media/cast/logging/raw_event_subscriber.h"
namespace media {
namespace cast {
// RawEventSubscriber implementation that records all incoming raw events
// in std::vector's.
// The user of this class can call the GetXXXEventsAndReset functions to get
// list of events that have acccumulated since last inovcation.
class SimpleEventSubscriber final : public RawEventSubscriber {
public:
SimpleEventSubscriber();
SimpleEventSubscriber(const SimpleEventSubscriber&) = delete;
SimpleEventSubscriber& operator=(const SimpleEventSubscriber&) = delete;
~SimpleEventSubscriber() final;
// RawEventSubscriber implementations.
void OnReceiveFrameEvent(const FrameEvent& frame_event) final;
void OnReceivePacketEvent(const PacketEvent& packet_event) final;
// Assigns frame events received so far to |frame_events| and clears them
// from this object.
void GetFrameEventsAndReset(std::vector<FrameEvent>* frame_events);
// Assigns packet events received so far to |packet_events| and clears them
// from this object.
void GetPacketEventsAndReset(std::vector<PacketEvent>* packet_events);
private:
std::vector<FrameEvent> frame_events_;
std::vector<PacketEvent> packet_events_;
// All functions must be called on the main thread.
base::ThreadChecker thread_checker_;
};
} // namespace cast
} // namespace media
#endif // MEDIA_CAST_LOGGING_SIMPLE_EVENT_SUBSCRIBER_H_

@ -1,114 +0,0 @@
// Copyright 2014 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/logging/simple_event_subscriber.h"
#include <memory>
#include <utility>
#include "base/memory/ref_counted.h"
#include "base/test/simple_test_tick_clock.h"
#include "base/time/tick_clock.h"
#include "media/base/fake_single_thread_task_runner.h"
#include "media/cast/cast_environment.h"
#include "media/cast/logging/logging_defines.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
namespace cast {
class SimpleEventSubscriberTest : public ::testing::Test {
protected:
SimpleEventSubscriberTest()
: task_runner_(new FakeSingleThreadTaskRunner(&testing_clock_)),
cast_environment_(new CastEnvironment(&testing_clock_,
task_runner_,
task_runner_,
task_runner_)) {
cast_environment_->logger()->Subscribe(&event_subscriber_);
}
~SimpleEventSubscriberTest() override {
cast_environment_->logger()->Unsubscribe(&event_subscriber_);
}
base::SimpleTestTickClock testing_clock_;
scoped_refptr<FakeSingleThreadTaskRunner> task_runner_;
scoped_refptr<CastEnvironment> cast_environment_;
SimpleEventSubscriber event_subscriber_;
};
TEST_F(SimpleEventSubscriberTest, GetAndResetEvents) {
// Log some frame events.
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = testing_clock_.NowTicks();
encode_event->type = FRAME_ENCODED;
encode_event->media_type = AUDIO_EVENT;
encode_event->rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
encode_event->frame_id = FrameId::first();
encode_event->size = 1234;
encode_event->key_frame = true;
encode_event->target_bitrate = 128u;
encode_event->encoder_cpu_utilization = 0.01;
encode_event->idealized_bitrate_utilization = 0.02;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
std::unique_ptr<FrameEvent> playout_event(new FrameEvent());
playout_event->timestamp = testing_clock_.NowTicks();
playout_event->type = FRAME_PLAYOUT;
playout_event->media_type = AUDIO_EVENT;
playout_event->rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(100));
playout_event->frame_id = FrameId::first();
playout_event->delay_delta = base::Milliseconds(100);
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
std::unique_ptr<FrameEvent> decode_event(new FrameEvent());
decode_event->timestamp = testing_clock_.NowTicks();
decode_event->type = FRAME_DECODED;
decode_event->media_type = AUDIO_EVENT;
decode_event->rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(200));
decode_event->frame_id = FrameId::first();
cast_environment_->logger()->DispatchFrameEvent(std::move(decode_event));
// Log some packet events.
std::unique_ptr<PacketEvent> receive_event(new PacketEvent());
receive_event->timestamp = testing_clock_.NowTicks();
receive_event->type = PACKET_RECEIVED;
receive_event->media_type = AUDIO_EVENT;
receive_event->rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(200));
receive_event->frame_id = FrameId::first();
receive_event->packet_id = 1u;
receive_event->max_packet_id = 5u;
receive_event->size = 100u;
cast_environment_->logger()->DispatchPacketEvent(std::move(receive_event));
receive_event = std::make_unique<PacketEvent>();
receive_event->timestamp = testing_clock_.NowTicks();
receive_event->type = PACKET_RECEIVED;
receive_event->media_type = VIDEO_EVENT;
receive_event->rtp_timestamp = RtpTimeTicks().Expand(UINT32_C(200));
receive_event->frame_id = FrameId::first();
receive_event->packet_id = 1u;
receive_event->max_packet_id = 10u;
receive_event->size = 1024u;
cast_environment_->logger()->DispatchPacketEvent(std::move(receive_event));
std::vector<FrameEvent> frame_events;
event_subscriber_.GetFrameEventsAndReset(&frame_events);
EXPECT_EQ(3u, frame_events.size());
std::vector<PacketEvent> packet_events;
event_subscriber_.GetPacketEventsAndReset(&packet_events);
EXPECT_EQ(2u, packet_events.size());
// Calling this function again should result in empty vector because no events
// were logged since last call.
event_subscriber_.GetFrameEventsAndReset(&frame_events);
event_subscriber_.GetPacketEventsAndReset(&packet_events);
EXPECT_TRUE(frame_events.empty());
EXPECT_TRUE(packet_events.empty());
}
} // namespace cast
} // namespace media

@ -19,8 +19,7 @@
case enum: \
return #enum
namespace media {
namespace cast {
namespace media::cast {
namespace {
@ -53,7 +52,7 @@ void StatsEventSubscriber::SimpleHistogram::Add(int64_t sample) {
++buckets_.back();
} else {
size_t index = 1 + (sample - min_) / width_;
DCHECK_LT(index, buckets_.size());
CHECK_LT(index, buckets_.size());
++buckets_[index];
}
}
@ -93,8 +92,8 @@ base::Value::List StatsEventSubscriber::SimpleHistogram::GetHistogram() const {
StatsEventSubscriber::StatsEventSubscriber(
EventMediaType event_media_type,
const base::TickClock* clock,
ReceiverTimeOffsetEstimator* offset_estimator)
const base::TickClock& clock,
ReceiverTimeOffsetEstimator& offset_estimator)
: event_media_type_(event_media_type),
clock_(clock),
offset_estimator_(offset_estimator),
@ -108,7 +107,7 @@ StatsEventSubscriber::StatsEventSubscriber(
num_frames_dropped_by_encoder_(0),
num_frames_late_(0),
start_time_(clock_->NowTicks()) {
DCHECK(event_media_type == AUDIO_EVENT || event_media_type == VIDEO_EVENT);
CHECK(event_media_type == AUDIO_EVENT || event_media_type == VIDEO_EVENT);
InitHistograms();
}
@ -445,7 +444,7 @@ void StatsEventSubscriber::GetStatsInternal(StatsMap* stats_map) const {
StatsEventSubscriber::SimpleHistogram*
StatsEventSubscriber::GetHistogramForTesting(CastStat stats) const {
DCHECK(histograms_.find(stats) != histograms_.end());
CHECK(histograms_.find(stats) != histograms_.end());
return histograms_.find(stats)->second.get();
}
@ -729,5 +728,4 @@ StatsEventSubscriber::PacketLogStats::~PacketLogStats() = default;
StatsEventSubscriber::FrameInfo::FrameInfo() : encoded(false) {}
StatsEventSubscriber::FrameInfo::~FrameInfo() = default;
} // namespace cast
} // namespace media
} // namespace media::cast

@ -33,8 +33,8 @@ class StatsEventSubscriberTest;
class StatsEventSubscriber final : public RawEventSubscriber {
public:
StatsEventSubscriber(EventMediaType event_media_type,
const base::TickClock* clock,
ReceiverTimeOffsetEstimator* offset_estimator);
const base::TickClock& clock,
ReceiverTimeOffsetEstimator& offset_estimator);
StatsEventSubscriber(const StatsEventSubscriber&) = delete;
StatsEventSubscriber& operator=(const StatsEventSubscriber&) = delete;
@ -254,10 +254,10 @@ class StatsEventSubscriber final : public RawEventSubscriber {
const EventMediaType event_media_type_;
// Not owned by this class.
const raw_ptr<const base::TickClock> clock_;
const raw_ref<const base::TickClock> clock_;
// Not owned by this class.
const raw_ptr<ReceiverTimeOffsetEstimator> offset_estimator_;
const raw_ref<ReceiverTimeOffsetEstimator> offset_estimator_;
FrameStatsMap frame_stats_;
PacketStatsMap packet_stats_;

@ -13,11 +13,11 @@
#include "base/memory/ref_counted.h"
#include "base/rand_util.h"
#include "base/test/simple_test_tick_clock.h"
#include "base/test/task_environment.h"
#include "base/time/tick_clock.h"
#include "base/time/time.h"
#include "base/values.h"
#include "media/base/fake_single_thread_task_runner.h"
#include "media/cast/cast_environment.h"
#include "media/cast/logging/log_event_dispatcher.h"
#include "media/cast/logging/logging_defines.h"
#include "media/cast/test/fake_receiver_time_offset_estimator.h"
#include "testing/gtest/include/gtest/gtest.h"
@ -26,45 +26,62 @@ namespace {
const int kReceiverOffsetSecs = 100;
}
namespace media {
namespace cast {
namespace media::cast {
class StatsEventSubscriberTest : public ::testing::Test {
protected:
StatsEventSubscriberTest()
: task_runner_(new FakeSingleThreadTaskRunner(&sender_clock_)),
cast_environment_(new CastEnvironment(&sender_clock_,
task_runner_,
task_runner_,
task_runner_)),
: log_dispatcher_(std::make_unique<LogEventDispatcher>(
task_environment_.GetMainThreadTaskRunner(),
// NOTE: This is safe because we wait for this task before deleting
// `this`.
base::BindOnce(&StatsEventSubscriberTest::OnDispatcherDeletion,
base::Unretained(this)))),
fake_offset_estimator_(base::Seconds(kReceiverOffsetSecs)) {
// Synchronize the SimpleTestTickClock with the TaskEnvironment.
receiver_clock_.SetNowTicks(NowTicks());
receiver_clock_.Advance(base::Seconds(kReceiverOffsetSecs));
cast_environment_->logger()->Subscribe(&fake_offset_estimator_);
log_dispatcher_->Subscribe(&fake_offset_estimator_);
}
~StatsEventSubscriberTest() override {
if (subscriber_.get()) {
cast_environment_->logger()->Unsubscribe(subscriber_.get());
log_dispatcher_->Unsubscribe(subscriber_.get());
}
cast_environment_->logger()->Unsubscribe(&fake_offset_estimator_);
log_dispatcher_->Unsubscribe(&fake_offset_estimator_);
dispatcher_deletion_cb_ = task_environment_.QuitClosure();
log_dispatcher_.reset();
task_environment_.RunUntilQuit();
}
void OnDispatcherDeletion() {
ASSERT_TRUE(dispatcher_deletion_cb_);
std::move(dispatcher_deletion_cb_).Run();
}
LogEventDispatcher& log_dispatcher() { return *log_dispatcher_; }
base::TimeTicks NowTicks() const { return task_environment_.NowTicks(); }
void AdvanceClocks(base::TimeDelta delta) {
task_runner_->Sleep(delta);
task_environment_.FastForwardBy(delta);
receiver_clock_.Advance(delta);
}
void Init(EventMediaType event_media_type) {
DCHECK(!subscriber_.get());
subscriber_ = std::make_unique<StatsEventSubscriber>(
event_media_type, cast_environment_->Clock(), &fake_offset_estimator_);
cast_environment_->logger()->Subscribe(subscriber_.get());
event_media_type, *task_environment_.GetMockTickClock(),
fake_offset_estimator_);
log_dispatcher().Subscribe(subscriber_.get());
}
base::SimpleTestTickClock sender_clock_;
base::test::TaskEnvironment task_environment_{
base::test::TaskEnvironment::TimeSource::MOCK_TIME};
base::SimpleTestTickClock receiver_clock_;
scoped_refptr<FakeSingleThreadTaskRunner> task_runner_;
scoped_refptr<CastEnvironment> cast_environment_;
std::unique_ptr<LogEventDispatcher> log_dispatcher_;
base::OnceClosure dispatcher_deletion_cb_;
test::FakeReceiverTimeOffsetEstimator fake_offset_estimator_;
std::unique_ptr<StatsEventSubscriber> subscriber_;
};
@ -79,30 +96,28 @@ TEST_F(StatsEventSubscriberTest, CaptureEncode) {
// when computing dropped frames.
int num_frames = StatsEventSubscriber::kMaxFrameInfoMapSize + 50;
int dropped_frames = 0;
base::TimeTicks start_time = sender_clock_.NowTicks();
base::TimeTicks start_time = NowTicks();
// Drop half the frames during the encode step.
for (int i = 0; i < num_frames; i++) {
std::unique_ptr<FrameEvent> capture_begin_event(new FrameEvent());
capture_begin_event->timestamp = sender_clock_.NowTicks();
capture_begin_event->timestamp = NowTicks();
capture_begin_event->type = FRAME_CAPTURE_BEGIN;
capture_begin_event->media_type = VIDEO_EVENT;
capture_begin_event->rtp_timestamp = rtp_timestamp;
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
log_dispatcher().DispatchFrameEvent(std::move(capture_begin_event));
AdvanceClocks(base::Microseconds(10));
std::unique_ptr<FrameEvent> capture_end_event(new FrameEvent());
capture_end_event->timestamp = sender_clock_.NowTicks();
capture_end_event->timestamp = NowTicks();
capture_end_event->type = FRAME_CAPTURE_END;
capture_end_event->media_type = VIDEO_EVENT;
capture_end_event->rtp_timestamp = rtp_timestamp;
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_end_event));
log_dispatcher().DispatchFrameEvent(std::move(capture_end_event));
if (i % 2 == 0) {
AdvanceClocks(base::Microseconds(10));
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->timestamp = NowTicks();
encode_event->type = FRAME_ENCODED;
encode_event->media_type = VIDEO_EVENT;
encode_event->rtp_timestamp = rtp_timestamp;
@ -112,7 +127,7 @@ TEST_F(StatsEventSubscriberTest, CaptureEncode) {
encode_event->target_bitrate = 5678;
encode_event->encoder_cpu_utilization = 9.10;
encode_event->idealized_bitrate_utilization = 11.12;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
log_dispatcher().DispatchFrameEvent(std::move(encode_event));
} else if (i < extra_frames) {
dropped_frames++;
}
@ -121,7 +136,7 @@ TEST_F(StatsEventSubscriberTest, CaptureEncode) {
frame_id++;
}
base::TimeTicks end_time = sender_clock_.NowTicks();
base::TimeTicks end_time = NowTicks();
StatsEventSubscriber::StatsMap stats_map;
subscriber_->GetStatsInternal(&stats_map);
@ -154,16 +169,16 @@ TEST_F(StatsEventSubscriberTest, Encode) {
RtpTimeTicks rtp_timestamp;
FrameId frame_id = FrameId::first();
int num_frames = 10;
base::TimeTicks start_time = sender_clock_.NowTicks();
base::TimeTicks start_time = NowTicks();
AdvanceClocks(base::Microseconds(35678));
base::TimeTicks first_event_time = sender_clock_.NowTicks();
base::TimeTicks first_event_time = NowTicks();
base::TimeTicks last_event_time;
int total_size = 0;
for (int i = 0; i < num_frames; i++) {
int size = 1000 + base::RandInt(-100, 100);
total_size += size;
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->timestamp = NowTicks();
encode_event->type = FRAME_ENCODED;
encode_event->media_type = VIDEO_EVENT;
encode_event->rtp_timestamp = rtp_timestamp;
@ -173,15 +188,15 @@ TEST_F(StatsEventSubscriberTest, Encode) {
encode_event->target_bitrate = 5678;
encode_event->encoder_cpu_utilization = 9.10;
encode_event->idealized_bitrate_utilization = 11.12;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
last_event_time = sender_clock_.NowTicks();
log_dispatcher().DispatchFrameEvent(std::move(encode_event));
last_event_time = NowTicks();
AdvanceClocks(base::Microseconds(35678));
rtp_timestamp += RtpTimeDelta::FromTicks(90);
frame_id++;
}
base::TimeTicks end_time = sender_clock_.NowTicks();
base::TimeTicks end_time = NowTicks();
StatsEventSubscriber::StatsMap stats_map;
subscriber_->GetStatsInternal(&stats_map);
@ -218,7 +233,7 @@ TEST_F(StatsEventSubscriberTest, Decode) {
RtpTimeTicks rtp_timestamp;
FrameId frame_id = FrameId::first();
int num_frames = 10;
base::TimeTicks start_time = sender_clock_.NowTicks();
base::TimeTicks start_time = NowTicks();
for (int i = 0; i < num_frames; i++) {
std::unique_ptr<FrameEvent> decode_event(new FrameEvent());
decode_event->timestamp = receiver_clock_.NowTicks();
@ -226,14 +241,14 @@ TEST_F(StatsEventSubscriberTest, Decode) {
decode_event->media_type = VIDEO_EVENT;
decode_event->rtp_timestamp = rtp_timestamp;
decode_event->frame_id = frame_id;
cast_environment_->logger()->DispatchFrameEvent(std::move(decode_event));
log_dispatcher().DispatchFrameEvent(std::move(decode_event));
AdvanceClocks(base::Microseconds(36789));
rtp_timestamp += RtpTimeDelta::FromTicks(90);
frame_id++;
}
base::TimeTicks end_time = sender_clock_.NowTicks();
base::TimeTicks end_time = NowTicks();
StatsEventSubscriber::StatsMap stats_map;
subscriber_->GetStatsInternal(&stats_map);
@ -264,7 +279,7 @@ TEST_F(StatsEventSubscriberTest, PlayoutDelay) {
playout_event->rtp_timestamp = rtp_timestamp;
playout_event->frame_id = frame_id;
playout_event->delay_delta = delay;
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
log_dispatcher().DispatchFrameEvent(std::move(playout_event));
AdvanceClocks(base::Microseconds(37890));
rtp_timestamp += RtpTimeDelta::FromTicks(90);
@ -289,12 +304,11 @@ TEST_F(StatsEventSubscriberTest, E2ELatency) {
base::TimeDelta total_latency;
for (int i = 0; i < num_frames; i++) {
std::unique_ptr<FrameEvent> capture_begin_event(new FrameEvent());
capture_begin_event->timestamp = sender_clock_.NowTicks();
capture_begin_event->timestamp = NowTicks();
capture_begin_event->type = FRAME_CAPTURE_BEGIN;
capture_begin_event->media_type = VIDEO_EVENT;
capture_begin_event->rtp_timestamp = rtp_timestamp;
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
log_dispatcher().DispatchFrameEvent(std::move(capture_begin_event));
int latency_micros = 100000 + base::RandInt(-5000, 50000);
base::TimeDelta latency = base::Microseconds(latency_micros);
@ -311,7 +325,7 @@ TEST_F(StatsEventSubscriberTest, E2ELatency) {
playout_event->rtp_timestamp = rtp_timestamp;
playout_event->frame_id = frame_id;
playout_event->delay_delta = delay;
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
log_dispatcher().DispatchFrameEvent(std::move(playout_event));
rtp_timestamp += RtpTimeDelta::FromTicks(90);
frame_id++;
@ -332,7 +346,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
RtpTimeTicks rtp_timestamp;
int num_packets = 10;
int num_latency_recorded_packets = 0;
base::TimeTicks start_time = sender_clock_.NowTicks();
base::TimeTicks start_time = NowTicks();
int total_size = 0;
int retransmit_total_size = 0;
base::TimeDelta total_network_latency;
@ -342,7 +356,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
int num_packets_retransmitted = 0;
int num_packets_rtx_rejected = 0;
base::TimeTicks sender_encoded_time = sender_clock_.NowTicks();
base::TimeTicks sender_encoded_time = NowTicks();
base::TimeTicks receiver_encoded_time = receiver_clock_.NowTicks();
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_encoded_time;
@ -350,7 +364,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
encode_event->media_type = VIDEO_EVENT;
encode_event->rtp_timestamp = rtp_timestamp;
encode_event->frame_id = FrameId::first();
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
log_dispatcher().DispatchFrameEvent(std::move(encode_event));
// Every 2nd packet will be retransmitted once.
// Every 4th packet will be retransmitted twice.
@ -360,7 +374,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
total_size += size;
std::unique_ptr<PacketEvent> send_event(new PacketEvent());
send_event->timestamp = sender_clock_.NowTicks();
send_event->timestamp = NowTicks();
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp;
@ -368,9 +382,9 @@ TEST_F(StatsEventSubscriberTest, Packets) {
send_event->packet_id = i;
send_event->max_packet_id = num_packets - 1;
send_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
log_dispatcher().DispatchPacketEvent(std::move(send_event));
total_queueing_latency += sender_clock_.NowTicks() - sender_encoded_time;
total_queueing_latency += NowTicks() - sender_encoded_time;
int latency_micros = 20000 + base::RandInt(-10000, 10000);
base::TimeDelta latency = base::Microseconds(latency_micros);
@ -398,8 +412,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
retransmit_event->packet_id = i;
retransmit_event->max_packet_id = num_packets - 1;
retransmit_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(
std::move(retransmit_event));
log_dispatcher().DispatchPacketEvent(std::move(retransmit_event));
retransmit_total_size += size;
num_packets_retransmitted++;
@ -417,8 +430,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
retransmit_event->packet_id = i;
retransmit_event->max_packet_id = num_packets - 1;
retransmit_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(
std::move(retransmit_event));
log_dispatcher().DispatchPacketEvent(std::move(retransmit_event));
retransmit_total_size += size;
num_packets_retransmitted++;
@ -436,8 +448,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
retransmit_event->packet_id = i;
retransmit_event->max_packet_id = num_packets - 1;
retransmit_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(
std::move(retransmit_event));
log_dispatcher().DispatchPacketEvent(std::move(retransmit_event));
std::unique_ptr<PacketEvent> reject_event(new PacketEvent());
reject_event->timestamp = receiver_clock_.NowTicks();
@ -448,7 +459,7 @@ TEST_F(StatsEventSubscriberTest, Packets) {
reject_event->packet_id = i;
reject_event->max_packet_id = num_packets - 1;
reject_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(reject_event));
log_dispatcher().DispatchPacketEvent(std::move(reject_event));
retransmit_total_size += size;
num_packets_retransmitted++;
@ -464,12 +475,12 @@ TEST_F(StatsEventSubscriberTest, Packets) {
receive_event->packet_id = i;
receive_event->max_packet_id = num_packets - 1;
receive_event->size = size;
cast_environment_->logger()->DispatchPacketEvent(std::move(receive_event));
log_dispatcher().DispatchPacketEvent(std::move(receive_event));
num_packets_received++;
}
base::TimeTicks end_time = sender_clock_.NowTicks();
base::TimeTicks end_time = NowTicks();
base::TimeDelta duration = end_time - start_time;
StatsEventSubscriber::StatsMap stats_map;
@ -526,9 +537,8 @@ TEST_F(StatsEventSubscriberTest, Packets) {
EXPECT_DOUBLE_EQ(it->second, static_cast<double>(num_packets_rtx_rejected));
}
bool CheckHistogramHasValue(const base::Value::List& values,
const std::string& bucket,
int expected_count) {
std::optional<int> GetBucketCount(const base::Value::List& values,
const std::string& bucket) {
for (const base::Value& value : values) {
if (!value.is_dict()) {
continue;
@ -539,11 +549,11 @@ bool CheckHistogramHasValue(const base::Value::List& values,
}
std::optional<int> bucket_count = dict.FindInt(bucket);
if (!bucket_count.has_value()) {
return false;
return std::nullopt;
}
return bucket_count == expected_count;
return bucket_count;
}
return false;
return std::nullopt;
}
TEST_F(StatsEventSubscriberTest, Histograms) {
@ -560,25 +570,23 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
rtp_timestamp += RtpTimeDelta::FromTicks(1);
std::unique_ptr<FrameEvent> capture_begin_event(new FrameEvent());
capture_begin_event->timestamp = sender_clock_.NowTicks();
capture_begin_event->timestamp = NowTicks();
capture_begin_event->type = FRAME_CAPTURE_BEGIN;
capture_begin_event->media_type = VIDEO_EVENT;
capture_begin_event->rtp_timestamp = rtp_timestamp;
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
log_dispatcher().DispatchFrameEvent(std::move(capture_begin_event));
AdvanceClocks(base::Milliseconds(10));
std::unique_ptr<FrameEvent> capture_end_event(new FrameEvent());
capture_end_event->timestamp = sender_clock_.NowTicks();
capture_end_event->timestamp = NowTicks();
capture_end_event->type = FRAME_CAPTURE_END;
capture_end_event->media_type = VIDEO_EVENT;
capture_end_event->rtp_timestamp = rtp_timestamp;
cast_environment_->logger()->DispatchFrameEvent(
std::move(capture_end_event));
log_dispatcher().DispatchFrameEvent(std::move(capture_end_event));
AdvanceClocks(base::Milliseconds(15));
std::unique_ptr<FrameEvent> encode_event(new FrameEvent());
encode_event->timestamp = sender_clock_.NowTicks();
encode_event->timestamp = NowTicks();
encode_event->type = FRAME_ENCODED;
encode_event->media_type = VIDEO_EVENT;
encode_event->rtp_timestamp = rtp_timestamp;
@ -588,7 +596,7 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
encode_event->target_bitrate = 5678;
encode_event->encoder_cpu_utilization = 9.10;
encode_event->idealized_bitrate_utilization = 11.12;
cast_environment_->logger()->DispatchFrameEvent(std::move(encode_event));
log_dispatcher().DispatchFrameEvent(std::move(encode_event));
}
// Send 3 packets for the last frame.
@ -596,7 +604,7 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
for (int i = 0; i < 3; ++i) {
AdvanceClocks(base::Milliseconds(100));
std::unique_ptr<PacketEvent> send_event(new PacketEvent());
send_event->timestamp = sender_clock_.NowTicks();
send_event->timestamp = NowTicks();
send_event->type = PACKET_SENT_TO_NETWORK;
send_event->media_type = VIDEO_EVENT;
send_event->rtp_timestamp = rtp_timestamp;
@ -604,7 +612,7 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
send_event->packet_id = i;
send_event->max_packet_id = 2;
send_event->size = 123;
cast_environment_->logger()->DispatchPacketEvent(std::move(send_event));
log_dispatcher().DispatchPacketEvent(std::move(send_event));
}
// Receive 3 packets for the last frame.
@ -621,7 +629,7 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
receive_event->packet_id = i;
receive_event->max_packet_id = 2;
receive_event->size = 123;
cast_environment_->logger()->DispatchPacketEvent(std::move(receive_event));
log_dispatcher().DispatchPacketEvent(std::move(receive_event));
}
std::unique_ptr<FrameEvent> playout_event(new FrameEvent());
@ -631,7 +639,7 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
playout_event->rtp_timestamp = rtp_timestamp;
playout_event->frame_id = frame_id;
playout_event->delay_delta = base::Milliseconds(100);
cast_environment_->logger()->DispatchFrameEvent(std::move(playout_event));
log_dispatcher().DispatchFrameEvent(std::move(playout_event));
StatsEventSubscriber::SimpleHistogram* histogram;
base::Value::List values;
@ -640,42 +648,41 @@ TEST_F(StatsEventSubscriberTest, Histograms) {
StatsEventSubscriber::CAPTURE_LATENCY_MS_HISTO);
ASSERT_TRUE(histogram);
values = histogram->GetHistogram();
EXPECT_TRUE(CheckHistogramHasValue(values, "10-14", 10));
EXPECT_EQ(GetBucketCount(values, "10-14"), 10);
histogram = subscriber_->GetHistogramForTesting(
StatsEventSubscriber::ENCODE_TIME_MS_HISTO);
ASSERT_TRUE(histogram);
values = histogram->GetHistogram();
EXPECT_TRUE(CheckHistogramHasValue(values, "15-19", 10));
EXPECT_EQ(GetBucketCount(values, "15-19"), 10);
histogram = subscriber_->GetHistogramForTesting(
StatsEventSubscriber::QUEUEING_LATENCY_MS_HISTO);
ASSERT_TRUE(histogram);
values = histogram->GetHistogram();
EXPECT_TRUE(CheckHistogramHasValue(values, "100-119", 1));
EXPECT_TRUE(CheckHistogramHasValue(values, "200-219", 1));
EXPECT_TRUE(CheckHistogramHasValue(values, "300-319", 1));
EXPECT_EQ(GetBucketCount(values, "100-119"), 1);
EXPECT_EQ(GetBucketCount(values, "200-219"), 1);
EXPECT_EQ(GetBucketCount(values, "300-319"), 1);
histogram = subscriber_->GetHistogramForTesting(
StatsEventSubscriber::NETWORK_LATENCY_MS_HISTO);
ASSERT_TRUE(histogram);
values = histogram->GetHistogram();
EXPECT_TRUE(CheckHistogramHasValue(values, "100-119", 1));
EXPECT_TRUE(CheckHistogramHasValue(values, "200-219", 1));
EXPECT_TRUE(CheckHistogramHasValue(values, "300-319", 1));
EXPECT_EQ(GetBucketCount(values, "100-119"), 1);
EXPECT_EQ(GetBucketCount(values, "200-219"), 1);
EXPECT_EQ(GetBucketCount(values, "300-319"), 1);
histogram = subscriber_->GetHistogramForTesting(
StatsEventSubscriber::PACKET_LATENCY_MS_HISTO);
ASSERT_TRUE(histogram);
values = histogram->GetHistogram();
EXPECT_TRUE(CheckHistogramHasValue(values, "400-419", 3));
EXPECT_EQ(GetBucketCount(values, "400-419"), 3);
histogram = subscriber_->GetHistogramForTesting(
StatsEventSubscriber::LATE_FRAME_MS_HISTO);
ASSERT_TRUE(histogram);
values = histogram->GetHistogram();
EXPECT_TRUE(CheckHistogramHasValue(values, "100-119", 1));
EXPECT_EQ(GetBucketCount(values, "100-119"), 1);
}
} // namespace cast
} // namespace media
} // namespace media::cast

@ -51,7 +51,7 @@ AudioSender::AudioSender(scoped_refptr<CastEnvironment> cast_environment,
// Post a task now with its initialization result status to allow the client
// to start sending frames.
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
CastEnvironment::ThreadId::kMain, FROM_HERE,
base::BindOnce(std::move(status_change_cb),
audio_encoder_ ? audio_encoder_->InitializationResult()
: STATUS_INVALID_CONFIGURATION));
@ -72,7 +72,7 @@ AudioSender::~AudioSender() {
void AudioSender::InsertAudio(std::unique_ptr<AudioBus> audio_bus,
base::TimeTicks recorded_time) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
CHECK(audio_encoder_);
number_of_frames_inserted_++;
@ -127,7 +127,7 @@ base::TimeDelta AudioSender::GetEncoderBacklogDuration() const {
void AudioSender::OnEncodedAudioFrame(
std::unique_ptr<SenderEncodedFrame> encoded_frame,
int samples_skipped) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
samples_in_encoder_ -= audio_encoder_->GetSamplesPerFrame() + samples_skipped;
DCHECK_GE(samples_in_encoder_, 0);

@ -26,6 +26,7 @@
#include "media/cast/constants.h"
#include "media/cast/test/fake_openscreen_clock.h"
#include "media/cast/test/mock_openscreen_environment.h"
#include "media/cast/test/test_with_cast_environment.h"
#include "media/cast/test/utility/audio_utility.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/openscreen/src/cast/streaming/public/environment.h"
@ -50,19 +51,13 @@ void SaveOperationalStatus(OperationalStatus* out_status,
} // namespace
class AudioSenderTest : public ::testing::Test {
class AudioSenderTest : public TestWithCastEnvironment {
protected:
AudioSenderTest()
: task_runner_(
base::MakeRefCounted<FakeSingleThreadTaskRunner>(&testing_clock_)),
cast_environment_(base::MakeRefCounted<CastEnvironment>(&testing_clock_,
task_runner_,
task_runner_,
task_runner_)),
openscreen_task_runner_(task_runner_) {
FakeOpenscreenClock::SetTickClock(&testing_clock_);
: openscreen_task_runner_(task_environment().GetMainThreadTaskRunner()) {
FakeOpenscreenClock::SetTickClock(GetMockTickClock());
InitializeMediaLibrary();
testing_clock_.Advance(base::TimeTicks::Now() - base::TimeTicks());
AdvanceClock(base::TimeTicks::Now() - base::TimeTicks());
mock_openscreen_environment_ = std::make_unique<MockOpenscreenEnvironment>(
&FakeOpenscreenClock::now, openscreen_task_runner_);
@ -89,21 +84,17 @@ class AudioSenderTest : public ::testing::Test {
OperationalStatus operational_status = STATUS_UNINITIALIZED;
audio_sender_ = std::make_unique<AudioSender>(
cast_environment_, audio_config_,
cast_environment(), audio_config_,
base::BindOnce(&SaveOperationalStatus, &operational_status),
std::move(openscreen_audio_sender));
task_runner_->RunTasks();
RunUntilIdle();
CHECK_EQ(STATUS_INITIALIZED, operational_status);
}
~AudioSenderTest() override {
FakeOpenscreenClock::ClearTickClock();
openscreen_audio_sender_ = nullptr;
}
base::SimpleTestTickClock testing_clock_;
const scoped_refptr<FakeSingleThreadTaskRunner> task_runner_;
const scoped_refptr<CastEnvironment> cast_environment_;
// openscreen::Sender related classes.
openscreen_platform::TaskRunner openscreen_task_runner_;
std::unique_ptr<media::cast::MockOpenscreenEnvironment>
@ -125,8 +116,8 @@ TEST_F(AudioSenderTest, Encode20ms) {
EXPECT_CALL(*mock_openscreen_environment_, SendPacket(_, _)).Times(3);
audio_sender_->InsertAudio(std::move(bus), testing_clock_.NowTicks());
task_runner_->RunTasks();
audio_sender_->InsertAudio(std::move(bus), NowTicks());
RunUntilIdle();
EXPECT_EQ(2, openscreen_audio_sender_->GetInFlightFrameCount());
}

@ -194,7 +194,7 @@ base::TimeDelta OpenscreenFrameSender::GetAllowedInFlightMediaDuration() const {
CastStreamingFrameDropReason OpenscreenFrameSender::EnqueueFrame(
std::unique_ptr<SenderEncodedFrame> encoded_frame) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
CHECK(encoded_frame);
VLOG_WITH_SSRC(2) << "About to send another frame ("
<< encoded_frame->frame_id
@ -203,7 +203,7 @@ CastStreamingFrameDropReason OpenscreenFrameSender::EnqueueFrame(
CHECK_GE(encoded_frame->frame_id, last_enqueued_frame_id_)
<< "enqueued frames out of order.";
last_enqueued_frame_id_ = encoded_frame->frame_id;
last_send_time_ = cast_environment_->Clock()->NowTicks();
last_send_time_ = cast_environment_->NowTicks();
if (!is_audio_ && encoded_frame->is_key_frame) {
VLOG_WITH_SSRC(1) << "Sending encoded key frame, id="

@ -17,6 +17,7 @@
#include "media/cast/cast_config.h"
#include "media/cast/cast_environment.h"
#include "media/cast/common/openscreen_conversion_helpers.h"
#include "media/cast/test/test_with_cast_environment.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/openscreen/src/cast/streaming/public/environment.h"
@ -74,7 +75,7 @@ static const openscreen::cast::SessionConfig kOpenscreenVideoConfig =
} // namespace
class OpenscreenFrameSenderTest : public ::testing::Test,
class OpenscreenFrameSenderTest : public TestWithCastEnvironment,
public FrameSender::Client {
public:
// FrameSender::Client overrides.
@ -84,32 +85,28 @@ class OpenscreenFrameSenderTest : public ::testing::Test,
protected:
OpenscreenFrameSenderTest()
: task_runner_(
base::MakeRefCounted<FakeSingleThreadTaskRunner>(&testing_clock_)),
cast_environment_(base::MakeRefCounted<CastEnvironment>(&testing_clock_,
task_runner_,
task_runner_,
task_runner_)),
openscreen_task_runner_(task_runner_),
: openscreen_task_runner_(GetMainThreadTaskRunner()),
openscreen_environment_(openscreen::Clock::now,
openscreen_task_runner_,
openscreen::IPEndpoint::kAnyV4()),
openscreen_packet_router_(openscreen_environment_,
20,
std::chrono::milliseconds(10)) {
openscreen_packet_router_(
std::make_unique<openscreen::cast::SenderPacketRouter>(
openscreen_environment_,
20,
std::chrono::milliseconds(10))) {
auto openscreen_audio_sender = std::make_unique<openscreen::cast::Sender>(
openscreen_environment_, openscreen_packet_router_,
openscreen_environment_, *openscreen_packet_router_,
kOpenscreenAudioConfig, openscreen::cast::RtpPayloadType::kAudioOpus);
auto openscreen_video_sender = std::make_unique<openscreen::cast::Sender>(
openscreen_environment_, openscreen_packet_router_,
openscreen_environment_, *openscreen_packet_router_,
kOpenscreenVideoConfig, openscreen::cast::RtpPayloadType::kVideoVp8);
audio_sender_ = std::make_unique<OpenscreenFrameSender>(
cast_environment_, kAudioConfig, std::move(openscreen_audio_sender),
cast_environment(), kAudioConfig, std::move(openscreen_audio_sender),
*this);
video_sender_ = std::make_unique<OpenscreenFrameSender>(
cast_environment_, kVideoConfig, std::move(openscreen_video_sender),
cast_environment(), kVideoConfig, std::move(openscreen_video_sender),
*this);
}
@ -120,14 +117,11 @@ class OpenscreenFrameSenderTest : public ::testing::Test,
OpenscreenFrameSender& video_sender() { return *video_sender_; }
private:
base::SimpleTestTickClock testing_clock_;
const scoped_refptr<FakeSingleThreadTaskRunner> task_runner_;
const scoped_refptr<CastEnvironment> cast_environment_;
// openscreen::Sender related classes.
openscreen_platform::TaskRunner openscreen_task_runner_;
openscreen::cast::Environment openscreen_environment_;
openscreen::cast::SenderPacketRouter openscreen_packet_router_;
std::unique_ptr<openscreen::cast::SenderPacketRouter>
openscreen_packet_router_;
std::unique_ptr<openscreen::cast::Sender> openscreen_video_sender_;
std::unique_ptr<openscreen::cast::Sender> openscreen_audio_sender_;

@ -94,12 +94,11 @@ void LogVideoCaptureTimestamps(CastEnvironment* cast_environment,
// The frame capture timestamps were not provided by the video capture
// source. Simply log the events as happening right now.
capture_begin_event->timestamp = capture_end_event->timestamp =
cast_environment->Clock()->NowTicks();
cast_environment->NowTicks();
}
cast_environment->logger()->DispatchFrameEvent(
std::move(capture_begin_event));
cast_environment->logger()->DispatchFrameEvent(std::move(capture_end_event));
cast_environment->logger().DispatchFrameEvent(std::move(capture_begin_event));
cast_environment->logger().DispatchFrameEvent(std::move(capture_end_event));
}
} // namespace
@ -148,7 +147,7 @@ VideoSender::~VideoSender() {
void VideoSender::InsertRawVideoFrame(
scoped_refptr<media::VideoFrame> video_frame,
base::TimeTicks reference_time) {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
CHECK(video_encoder_);
const RtpTimeTicks rtp_timestamp =
@ -321,7 +320,7 @@ base::TimeDelta VideoSender::GetEncoderBacklogDuration() const {
void VideoSender::OnEncodedVideoFrame(
std::unique_ptr<SenderEncodedFrame> encoded_frame) {
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
CHECK(cast_environment_->CurrentlyOn(CastEnvironment::ThreadId::kMain));
frames_in_encoder_--;
CHECK_GE(frames_in_encoder_, 0);

@ -32,10 +32,10 @@
#include "media/cast/cast_environment.h"
#include "media/cast/common/openscreen_conversion_helpers.h"
#include "media/cast/constants.h"
#include "media/cast/logging/simple_event_subscriber.h"
#include "media/cast/test/fake_openscreen_clock.h"
#include "media/cast/test/fake_video_encode_accelerator_factory.h"
#include "media/cast/test/mock_openscreen_environment.h"
#include "media/cast/test/test_with_cast_environment.h"
#include "media/cast/test/utility/default_config.h"
#include "media/cast/test/utility/video_utility.h"
#include "media/video/fake_video_encode_accelerator.h"
@ -86,31 +86,26 @@ int GetVideoNetworkBandwidth() {
} // namespace
class VideoSenderTest : public ::testing::TestWithParam<bool> {
class VideoSenderTest : public ::testing::TestWithParam<bool>,
public WithCastEnvironment {
public:
VideoSenderTest(const VideoSenderTest&) = delete;
VideoSenderTest& operator=(const VideoSenderTest&) = delete;
protected:
VideoSenderTest()
: task_environment_(base::test::TaskEnvironment::TimeSource::MOCK_TIME) {
task_runner_ = task_environment_.GetMainThreadTaskRunner();
openscreen_task_runner_ =
std::make_unique<openscreen_platform::TaskRunner>(task_runner_);
VideoSenderTest() {
openscreen_task_runner_ = std::make_unique<openscreen_platform::TaskRunner>(
GetMainThreadTaskRunner());
accelerator_task_runner_ = base::ThreadPool::CreateSingleThreadTaskRunner(
{base::TaskPriority::USER_BLOCKING,
base::TaskShutdownBehavior::SKIP_ON_SHUTDOWN},
base::SingleThreadTaskRunnerThreadMode::DEDICATED);
cast_environment_ = base::MakeRefCounted<CastEnvironment>(
task_environment_.GetMockTickClock(), task_runner_, task_runner_,
task_runner_);
vea_factory_ = std::make_unique<FakeVideoEncodeAcceleratorFactory>(
accelerator_task_runner_);
FakeOpenscreenClock::SetTickClock(task_environment_.GetMockTickClock());
task_environment_.AdvanceClock(base::TimeTicks::Now() - base::TimeTicks());
FakeOpenscreenClock::SetTickClock(GetMockTickClock());
mock_openscreen_environment_ = std::make_unique<MockOpenscreenEnvironment>(
&FakeOpenscreenClock::now, *openscreen_task_runner_);
openscreen_packet_router_ =
@ -132,13 +127,12 @@ class VideoSenderTest : public ::testing::TestWithParam<bool> {
}
void RunTasksAndAdvanceClock(base::TimeDelta clock_delta = {}) {
task_environment_.AdvanceClock(clock_delta);
AdvanceClock(clock_delta);
accelerator_task_runner_->PostTask(FROM_HERE,
task_environment_.QuitClosure());
task_environment_.RunUntilQuit();
task_runner_->PostTask(FROM_HERE, task_environment_.QuitClosure());
task_environment_.RunUntilQuit();
accelerator_task_runner_->PostTask(FROM_HERE, QuitClosure());
RunUntilQuit();
GetMainThreadTaskRunner()->PostTask(FROM_HERE, QuitClosure());
RunUntilQuit();
}
// Can be used to be notified when video capture feedback is created. This is
@ -192,7 +186,7 @@ class VideoSenderTest : public ::testing::TestWithParam<bool> {
}
video_sender_ = std::make_unique<VideoSender>(
cast_environment_, video_config,
cast_environment(), video_config,
base::BindRepeating(&SaveOperationalStatus, &status_changes_),
base::BindRepeating(
&FakeVideoEncodeAcceleratorFactory::CreateVideoEncodeAccelerator,
@ -210,25 +204,18 @@ class VideoSenderTest : public ::testing::TestWithParam<bool> {
scoped_refptr<media::VideoFrame> GetNewVideoFrame() {
if (first_frame_timestamp_.is_null()) {
first_frame_timestamp_ = Now();
first_frame_timestamp_ = NowTicks();
}
constexpr gfx::Size kSize(kWidth, kHeight);
scoped_refptr<media::VideoFrame> video_frame =
media::VideoFrame::CreateFrame(PIXEL_FORMAT_I420, kSize,
gfx::Rect(kSize), kSize,
Now() - first_frame_timestamp_);
NowTicks() - first_frame_timestamp_);
PopulateVideoFrame(video_frame.get(), last_pixel_value_++);
return video_frame;
}
base::TimeTicks Now() {
return task_environment_.GetMockTickClock()->NowTicks();
}
base::test::TaskEnvironment task_environment_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
scoped_refptr<base::SingleThreadTaskRunner> accelerator_task_runner_;
scoped_refptr<CastEnvironment> cast_environment_;
// openscreen::Sender related classes.
std::unique_ptr<openscreen_platform::TaskRunner> openscreen_task_runner_;
@ -255,11 +242,10 @@ TEST_P(VideoSenderTest, BuiltInEncoder) {
ASSERT_EQ(STATUS_INITIALIZED, status_changes_.front());
scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
const base::TimeTicks reference_time = Now();
video_sender_->InsertRawVideoFrame(video_frame, reference_time);
video_sender_->InsertRawVideoFrame(video_frame, NowTicks());
SetVideoCaptureFeedbackClosure(task_environment_.QuitClosure());
task_environment_.RunUntilQuit();
SetVideoCaptureFeedbackClosure(task_environment().QuitClosure());
RunUntilQuit();
}
TEST_P(VideoSenderTest, ExternalEncoder) {
@ -271,7 +257,7 @@ TEST_P(VideoSenderTest, ExternalEncoder) {
// have occurred at this point. Send a frame to spurn creation of the
// underlying ExternalVideoEncoder instance.
if (vea_factory_->vea_response_count() == 0) {
video_sender_->InsertRawVideoFrame(GetNewVideoFrame(), Now());
video_sender_->InsertRawVideoFrame(GetNewVideoFrame(), NowTicks());
RunTasksAndAdvanceClock();
}
ASSERT_EQ(STATUS_INITIALIZED, status_changes_.front());
@ -284,8 +270,7 @@ TEST_P(VideoSenderTest, ExternalEncoder) {
scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
for (int i = 0; i < 3; ++i) {
const base::TimeTicks reference_time = Now();
video_sender_->InsertRawVideoFrame(video_frame, reference_time);
video_sender_->InsertRawVideoFrame(video_frame, NowTicks());
RunTasksAndAdvanceClock(base::Milliseconds(33));
// VideoSender re-created the encoder for the 320x240 frames we're
// providing.
@ -306,7 +291,7 @@ TEST_P(VideoSenderTest, ExternalEncoderInitFails) {
// Send a frame to spurn creation of the underlying ExternalVideoEncoder
// instance, which should result in failure.
video_sender_->InsertRawVideoFrame(GetNewVideoFrame(), Now());
video_sender_->InsertRawVideoFrame(GetNewVideoFrame(), NowTicks());
RunTasksAndAdvanceClock();
EXPECT_NE(std::ranges::find(status_changes_, STATUS_CODEC_INIT_FAILED),

@ -0,0 +1,32 @@
// Copyright 2025 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/cast/test/test_with_cast_environment.h"
namespace media::cast {
WithCastEnvironment::WithCastEnvironment()
: cast_environment_(base::MakeRefCounted<CastEnvironment>(
*task_environment_.GetMockTickClock(),
task_environment_.GetMainThreadTaskRunner(),
task_environment_.GetMainThreadTaskRunner(),
task_environment_.GetMainThreadTaskRunner(),
// NOTE: Unretained is safe because we wait for this task before
// deleting `this`.
base::BindOnce(&WithCastEnvironment::OnCastEnvironmentDeletion,
base::Unretained(this)))) {}
void WithCastEnvironment::OnCastEnvironmentDeletion() {
CHECK(deletion_cb_);
std::move(deletion_cb_).Run();
}
WithCastEnvironment::~WithCastEnvironment() {
deletion_cb_ = task_environment_.QuitClosure();
cast_environment_.reset();
task_environment_.RunUntilQuit();
}
} // namespace media::cast

@ -0,0 +1,94 @@
// Copyright 2025 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_CAST_TEST_TEST_WITH_CAST_ENVIRONMENT_H_
#define MEDIA_CAST_TEST_TEST_WITH_CAST_ENVIRONMENT_H_
#include "base/test/task_environment.h"
#include "base/time/time.h"
#include "media/cast/cast_environment.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace base {
class TickClock;
} // namespace base
namespace media::cast {
// Inherit from this class if a CastEnvironment is needed in a test.
// Use in class hierarchies where inheritance from ::testing::Test at the same
// time is not desirable or possible (for example, when inheriting from
// testing::TestWithParam).
class WithCastEnvironment {
public:
WithCastEnvironment(const WithCastEnvironment&) = delete;
WithCastEnvironment(WithCastEnvironment&&) = delete;
WithCastEnvironment& operator=(const WithCastEnvironment&) = delete;
WithCastEnvironment& operator=(WithCastEnvironment&&) = delete;
protected:
WithCastEnvironment();
~WithCastEnvironment();
[[nodiscard]] base::RepeatingClosure QuitClosure() {
return task_environment_.QuitClosure();
}
void RunUntilQuit() { task_environment_.RunUntilQuit(); }
void RunUntilIdle() { task_environment_.RunUntilIdle(); }
// Only valid for instances using TimeSource::MOCK_TIME.
void AdvanceClock(base::TimeDelta delta) {
task_environment_.AdvanceClock(delta);
}
[[nodiscard]] scoped_refptr<base::SingleThreadTaskRunner>
GetMainThreadTaskRunner() {
return task_environment_.GetMainThreadTaskRunner();
}
[[nodiscard]] const base::TickClock* GetMockTickClock() {
return task_environment_.GetMockTickClock();
}
[[nodiscard]] const base::TimeTicks NowTicks() {
return task_environment_.GetMockTickClock()->NowTicks();
}
[[nodiscard]] base::test::TaskEnvironment& task_environment() {
return task_environment_;
}
[[nodiscard]] scoped_refptr<CastEnvironment> cast_environment() {
return cast_environment_;
}
private:
void OnCastEnvironmentDeletion();
base::test::TaskEnvironment task_environment_{
base::test::TaskEnvironment::MainThreadType::IO,
base::test::TaskEnvironment::TimeSource::MOCK_TIME};
base::OnceClosure deletion_cb_;
scoped_refptr<CastEnvironment> cast_environment_;
};
// Inherit from this class instead of ::testing::Test directly if a
// CastEnvironment is needed in a test.
class TestWithCastEnvironment : public ::testing::Test,
public WithCastEnvironment {
public:
TestWithCastEnvironment(TestWithCastEnvironment&&) = delete;
TestWithCastEnvironment(const TestWithCastEnvironment&) = delete;
TestWithCastEnvironment& operator=(const TestWithCastEnvironment&) = delete;
TestWithCastEnvironment& operator=(TestWithCastEnvironment&&) = delete;
protected:
using WithCastEnvironment::WithCastEnvironment;
};
} // namespace media::cast
#endif // MEDIA_CAST_TEST_TEST_WITH_CAST_ENVIRONMENT_H_

@ -12,8 +12,7 @@
#include "base/time/time.h"
#include "media/base/audio_bus.h"
namespace media {
namespace cast {
namespace media::cast {
TestAudioBusFactory::TestAudioBusFactory(int num_channels,
int sample_rate,
@ -31,6 +30,9 @@ TestAudioBusFactory::TestAudioBusFactory(int num_channels,
TestAudioBusFactory::~TestAudioBusFactory() = default;
// static
constexpr int TestAudioBusFactory::kMiddleANoteFreq;
std::unique_ptr<AudioBus> TestAudioBusFactory::NextAudioBus(
const base::TimeDelta& duration) {
const int num_samples = (sample_rate_ * duration).InSeconds();
@ -40,5 +42,4 @@ std::unique_ptr<AudioBus> TestAudioBusFactory::NextAudioBus(
return bus;
}
} // namespace cast
} // namespace media
} // namespace media::cast

@ -20,8 +20,7 @@ namespace media {
class AudioBus;
}
namespace media {
namespace cast {
namespace media::cast {
// Produces AudioBuses of varying duration where each successive output contains
// the continuation of a single sine wave.
@ -42,16 +41,15 @@ class TestAudioBusFactory {
std::unique_ptr<AudioBus> NextAudioBus(const base::TimeDelta& duration);
// A reasonable test tone.
static const int kMiddleANoteFreq = 440;
static constexpr int kMiddleANoteFreq = 440;
private:
const int num_channels_;
const int sample_rate_;
const float volume_;
const int num_channels_ = 0;
const int sample_rate_ = 0;
const float volume_ = 0.0f;
SineWaveAudioSource source_;
};
} // namespace cast
} // namespace media
} // namespace media::cast
#endif // MEDIA_CAST_TEST_UTILITY_AUDIO_UTILITY_H_