0

Remove stable video decoding Mojo interfaces

Due to Lacros being sunset the stable Mojo interfaces are not needed
anymore. This CL replaces the main stable video decoding interfaces,
StableVideoDecoder and StableVideoDecoderFactory, with the existing
non-stable interfaces, VideoDecoder and InterfaceFactory, respectively.

Other stable interfaces that are still needed for Out-of-process Video
Decoding (OOP-VD) are renamed and made non-stable:
- StableVideoDecoderFactoryProcess is now VideoDecoderFactoryProcess
- StableCdmContext is now CdmContextForOOPVD

This CL also removes the code used by the GTFO OOP-VD mode. This mode
was never enabled and its deployment has been canceled.

Bug: b:347331029
Test: unit tests
Test: clear and protected video playback on volteer
Change-Id: I5a78e4cd03c51b49bb921bf6135618bd9204c0ee
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/5960256
Reviewed-by: Giovanni Ortuno Urquidi <ortuno@chromium.org>
Reviewed-by: Dale Curtis <dalecurtis@chromium.org>
Reviewed-by: Avi Drissman <avi@chromium.org>
Commit-Queue: Pilar Molina Lopez <pmolinalopez@chromium.org>
Reviewed-by: Andres Calderon Jaramillo <andrescj@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1433728}
This commit is contained in:
Pilar Molina Lopez
2025-03-17 13:07:19 -07:00
committed by Chromium LUCI CQ
parent 33fd5bb318
commit 0f80715e92
86 changed files with 1911 additions and 8930 deletions
chrome
chromeos/components/cdm_factory_daemon
content
media
third_party/blink/renderer/modules

@ -279,7 +279,6 @@ static_library("crosapi") {
"//extensions/browser/api/power",
"//extensions/common",
"//headless:headless_non_renderer",
"//media/mojo/mojom/stable:stable_video_decoder",
"//printing/backend",
"//remoting/host/chromeos:remoting_service",
"//services/data_decoder/public/cpp",

@ -87,12 +87,6 @@
#include "printing/buildflags/buildflags.h"
#include "services/video_capture/public/mojom/video_capture_service.mojom.h"
#if BUILDFLAG(USE_VAAPI) || BUILDFLAG(USE_V4L2_CODEC)
#include "content/public/browser/stable_video_decoder_factory.h"
#include "media/base/media_switches.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#endif // BUILDFLAG(USE_VAAPI) || BUILDFLAG(USE_V4L2_CODEC)
namespace crosapi {
namespace {

@ -202,7 +202,6 @@ browser_exposed_mojom_targets = [
"//media/mojo/mojom:speech_recognition",
"//media/mojo/mojom:web_speech_recognition",
"//media/mojo/mojom/stable:native_pixmap_handle",
"//media/mojo/mojom/stable:stable_video_decoder",
"//mojo/public/interfaces/bindings:bindings",
"//mojo/public/mojom/base:base",
"//mojo/public/mojom/base:protobuf_support",

@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//media/media_options.gni")
assert(is_chromeos, "Non-ChromeOS builds cannot depend on //chromeos")
component("cdm_factory_daemon_browser") {
@ -34,10 +36,6 @@ component("cdm_factory_daemon_gpu") {
"chromeos_cdm_factory.h",
"content_decryption_module_adapter.cc",
"content_decryption_module_adapter.h",
"remote_cdm_context.cc",
"remote_cdm_context.h",
"stable_cdm_context_impl.cc",
"stable_cdm_context_impl.h",
]
public_deps = [
"//chromeos/components/cdm_factory_daemon/mojom",
@ -45,9 +43,17 @@ component("cdm_factory_daemon_gpu") {
]
deps = [
"//base",
"//media/mojo/mojom/stable:stable_video_decoder",
"//mojo/public/cpp/bindings",
]
if (allow_oop_video_decoder) {
sources += [
"cdm_context_for_oopvd_impl.cc",
"cdm_context_for_oopvd_impl.h",
"remote_cdm_context.cc",
"remote_cdm_context.h",
]
deps += [ "//media/mojo/common" ]
}
defines = [ "IS_CDM_FACTORY_DAEMON_IMPL" ]
}

@ -5,6 +5,7 @@ include_rules = [
"+content/public/test",
"+media/base",
"+media/cdm",
"+media/mojo/common",
"+media/mojo/mojom",
"+mojo/core",
"+ui/display",

@ -7,16 +7,18 @@
#pragma allow_unsafe_buffers
#endif
#include "chromeos/components/cdm_factory_daemon/stable_cdm_context_impl.h"
#include "chromeos/components/cdm_factory_daemon/cdm_context_for_oopvd_impl.h"
#include "base/functional/callback.h"
#include "base/task/bind_post_task.h"
#include "chromeos/components/cdm_factory_daemon/chromeos_cdm_context.h"
#include "chromeos/components/cdm_factory_daemon/chromeos_cdm_factory.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/common/validation_utils.h"
namespace chromeos {
StableCdmContextImpl::StableCdmContextImpl(media::CdmContext* cdm_context)
CdmContextForOOPVDImpl::CdmContextForOOPVDImpl(media::CdmContext* cdm_context)
: cdm_context_(cdm_context) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(cdm_context_);
@ -24,23 +26,31 @@ StableCdmContextImpl::StableCdmContextImpl(media::CdmContext* cdm_context)
cdm_context_ref_ = cdm_context_->GetChromeOsCdmContext()->GetCdmContextRef();
}
StableCdmContextImpl::~StableCdmContextImpl() {
CdmContextForOOPVDImpl::~CdmContextForOOPVDImpl() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
void StableCdmContextImpl::GetHwKeyData(
std::unique_ptr<media::DecryptConfig> decrypt_config,
void CdmContextForOOPVDImpl::GetHwKeyData(
media::mojom::DecryptConfigPtr decrypt_config,
const std::vector<uint8_t>& hw_identifier,
GetHwKeyDataCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
std::unique_ptr<media::DecryptConfig> media_decrypt_config =
media::ValidateAndConvertMojoDecryptConfig(std::move(decrypt_config));
if (!media_decrypt_config) {
CHECK(mojo::IsInMessageDispatch());
mojo::ReportBadMessage("Invalid DecryptConfig received");
return;
}
cdm_context_->GetChromeOsCdmContext()->GetHwKeyData(
decrypt_config.get(), hw_identifier,
media_decrypt_config.get(), hw_identifier,
base::BindPostTaskToCurrentDefault(std::move(callback)));
}
void StableCdmContextImpl::RegisterEventCallback(
mojo::PendingRemote<media::stable::mojom::CdmContextEventCallback>
callback) {
void CdmContextForOOPVDImpl::RegisterEventCallback(
mojo::PendingRemote<media::mojom::CdmContextEventCallback> callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// Note: we don't need to use base::BindPostTaskToCurrentDefault() for either
@ -51,25 +61,25 @@ void StableCdmContextImpl::RegisterEventCallback(
remote_event_callbacks_.Add(std::move(callback));
if (!callback_registration_) {
callback_registration_ = cdm_context_->RegisterEventCB(
base::BindRepeating(&StableCdmContextImpl::CdmEventCallback,
base::BindRepeating(&CdmContextForOOPVDImpl::CdmEventCallback,
weak_ptr_factory_.GetWeakPtr()));
}
}
void StableCdmContextImpl::GetHwConfigData(GetHwConfigDataCallback callback) {
void CdmContextForOOPVDImpl::GetHwConfigData(GetHwConfigDataCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
ChromeOsCdmFactory::GetHwConfigData(
base::BindPostTaskToCurrentDefault(std::move(callback)));
}
void StableCdmContextImpl::GetScreenResolutions(
void CdmContextForOOPVDImpl::GetScreenResolutions(
GetScreenResolutionsCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
ChromeOsCdmFactory::GetScreenResolutions(
base::BindPostTaskToCurrentDefault(std::move(callback)));
}
void StableCdmContextImpl::AllocateSecureBuffer(
void CdmContextForOOPVDImpl::AllocateSecureBuffer(
uint32_t size,
AllocateSecureBufferCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
@ -77,7 +87,7 @@ void StableCdmContextImpl::AllocateSecureBuffer(
size, base::BindPostTaskToCurrentDefault(std::move(callback)));
}
void StableCdmContextImpl::ParseEncryptedSliceHeader(
void CdmContextForOOPVDImpl::ParseEncryptedSliceHeader(
uint64_t secure_handle,
uint32_t offset,
const std::vector<uint8_t>& stream_data,
@ -88,28 +98,37 @@ void StableCdmContextImpl::ParseEncryptedSliceHeader(
base::BindPostTaskToCurrentDefault(std::move(callback)));
}
void StableCdmContextImpl::DecryptVideoBuffer(
const scoped_refptr<media::DecoderBuffer>& decoder_buffer,
void CdmContextForOOPVDImpl::DecryptVideoBuffer(
media::mojom::DecoderBufferPtr decoder_buffer,
const std::vector<uint8_t>& bytes,
DecryptVideoBufferCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
CHECK(cdm_context_->GetDecryptor());
CHECK_EQ(decoder_buffer->size(), bytes.size());
memcpy(decoder_buffer->writable_data(), bytes.data(), bytes.size());
scoped_refptr<media::DecoderBuffer> media_decoder_buffer =
media::ValidateAndConvertMojoDecoderBuffer(std::move(decoder_buffer));
if (!media_decoder_buffer) {
CHECK(mojo::IsInMessageDispatch());
mojo::ReportBadMessage("Invalid DecoderBuffer received");
return;
}
CHECK_EQ(media_decoder_buffer->size(), bytes.size());
memcpy(media_decoder_buffer->writable_data(), bytes.data(), bytes.size());
cdm_context_->GetDecryptor()->Decrypt(
media::Decryptor::StreamType::kVideo, decoder_buffer,
media::Decryptor::StreamType::kVideo, media_decoder_buffer,
base::BindPostTaskToCurrentDefault(
base::BindOnce(&StableCdmContextImpl::OnDecryptDone,
base::BindOnce(&CdmContextForOOPVDImpl::OnDecryptDone,
weak_ptr_factory_.GetWeakPtr(), std::move(callback))));
}
void StableCdmContextImpl::CdmEventCallback(media::CdmContext::Event event) {
void CdmContextForOOPVDImpl::CdmEventCallback(media::CdmContext::Event event) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
for (auto& cb : remote_event_callbacks_)
for (auto& cb : remote_event_callbacks_) {
cb->EventCallback(event);
}
}
void StableCdmContextImpl::OnDecryptDone(
void CdmContextForOOPVDImpl::OnDecryptDone(
DecryptVideoBufferCallback decrypt_video_buffer_cb,
media::Decryptor::Status status,
scoped_refptr<media::DecoderBuffer> decoder_buffer) {
@ -119,7 +138,14 @@ void StableCdmContextImpl::OnDecryptDone(
bytes.insert(bytes.begin(), decoder_buffer->data(),
decoder_buffer->data() + decoder_buffer->size());
}
std::move(decrypt_video_buffer_cb).Run(status, decoder_buffer, bytes);
media::mojom::DecoderBufferPtr mojo_decoder_buffer;
if (decoder_buffer) {
mojo_decoder_buffer = media::mojom::DecoderBuffer::From(*decoder_buffer);
CHECK(mojo_decoder_buffer);
}
std::move(decrypt_video_buffer_cb)
.Run(status, std::move(mojo_decoder_buffer), bytes);
}
} // namespace chromeos

@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROMEOS_COMPONENTS_CDM_FACTORY_DAEMON_STABLE_CDM_CONTEXT_IMPL_H_
#define CHROMEOS_COMPONENTS_CDM_FACTORY_DAEMON_STABLE_CDM_CONTEXT_IMPL_H_
#ifndef CHROMEOS_COMPONENTS_CDM_FACTORY_DAEMON_CDM_CONTEXT_FOR_OOPVD_IMPL_H_
#define CHROMEOS_COMPONENTS_CDM_FACTORY_DAEMON_CDM_CONTEXT_FOR_OOPVD_IMPL_H_
#include <memory>
@ -13,7 +13,7 @@
#include "base/sequence_checker.h"
#include "media/base/callback_registry.h"
#include "media/base/cdm_context.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/cdm_context_for_oopvd.mojom.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/bindings/remote_set.h"
@ -23,25 +23,25 @@ namespace chromeos {
// used with out of process video decoding. This will run in the GPU process and
// is used by the OOPVideoDecoder. The remote end of it will run in the video
// decoder utility process launched from ash-chrome.
class COMPONENT_EXPORT(CDM_FACTORY_DAEMON) StableCdmContextImpl
: public media::stable::mojom::StableCdmContext {
class COMPONENT_EXPORT(CDM_FACTORY_DAEMON) CdmContextForOOPVDImpl
: public media::mojom::CdmContextForOOPVD {
public:
explicit StableCdmContextImpl(media::CdmContext* cdm_context);
explicit CdmContextForOOPVDImpl(media::CdmContext* cdm_context);
StableCdmContextImpl(const StableCdmContextImpl&) = delete;
StableCdmContextImpl& operator=(const StableCdmContextImpl&) = delete;
CdmContextForOOPVDImpl(const CdmContextForOOPVDImpl&) = delete;
CdmContextForOOPVDImpl& operator=(const CdmContextForOOPVDImpl&) = delete;
~StableCdmContextImpl() override;
~CdmContextForOOPVDImpl() override;
const media::CdmContext* cdm_context() const { return cdm_context_; }
// media::stable::mojom::StableCdmContext:
void GetHwKeyData(std::unique_ptr<media::DecryptConfig> decrypt_config,
// media::mojom::CdmContextForOOPVD
void GetHwKeyData(media::mojom::DecryptConfigPtr decrypt_config,
const std::vector<uint8_t>& hw_identifier,
GetHwKeyDataCallback callback) override;
void RegisterEventCallback(
mojo::PendingRemote<media::stable::mojom::CdmContextEventCallback>
callback) override;
mojo::PendingRemote<media::mojom::CdmContextEventCallback> callback)
override;
void GetHwConfigData(GetHwConfigDataCallback callback) override;
void GetScreenResolutions(GetScreenResolutionsCallback callback) override;
void AllocateSecureBuffer(uint32_t size,
@ -51,10 +51,9 @@ class COMPONENT_EXPORT(CDM_FACTORY_DAEMON) StableCdmContextImpl
uint32_t offset,
const std::vector<uint8_t>& stream_data,
ParseEncryptedSliceHeaderCallback callback) override;
void DecryptVideoBuffer(
const scoped_refptr<media::DecoderBuffer>& decoder_buffer,
const std::vector<uint8_t>& bytes,
DecryptVideoBufferCallback callback) override;
void DecryptVideoBuffer(media::mojom::DecoderBufferPtr decoder_buffer,
const std::vector<uint8_t>& bytes,
DecryptVideoBufferCallback callback) override;
private:
// Receives callbacks from the |cdm_context_| after we register with it.
@ -69,13 +68,13 @@ class COMPONENT_EXPORT(CDM_FACTORY_DAEMON) StableCdmContextImpl
const raw_ptr<media::CdmContext> cdm_context_;
std::unique_ptr<media::CdmContextRef> cdm_context_ref_;
std::unique_ptr<media::CallbackRegistration> callback_registration_;
mojo::RemoteSet<media::stable::mojom::CdmContextEventCallback>
mojo::RemoteSet<media::mojom::CdmContextEventCallback>
remote_event_callbacks_;
// WeakPtrFactory to use for callbacks.
base::WeakPtrFactory<StableCdmContextImpl> weak_ptr_factory_{this};
base::WeakPtrFactory<CdmContextForOOPVDImpl> weak_ptr_factory_{this};
};
} // namespace chromeos
#endif // CHROMEOS_COMPONENTS_CDM_FACTORY_DAEMON_STABLE_CDM_CONTEXT_IMPL_H_
#endif // CHROMEOS_COMPONENTS_CDM_FACTORY_DAEMON_CDM_CONTEXT_FOR_OOPVD_IMPL_H_

@ -14,6 +14,8 @@
#include "base/task/sequenced_task_runner.h"
#include "media/base/callback_registry.h"
#include "media/cdm/cdm_context_ref_impl.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/common/validation_utils.h"
#include "mojo/public/cpp/bindings/receiver.h"
#include "mojo/public/cpp/bindings/remote.h"
@ -39,12 +41,13 @@ class RemoteCdmContextRef final : public media::CdmContextRef {
} // namespace
class RemoteCdmContext::MojoSequenceState
: public media::stable::mojom::CdmContextEventCallback {
: public media::mojom::CdmContextEventCallback {
public:
explicit MojoSequenceState(
mojo::PendingRemote<media::stable::mojom::StableCdmContext>
pending_stable_cdm_context)
: pending_stable_cdm_context_(std::move(pending_stable_cdm_context)) {
mojo::PendingRemote<media::mojom::CdmContextForOOPVD>
pending_cdm_context_for_oopvd)
: pending_cdm_context_for_oopvd_(
std::move(pending_cdm_context_for_oopvd)) {
sequence_checker_.DetachFromSequence();
}
@ -52,20 +55,20 @@ class RemoteCdmContext::MojoSequenceState
CHECK(sequence_checker_.CalledOnValidSequence());
}
mojo::Remote<media::stable::mojom::StableCdmContext>& GetStableCdmContext() {
mojo::Remote<media::mojom::CdmContextForOOPVD>& GetCdmContextForOOPVD() {
CHECK(sequence_checker_.CalledOnValidSequence());
if (!stable_cdm_context_) {
stable_cdm_context_.Bind(std::move(pending_stable_cdm_context_));
if (!cdm_context_for_oopvd_) {
cdm_context_for_oopvd_.Bind(std::move(pending_cdm_context_for_oopvd_));
mojo_task_runner_ = base::SequencedTaskRunner::GetCurrentDefault();
}
return stable_cdm_context_;
return cdm_context_for_oopvd_;
}
std::unique_ptr<media::CallbackRegistration> RegisterEventCB(
EventCB event_cb) {
CHECK(sequence_checker_.CalledOnValidSequence());
if (!event_callback_receiver_.is_bound()) {
GetStableCdmContext()->RegisterEventCallback(
GetCdmContextForOOPVD()->RegisterEventCallback(
event_callback_receiver_.BindNewPipeAndPassRemote());
}
auto registration = event_callbacks_.Register(std::move(event_cb));
@ -82,8 +85,9 @@ class RemoteCdmContext::MojoSequenceState
} else {
// We're either on the right sequence or the |mojo_sequence_state| was
// never bound to a sequence (i.e., it was constructed but never used).
DCHECK(task_runner || mojo_sequence_state->pending_stable_cdm_context_);
DCHECK(task_runner || !mojo_sequence_state->stable_cdm_context_);
DCHECK(task_runner ||
mojo_sequence_state->pending_cdm_context_for_oopvd_);
DCHECK(task_runner || !mojo_sequence_state->cdm_context_for_oopvd_);
DCHECK(task_runner ||
!mojo_sequence_state->event_callback_receiver_.is_bound());
delete mojo_sequence_state;
@ -91,27 +95,27 @@ class RemoteCdmContext::MojoSequenceState
}
private:
// media::stable::mojom::CdmContextEventCallback:
// media::mojom::CdmContextEventCallback:
void EventCallback(media::CdmContext::Event event) override {
CHECK(sequence_checker_.CalledOnValidSequence());
event_callbacks_.Notify(std::move(event));
}
base::SequenceCheckerImpl sequence_checker_;
mojo::PendingRemote<media::stable::mojom::StableCdmContext>
pending_stable_cdm_context_;
mojo::Remote<media::stable::mojom::StableCdmContext> stable_cdm_context_;
mojo::Receiver<media::stable::mojom::CdmContextEventCallback>
mojo::PendingRemote<media::mojom::CdmContextForOOPVD>
pending_cdm_context_for_oopvd_;
mojo::Remote<media::mojom::CdmContextForOOPVD> cdm_context_for_oopvd_;
mojo::Receiver<media::mojom::CdmContextEventCallback>
event_callback_receiver_{this};
media::CallbackRegistry<EventCB::RunType> event_callbacks_;
scoped_refptr<base::SequencedTaskRunner> mojo_task_runner_;
};
RemoteCdmContext::RemoteCdmContext(
mojo::PendingRemote<media::stable::mojom::StableCdmContext>
stable_cdm_context)
: mojo_sequence_state_(new MojoSequenceState(std::move(stable_cdm_context)),
&MojoSequenceState::DeleteOnCorrectSequence) {}
mojo::PendingRemote<media::mojom::CdmContextForOOPVD> cdm_context_for_oopvd)
: mojo_sequence_state_(
new MojoSequenceState(std::move(cdm_context_for_oopvd)),
&MojoSequenceState::DeleteOnCorrectSequence) {}
std::unique_ptr<media::CallbackRegistration> RemoteCdmContext::RegisterEventCB(
EventCB event_cb) {
@ -131,23 +135,27 @@ ChromeOsCdmContext* RemoteCdmContext::GetChromeOsCdmContext() {
void RemoteCdmContext::GetHwKeyData(const media::DecryptConfig* decrypt_config,
const std::vector<uint8_t>& hw_identifier,
GetHwKeyDataCB callback) {
mojo_sequence_state_->GetStableCdmContext()->GetHwKeyData(
decrypt_config->Clone(), hw_identifier, std::move(callback));
CHECK(decrypt_config);
media::mojom::DecryptConfigPtr mojo_decrypt_config =
media::mojom::DecryptConfig::From(*decrypt_config);
CHECK(mojo_decrypt_config);
mojo_sequence_state_->GetCdmContextForOOPVD()->GetHwKeyData(
std::move(mojo_decrypt_config), hw_identifier, std::move(callback));
}
void RemoteCdmContext::GetHwConfigData(GetHwConfigDataCB callback) {
mojo_sequence_state_->GetStableCdmContext()->GetHwConfigData(
mojo_sequence_state_->GetCdmContextForOOPVD()->GetHwConfigData(
std::move(callback));
}
void RemoteCdmContext::GetScreenResolutions(GetScreenResolutionsCB callback) {
mojo_sequence_state_->GetStableCdmContext()->GetScreenResolutions(
mojo_sequence_state_->GetCdmContextForOOPVD()->GetScreenResolutions(
std::move(callback));
}
void RemoteCdmContext::AllocateSecureBuffer(uint32_t size,
AllocateSecureBufferCB callback) {
mojo_sequence_state_->GetStableCdmContext()->AllocateSecureBuffer(
mojo_sequence_state_->GetCdmContextForOOPVD()->AllocateSecureBuffer(
size, std::move(callback));
}
@ -156,7 +164,7 @@ void RemoteCdmContext::ParseEncryptedSliceHeader(
uint32_t offset,
const std::vector<uint8_t>& stream_data,
ParseEncryptedSliceHeaderCB callback) {
mojo_sequence_state_->GetStableCdmContext()->ParseEncryptedSliceHeader(
mojo_sequence_state_->GetCdmContextForOOPVD()->ParseEncryptedSliceHeader(
secure_handle, offset, stream_data, std::move(callback));
}
@ -176,8 +184,12 @@ void RemoteCdmContext::Decrypt(StreamType stream_type,
scoped_refptr<media::DecoderBuffer> encrypted,
DecryptCB decrypt_cb) {
DCHECK_EQ(stream_type, Decryptor::kVideo);
mojo_sequence_state_->GetStableCdmContext()->DecryptVideoBuffer(
encrypted,
CHECK(encrypted);
media::mojom::DecoderBufferPtr encrypted_mojo_buffer =
media::mojom::DecoderBuffer::From(*encrypted);
CHECK(encrypted_mojo_buffer);
mojo_sequence_state_->GetCdmContextForOOPVD()->DecryptVideoBuffer(
std::move(encrypted_mojo_buffer),
std::vector<uint8_t>(encrypted->data(),
encrypted->data() + encrypted->size()),
base::BindOnce(&RemoteCdmContext::OnDecryptVideoBufferDone,
@ -187,13 +199,21 @@ void RemoteCdmContext::Decrypt(StreamType stream_type,
void RemoteCdmContext::OnDecryptVideoBufferDone(
DecryptCB decrypt_cb,
media::Decryptor::Status status,
const scoped_refptr<media::DecoderBuffer>& decoder_buffer,
media::mojom::DecoderBufferPtr decoder_buffer,
const std::vector<uint8_t>& bytes) {
scoped_refptr<media::DecoderBuffer> media_decoder_buffer;
if (decoder_buffer) {
CHECK_EQ(bytes.size(), decoder_buffer->size());
memcpy(decoder_buffer->writable_data(), bytes.data(), bytes.size());
media_decoder_buffer =
media::ValidateAndConvertMojoDecoderBuffer(std::move(decoder_buffer));
if (!media_decoder_buffer) {
CHECK(mojo::IsInMessageDispatch());
mojo::ReportBadMessage("Invalid DecoderBuffer received");
return;
}
CHECK_EQ(bytes.size(), media_decoder_buffer->size());
memcpy(media_decoder_buffer->writable_data(), bytes.data(), bytes.size());
}
std::move(decrypt_cb).Run(status, decoder_buffer);
std::move(decrypt_cb).Run(status, media_decoder_buffer);
}
void RemoteCdmContext::CancelDecrypt(StreamType stream_type) {

@ -12,13 +12,13 @@
#include "chromeos/components/cdm_factory_daemon/chromeos_cdm_context.h"
#include "media/base/cdm_context.h"
#include "media/base/decryptor.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/cdm_context_for_oopvd.mojom.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
namespace chromeos {
// Provides the implementation that runs in out of process video decoding that
// proxies the media::CdmContext calls back through a mojom::StableCdmContext
// proxies the media::CdmContext calls back through a mojom::CdmContextForOOPVD
// IPC connection.
//
// This particular media::CdmContext/chromeos::ChromeOsCdmContext implementation
@ -34,8 +34,8 @@ class COMPONENT_EXPORT(CDM_FACTORY_DAEMON) RemoteCdmContext
public base::RefCountedThreadSafe<RemoteCdmContext> {
public:
explicit RemoteCdmContext(
mojo::PendingRemote<media::stable::mojom::StableCdmContext>
stable_cdm_context);
mojo::PendingRemote<media::mojom::CdmContextForOOPVD>
cdm_context_for_oopvd);
RemoteCdmContext(const RemoteCdmContext&) = delete;
RemoteCdmContext& operator=(const RemoteCdmContext&) = delete;
@ -93,11 +93,10 @@ class COMPONENT_EXPORT(CDM_FACTORY_DAEMON) RemoteCdmContext
~RemoteCdmContext() override;
void OnDecryptVideoBufferDone(
DecryptCB decrypt_cb,
media::Decryptor::Status status,
const scoped_refptr<media::DecoderBuffer>& decoder_buffer,
const std::vector<uint8_t>& bytes);
void OnDecryptVideoBufferDone(DecryptCB decrypt_cb,
media::Decryptor::Status status,
media::mojom::DecoderBufferPtr decoder_buffer,
const std::vector<uint8_t>& bytes);
std::unique_ptr<MojoSequenceState, void (*)(MojoSequenceState*)>
mojo_sequence_state_;

@ -2594,7 +2594,7 @@ source_set("browser") {
if (allow_oop_video_decoder) {
sources += [ "media/stable_video_decoder_factory.cc" ]
deps += [ "//media/mojo/mojom/stable:stable_video_decoder" ]
deps += [ "//media/mojo/mojom" ]
}
if (is_chromeos) {

@ -14,7 +14,6 @@
#include "media/base/cdm_context.h"
#include "media/mojo/mojom/media_service.mojom.h"
#include "media/mojo/mojom/renderer_extensions.mojom.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#include "content/public/browser/stable_video_decoder_factory.h"
@ -54,8 +53,7 @@ void FramelessMediaInterfaceProxy::CreateAudioDecoder(
void FramelessMediaInterfaceProxy::CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) {
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder) {
DVLOG(2) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
// The browser process cannot act as a proxy for video decoding and clients
@ -66,41 +64,24 @@ void FramelessMediaInterfaceProxy::CreateVideoDecoder(
if (!factory)
return;
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
oop_video_decoder;
mojo::PendingRemote<media::mojom::VideoDecoder> oop_video_decoder;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
switch (media::GetOutOfProcessVideoDecodingMode()) {
case media::OOPVDMode::kEnabledWithGpuProcessAsProxy:
if (!render_process_host_) {
if (!stable_vd_factory_remote_.is_bound()) {
LaunchStableVideoDecoderFactory(
stable_vd_factory_remote_.BindNewPipeAndPassReceiver());
stable_vd_factory_remote_.reset_on_disconnect();
}
CHECK(stable_vd_factory_remote_.is_bound());
stable_vd_factory_remote_->CreateStableVideoDecoder(
oop_video_decoder.InitWithNewPipeAndPassReceiver(), /*tracker=*/{});
} else {
render_process_host_->CreateStableVideoDecoder(
oop_video_decoder.InitWithNewPipeAndPassReceiver());
if (media::IsOutOfProcessVideoDecodingEnabled()) {
if (!render_process_host_) {
if (!vd_factory_remote_.is_bound()) {
LaunchOOPVideoDecoderFactory(
vd_factory_remote_.BindNewPipeAndPassReceiver());
vd_factory_remote_.reset_on_disconnect();
}
break;
case media::OOPVDMode::kEnabledWithoutGpuProcessAsProxy:
// Well-behaved clients shouldn't call CreateVideoDecoder() in this OOP-VD
// mode.
//
// Note: FramelessMediaInterfaceProxy::CreateVideoDecoder() might be
// called outside of a message dispatch, e.g., by
// GpuDataManagerImplPrivate::RequestMojoMediaVideoCapabilities().
// However, these calls should only occur inside of the browser process
// which we can trust not to reach this point, hence the CHECK().
CHECK(mojo::IsInMessageDispatch());
mojo::ReportBadMessage("CreateVideoDecoder() called unexpectedly");
return;
case media::OOPVDMode::kDisabled:
break;
CHECK(vd_factory_remote_.is_bound());
vd_factory_remote_->CreateVideoDecoderWithTracker(
oop_video_decoder.InitWithNewPipeAndPassReceiver(), /*tracker=*/{});
} else {
render_process_host_->CreateOOPVideoDecoder(
oop_video_decoder.InitWithNewPipeAndPassReceiver());
}
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
factory->CreateVideoDecoder(std::move(receiver),
@ -108,43 +89,16 @@ void FramelessMediaInterfaceProxy::CreateVideoDecoder(
}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void FramelessMediaInterfaceProxy::CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) {
DVLOG(2) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
switch (media::GetOutOfProcessVideoDecodingMode()) {
case media::OOPVDMode::kEnabledWithGpuProcessAsProxy:
case media::OOPVDMode::kDisabled:
// Well-behaved clients shouldn't call CreateStableVideoDecoder() in this
// OOP-VD mode.
//
// Note: FramelessMediaInterfaceProxy::CreateStableVideoDecoder() might be
// called outside of a message dispatch, e.g., by
// GpuDataManagerImplPrivate::RequestMojoMediaVideoCapabilities().
// However, these calls should only occur inside of the browser process
// which we can trust not to reach this point, hence the CHECK().
CHECK(mojo::IsInMessageDispatch());
mojo::ReportBadMessage("CreateStableVideoDecoder() called unexpectedly");
return;
case media::OOPVDMode::kEnabledWithoutGpuProcessAsProxy:
if (!render_process_host_) {
if (!stable_vd_factory_remote_.is_bound()) {
LaunchStableVideoDecoderFactory(
stable_vd_factory_remote_.BindNewPipeAndPassReceiver());
stable_vd_factory_remote_.reset_on_disconnect();
}
CHECK(stable_vd_factory_remote_.is_bound());
stable_vd_factory_remote_->CreateStableVideoDecoder(
std::move(video_decoder), /*tracker=*/{});
} else {
render_process_host_->CreateStableVideoDecoder(
std::move(video_decoder));
}
break;
}
void FramelessMediaInterfaceProxy::CreateVideoDecoderWithTracker(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker) {
// mojo::ReportBadMessage() should be called directly within the stack frame
// of a message dispatch, hence the CHECK().
// CreateVideoDecoderWithTracker() should be called by the browser process
// only. This implementation is exposed to the renderer. Well-behaved clients
// (renderers) shouldn't call CreateVideoDecoderWithTracker().
CHECK(mojo::IsInMessageDispatch());
mojo::ReportBadMessage("CreateVideoDecoderWithTracker() called unexpectedly");
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)

@ -20,7 +20,7 @@
#include "mojo/public/cpp/bindings/remote.h"
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#endif // BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
namespace content {
@ -52,12 +52,11 @@ class FramelessMediaInterfaceProxy final
mojo::PendingReceiver<media::mojom::AudioDecoder> receiver) final;
void CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) final;
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder) final;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) final;
void CreateVideoDecoderWithTracker(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker) final;
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateAudioEncoder(
mojo::PendingReceiver<media::mojom::AudioEncoder> receiver) final;
@ -106,15 +105,14 @@ class FramelessMediaInterfaceProxy final
mojo::ReceiverSet<media::mojom::InterfaceFactory> receivers_;
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
// Connection to the StableVideoDecoderFactory that lives in a utility
// process. This is only used for out-of-process video decoding and only when
// the FramelessMediaInterfaceProxy is created without a RenderProcessHost
// Connection to the InterfaceFactory that lives in a utility process.
// This is only used for out-of-process video decoding and only when the
// FramelessMediaInterfaceProxy is created without a RenderProcessHost
// (e.g., to get the supported video decoder configurations). Note that we
// make this a member instead of a local variable inside CreateVideoDecoder()
// in order to keep the video decoder process alive for the lifetime of the
// FramelessMediaInterfaceProxy.
mojo::Remote<media::stable::mojom::StableVideoDecoderFactory>
stable_vd_factory_remote_;
mojo::Remote<media::mojom::InterfaceFactory> vd_factory_remote_;
#endif // BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
// FramelessMediaInterfaceProxy is fully owned by the RenderProcessHostImpl,

@ -34,7 +34,6 @@
#include "media/mojo/buildflags.h"
#include "media/mojo/mojom/frame_interface_factory.mojom.h"
#include "media/mojo/mojom/media_service.mojom.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "mojo/public/cpp/bindings/callback_helpers.h"
#include "mojo/public/cpp/bindings/remote_set.h"
@ -74,7 +73,6 @@
#endif
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#include "content/public/browser/stable_video_decoder_factory.h"
#include "media/base/media_switches.h"
#include "mojo/public/cpp/bindings/message.h"
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
@ -271,8 +269,7 @@ void MediaInterfaceProxy::CreateAudioDecoder(
void MediaInterfaceProxy::CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) {
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder) {
DCHECK(thread_checker_.CalledOnValidThread());
// The browser process cannot act as a proxy for video decoding and clients
// should not attempt to use it that way.
@ -282,23 +279,11 @@ void MediaInterfaceProxy::CreateVideoDecoder(
if (!factory)
return;
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
oop_video_decoder;
mojo::PendingRemote<media::mojom::VideoDecoder> oop_video_decoder;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
switch (media::GetOutOfProcessVideoDecodingMode()) {
case media::OOPVDMode::kEnabledWithGpuProcessAsProxy:
render_frame_host().GetProcess()->CreateStableVideoDecoder(
oop_video_decoder.InitWithNewPipeAndPassReceiver());
break;
case media::OOPVDMode::kEnabledWithoutGpuProcessAsProxy:
// Well-behaved clients shouldn't call CreateVideoDecoder() in this OOP-VD
// mode and MediaInterfaceProxy::CreateVideoDecoder() should always be
// called during a message dispatch.
CHECK(mojo::IsInMessageDispatch());
mojo::ReportBadMessage("CreateVideoDecoder() called unexpectedly");
return;
case media::OOPVDMode::kDisabled:
break;
if (media::IsOutOfProcessVideoDecodingEnabled()) {
render_frame_host().GetProcess()->CreateOOPVideoDecoder(
oop_video_decoder.InitWithNewPipeAndPassReceiver());
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
factory->CreateVideoDecoder(std::move(receiver),
@ -306,24 +291,16 @@ void MediaInterfaceProxy::CreateVideoDecoder(
}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void MediaInterfaceProxy::CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) {
DCHECK(thread_checker_.CalledOnValidThread());
switch (media::GetOutOfProcessVideoDecodingMode()) {
case media::OOPVDMode::kEnabledWithGpuProcessAsProxy:
case media::OOPVDMode::kDisabled:
// Well-behaved clients shouldn't call CreateStableVideoDecoder() in this
// OOP-VD mode and MediaInterfaceProxy::CreateStableVideoDecoder() should
// always be called during a message dispatch.
CHECK(mojo::IsInMessageDispatch());
mojo::ReportBadMessage("CreateStableVideoDecoder() called unexpectedly");
return;
case media::OOPVDMode::kEnabledWithoutGpuProcessAsProxy:
render_frame_host().GetProcess()->CreateStableVideoDecoder(
std::move(video_decoder));
break;
}
void MediaInterfaceProxy::CreateVideoDecoderWithTracker(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker) {
// mojo::ReportBadMessage() should be called directly within the stack frame
// of a message dispatch, hence the CHECK().
// CreateVideoDecoderWithTracker() should be called by the browser process
// only. This implementation is exposed to the renderer. Well-behaved clients
// (renderers) shouldn't call CreateVideoDecoderWithTracker().
CHECK(mojo::IsInMessageDispatch());
mojo::ReportBadMessage("CreateVideoDecoderWithTracker() called unexpectedly");
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)

@ -55,12 +55,11 @@ class MediaInterfaceProxy final : public DocumentUserData<MediaInterfaceProxy>,
mojo::PendingReceiver<media::mojom::AudioDecoder> receiver) final;
void CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) final;
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder) final;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) final;
void CreateVideoDecoderWithTracker(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker) final;
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateAudioEncoder(
mojo::PendingReceiver<media::mojom::AudioEncoder> receiver) final;

@ -12,36 +12,37 @@
#include "content/public/browser/gpu_data_manager_observer.h"
#include "content/public/browser/gpu_utils.h"
#include "content/public/browser/service_process_host.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/interface_factory.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/mojom/video_decoder_factory_process.mojom.h"
#include "mojo/public/cpp/bindings/remote_set.h"
// TODO(crbug.com/347331029): rename this file to oop_video_decoder_factory.cc.
namespace content {
#if BUILDFLAG(ALLOW_HOSTING_OOP_VIDEO_DECODER)
namespace {
// StableVideoDecoderFactoryProcessLauncher is a helper singleton class that
// launches utility processes to host a
// media::stable::mojom::StableVideoDecoderFactory once the gpu::GpuFeatureInfo
// is known.
class StableVideoDecoderFactoryProcessLauncher final
// OOPVideoDecoderFactoryProcessLauncher is a helper singleton class that
// launches utility processes to host a media::mojom::InterfaceFactory once
// the gpu::GpuFeatureInfo is known.
class OOPVideoDecoderFactoryProcessLauncher final
: public GpuDataManagerObserver {
public:
static StableVideoDecoderFactoryProcessLauncher& Instance() {
static base::NoDestructor<StableVideoDecoderFactoryProcessLauncher>
instance;
static OOPVideoDecoderFactoryProcessLauncher& Instance() {
static base::NoDestructor<OOPVideoDecoderFactoryProcessLauncher> instance;
return *instance;
}
StableVideoDecoderFactoryProcessLauncher(
const StableVideoDecoderFactoryProcessLauncher&) = delete;
StableVideoDecoderFactoryProcessLauncher& operator=(
const StableVideoDecoderFactoryProcessLauncher&) = delete;
OOPVideoDecoderFactoryProcessLauncher(
const OOPVideoDecoderFactoryProcessLauncher&) = delete;
OOPVideoDecoderFactoryProcessLauncher& operator=(
const OOPVideoDecoderFactoryProcessLauncher&) = delete;
void LaunchWhenGpuFeatureInfoIsKnown(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoderFactory>
receiver) {
mojo::PendingReceiver<media::mojom::InterfaceFactory> receiver) {
if (gpu_preferences_.disable_accelerated_video_decode) {
return;
}
@ -51,18 +52,18 @@ class StableVideoDecoderFactoryProcessLauncher final
}
// base::Unretained(this) is safe because *|this| is never destroyed.
ui_thread_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&StableVideoDecoderFactoryProcessLauncher::
FROM_HERE, base::BindOnce(&OOPVideoDecoderFactoryProcessLauncher::
LaunchWhenGpuFeatureInfoIsKnownOnUIThread,
base::Unretained(this), std::move(receiver)));
}
private:
friend class base::NoDestructor<StableVideoDecoderFactoryProcessLauncher>;
friend class base::NoDestructor<OOPVideoDecoderFactoryProcessLauncher>;
StableVideoDecoderFactoryProcessLauncher()
OOPVideoDecoderFactoryProcessLauncher()
: ui_thread_task_runner_(GetUIThreadTaskRunner({})),
gpu_preferences_(content::GetGpuPreferencesFromCommandLine()) {}
~StableVideoDecoderFactoryProcessLauncher() final = default;
~OOPVideoDecoderFactoryProcessLauncher() final = default;
// GpuDataManagerObserver implementation.
void OnGpuInfoUpdate() final {
@ -72,9 +73,10 @@ class StableVideoDecoderFactoryProcessLauncher final
}
// base::Unretained(this) is safe because *|this| is never destroyed.
ui_thread_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&StableVideoDecoderFactoryProcessLauncher::
OnGpuInfoUpdateOnUIThread,
base::Unretained(this)));
FROM_HERE,
base::BindOnce(
&OOPVideoDecoderFactoryProcessLauncher::OnGpuInfoUpdateOnUIThread,
base::Unretained(this)));
}
void OnGpuInfoUpdateOnUIThread() {
@ -94,8 +96,7 @@ class StableVideoDecoderFactoryProcessLauncher final
}
void LaunchWhenGpuFeatureInfoIsKnownOnUIThread(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoderFactory>
receiver) {
mojo::PendingReceiver<media::mojom::InterfaceFactory> receiver) {
DCHECK_CALLED_ON_VALID_SEQUENCE(ui_sequence_checker_);
if (gpu_feature_info_) {
@ -108,8 +109,7 @@ class StableVideoDecoderFactoryProcessLauncher final
}
void LaunchOnUIThread(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoderFactory>
receiver) {
mojo::PendingReceiver<media::mojom::InterfaceFactory> receiver) {
DCHECK_CALLED_ON_VALID_SEQUENCE(ui_sequence_checker_);
if (gpu_feature_info_
@ -118,13 +118,12 @@ class StableVideoDecoderFactoryProcessLauncher final
return;
}
mojo::Remote<media::stable::mojom::StableVideoDecoderFactoryProcess>
process;
mojo::Remote<media::mojom::VideoDecoderFactoryProcess> process;
ServiceProcessHost::Launch(
process.BindNewPipeAndPassReceiver(),
ServiceProcessHost::Options().WithDisplayName("Video Decoder").Pass());
process->InitializeStableVideoDecoderFactory(*gpu_feature_info_,
std::move(receiver));
process->InitializeVideoDecoderFactory(*gpu_feature_info_,
std::move(receiver));
processes_.Add(std::move(process));
}
@ -132,28 +131,27 @@ class StableVideoDecoderFactoryProcessLauncher final
const gpu::GpuPreferences gpu_preferences_;
SEQUENCE_CHECKER(ui_sequence_checker_);
// Each utility process launched by this class hosts a
// StableVideoDecoderFactoryProcess implementation which is used to broker a
// StableVideoDecoderFactory connection. The process stays alive until either
// a) the StableVideoDecoderFactoryProcess connection is lost, or b) it
// Each utility process launched by this class hosts an
// OOPVideoDecoderFactoryProcess implementation which is used to broker an
// InterfaceFactory connection. The process stays alive until either
// a) the OOPVideoDecoderFactoryProcess connection is lost, or b) it
// crashes. Case (a) will typically happen when the client that uses the
// StableVideoDecoderFactory connection closes its endpoint (e.g., a renderer
// InterfaceFactory connection closes its endpoint (e.g., a renderer
// process dies). In that situation, the utility process should detect that
// the StableVideoDecoderFactory connection got lost and subsequently close
// the StableVideoDecoderFactoryProcess connection which should cause the
// the InterfaceFactory connection got lost and subsequently close
// the OOPVideoDecoderFactoryProcess connection which should cause the
// termination of the process. We need to keep the
// StableVideoDecoderFactoryProcess connection endpoint in a RemoteSet to keep
// the process alive until the StableVideoDecoderFactory connection is lost.
mojo::RemoteSet<media::stable::mojom::StableVideoDecoderFactoryProcess>
processes_ GUARDED_BY_CONTEXT(ui_sequence_checker_);
// OOPVideoDecoderFactoryProcess connection endpoint in a RemoteSet to keep
// the process alive until the InterfaceFactory connection is lost.
mojo::RemoteSet<media::mojom::VideoDecoderFactoryProcess> processes_
GUARDED_BY_CONTEXT(ui_sequence_checker_);
std::optional<gpu::GpuFeatureInfo> gpu_feature_info_
GUARDED_BY_CONTEXT(ui_sequence_checker_);
// This member holds onto any requests for a StableVideoDecoderFactory until
// This member holds onto any requests for an InterfaceFactory until
// the gpu::GpuFeatureInfo is known.
base::queue<
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoderFactory>>
base::queue<mojo::PendingReceiver<media::mojom::InterfaceFactory>>
pending_factory_receivers_ GUARDED_BY_CONTEXT(ui_sequence_checker_);
};
@ -161,11 +159,10 @@ class StableVideoDecoderFactoryProcessLauncher final
#endif // BUILDFLAG(ALLOW_HOSTING_OOP_VIDEO_DECODER)
void LaunchStableVideoDecoderFactory(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoderFactory>
receiver) {
void LaunchOOPVideoDecoderFactory(
mojo::PendingReceiver<media::mojom::InterfaceFactory> receiver) {
#if BUILDFLAG(ALLOW_HOSTING_OOP_VIDEO_DECODER)
StableVideoDecoderFactoryProcessLauncher::Instance()
OOPVideoDecoderFactoryProcessLauncher::Instance()
.LaunchWhenGpuFeatureInfoIsKnown(std::move(receiver));
#endif
}

@ -2226,76 +2226,91 @@ IN_PROC_BROWSER_TEST_P(RenderProcessHostTest, ReuseSiteURLChanges) {
}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
class FakeStableVideoDecoderFactoryService
: public media::stable::mojom::StableVideoDecoderFactory {
class FakeOOPVideoDecoderFactoryService
: public media::mojom::InterfaceFactory {
public:
FakeStableVideoDecoderFactoryService() = default;
FakeStableVideoDecoderFactoryService(
const FakeStableVideoDecoderFactoryService&) = delete;
FakeStableVideoDecoderFactoryService& operator=(
const FakeStableVideoDecoderFactoryService&) = delete;
~FakeStableVideoDecoderFactoryService() override = default;
FakeOOPVideoDecoderFactoryService() = default;
FakeOOPVideoDecoderFactoryService(const FakeOOPVideoDecoderFactoryService&) =
delete;
FakeOOPVideoDecoderFactoryService& operator=(
const FakeOOPVideoDecoderFactoryService&) = delete;
~FakeOOPVideoDecoderFactoryService() override = default;
// media::stable::mojom::StableVideoDecoderFactory implementation.
void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoderTracker>
tracker) final {
// media::mojom::InterfaceFactory implementation.
void CreateVideoDecoderWithTracker(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker) final {
video_decoders_.Add(
std::make_unique<FakeStableVideoDecoderService>(std::move(tracker)),
std::make_unique<FakeOOPVideoDecoderService>(std::move(tracker)),
std::move(receiver));
}
private:
class FakeStableVideoDecoderService
: public media::stable::mojom::StableVideoDecoder {
public:
explicit FakeStableVideoDecoderService(
mojo::PendingRemote<media::stable::mojom::StableVideoDecoderTracker>
tracker)
: tracker_(std::move(tracker)) {}
FakeStableVideoDecoderService(const FakeStableVideoDecoderService&) =
delete;
FakeStableVideoDecoderService& operator=(
const FakeStableVideoDecoderService&) = delete;
~FakeStableVideoDecoderService() override = default;
void CreateAudioDecoder(
mojo::PendingReceiver<media::mojom::AudioDecoder> receiver) final {}
void CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder) final {
}
void CreateAudioEncoder(
mojo::PendingReceiver<media::mojom::AudioEncoder> receiver) final {}
void CreateDefaultRenderer(
const std::string& audio_device_id,
mojo::PendingReceiver<media::mojom::Renderer> receiver) final {}
void CreateCdm(const media::CdmConfig& cdm_config,
CreateCdmCallback callback) final {}
// media::stable::mojom::StableVideoDecoder implementation.
private:
class FakeOOPVideoDecoderService : public media::mojom::VideoDecoder {
public:
explicit FakeOOPVideoDecoderService(
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker)
: tracker_(std::move(tracker)) {}
FakeOOPVideoDecoderService(const FakeOOPVideoDecoderService&) = delete;
FakeOOPVideoDecoderService& operator=(const FakeOOPVideoDecoderService&) =
delete;
~FakeOOPVideoDecoderService() override = default;
// media::mojom::VideoDecoder implementation.
void GetSupportedConfigs(GetSupportedConfigsCallback callback) final {
std::move(callback).Run({}, media::VideoDecoderType::kTesting);
}
void Construct(
mojo::PendingAssociatedRemote<media::stable::mojom::VideoDecoderClient>
stable_video_decoder_client_remote,
mojo::PendingRemote<media::stable::mojom::MediaLog>
stable_media_log_remote,
mojo::PendingReceiver<media::stable::mojom::VideoFrameHandleReleaser>
stable_video_frame_handle_releaser_receiver,
mojo::PendingAssociatedRemote<media::mojom::VideoDecoderClient>
video_decoder_client_remote,
mojo::PendingRemote<media::mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<media::mojom::VideoFrameHandleReleaser>
video_frame_handle_releaser_receiver,
mojo::ScopedDataPipeConsumerHandle decoder_buffer_pipe,
media::mojom::CommandBufferIdPtr command_buffer_id,
const gfx::ColorSpace& target_color_space) final {}
void Initialize(
void Initialize(const media::VideoDecoderConfig& config,
bool low_delay,
const std::optional<base::UnguessableToken>& cdm_id,
InitializeCallback callback) final {}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void InitializeWithCdmContext(
const media::VideoDecoderConfig& config,
bool low_delay,
mojo::PendingRemote<media::stable::mojom::StableCdmContext> cdm_context,
InitializeCallback callback) final {}
void Decode(const scoped_refptr<media::DecoderBuffer>& buffer,
mojo::PendingRemote<media::mojom::CdmContextForOOPVD> cdm_context,
InitializeWithCdmContextCallback callback) final {}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void Decode(media::mojom::DecoderBufferPtr buffer,
DecodeCallback callback) final {}
void Reset(ResetCallback callback) final {}
void OnOverlayInfoChanged(const media::OverlayInfo& overlay_info) final {}
private:
mojo::Remote<media::stable::mojom::StableVideoDecoderTracker> tracker_;
mojo::Remote<media::mojom::VideoDecoderTracker> tracker_;
};
mojo::UniqueReceiverSet<media::stable::mojom::StableVideoDecoder>
video_decoders_;
mojo::UniqueReceiverSet<media::mojom::VideoDecoder> video_decoders_;
};
class RenderProcessHostTestStableVideoDecoderTest
class RenderProcessHostTestOOPVideoDecoderTest
: public RenderProcessHostTestBase {
public:
RenderProcessHostTestStableVideoDecoderTest()
: stable_video_decoder_factory_receiver_(
&stable_video_decoder_factory_service_) {}
RenderProcessHostTestOOPVideoDecoderTest()
: video_decoder_factory_receiver_(&oop_video_decoder_factory_service_) {}
void SetUp() override {
feature_list_.InitAndEnableFeature(media::kUseOutOfProcessVideoDecoding);
@ -2303,44 +2318,41 @@ class RenderProcessHostTestStableVideoDecoderTest
}
void SetUpOnMainThread() override {
RenderProcessHostImpl::SetStableVideoDecoderFactoryCreationCBForTesting(
stable_video_decoder_factory_creation_cb_.Get());
RenderProcessHostImpl::SetStableVideoDecoderEventCBForTesting(
stable_video_decoder_event_cb_.Get());
RenderProcessHostImpl::SetVideoDecoderFactoryCreationCBForTesting(
video_decoder_factory_creation_cb_.Get());
RenderProcessHostImpl::SetVideoDecoderEventCBForTesting(
video_decoder_event_cb_.Get());
#if BUILDFLAG(PLATFORM_HAS_OPTIONAL_HEVC_DECODE_SUPPORT)
// When Chrome is compiled with
// BUILDFLAG(PLATFORM_HAS_OPTIONAL_HEVC_DECODE_SUPPORT), renderer processes
// need a media::mojom::VideoDecoder during startup in order to query for
// supported configurations (see content::RenderMediaClient::Initialize()).
// With OOP-VD, this should cause the creation of a
// media::stable::mojom::StableVideoDecoderFactory in order to create the
// corresponding media::stable::mojom::StableVideoDecoder. When the
// supported configurations are obtained, the media::mojom::VideoDecoder and
// media::stable::mojom::StableVideoDecoder connections should be torn down
// thus causing the termination of the
// media::stable::mojom::StableVideoDecoderFactory connection. Here, we set
// up expectations for that.
// With OOP-VD, this should cause the creation of a media::InterfaceFactory
// in order to create the corresponding media::mojom::VideoDecoder. When the
// supported configurations are obtained, the
// renderer-process-to-GPU-process media::mojom::VideoDecoder connection and
// the GPU-process-to-utility-process media::mojom::VideoDecoder connection
// should be torn down thus causing the termination of the
// media::mojom::InterfaceFactory connection. Here, we set up expectations
// for that.
base::RunLoop run_loop;
{
InSequence seq;
EXPECT_CALL(stable_video_decoder_factory_creation_cb_, Run(_))
.WillOnce(
[&](mojo::PendingReceiver<
media::stable::mojom::StableVideoDecoderFactory> receiver) {
stable_video_decoder_factory_receiver_.Bind(
std::move(receiver));
stable_video_decoder_factory_receiver_.set_disconnect_handler(
stable_video_decoder_factory_disconnect_cb_.Get());
});
EXPECT_CALL(stable_video_decoder_event_cb_,
Run(RenderProcessHostImpl::StableVideoDecoderEvent::
kAllDecodersDisconnected));
EXPECT_CALL(stable_video_decoder_factory_disconnect_cb_, Run())
.WillOnce([&]() {
stable_video_decoder_factory_receiver_.reset();
run_loop.Quit();
EXPECT_CALL(video_decoder_factory_creation_cb_, Run(_))
.WillOnce([&](mojo::PendingReceiver<media::mojom::InterfaceFactory>
receiver) {
video_decoder_factory_receiver_.Bind(std::move(receiver));
video_decoder_factory_receiver_.set_disconnect_handler(
video_decoder_factory_disconnect_cb_.Get());
});
EXPECT_CALL(video_decoder_event_cb_,
Run(RenderProcessHostImpl::VideoDecoderEvent::
kAllDecodersDisconnected));
EXPECT_CALL(video_decoder_factory_disconnect_cb_, Run()).WillOnce([&]() {
video_decoder_factory_receiver_.reset();
run_loop.Quit();
});
}
#endif // BUILDFLAG(PLATFORM_HAS_OPTIONAL_HEVC_DECODE_SUPPORT)
@ -2356,9 +2368,9 @@ class RenderProcessHostTestStableVideoDecoderTest
}
void TearDownOnMainThread() override {
// Reset the |stable_video_decoder_factory_receiver_| so that the
// Reset the |video_decoder_factory_receiver_| so that the
// disconnection callback is not called on tear down.
stable_video_decoder_factory_receiver_.reset();
video_decoder_factory_receiver_.reset();
if (rph_initialized_) {
rph_->Cleanup();
}
@ -2369,123 +2381,120 @@ class RenderProcessHostTestStableVideoDecoderTest
bool VerifyAndClearExpectations() {
// Note: we verify and clear the expectations for all the mocks. We
// intentionally don't early out if verifying one mock fails.
bool result = Mock::VerifyAndClearExpectations(
&stable_video_decoder_factory_creation_cb_);
bool result =
Mock::VerifyAndClearExpectations(&video_decoder_factory_creation_cb_);
result = Mock::VerifyAndClearExpectations(
&stable_video_decoder_factory_disconnect_cb_) &&
&video_decoder_factory_disconnect_cb_) &&
result;
result =
Mock::VerifyAndClearExpectations(&stable_video_decoder_event_cb_) &&
result;
Mock::VerifyAndClearExpectations(&video_decoder_event_cb_) && result;
return result;
}
base::test::ScopedFeatureList feature_list_;
StrictMock<base::MockRepeatingCallback<
RenderProcessHostImpl::StableVideoDecoderFactoryCreationCB::RunType>>
stable_video_decoder_factory_creation_cb_;
RenderProcessHostImpl::VideoDecoderFactoryCreationCB::RunType>>
video_decoder_factory_creation_cb_;
StrictMock<base::MockOnceCallback<void()>>
stable_video_decoder_factory_disconnect_cb_;
video_decoder_factory_disconnect_cb_;
StrictMock<base::MockRepeatingCallback<
RenderProcessHostImpl::StableVideoDecoderEventCB::RunType>>
stable_video_decoder_event_cb_;
RenderProcessHostImpl::VideoDecoderEventCB::RunType>>
video_decoder_event_cb_;
FakeStableVideoDecoderFactoryService stable_video_decoder_factory_service_;
mojo::Receiver<media::stable::mojom::StableVideoDecoderFactory>
stable_video_decoder_factory_receiver_;
FakeOOPVideoDecoderFactoryService oop_video_decoder_factory_service_;
mojo::Receiver<media::mojom::InterfaceFactory>
video_decoder_factory_receiver_;
raw_ptr<RenderProcessHost> rph_ = nullptr;
bool rph_initialized_ = false;
};
// Ensures that the StableVideoDecoderFactory connection is terminated after a
// delay once all the StableVideoDecoders created with it have disconnected.
IN_PROC_BROWSER_TEST_F(RenderProcessHostTestStableVideoDecoderTest,
// Ensures that the InterfaceFactory connection is terminated after a
// delay once all the VideoDecoders created with it have disconnected.
IN_PROC_BROWSER_TEST_F(RenderProcessHostTestOOPVideoDecoderTest,
FactoryIsResetAfterDelay) {
ASSERT_FALSE(Test::HasFailure());
// First, let's ask the RPH to establish a StableVideoDecoder connection. This
// should cause the RPH's StableVideoDecoderFactory to be bound.
EXPECT_CALL(stable_video_decoder_factory_creation_cb_, Run(_))
.WillOnce([&](mojo::PendingReceiver<
media::stable::mojom::StableVideoDecoderFactory> receiver) {
stable_video_decoder_factory_receiver_.Bind(std::move(receiver));
stable_video_decoder_factory_receiver_.set_disconnect_handler(
stable_video_decoder_factory_disconnect_cb_.Get());
});
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
stable_video_decoder_remote;
rph_->CreateStableVideoDecoder(
stable_video_decoder_remote.InitWithNewPipeAndPassReceiver());
// First, let's ask the RPH to establish a VideoDecoder connection. This
// should cause the RPH's InterfaceFactory to be bound.
EXPECT_CALL(video_decoder_factory_creation_cb_, Run(_))
.WillOnce(
[&](mojo::PendingReceiver<media::mojom::InterfaceFactory> receiver) {
video_decoder_factory_receiver_.Bind(std::move(receiver));
video_decoder_factory_receiver_.set_disconnect_handler(
video_decoder_factory_disconnect_cb_.Get());
});
mojo::PendingRemote<media::mojom::VideoDecoder> video_decoder_remote;
rph_->CreateOOPVideoDecoder(
video_decoder_remote.InitWithNewPipeAndPassReceiver());
ASSERT_TRUE(VerifyAndClearExpectations());
// Now, let's destroy the StableVideoDecoder connection. Since this was the
// only StableVideoDecoder connection, destroying it should cause the RPH's
// StableVideoDecoderFactory connection to die after a delay.
// Now, let's destroy the VideoDecoder connection. Since this was the
// only VideoDecoder connection, destroying it should cause the RPH's
// InterfaceFactory connection to die after a delay.
base::RunLoop run_loop;
base::ElapsedTimer reset_stable_video_decoder_factory_timer;
base::ElapsedTimer reset_video_decoder_factory_timer;
{
InSequence seq;
EXPECT_CALL(stable_video_decoder_event_cb_,
Run(RenderProcessHostImpl::StableVideoDecoderEvent::
EXPECT_CALL(video_decoder_event_cb_,
Run(RenderProcessHostImpl::VideoDecoderEvent::
kAllDecodersDisconnected));
EXPECT_CALL(stable_video_decoder_factory_disconnect_cb_, Run())
.WillOnce([&]() { run_loop.Quit(); });
EXPECT_CALL(video_decoder_factory_disconnect_cb_, Run()).WillOnce([&]() {
run_loop.Quit();
});
}
stable_video_decoder_remote.reset();
video_decoder_remote.reset();
run_loop.Run();
EXPECT_GE(reset_stable_video_decoder_factory_timer.Elapsed(),
base::Seconds(3));
EXPECT_GE(reset_video_decoder_factory_timer.Elapsed(), base::Seconds(3));
}
// Ensures that the timer that destroys the StableVideoDecoderFactory connection
// when all StableVideoDecoder connections die is stopped if a request to
// connect another StableVideoDecoder is received soon enough.
IN_PROC_BROWSER_TEST_F(RenderProcessHostTestStableVideoDecoderTest,
// Ensures that the timer that destroys the InterfaceFactory connection
// when all VideoDecoder connections die is stopped if a request to
// connect another VideoDecoder is received soon enough.
IN_PROC_BROWSER_TEST_F(RenderProcessHostTestOOPVideoDecoderTest,
FactoryResetTimerIsStoppedOnRequestBeforeResetDelay) {
ASSERT_FALSE(Test::HasFailure());
// First, let's ask the RPH to establish a StableVideoDecoder connection. This
// should cause the RPH's StableVideoDecoderFactory to be bound.
EXPECT_CALL(stable_video_decoder_factory_creation_cb_, Run(_))
.WillOnce([&](mojo::PendingReceiver<
media::stable::mojom::StableVideoDecoderFactory> receiver) {
stable_video_decoder_factory_receiver_.Bind(std::move(receiver));
stable_video_decoder_factory_receiver_.set_disconnect_handler(
stable_video_decoder_factory_disconnect_cb_.Get());
});
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
stable_video_decoder_remote;
rph_->CreateStableVideoDecoder(
stable_video_decoder_remote.InitWithNewPipeAndPassReceiver());
// First, let's ask the RPH to establish a VideoDecoder connection. This
// should cause the RPH's InterfaceFactory to be bound.
EXPECT_CALL(video_decoder_factory_creation_cb_, Run(_))
.WillOnce(
[&](mojo::PendingReceiver<media::mojom::InterfaceFactory> receiver) {
video_decoder_factory_receiver_.Bind(std::move(receiver));
video_decoder_factory_receiver_.set_disconnect_handler(
video_decoder_factory_disconnect_cb_.Get());
});
mojo::PendingRemote<media::mojom::VideoDecoder> video_decoder_remote;
rph_->CreateOOPVideoDecoder(
video_decoder_remote.InitWithNewPipeAndPassReceiver());
ASSERT_TRUE(VerifyAndClearExpectations());
// Now, let's destroy the StableVideoDecoder connection. Since this was the
// only StableVideoDecoder connection, destroying it should trigger a
// Now, let's destroy the VideoDecoder connection. Since this was the
// only VideoDecoder connection, destroying it should trigger a
// kAllDecodersDisconnected event.
base::RunLoop run_loop_1;
EXPECT_CALL(stable_video_decoder_event_cb_,
Run(RenderProcessHostImpl::StableVideoDecoderEvent::
kAllDecodersDisconnected))
EXPECT_CALL(
video_decoder_event_cb_,
Run(RenderProcessHostImpl::VideoDecoderEvent::kAllDecodersDisconnected))
.WillOnce([&]() { run_loop_1.Quit(); });
stable_video_decoder_remote.reset();
video_decoder_remote.reset();
run_loop_1.Run();
ASSERT_TRUE(VerifyAndClearExpectations());
// Now, let's request another StableVideoDecoder connection immediately. This
// Now, let's request another VideoDecoder connection immediately. This
// should stop the timer that resets the factory.
EXPECT_CALL(stable_video_decoder_event_cb_,
Run(RenderProcessHostImpl::StableVideoDecoderEvent::
kFactoryResetTimerStopped));
rph_->CreateStableVideoDecoder(
stable_video_decoder_remote.InitWithNewPipeAndPassReceiver());
EXPECT_CALL(
video_decoder_event_cb_,
Run(RenderProcessHostImpl::VideoDecoderEvent::kFactoryResetTimerStopped));
rph_->CreateOOPVideoDecoder(
video_decoder_remote.InitWithNewPipeAndPassReceiver());
ASSERT_TRUE(VerifyAndClearExpectations());
// Finally, let's wait a few seconds (longer than the delay configured for the
// timer that kills the StableVideoDecoderFactory connection). Because the
// |stable_video_decoder_factory_disconnect_cb_| is a StrictMock, this should
// detect that the StableVideoDecoderFactory connection doesn't die.
// timer that kills the InterfaceFactory connection). Because the
// |video_decoder_factory_disconnect_cb_| is a StrictMock, this should
// detect that the InterfaceFactory connection doesn't die.
base::RunLoop run_loop_2;
GetUIThreadTaskRunner()->PostDelayedTask(
FROM_HERE,

@ -1176,28 +1176,26 @@ void LogDelayReasonForCleanup(
}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
RenderProcessHostImpl::StableVideoDecoderFactoryCreationCB&
GetStableVideoDecoderFactoryCreationCB() {
RenderProcessHostImpl::VideoDecoderFactoryCreationCB&
GetVideoDecoderFactoryCreationCB() {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
static base::NoDestructor<
RenderProcessHostImpl::StableVideoDecoderFactoryCreationCB>
RenderProcessHostImpl::VideoDecoderFactoryCreationCB>
s_callback;
return *s_callback;
}
RenderProcessHostImpl::StableVideoDecoderEventCB&
GetStableVideoDecoderEventCB() {
RenderProcessHostImpl::VideoDecoderEventCB& GetVideoDecoderEventCB() {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
static base::NoDestructor<RenderProcessHostImpl::StableVideoDecoderEventCB>
static base::NoDestructor<RenderProcessHostImpl::VideoDecoderEventCB>
s_callback;
return *s_callback;
}
void InvokeStableVideoDecoderEventCB(
RenderProcessHostImpl::StableVideoDecoderEvent event) {
void InvokeVideoDecoderEventCB(RenderProcessHostImpl::VideoDecoderEvent event) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
RenderProcessHostImpl::StableVideoDecoderEventCB& callback =
GetStableVideoDecoderEventCB();
RenderProcessHostImpl::VideoDecoderEventCB& callback =
GetVideoDecoderEventCB();
if (!callback.is_null()) {
callback.Run(event);
}
@ -1537,9 +1535,9 @@ RenderProcessHostImpl::RenderProcessHostImpl(
ChromeTrackEvent::kRenderProcessHost, *this);
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
stable_video_decoder_trackers_.set_disconnect_handler(base::BindRepeating(
&RenderProcessHostImpl::OnStableVideoDecoderDisconnected,
instance_weak_factory_.GetWeakPtr()));
video_decoder_trackers_.set_disconnect_handler(
base::BindRepeating(&RenderProcessHostImpl::OnVideoDecoderDisconnected,
instance_weak_factory_.GetWeakPtr()));
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
widget_helper_ = new RenderWidgetHelper();
@ -2253,103 +2251,94 @@ void RenderProcessHostImpl::SetBatterySaverMode(
}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void RenderProcessHostImpl::CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder> receiver) {
void RenderProcessHostImpl::CreateOOPVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
if (!stable_video_decoder_factory_remote_.is_bound()) {
auto creation_cb = GetStableVideoDecoderFactoryCreationCB();
if (!video_decoder_factory_remote_.is_bound()) {
auto creation_cb = GetVideoDecoderFactoryCreationCB();
if (creation_cb.is_null()) {
LaunchStableVideoDecoderFactory(
stable_video_decoder_factory_remote_.BindNewPipeAndPassReceiver());
LaunchOOPVideoDecoderFactory(
video_decoder_factory_remote_.BindNewPipeAndPassReceiver());
} else {
creation_cb.Run(
stable_video_decoder_factory_remote_.BindNewPipeAndPassReceiver());
video_decoder_factory_remote_.BindNewPipeAndPassReceiver());
}
stable_video_decoder_factory_remote_.set_disconnect_handler(
base::BindOnce(&RenderProcessHostImpl::ResetStableVideoDecoderFactory,
video_decoder_factory_remote_.set_disconnect_handler(
base::BindOnce(&RenderProcessHostImpl::ResetVideoDecoderFactory,
instance_weak_factory_.GetWeakPtr()));
// Version 1 introduced the ability to pass a
// mojo::PendingRemote<StableVideoDecoderTracker> to
// CreateStableVideoDecoder().
stable_video_decoder_factory_remote_.RequireVersion(1u);
}
CHECK(stable_video_decoder_factory_remote_.is_bound());
CHECK(video_decoder_factory_remote_.is_bound());
mojo::PendingRemote<media::stable::mojom::StableVideoDecoderTracker>
tracker_remote;
stable_video_decoder_trackers_.Add(
this, tracker_remote.InitWithNewPipeAndPassReceiver());
stable_video_decoder_factory_remote_->CreateStableVideoDecoder(
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker_remote;
video_decoder_trackers_.Add(this,
tracker_remote.InitWithNewPipeAndPassReceiver());
video_decoder_factory_remote_->CreateVideoDecoderWithTracker(
std::move(receiver), std::move(tracker_remote));
if (stable_video_decoder_factory_reset_timer_.IsRunning()) {
// |stable_video_decoder_factory_reset_timer_| has been started to
// eventually reset() the |stable_video_decoder_factory_remote_|. Now that
// we got a request to create a StableVideoDecoder before the timer
// triggered, we can stop it so that the utility process associated with the
// |stable_video_decoder_factory_remote_| doesn't die.
stable_video_decoder_factory_reset_timer_.Stop();
InvokeStableVideoDecoderEventCB(
StableVideoDecoderEvent::kFactoryResetTimerStopped);
if (video_decoder_factory_reset_timer_.IsRunning()) {
// |video_decoder_factory_reset_timer_| has been started to eventually
// reset() the |video_decoder_factory_remote_|. Now that we got a request
// to create a VideoDecoder before the timer triggered, we can stop it so
// that the utility process associated with the
// |video_decoder_factory_remote_| doesn't die.
video_decoder_factory_reset_timer_.Stop();
InvokeVideoDecoderEventCB(VideoDecoderEvent::kFactoryResetTimerStopped);
}
}
void RenderProcessHostImpl::OnStableVideoDecoderDisconnected() {
void RenderProcessHostImpl::OnVideoDecoderDisconnected() {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
if (stable_video_decoder_trackers_.empty()) {
// All StableVideoDecoders have disconnected. Let's reset() the
// |stable_video_decoder_factory_remote_| so that the corresponding utility
// process gets terminated. Note that we don't reset() immediately. Instead,
// we wait a little bit in case a request to create another
// StableVideoDecoder comes in. That way, we don't unnecessarily tear down
// the video decoder process just to create another one almost immediately.
// We chose 3 seconds because it seemed "reasonable."
constexpr base::TimeDelta kTimeToResetStableVideoDecoderFactory =
if (video_decoder_trackers_.empty()) {
// All VideoDecoders have disconnected. Let's reset() the
// |video_decoder_factory_remote_| so that the corresponding utility process
// gets terminated. Note that we don't reset() immediately. Instead, we wait
// a little bit in case a request to create another VideoDecoder comes in.
// That way, we don't unnecessarily tear down the video decoder process just
// to create another one almost immediately. We chose 3 seconds because it
// seemed "reasonable."
constexpr base::TimeDelta kTimeToResetVideoDecoderFactory =
base::Seconds(3);
stable_video_decoder_factory_reset_timer_.Start(
FROM_HERE, kTimeToResetStableVideoDecoderFactory,
base::BindOnce(&RenderProcessHostImpl::ResetStableVideoDecoderFactory,
video_decoder_factory_reset_timer_.Start(
FROM_HERE, kTimeToResetVideoDecoderFactory,
base::BindOnce(&RenderProcessHostImpl::ResetVideoDecoderFactory,
instance_weak_factory_.GetWeakPtr()));
InvokeStableVideoDecoderEventCB(
StableVideoDecoderEvent::kAllDecodersDisconnected);
InvokeVideoDecoderEventCB(VideoDecoderEvent::kAllDecodersDisconnected);
}
}
void RenderProcessHostImpl::ResetStableVideoDecoderFactory() {
void RenderProcessHostImpl::ResetVideoDecoderFactory() {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
stable_video_decoder_factory_remote_.reset();
video_decoder_factory_remote_.reset();
// Note that |stable_video_decoder_trackers_| should be empty if
// ResetStableVideoDecoderFactory() was called because
// |stable_video_decoder_factory_reset_timer_| fired. Otherwise, there's no
// Note that |video_decoder_trackers_| should be empty if
// ResetVideoDecoderFactory() was called because
// |video_decoder_factory_reset_timer_| fired. Otherwise, there's no
// guarantee about its contents. For example, maybe
// ResetStableVideoDecoderFactory() got called because the video decoder
// ResetVideoDecoderFactory() got called because the video decoder
// process crashed and we got the disconnection notification for
// |stable_video_decoder_factory_remote_| before the disconnection
// notification for any of the elements in |stable_video_decoder_trackers_|.
stable_video_decoder_trackers_.Clear();
// |video_decoder_factory_remote_| before the disconnection
// notification for any of the elements in |video_decoder_trackers_|.
video_decoder_trackers_.Clear();
if (stable_video_decoder_factory_reset_timer_.IsRunning()) {
stable_video_decoder_factory_reset_timer_.Stop();
InvokeStableVideoDecoderEventCB(
StableVideoDecoderEvent::kFactoryResetTimerStopped);
if (video_decoder_factory_reset_timer_.IsRunning()) {
video_decoder_factory_reset_timer_.Stop();
InvokeVideoDecoderEventCB(VideoDecoderEvent::kFactoryResetTimerStopped);
}
}
void RenderProcessHostImpl::SetStableVideoDecoderFactoryCreationCBForTesting(
StableVideoDecoderFactoryCreationCB callback) {
void RenderProcessHostImpl::SetVideoDecoderFactoryCreationCBForTesting(
VideoDecoderFactoryCreationCB callback) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
GetStableVideoDecoderFactoryCreationCB() = callback;
GetVideoDecoderFactoryCreationCB() = callback;
}
void RenderProcessHostImpl::SetStableVideoDecoderEventCBForTesting(
StableVideoDecoderEventCB callback) {
void RenderProcessHostImpl::SetVideoDecoderEventCBForTesting(
VideoDecoderEventCB callback) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
GetStableVideoDecoderEventCB() = callback;
GetVideoDecoderEventCB() = callback;
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
@ -5194,7 +5183,7 @@ void RenderProcessHostImpl::ResetIPC() {
tracing_registration_.reset();
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
ResetStableVideoDecoderFactory();
ResetVideoDecoderFactory();
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// Destroy all embedded CompositorFrameSinks.

@ -95,7 +95,7 @@
#endif
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/interface_factory.mojom.h"
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#if BUILDFLAG(IS_FUCHSIA)
@ -201,7 +201,7 @@ class CONTENT_EXPORT RenderProcessHostImpl
public metrics::HistogramChildProcess
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
,
public media::stable::mojom::StableVideoDecoderTracker
public media::mojom::VideoDecoderTracker
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
{
public:
@ -838,9 +838,8 @@ class CONTENT_EXPORT RenderProcessHostImpl
override;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder> receiver)
override;
void CreateOOPVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver) override;
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void BindP2PSocketManager(
@ -866,19 +865,17 @@ class CONTENT_EXPORT RenderProcessHostImpl
#endif
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
using StableVideoDecoderFactoryCreationCB = base::RepeatingCallback<void(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoderFactory>)>;
static void SetStableVideoDecoderFactoryCreationCBForTesting(
StableVideoDecoderFactoryCreationCB cb);
using VideoDecoderFactoryCreationCB = base::RepeatingCallback<void(
mojo::PendingReceiver<media::mojom::InterfaceFactory>)>;
static void SetVideoDecoderFactoryCreationCBForTesting(
VideoDecoderFactoryCreationCB cb);
enum class StableVideoDecoderEvent {
enum class VideoDecoderEvent {
kFactoryResetTimerStopped,
kAllDecodersDisconnected,
};
using StableVideoDecoderEventCB =
base::RepeatingCallback<void(StableVideoDecoderEvent)>;
static void SetStableVideoDecoderEventCBForTesting(
StableVideoDecoderEventCB cb);
using VideoDecoderEventCB = base::RepeatingCallback<void(VideoDecoderEvent)>;
static void SetVideoDecoderEventCBForTesting(VideoDecoderEventCB cb);
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void GetBoundInterfacesForTesting(std::vector<std::string>& out);
@ -1255,9 +1252,9 @@ class CONTENT_EXPORT RenderProcessHostImpl
bool AreAllRefCountsZero();
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void OnStableVideoDecoderDisconnected();
void OnVideoDecoderDisconnected();
void ResetStableVideoDecoderFactory();
void ResetVideoDecoderFactory();
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
mojo::OutgoingInvitation mojo_invitation_;
@ -1432,24 +1429,22 @@ class CONTENT_EXPORT RenderProcessHostImpl
std::unique_ptr<PermissionServiceContext> permission_service_context_;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// Connection to the StableVideoDecoderFactory that lives in a utility
// Connection to the InterfaceFactory that lives in a utility
// process. This is only used for out-of-process video decoding.
mojo::Remote<media::stable::mojom::StableVideoDecoderFactory>
stable_video_decoder_factory_remote_;
mojo::Remote<media::mojom::InterfaceFactory> video_decoder_factory_remote_;
// Using |stable_video_decoder_trackers_|, we track the StableVideoDecoders
// that have been created using |stable_video_decoder_factory_remote_|. That
// way, we know when the remote StableVideoDecoder dies.
mojo::ReceiverSet<media::stable::mojom::StableVideoDecoderTracker>
stable_video_decoder_trackers_;
// Using |video_decoder_trackers_|, we track the VideoDecoders
// that have been created using |video_decoder_factory_remote_|. That way, we
// know when the remote VideoDecoder dies.
mojo::ReceiverSet<media::mojom::VideoDecoderTracker> video_decoder_trackers_;
// |stable_video_decoder_factory_reset_timer_| allows us to delay the reset()
// of |stable_video_decoder_factory_remote_|: after all StableVideoDecoders
// have disconnected, we wait for the timer to trigger, and if no request
// comes in to create a StableVideoDecoder before that, we reset the
// |stable_video_decoder_factory_remote_| which should cause the destruction
// of the remote video decoder utility process.
base::OneShotTimer stable_video_decoder_factory_reset_timer_;
// |video_decoder_factory_reset_timer_| allows us to delay the reset() of
// |video_decoder_factory_remote_|: after all VideoDecoders have disconnected,
// we wait for the timer to trigger, and if no request comes in to create a
// VideoDecoder before that, we reset the |video_decoder_factory_remote_|
// which should cause the destruction of the remote video decoder utility
// process.
base::OneShotTimer video_decoder_factory_reset_timer_;
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#if BUILDFLAG(IS_FUCHSIA)

@ -705,7 +705,7 @@ source_set("browser_sources") {
if (allow_oop_video_decoder) {
sources += [ "stable_video_decoder_factory.h" ]
public_deps += [ "//media/mojo/mojom/stable:stable_video_decoder" ]
public_deps += [ "//media/mojo/mojom" ]
}
if (is_chromeos) {

@ -58,7 +58,7 @@
#endif
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#include "media/mojo/mojom/stable/stable_video_decoder.mojom-forward.h"
#include "media/mojo/mojom/video_decoder.mojom-forward.h"
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#if BUILDFLAG(IS_FUCHSIA)
@ -710,9 +710,8 @@ class CONTENT_EXPORT RenderProcessHost : public IPC::Sender,
mojo::PendingReceiver<blink::mojom::WebSocketConnector> receiver) = 0;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
virtual void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
receiver) = 0;
virtual void CreateOOPVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver) = 0;
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// Returns the current number of active views in this process. Excludes

@ -6,16 +6,17 @@
#define CONTENT_PUBLIC_BROWSER_STABLE_VIDEO_DECODER_FACTORY_H_
#include "content/common/content_export.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom-forward.h"
#include "media/mojo/mojom/interface_factory.mojom-forward.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
// TODO(crbug.com/347331029): rename this file to oop_video_decoder_factory.h.
namespace content {
// Binds a StableVideoDecoderFactory PendingReceiver by starting a new utility
// process. This function can be called from any thread.
CONTENT_EXPORT void LaunchStableVideoDecoderFactory(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoderFactory>
receiver);
// Binds a media::mojom::InterfaceFactory PendingReceiver by starting a new
// utility process. This function can be called from any thread.
CONTENT_EXPORT void LaunchOOPVideoDecoderFactory(
mojo::PendingReceiver<media::mojom::InterfaceFactory> receiver);
} // namespace content

@ -276,9 +276,8 @@ class MockRenderProcessHost : public RenderProcessHost {
mojo::PendingReceiver<blink::mojom::WebSocketConnector> receiver)
override {}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder> receiver)
override {}
void CreateOOPVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver) override {}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
std::string GetInfoForBrowserContextDestructionCrashReporting() override;

@ -52,18 +52,16 @@ void MediaInterfaceFactory::CreateAudioDecoder(
void MediaInterfaceFactory::CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) {
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder) {
// The renderer process cannot act as a proxy for video decoding.
DCHECK(!dst_video_decoder);
if (!task_runner_->BelongsToCurrentThread()) {
task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
&MediaInterfaceFactory::CreateVideoDecoder, weak_this_,
std::move(receiver),
/*dst_video_decoder=*/
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>()));
base::BindOnce(&MediaInterfaceFactory::CreateVideoDecoder, weak_this_,
std::move(receiver),
/*dst_video_decoder=*/
mojo::PendingRemote<media::mojom::VideoDecoder>()));
return;
}
@ -73,20 +71,12 @@ void MediaInterfaceFactory::CreateVideoDecoder(
}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void MediaInterfaceFactory::CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) {
if (!task_runner_->BelongsToCurrentThread()) {
task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&MediaInterfaceFactory::CreateStableVideoDecoder,
weak_this_, std::move(video_decoder)));
return;
}
DVLOG(1) << __func__;
GetMediaInterfaceFactory()->CreateStableVideoDecoder(
std::move(video_decoder));
void MediaInterfaceFactory::CreateVideoDecoderWithTracker(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker) {
// CreateVideoDecoderWithTracker() should not be called by the renderer
// process.
NOTREACHED();
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)

@ -50,12 +50,11 @@ class MediaInterfaceFactory final : public media::mojom::InterfaceFactory {
mojo::PendingReceiver<media::mojom::AudioDecoder> receiver) final;
void CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) final;
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder) final;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) final;
void CreateVideoDecoderWithTracker(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker) final;
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateAudioEncoder(
mojo::PendingReceiver<media::mojom::AudioEncoder> receiver) final;

@ -88,38 +88,6 @@ RenderMediaClient::RenderMediaClient()
// asynchronously. If IsDecoderSupportedVideoType() is called before we get a
// response, that method will block if its not on the main thread or fall
// back to querying the video decoder configurations synchronously otherwise.
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
switch (media::GetOutOfProcessVideoDecodingMode()) {
case media::OOPVDMode::kEnabledWithoutGpuProcessAsProxy: {
mojo::SharedRemote<media::stable::mojom::StableVideoDecoder>
stable_video_decoder_remote;
interface_factory_for_supported_profiles_->CreateStableVideoDecoder(
stable_video_decoder_remote.BindNewPipeAndPassReceiver());
stable_video_decoder_remote.set_disconnect_handler(
base::BindOnce(&RenderMediaClient::OnGetSupportedVideoDecoderConfigs,
// base::Unretained(this) is safe because the
// RenderMediaClient is never destructed.
base::Unretained(this),
media::SupportedVideoDecoderConfigs(),
media::VideoDecoderType::kUnknown),
main_task_runner_);
stable_video_decoder_remote->GetSupportedConfigs(
base::BindOnce(&RenderMediaClient::OnGetSupportedVideoDecoderConfigs,
// base::Unretained(this) is safe because the
// RenderMediaClient is never destructed.
base::Unretained(this)));
video_decoder_for_supported_profiles_.emplace<
mojo::SharedRemote<media::stable::mojom::StableVideoDecoder>>(
std::move(stable_video_decoder_remote));
return;
}
case media::OOPVDMode::kEnabledWithGpuProcessAsProxy:
case media::OOPVDMode::kDisabled:
break;
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
mojo::SharedRemote<media::mojom::VideoDecoder> video_decoder_remote;
interface_factory_for_supported_profiles_->CreateVideoDecoder(
video_decoder_remote.BindNewPipeAndPassReceiver(),
@ -135,9 +103,7 @@ RenderMediaClient::RenderMediaClient()
// base::Unretained(this) is safe because the
// RenderMediaClient is never destructed.
base::Unretained(this)));
video_decoder_for_supported_profiles_
.emplace<mojo::SharedRemote<media::mojom::VideoDecoder>>(
std::move(video_decoder_remote));
video_decoder_for_supported_profiles_ = std::move(video_decoder_remote);
#endif // BUILDFLAG(PLATFORM_HAS_OPTIONAL_HEVC_DECODE_SUPPORT)
#if BUILDFLAG(ENABLE_MOJO_AUDIO_DECODER)
@ -194,19 +160,8 @@ bool RenderMediaClient::IsDecoderSupportedVideoType(
DCHECK_CALLED_ON_VALID_SEQUENCE(main_thread_sequence_checker_);
media::SupportedVideoDecoderConfigs configs;
media::VideoDecoderType video_decoder_type;
if ((absl::holds_alternative<
mojo::SharedRemote<media::mojom::VideoDecoder>>(
video_decoder_for_supported_profiles_) &&
!absl::get<mojo::SharedRemote<media::mojom::VideoDecoder>>(
video_decoder_for_supported_profiles_)
->GetSupportedConfigs(&configs, &video_decoder_type)) ||
(absl::holds_alternative<
mojo::SharedRemote<media::stable::mojom::StableVideoDecoder>>(
video_decoder_for_supported_profiles_) &&
!absl::get<
mojo::SharedRemote<media::stable::mojom::StableVideoDecoder>>(
video_decoder_for_supported_profiles_)
->GetSupportedConfigs(&configs, &video_decoder_type))) {
if (!video_decoder_for_supported_profiles_->GetSupportedConfigs(
&configs, &video_decoder_type)) {
configs.clear();
}
OnGetSupportedVideoDecoderConfigs(configs, video_decoder_type);
@ -322,8 +277,7 @@ void RenderMediaClient::OnGetSupportedVideoDecoderConfigs(
UpdateDecoderVideoProfilesInternal(configs);
did_video_decoder_update_.Signal();
video_decoder_for_supported_profiles_
.emplace<mojo::SharedRemote<media::mojom::VideoDecoder>>();
video_decoder_for_supported_profiles_.reset();
#if BUILDFLAG(ENABLE_MOJO_AUDIO_DECODER)
if (did_audio_decoder_update_.IsSignaled()) {
interface_factory_for_supported_profiles_.reset();

@ -16,7 +16,6 @@
#include "media/base/supported_video_decoder_config.h"
#include "media/mojo/mojom/audio_decoder.mojom.h"
#include "media/mojo/mojom/interface_factory.mojom.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/mojom/video_encode_accelerator.mojom.h"
#include "mojo/public/cpp/bindings/remote.h"
@ -91,9 +90,7 @@ class RenderMediaClient : public media::MediaClient {
[[maybe_unused]] mojo::Remote<viz::mojom::Gpu> gpu_for_supported_profiles_
GUARDED_BY_CONTEXT(main_thread_sequence_checker_);
[[maybe_unused]] absl::variant<
mojo::SharedRemote<media::mojom::VideoDecoder>,
mojo::SharedRemote<media::stable::mojom::StableVideoDecoder>>
[[maybe_unused]] mojo::SharedRemote<media::mojom::VideoDecoder>
video_decoder_for_supported_profiles_
GUARDED_BY_CONTEXT(main_thread_sequence_checker_);

@ -383,10 +383,9 @@ auto RunOOPArcVideoAcceleratorFactoryService(
#if (BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)) && \
(BUILDFLAG(USE_VAAPI) || BUILDFLAG(USE_V4L2_CODEC))
auto RunStableVideoDecoderFactoryProcessService(
mojo::PendingReceiver<
media::stable::mojom::StableVideoDecoderFactoryProcess> receiver) {
return std::make_unique<media::StableVideoDecoderFactoryProcessService>(
auto RunOOPVideoDecoderFactoryProcessService(
mojo::PendingReceiver<media::mojom::VideoDecoderFactoryProcess> receiver) {
return std::make_unique<media::OOPVideoDecoderFactoryProcessService>(
std::move(receiver));
}
#endif // (BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)) &&
@ -417,7 +416,7 @@ void RegisterIOThreadServices(mojo::ServiceFactory& services) {
#if (BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)) && \
(BUILDFLAG(USE_VAAPI) || BUILDFLAG(USE_V4L2_CODEC))
services.Add(RunStableVideoDecoderFactoryProcessService);
services.Add(RunOOPVideoDecoderFactoryProcessService);
#endif // (BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)) &&
// (BUILDFLAG(USE_VAAPI) || BUILDFLAG(USE_V4L2_CODEC))

@ -1330,8 +1330,7 @@ BASE_FEATURE(kBackgroundListening,
// Spawn utility processes to perform hardware decode acceleration on behalf of
// renderer processes (instead of using the GPU process). The GPU process will
// still be used as a proxy between renderers and utility processes (see
// go/oop-vd-dd). This feature and |kUseGTFOOutOfProcessVideoDecoding| are
// mutually exclusive.
// go/oop-vd-dd).
BASE_FEATURE(kUseOutOfProcessVideoDecoding,
"UseOutOfProcessVideoDecoding",
#if BUILDFLAG(IS_CHROMEOS)
@ -1340,17 +1339,6 @@ BASE_FEATURE(kUseOutOfProcessVideoDecoding,
base::FEATURE_DISABLED_BY_DEFAULT
#endif
);
// Spawn utility processes to perform hardware decode acceleration on behalf of
// renderer processes (instead of using the GPU process). The GPU process will
// NOT be used as a proxy between renderers and utility processes (see
// go/oopvd-gtfo-dd). This feature and |kUseOutOfProcessVideoDecoding| are
// mutually exclusive.
// Owner: andrescj@chromium.org
// Expiry: one milestone after this path is enabled by default
BASE_FEATURE(kUseGTFOOutOfProcessVideoDecoding,
"UseGTFOOutOfProcessVideoDecoding",
base::FEATURE_DISABLED_BY_DEFAULT);
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
@ -1768,28 +1756,16 @@ bool IsMediaFoundationD3D11VideoCaptureEnabled() {
#endif
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
OOPVDMode GetOutOfProcessVideoDecodingMode() {
bool IsOutOfProcessVideoDecodingEnabled() {
#if BUILDFLAG(IS_CASTOS)
// The sandbox for OOP-VD was designed assuming that we're not on CastOS (see
// go/oop-vd-sandbox).
//
// TODO(b/210759684): revisit the sandbox to see if this restriction is
// necessary.
return OOPVDMode::kDisabled;
return false;
#else
const bool use_gtfo_oopvd =
base::FeatureList::IsEnabled(kUseGTFOOutOfProcessVideoDecoding);
const bool use_regular_oopvd =
base::FeatureList::IsEnabled(kUseOutOfProcessVideoDecoding);
LOG_IF(WARNING, use_gtfo_oopvd && use_regular_oopvd)
<< "UseGTFOOutOfProcessVideoDecoding and UseOutOfProcessVideoDecoding "
"are both enabled; ignoring UseOutOfProcessVideoDecoding";
if (use_gtfo_oopvd) {
return OOPVDMode::kEnabledWithoutGpuProcessAsProxy;
} else if (use_regular_oopvd) {
return OOPVDMode::kEnabledWithGpuProcessAsProxy;
}
return OOPVDMode::kDisabled;
return base::FeatureList::IsEnabled(kUseOutOfProcessVideoDecoding);
#endif
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)

@ -486,15 +486,10 @@ MEDIA_EXPORT BASE_DECLARE_FEATURE(kBackgroundListening);
#endif // BUILDFLAG(IS_CHROMEOS)
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// Note: please use GetOutOfProcessVideoDecodingMode() to determine if OOP-VD is
// enabled instead of directly checking this feature flag. The reason is that
// Note: please use IsOutOfProcessVideoDecodingEnabled() to determine if OOP-VD
// is enabled instead of directly checking this feature flag. The reason is that
// that function may perform checks beyond the feature flag.
MEDIA_EXPORT BASE_DECLARE_FEATURE(kUseOutOfProcessVideoDecoding);
// Note: please use GetOutOfProcessVideoDecodingMode() to determine if GTFO
// OOP-VD is enabled instead of directly checking this feature flag. The reason
// is that that function may perform checks beyond the feature flag.
MEDIA_EXPORT BASE_DECLARE_FEATURE(kUseGTFOOutOfProcessVideoDecoding);
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
@ -563,12 +558,7 @@ MEDIA_EXPORT bool IsMediaFoundationD3D11VideoCaptureEnabled();
#endif
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
enum class OOPVDMode {
kDisabled,
kEnabledWithGpuProcessAsProxy, // AKA "regular" OOP-VD; go/oop-vd-dd.
kEnabledWithoutGpuProcessAsProxy, // AKA GTFO OOP-VD; go/oopvd-gtfo-dd.
};
MEDIA_EXPORT OOPVDMode GetOutOfProcessVideoDecodingMode();
MEDIA_EXPORT bool IsOutOfProcessVideoDecodingEnabled();
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// Return bitmask of audio formats supported by EDID.

@ -104,7 +104,7 @@ source_set("common") {
":video_frame_resource",
":video_frame_utils",
"//gpu/ipc/service",
"//media/mojo/mojom/stable:stable_video_decoder",
"//media/mojo/mojom:mojom",
"//ui/ozone",
]

@ -8,13 +8,14 @@
#include "base/task/bind_post_task.h"
#include "base/task/sequenced_task_runner.h"
#include "build/build_config.h"
#include "chromeos/components/cdm_factory_daemon/stable_cdm_context_impl.h"
#include "chromeos/components/cdm_factory_daemon/cdm_context_for_oopvd_impl.h"
#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "media/base/format_utils.h"
#include "media/base/video_util.h"
#include "media/gpu/buffer_validation.h"
#include "media/gpu/chromeos/native_pixmap_frame_resource.h"
#include "media/gpu/chromeos/platform_video_frame_utils.h"
#include "media/gpu/chromeos/video_frame_resource.h"
#include "media/gpu/macros.h"
#include "media/mojo/common/mojo_decoder_buffer_converter.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
@ -74,8 +75,8 @@
// VideoDecoder::Reset()).
//
// - That OOPVideoDecoder asserts it's not being misused (which might cause us
// to violate the requirements of the StableVideoDecoder interface). For
// example, the StableVideoDecoder interface says for Decode(): "this must not
// to violate the requirements of the VideoDecoder interface). For
// example, the VideoDecoder interface says for Decode(): "this must not
// be called while there are pending Initialize(), Reset(), or Decode(EOS)
// requests."
@ -87,52 +88,46 @@ namespace {
// The maximum size is chosen to be the same as in the VaapiVideoDecoder.
constexpr size_t kTimestampCacheSize = 128;
// Converts |mojo_frame| to a media::FrameResource after performing some
// validation. The reason we do validation/conversion here and not in the mojo
// traits is that we don't want every incoming stable::mojom::VideoFrame to
// result in a media::FrameResource: we'd like to re-use buffers based on the
// incoming |mojo_frame|->gpu_memory_buffer_handle.id; if that incoming
// |mojo_frame| is a frame that we already know about, we can re-use the
// underlying buffer without creating a media::FrameResource.
scoped_refptr<FrameResource> MojoVideoFrameToFrameResource(
stable::mojom::VideoFramePtr mojo_frame) {
if (mojo_frame->metadata.protected_video &&
mojo_frame->metadata.needs_detiling &&
mojo_frame->format == PIXEL_FORMAT_P010LE) {
scoped_refptr<FrameResource> CreateDecodedFrameResource(
gfx::GpuMemoryBufferHandle gmb_handle,
VideoPixelFormat format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
base::TimeDelta timestamp,
const VideoFrameMetadata& metadata,
const gfx::ColorSpace& color_space,
const std::optional<gfx::HDRMetadata>& hdr_metadata) {
if (metadata.protected_video && metadata.needs_detiling &&
format == PIXEL_FORMAT_P010LE) {
// This is a tiled, protected MTK format that is true 10bpp so it will
// not pass the tests in VerifyGpuMemoryBufferHandle for P010. Instead just
// do the basic tests that would be done in that call here. This is safe to
// do because the buffers for this will only go into the secure video
// decoder which will fail on invalid buffer parameters.
if (mojo_frame->gpu_memory_buffer_handle.type != gfx::NATIVE_PIXMAP) {
VLOGF(1) << "Unexpected GpuMemoryBufferType: "
<< mojo_frame->gpu_memory_buffer_handle.type;
return nullptr;
}
if (!media::VideoFrame::IsValidCodedSize(mojo_frame->coded_size)) {
VLOG(1) << "Coded size is beyond allowed dimensions: "
<< mojo_frame->coded_size.ToString();
return nullptr;
}
// We've validated the type of the GpuMemoryBufferHandle before.
CHECK_EQ(gmb_handle.type, gfx::NATIVE_PIXMAP);
// The media::VideoFrame traits guarantee this.
CHECK(VideoFrame::IsValidCodedSize(coded_size));
constexpr size_t kNumP010Planes = 2;
if (kNumP010Planes != mojo_frame->gpu_memory_buffer_handle
.native_pixmap_handle.planes.size()) {
if (kNumP010Planes != gmb_handle.native_pixmap_handle.planes.size()) {
VLOGF(1) << "Invalid number of dmabuf planes passed: "
<< mojo_frame->gpu_memory_buffer_handle.native_pixmap_handle
.planes.size()
<< gmb_handle.native_pixmap_handle.planes.size()
<< ", expected: 2";
return nullptr;
}
} else {
if (!VerifyGpuMemoryBufferHandle(mojo_frame->format, mojo_frame->coded_size,
mojo_frame->gpu_memory_buffer_handle)) {
if (!VerifyGpuMemoryBufferHandle(format, coded_size, gmb_handle)) {
VLOGF(2) << "Received an invalid GpuMemoryBufferHandle";
return nullptr;
}
}
std::optional<gfx::BufferFormat> buffer_format =
VideoPixelFormatToGfxBufferFormat(mojo_frame->format);
VideoPixelFormatToGfxBufferFormat(format);
if (!buffer_format) {
VLOGF(2) << "Could not convert the incoming frame's format to a "
"gfx::BufferFormat";
@ -141,20 +136,19 @@ scoped_refptr<FrameResource> MojoVideoFrameToFrameResource(
scoped_refptr<media::NativePixmapFrameResource> native_pixmap_frame =
NativePixmapFrameResource::Create(
mojo_frame->visible_rect, mojo_frame->natural_size,
mojo_frame->timestamp, gfx::BufferUsage::SCANOUT_VDA_WRITE,
visible_rect, natural_size, timestamp,
gfx::BufferUsage::SCANOUT_VDA_WRITE,
base::MakeRefCounted<gfx::NativePixmapDmaBuf>(
mojo_frame->coded_size, *buffer_format,
std::move(
mojo_frame->gpu_memory_buffer_handle.native_pixmap_handle)));
coded_size, *buffer_format,
std::move(gmb_handle.native_pixmap_handle)));
if (!native_pixmap_frame) {
VLOGF(2) << "Could not create a NativePixmap-backed FrameResource";
return nullptr;
}
native_pixmap_frame->set_metadata(mojo_frame->metadata);
native_pixmap_frame->set_color_space(mojo_frame->color_space);
native_pixmap_frame->set_hdr_metadata(mojo_frame->hdr_metadata);
native_pixmap_frame->set_metadata(metadata);
native_pixmap_frame->set_color_space(color_space);
native_pixmap_frame->set_hdr_metadata(hdr_metadata);
return native_pixmap_frame;
}
@ -188,8 +182,7 @@ class OOPVideoDecoderSupportedConfigsManager {
// OOPVideoDecoder instances. This should be impossible as higher layers
// should guarantee that we know the supported configurations before
// creating OOPVideoDecoder instances. See the logic in
// InterfaceFactoryImpl::CreateVideoDecoder() (for regular OOP-VD) and in
// MojoStableVideoDecoder::Initialize() (for GTFO OOP-VD).
// InterfaceFactoryImpl::CreateVideoDecoder().
//
// b) We did try to get the supported configurations but an error occurred.
// This case reduces to no supported configurations in which case, a
@ -210,9 +203,8 @@ class OOPVideoDecoderSupportedConfigsManager {
}
void NotifySupportKnown(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<
void(mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb) {
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb) {
base::ReleasableAutoLock lock(&lock_);
if ((configs_ && interface_version_) || disconnected_) {
// Both the supported configurations and the interface version are already
@ -255,9 +247,9 @@ class OOPVideoDecoderSupportedConfigsManager {
// because it's been taken over by the |oop_video_decoder_|. For now, we'll
// store a default-constructed PendingRemote. Later, when we have to call
// |cb|, we can pass |oop_video_decoder_|.Unbind().
waiting_callbacks_.emplace(
mojo::PendingRemote<stable::mojom::StableVideoDecoder>(), std::move(cb),
base::SequencedTaskRunner::GetCurrentDefault());
waiting_callbacks_.emplace(mojo::PendingRemote<mojom::VideoDecoder>(),
std::move(cb),
base::SequencedTaskRunner::GetCurrentDefault());
}
void ResetForTesting() {
@ -356,7 +348,7 @@ class OOPVideoDecoderSupportedConfigsManager {
std::move(waiting_callbacks_.front());
waiting_callbacks_.pop();
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder =
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder =
waiting_callback.oop_video_decoder
? std::move(waiting_callback.oop_video_decoder)
: oop_video_decoder_.Unbind();
@ -381,7 +373,7 @@ class OOPVideoDecoderSupportedConfigsManager {
// to |oop_video_decoder_| and we use it to query the supported configurations
// and the interface version of the out-of-process video decoder.
// |oop_video_decoder_| will get unbound once both of those things are known.
mojo::Remote<stable::mojom::StableVideoDecoder> oop_video_decoder_;
mojo::Remote<mojom::VideoDecoder> oop_video_decoder_;
bool disconnected_ GUARDED_BY(lock_) = false;
@ -397,18 +389,14 @@ class OOPVideoDecoderSupportedConfigsManager {
// progress.
struct WaitingCallbackContext {
WaitingCallbackContext(
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
oop_video_decoder,
base::OnceCallback<
void(mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb,
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb,
scoped_refptr<base::SequencedTaskRunner> cb_task_runner)
: oop_video_decoder(std::move(oop_video_decoder)),
cb(std::move(cb)),
cb_task_runner(std::move(cb_task_runner)) {}
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder;
base::OnceCallback<void(
mojo::PendingRemote<stable::mojom::StableVideoDecoder>)>
cb;
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder;
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb;
scoped_refptr<base::SequencedTaskRunner> cb_task_runner;
};
base::queue<WaitingCallbackContext> waiting_callbacks_ GUARDED_BY(lock_);
@ -418,8 +406,7 @@ class OOPVideoDecoderSupportedConfigsManager {
// static
std::unique_ptr<VideoDecoderMixin> OOPVideoDecoder::Create(
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
pending_remote_decoder,
mojo::PendingRemote<mojom::VideoDecoder> pending_remote_decoder,
std::unique_ptr<media::MediaLog> media_log,
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
base::WeakPtr<VideoDecoderMixin::Client> client) {
@ -433,9 +420,8 @@ std::unique_ptr<VideoDecoderMixin> OOPVideoDecoder::Create(
// static
void OOPVideoDecoder::NotifySupportKnown(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<
void(mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb) {
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb) {
OOPVideoDecoderSupportedConfigsManager::Instance().NotifySupportKnown(
std::move(oop_video_decoder), std::move(cb));
}
@ -456,8 +442,7 @@ OOPVideoDecoder::OOPVideoDecoder(
std::unique_ptr<media::MediaLog> media_log,
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
base::WeakPtr<VideoDecoderMixin::Client> client,
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
pending_remote_decoder)
mojo::PendingRemote<mojom::VideoDecoder> pending_remote_decoder)
: VideoDecoderMixin(std::move(media_log),
std::move(decoder_task_runner),
std::move(client)),
@ -490,25 +475,25 @@ OOPVideoDecoder::OOPVideoDecoder(
&remote_consumer_handle);
CHECK(mojo_decoder_buffer_writer_);
DCHECK(!stable_video_frame_handle_releaser_remote_.is_bound());
mojo::PendingReceiver<stable::mojom::VideoFrameHandleReleaser>
stable_video_frame_handle_releaser_receiver =
stable_video_frame_handle_releaser_remote_
.BindNewPipeAndPassReceiver();
DCHECK(!video_frame_handle_releaser_remote_.is_bound());
mojo::PendingReceiver<mojom::VideoFrameHandleReleaser>
video_frame_handle_releaser_receiver =
video_frame_handle_releaser_remote_.BindNewPipeAndPassReceiver();
// base::Unretained() is safe because `this` owns the `mojo::Remote`.
stable_video_frame_handle_releaser_remote_.set_disconnect_handler(
video_frame_handle_releaser_remote_.set_disconnect_handler(
base::BindOnce(&OOPVideoDecoder::Stop, base::Unretained(this)));
DCHECK(!stable_media_log_receiver_.is_bound());
DCHECK(!media_log_receiver_.is_bound());
CHECK(!has_error_);
// TODO(b/171813538): plumb the remaining parameters.
remote_decoder_->Construct(
client_receiver_.BindNewEndpointAndPassRemote(),
stable_media_log_receiver_.BindNewPipeAndPassRemote(),
std::move(stable_video_frame_handle_releaser_receiver),
std::move(remote_consumer_handle), gfx::ColorSpace());
remote_decoder_->Construct(client_receiver_.BindNewEndpointAndPassRemote(),
media_log_receiver_.BindNewPipeAndPassRemote(),
std::move(video_frame_handle_releaser_receiver),
std::move(remote_consumer_handle),
media::mojom::CommandBufferIdPtr(),
gfx::ColorSpace());
}
OOPVideoDecoder::~OOPVideoDecoder() {
@ -549,33 +534,34 @@ void OOPVideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
mojo::PendingRemote<stable::mojom::StableCdmContext>
pending_remote_stable_cdm_context;
mojo::PendingRemote<mojom::CdmContextForOOPVD>
pending_remote_cdm_context_for_oopvd;
if (config.is_encrypted()) {
#if BUILDFLAG(IS_CHROMEOS)
// There's logic in MojoVideoDecoderService::Initialize() to ensure that the
// CDM doesn't change across Initialize() calls. We rely on this assumption
// to ensure that creating a single StableCdmContextImpl that survives
// to ensure that creating a single CdmContextForOOPVDImpl that survives
// re-initializations is correct: the remote decoder requires a bound
// |pending_remote_stable_cdm_context| only for the first Initialize() call
// that sets up encryption.
DCHECK(!stable_cdm_context_ ||
cdm_context == stable_cdm_context_->cdm_context());
if (!stable_cdm_context_) {
DCHECK(!cdm_context_for_oopvd_ ||
cdm_context == cdm_context_for_oopvd_->cdm_context());
if (!cdm_context_for_oopvd_) {
if (!cdm_context || !cdm_context->GetChromeOsCdmContext()) {
std::move(init_cb).Run(
DecoderStatus::Codes::kUnsupportedEncryptionMode);
return;
}
stable_cdm_context_ =
std::make_unique<chromeos::StableCdmContextImpl>(cdm_context);
stable_cdm_context_receiver_ =
std::make_unique<mojo::Receiver<stable::mojom::StableCdmContext>>(
stable_cdm_context_.get(), pending_remote_stable_cdm_context
.InitWithNewPipeAndPassReceiver());
cdm_context_for_oopvd_ =
std::make_unique<chromeos::CdmContextForOOPVDImpl>(cdm_context);
cdm_context_for_oopvd_receiver_ =
std::make_unique<mojo::Receiver<mojom::CdmContextForOOPVD>>(
cdm_context_for_oopvd_.get(),
pending_remote_cdm_context_for_oopvd
.InitWithNewPipeAndPassReceiver());
// base::Unretained() is safe because |this| owns the mojo::Receiver.
stable_cdm_context_receiver_->set_disconnect_handler(
cdm_context_for_oopvd_receiver_->set_disconnect_handler(
base::BindOnce(&OOPVideoDecoder::Stop, base::Unretained(this)));
}
#else
@ -593,10 +579,10 @@ void OOPVideoDecoder::Initialize(const VideoDecoderConfig& config,
output_cb_ = output_cb;
waiting_cb_ = waiting_cb;
remote_decoder_->Initialize(config, low_delay,
std::move(pending_remote_stable_cdm_context),
base::BindOnce(&OOPVideoDecoder::OnInitializeDone,
weak_this_factory_.GetWeakPtr()));
remote_decoder_->InitializeWithCdmContext(
config, low_delay, std::move(pending_remote_cdm_context_for_oopvd),
base::BindOnce(&OOPVideoDecoder::OnInitializeDone,
weak_this_factory_.GetWeakPtr()));
}
void OOPVideoDecoder::OnInitializeDone(const DecoderStatus& status,
@ -627,26 +613,8 @@ void OOPVideoDecoder::OnInitializeDone(const DecoderStatus& status,
max_decode_requests_ = max_decode_requests;
remote_decoder_type_ = decoder_type;
if (OOPVideoDecoderSupportedConfigsManager::Instance()
.GetInterfaceVersion() >= 1u) {
// Starting on version 1, the remote decoder tells us if we need to do
// transcryption before sending the encoded data.
needs_transcryption_ =
initialized_for_protected_content_ && needs_transcryption;
} else {
// Before version 1, the remote decoder does not tell us this information,
// so we need to find it ourselves.
//
// TODO(b/171813538): remove this once the maximum version skew between
// lacros-chrome and ash-chrome makes it impossible for the former to run on
// ash-chrome < M115 (since M115 is when StableVideoDecoder got upgraded to
// version 1).
#if BUILDFLAG(USE_VAAPI)
needs_transcryption_ = initialized_for_protected_content_ &&
(VaapiWrapper::GetImplementationType() ==
VAImplementation::kMesaGallium);
#endif // BUILDFLAG(USE_VAAPI)
}
needs_transcryption_ =
initialized_for_protected_content_ && needs_transcryption;
std::move(init_cb_).Run(status);
}
@ -712,7 +680,7 @@ void OOPVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
is_flushing_ = buffer->end_of_stream();
remote_decoder_->Decode(
std::move(buffer),
std::move(mojo_buffer),
base::BindOnce(&OOPVideoDecoder::OnDecodeDone,
weak_this_factory_.GetWeakPtr(), decode_id, is_flushing_));
}
@ -828,7 +796,7 @@ void OOPVideoDecoder::OnResetDone() {
// After a reset is completed, we shouldn't receive decoded frames
// corresponding to Decode() calls that came in prior to the reset (similar to
// a flush). That's because according to the media::VideoDecoder and
// media::stable::mojom::StableVideoDecoder interfaces, all ongoing Decode()
// media::mojom::VideoDecoder interfaces, all ongoing Decode()
// requests must be completed or aborted prior to executing the reset
// callback. The clearing of the cache together with the validation in
// OnVideoFrameDecoded() should guarantee this.
@ -861,15 +829,15 @@ void OOPVideoDecoder::Stop() {
base::WeakPtr<OOPVideoDecoder> weak_this = weak_this_factory_.GetWeakPtr();
client_receiver_.reset();
stable_media_log_receiver_.reset();
media_log_receiver_.reset();
remote_decoder_.reset();
mojo_decoder_buffer_writer_.reset();
stable_video_frame_handle_releaser_remote_.reset();
video_frame_handle_releaser_remote_.reset();
fake_timestamp_to_real_timestamp_cache_.Clear();
#if BUILDFLAG(IS_CHROMEOS)
stable_cdm_context_receiver_.reset();
stable_cdm_context_.reset();
cdm_context_for_oopvd_receiver_.reset();
cdm_context_for_oopvd_.reset();
#endif // BUILDFLAG(IS_CHROMEOS)
if (init_cb_)
@ -904,9 +872,10 @@ void OOPVideoDecoder::ReleaseVideoFrame(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
CHECK(!has_error_);
CHECK(stable_video_frame_handle_releaser_remote_.is_bound());
CHECK(video_frame_handle_releaser_remote_.is_bound());
stable_video_frame_handle_releaser_remote_->ReleaseVideoFrame(release_token);
video_frame_handle_releaser_remote_->ReleaseVideoFrame(
release_token, /*release_sync_token=*/{});
}
void OOPVideoDecoder::ApplyResolutionChange() {
@ -963,9 +932,9 @@ bool OOPVideoDecoder::NeedsTranscryption() {
}
void OOPVideoDecoder::OnVideoFrameDecoded(
stable::mojom::VideoFramePtr frame,
const scoped_refptr<VideoFrame>& frame,
bool can_read_without_stalling,
const base::UnguessableToken& release_token) {
const std::optional<base::UnguessableToken>& release_token) {
DVLOGF(4);
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
@ -978,17 +947,98 @@ void OOPVideoDecoder::OnVideoFrameDecoded(
}
// According to the media::VideoDecoder API, |output_cb_| should not be
// supplied with EOS frames. The mojo traits guarantee this DCHECK.
DCHECK(!frame->metadata.end_of_stream);
if (!gfx::Rect(frame->coded_size).Contains(frame->visible_rect)) {
VLOGF(2) << "Received a frame with inconsistent coded size and visible "
"rectangle";
// supplied with EOS frames.
if (frame->metadata().end_of_stream) {
VLOGF(2) << "Unexpectedly received an EOS frame";
Stop();
return;
}
if (!frame->metadata().allow_overlay) {
// All decoded frames should be eligible for overlay promotion at this stage
// in the pipeline.
VLOGF(2) << "Unexpectedly received a frame with allow_overlay = false";
Stop();
return;
}
if (!frame->metadata().read_lock_fences_enabled) {
// The remote decoder should expect that frames are returned only when they
// are no longer needed by the client.
VLOGF(2) << "Unexpectedly received a frame with read_lock_fences_enabled ="
" false";
Stop();
return;
}
if (!frame->metadata().power_efficient) {
// All frames coming from a hardware decoder should have been decoded in a
// power efficient manner.
VLOGF(2) << "Unexpectedly received a frame with power_efficient = false";
Stop();
return;
}
if (frame->metadata().hw_protected && !frame->metadata().protected_video) {
// According to the VideoFrameMetadata documentation, |hw_protected| is only
// valid if |protected_video| is set to true.
VLOGF(2) << "Unexpectedly received a frame with hw_protected = true but "
"protected_video = false";
Stop();
return;
}
const base::TimeDelta fake_timestamp = frame->timestamp;
// VideoFrameMetadata has many fields and we don't validate all of them.
// Fortunately, we also don't need all the fields. |metadata_to_propagate|
// will be explicitly initialized with the fields that:
//
// 1) We need,
//
// AND
//
// 2) We've validated above or know that not validating won't have security
// implications.
//
// The rest of the fields are left as default.
VideoFrameMetadata metadata_to_propagate;
metadata_to_propagate.allow_overlay = true;
metadata_to_propagate.end_of_stream = false;
metadata_to_propagate.read_lock_fences_enabled = true;
metadata_to_propagate.protected_video = frame->metadata().protected_video;
metadata_to_propagate.hw_protected = frame->metadata().hw_protected;
metadata_to_propagate.needs_detiling = frame->metadata().needs_detiling;
metadata_to_propagate.power_efficient = true;
if (!release_token.has_value()) {
VLOGF(2) << "Did not receive a valid release token";
Stop();
return;
}
if (frame->storage_type() != VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
VLOGF(2) << "Received a frame with an unexpected storage type";
Stop();
return;
}
// The mojo traits guarantee this.
CHECK(gfx::Rect(frame->coded_size()).Contains(frame->visible_rect()));
gfx::GpuMemoryBufferHandle gmb_handle = frame->GetGpuMemoryBufferHandle();
if (gmb_handle.type != gfx::NATIVE_PIXMAP ||
gmb_handle.native_pixmap_handle.planes.empty()) {
VLOGF(2) << "Received an invalid GpuMemoryBufferHandle";
Stop();
return;
}
if (!std::all_of(gmb_handle.native_pixmap_handle.planes.cbegin(),
gmb_handle.native_pixmap_handle.planes.cend(),
[](const gfx::NativePixmapPlane& plane) {
return plane.fd.is_valid();
})) {
VLOGF(2) << "Received at least one invalid FD";
Stop();
return;
}
const base::TimeDelta fake_timestamp = frame->timestamp();
auto it = fake_timestamp_to_real_timestamp_cache_.Get(fake_timestamp);
if (it == fake_timestamp_to_real_timestamp_cache_.end()) {
// The remote decoder is misbehaving.
@ -1000,14 +1050,16 @@ void OOPVideoDecoder::OnVideoFrameDecoded(
// Validate protected content metadata.
if (!initialized_for_protected_content_ &&
(frame->metadata.protected_video || frame->metadata.hw_protected)) {
(metadata_to_propagate.protected_video ||
metadata_to_propagate.hw_protected)) {
VLOGF(2) << "Received a frame with unexpected metadata from a decoder that "
"was not configured for protected content";
Stop();
return;
}
if (initialized_for_protected_content_ &&
(!frame->metadata.protected_video || !frame->metadata.hw_protected)) {
(!metadata_to_propagate.protected_video ||
!metadata_to_propagate.hw_protected)) {
VLOGF(2) << "Received a frame with unexpected metadata from a decoder that "
"was configured for protected content";
Stop();
@ -1034,15 +1086,14 @@ void OOPVideoDecoder::OnVideoFrameDecoded(
// forget about all known frames since we shouldn't see those buffers again.
// In order to detect those cases, we replicate the logic from
// PlatformVideoFramePool::IsSameFormat_Locked().
const VideoPixelFormat format = frame->format;
const gfx::Size coded_size = frame->coded_size;
const gfx::Rect visible_rect = frame->visible_rect;
const gfx::Size natural_size = frame->natural_size;
const gfx::ColorSpace color_space = frame->color_space;
const std::optional<gfx::HDRMetadata> hdr_metadata = frame->hdr_metadata;
const VideoFrameMetadata metadata = frame->metadata;
const gfx::GpuMemoryBufferId received_gmb_id =
frame->gpu_memory_buffer_handle.id;
const VideoPixelFormat format = frame->format();
const gfx::Size coded_size = frame->coded_size();
const gfx::Rect visible_rect = frame->visible_rect();
const gfx::Size natural_size = frame->natural_size();
const gfx::ColorSpace color_space = frame->ColorSpace();
const std::optional<gfx::HDRMetadata> hdr_metadata = frame->hdr_metadata();
const VideoFrameMetadata metadata = metadata_to_propagate;
const gfx::GpuMemoryBufferId received_gmb_id = gmb_handle.id;
if (!received_id_to_decoded_frame_map_.empty()) {
// It doesn't matter which frame we pick to calculate the current state. All
// of them should yield the same result.
@ -1079,7 +1130,9 @@ void OOPVideoDecoder::OnVideoFrameDecoded(
CHECK_EQ(frame_to_wrap->metadata().hw_protected, metadata.hw_protected);
} else {
scoped_refptr<FrameResource> native_pixmap_frame =
MojoVideoFrameToFrameResource(std::move(frame));
CreateDecodedFrameResource(std::move(gmb_handle), format, coded_size,
visible_rect, natural_size, fake_timestamp,
metadata, color_space, hdr_metadata);
if (!native_pixmap_frame) {
Stop();
return;
@ -1114,12 +1167,13 @@ void OOPVideoDecoder::OnVideoFrameDecoded(
// the WeakPtr is dereferenced on the correct sequence.
wrapped_frame->AddDestructionObserver(base::BindPostTaskToCurrentDefault(
base::BindOnce(&OOPVideoDecoder::ReleaseVideoFrame,
weak_this_factory_.GetWeakPtr(), release_token)));
weak_this_factory_.GetWeakPtr(), *release_token)));
can_read_without_stalling_ = can_read_without_stalling;
if (output_cb_)
if (output_cb_) {
output_cb_.Run(std::move(wrapped_frame));
}
}
void OOPVideoDecoder::OnWaiting(WaitingReason reason) {
@ -1128,12 +1182,9 @@ void OOPVideoDecoder::OnWaiting(WaitingReason reason) {
CHECK(!has_error_);
// Note: the remote video decoder may be of a newer version than us (see e.g.,
// go/lacros-version-skew-guide). Therefore, we may get the default
// WaitingReason::kNoCdm if the value received over mojo is unrecognized. It's
// not expected that we'll ever use WaitingReason::kNoCdm for anything
// It's not expected that we'll ever use WaitingReason::kNoCdm for anything
// legitimate in ChromeOS, so if we receive that for any reason, the remote
// decoder is either misbehaving or too new.
// decoder is misbehaving.
if (reason == WaitingReason::kNoCdm) {
VLOGF(2) << "Received an unexpected WaitingReason";
Stop();
@ -1144,6 +1195,11 @@ void OOPVideoDecoder::OnWaiting(WaitingReason reason) {
waiting_cb_.Run(reason);
}
void OOPVideoDecoder::RequestOverlayInfo(bool restart_for_transitions) {
DVLOGF(4);
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
void OOPVideoDecoder::AddLogRecord(const MediaLogRecord& event) {
VLOGF(2);
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);

@ -14,42 +14,39 @@
#include "media/base/media_log.h"
#include "media/gpu/chromeos/video_decoder_pipeline.h"
#include "media/gpu/media_gpu_export.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/media_log.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "mojo/public/cpp/bindings/associated_receiver.h"
#include "mojo/public/cpp/bindings/remote.h"
#if BUILDFLAG(IS_CHROMEOS)
namespace chromeos {
class StableCdmContextImpl;
class CdmContextForOOPVDImpl;
} // namespace chromeos
#endif // BUILDFLAG(IS_CHROMEOS)
namespace media {
class MediaLog;
class MojoDecoderBufferWriter;
// Proxy video decoder that connects with an out-of-process
// video decoder via Mojo. This class should be operated and
// destroyed on |decoder_task_runner_|.
//
// Note: MEDIA_GPU_EXPORT is necessary to expose the OOPVideoDecoder to the
// MojoStableVideoDecoder.
//
// TODO(b/195769334): this class (or most of it) would be unnecessary if the
// MailboxVideoFrameConverter lived together with the remote decoder in the same
// process. Then, clients can communicate with that process without the GPU
// process acting as a proxy.
class MEDIA_GPU_EXPORT OOPVideoDecoder
: public VideoDecoderMixin,
public stable::mojom::VideoDecoderClient,
public stable::mojom::MediaLog {
class OOPVideoDecoder : public VideoDecoderMixin,
public mojom::VideoDecoderClient,
public mojom::MediaLog {
public:
OOPVideoDecoder(const OOPVideoDecoder&) = delete;
OOPVideoDecoder& operator=(const OOPVideoDecoder&) = delete;
static std::unique_ptr<VideoDecoderMixin> Create(
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
pending_remote_decoder,
mojo::PendingRemote<mojom::VideoDecoder> pending_remote_decoder,
std::unique_ptr<media::MediaLog> media_log,
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
base::WeakPtr<VideoDecoderMixin::Client> client);
@ -69,9 +66,8 @@ class MEDIA_GPU_EXPORT OOPVideoDecoder
// will be called on the same sequence as the one NotifySupportKnown() is
// called on.
static void NotifySupportKnown(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<
void(mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb);
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb);
// Returns the cached supported configurations of the out-of-process video
// decoder if known (std::nullopt otherwise). This method is thread- and
@ -100,23 +96,25 @@ class MEDIA_GPU_EXPORT OOPVideoDecoder
void ApplyResolutionChange() override;
bool NeedsTranscryption() override;
// stable::mojom::VideoDecoderClient implementation.
void OnVideoFrameDecoded(stable::mojom::VideoFramePtr frame,
bool can_read_without_stalling,
const base::UnguessableToken& release_token) final;
// mojom::VideoDecoderClient implementation.
void OnVideoFrameDecoded(
const scoped_refptr<VideoFrame>& frame,
bool can_read_without_stalling,
const std::optional<base::UnguessableToken>& release_token) final;
void OnWaiting(WaitingReason reason) final;
void RequestOverlayInfo(bool restart_for_transitions) final;
// stable::mojom::MediaLog implementation.
// mojom::MediaLog implementation.
void AddLogRecord(const MediaLogRecord& event) final;
FrameResource* GetOriginalFrame(const base::UnguessableToken& tracking_token);
private:
OOPVideoDecoder(std::unique_ptr<media::MediaLog> media_log,
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
base::WeakPtr<VideoDecoderMixin::Client> client,
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
pending_remote_decoder);
OOPVideoDecoder(
std::unique_ptr<media::MediaLog> media_log,
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
base::WeakPtr<VideoDecoderMixin::Client> client,
mojo::PendingRemote<mojom::VideoDecoder> pending_remote_decoder);
~OOPVideoDecoder() override;
void OnInitializeDone(const DecoderStatus& status,
@ -186,17 +184,17 @@ class MEDIA_GPU_EXPORT OOPVideoDecoder
base::OnceClosure reset_cb_ GUARDED_BY_CONTEXT(sequence_checker_);
mojo::AssociatedReceiver<stable::mojom::VideoDecoderClient> client_receiver_
mojo::AssociatedReceiver<mojom::VideoDecoderClient> client_receiver_
GUARDED_BY_CONTEXT(sequence_checker_){this};
mojo::Receiver<stable::mojom::MediaLog> stable_media_log_receiver_
mojo::Receiver<mojom::MediaLog> media_log_receiver_
GUARDED_BY_CONTEXT(sequence_checker_){this};
#if BUILDFLAG(IS_CHROMEOS)
std::unique_ptr<chromeos::StableCdmContextImpl> stable_cdm_context_
std::unique_ptr<chromeos::CdmContextForOOPVDImpl> cdm_context_for_oopvd_
GUARDED_BY_CONTEXT(sequence_checker_);
std::unique_ptr<mojo::Receiver<stable::mojom::StableCdmContext>>
stable_cdm_context_receiver_ GUARDED_BY_CONTEXT(sequence_checker_);
std::unique_ptr<mojo::Receiver<mojom::CdmContextForOOPVD>>
cdm_context_for_oopvd_receiver_ GUARDED_BY_CONTEXT(sequence_checker_);
#endif // BUILDFLAG(IS_CHROMEOS)
bool initialized_for_protected_content_
GUARDED_BY_CONTEXT(sequence_checker_) = false;
@ -209,13 +207,12 @@ class MEDIA_GPU_EXPORT OOPVideoDecoder
VideoDecoderType remote_decoder_type_ GUARDED_BY_CONTEXT(sequence_checker_) =
VideoDecoderType::kUnknown;
mojo::Remote<stable::mojom::StableVideoDecoder> remote_decoder_
mojo::Remote<mojom::VideoDecoder> remote_decoder_
GUARDED_BY_CONTEXT(sequence_checker_);
bool has_error_ GUARDED_BY_CONTEXT(sequence_checker_) = false;
mojo::Remote<stable::mojom::VideoFrameHandleReleaser>
stable_video_frame_handle_releaser_remote_
GUARDED_BY_CONTEXT(sequence_checker_);
mojo::Remote<mojom::VideoFrameHandleReleaser>
video_frame_handle_releaser_remote_ GUARDED_BY_CONTEXT(sequence_checker_);
std::unique_ptr<MojoDecoderBufferWriter> mojo_decoder_buffer_writer_
GUARDED_BY_CONTEXT(sequence_checker_);

@ -25,7 +25,6 @@
#include "media/gpu/chromeos/vda_video_frame_pool.h"
#include "media/gpu/chromeos/video_decoder_pipeline.h"
#include "media/gpu/media_gpu_export.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/video/video_decode_accelerator.h"
#include "ui/gfx/gpu_memory_buffer.h"

@ -217,7 +217,7 @@ std::unique_ptr<VideoDecoder> VideoDecoderPipeline::Create(
std::unique_ptr<FrameResourceConverter> frame_converter,
std::vector<Fourcc> renderable_fourccs,
std::unique_ptr<MediaLog> media_log,
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
bool in_video_decoder_process) {
DCHECK(client_task_runner);
DCHECK(frame_pool);
@ -351,9 +351,8 @@ std::vector<Fourcc> VideoDecoderPipeline::DefaultPreferredRenderableFourccs() {
// static
void VideoDecoderPipeline::NotifySupportKnown(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<
void(mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb) {
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb) {
if (oop_video_decoder) {
OOPVideoDecoder::NotifySupportKnown(std::move(oop_video_decoder),
std::move(cb));
@ -944,6 +943,8 @@ void VideoDecoderPipeline::OnFrameConverted(
// Flag that the video frame was decoded in a power efficient way.
video_frame->metadata().power_efficient = true;
video_frame->metadata().read_lock_fences_enabled = true;
// MojoVideoDecoderService expects the |output_cb_| to be called on the client
// task runner, even though media::VideoDecoder states frames should be output
// without any thread jumping.

@ -26,7 +26,7 @@
#include "media/gpu/chromeos/frame_resource_converter.h"
#include "media/gpu/chromeos/image_processor_with_pool.h"
#include "media/gpu/media_gpu_export.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gfx/native_pixmap.h"
#include "ui/gfx/native_pixmap_handle.h"
@ -196,7 +196,7 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
std::unique_ptr<FrameResourceConverter> frame_converter,
std::vector<Fourcc> renderable_fourccs,
std::unique_ptr<MediaLog> media_log,
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
bool in_video_decoder_process);
// Same idea but creates a VideoDecoderPipeline instance intended to be
// adapted or bridged to a VideoDecodeAccelerator interface, for ARC clients.
@ -229,9 +229,8 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
// This method is thread- and sequence-safe. |cb| is always called on the same
// sequence as NotifySupportKnown().
static void NotifySupportKnown(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<
void(mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb);
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb);
static std::optional<SupportedVideoDecoderConfigs> GetSupportedConfigs(
VideoDecoderType decoder_type,

@ -77,10 +77,13 @@ source_set("unit_tests") {
"//media/mojo/clients:unit_tests",
"//media/mojo/common:unit_tests",
"//media/mojo/mojom:unit_tests",
"//media/mojo/mojom/stable:unit_tests",
"//media/mojo/services:unit_tests",
"//media/mojo/test:unit_tests",
]
if (is_linux || is_chromeos) {
deps += [ "//media/mojo/mojom/stable:unit_tests" ]
}
}
test("media_mojo_unittests") {

@ -68,13 +68,6 @@ source_set("clients") {
"mojo_video_encoder_metrics_provider.h",
]
if (allow_oop_video_decoder) {
sources += [
"mojo_stable_video_decoder.cc",
"mojo_stable_video_decoder.h",
]
}
if (is_android) {
sources += [
"mojo_android_overlay.cc",
@ -157,10 +150,6 @@ source_set("unit_tests") {
"mojo_video_encoder_metrics_provider_unittest.cc",
]
if (allow_oop_video_decoder) {
sources += [ "mojo_stable_video_decoder_unittest.cc" ]
}
deps = [
"//base",
"//base/test:test_support",

@ -1,2 +0,0 @@
per-file mojo_stable_video_decoder*=andrescj@chromium.org
per-file mojo_stable_video_decoder*=pmolinalopez@chromium.org

@ -15,11 +15,6 @@
#include "media/video/gpu_video_accelerator_factories.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#include "media/base/media_switches.h"
#include "media/mojo/clients/mojo_stable_video_decoder.h"
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
namespace media {
MojoCodecFactoryMojoDecoder::MojoCodecFactoryMojoDecoder(
@ -52,23 +47,6 @@ MojoCodecFactoryMojoDecoder::CreateVideoDecoder(
DCHECK(video_decode_accelerator_enabled_);
DCHECK(interface_factory_.is_bound());
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
switch (media::GetOutOfProcessVideoDecodingMode()) {
case media::OOPVDMode::kEnabledWithoutGpuProcessAsProxy: {
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
stable_video_decoder_remote;
interface_factory_->CreateStableVideoDecoder(
stable_video_decoder_remote.InitWithNewPipeAndPassReceiver());
return std::make_unique<media::MojoStableVideoDecoder>(
media_task_runner_, gpu_factories, media_log,
std::move(stable_video_decoder_remote));
}
case media::OOPVDMode::kEnabledWithGpuProcessAsProxy:
case media::OOPVDMode::kDisabled:
break;
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
mojo::PendingRemote<media::mojom::VideoDecoder> video_decoder;
interface_factory_->CreateVideoDecoder(
video_decoder.InitWithNewPipeAndPassReceiver(), /*dst_video_decoder=*/{});
@ -101,30 +79,6 @@ void MojoCodecFactoryMojoDecoder::BindOnTaskRunner(
// Unretained is safe since MojoCodecFactory is never destroyed.
// It lives until the process shuts down.
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
switch (media::GetOutOfProcessVideoDecodingMode()) {
case media::OOPVDMode::kEnabledWithoutGpuProcessAsProxy: {
mojo::Remote<media::stable::mojom::StableVideoDecoder>
stable_video_decoder;
interface_factory_->CreateStableVideoDecoder(
stable_video_decoder.BindNewPipeAndPassReceiver());
stable_video_decoder.set_disconnect_handler(
base::BindOnce(&MojoCodecFactoryMojoDecoder::OnDecoderSupportFailed,
base::Unretained(this)));
stable_video_decoder->GetSupportedConfigs(base::BindOnce(
&MojoCodecFactoryMojoDecoder::OnGetSupportedDecoderConfigs,
base::Unretained(this)));
video_decoder_
.emplace<mojo::Remote<media::stable::mojom::StableVideoDecoder>>(
std::move(stable_video_decoder));
return;
}
case media::OOPVDMode::kEnabledWithGpuProcessAsProxy:
case media::OOPVDMode::kDisabled:
break;
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
mojo::Remote<media::mojom::VideoDecoder> video_decoder;
interface_factory_->CreateVideoDecoder(
video_decoder.BindNewPipeAndPassReceiver(), /*dst_video_decoder=*/{});
@ -134,8 +88,7 @@ void MojoCodecFactoryMojoDecoder::BindOnTaskRunner(
video_decoder->GetSupportedConfigs(
base::BindOnce(&MojoCodecFactoryMojoDecoder::OnGetSupportedDecoderConfigs,
base::Unretained(this)));
video_decoder_.emplace<mojo::Remote<media::mojom::VideoDecoder>>(
std::move(video_decoder));
video_decoder_ = std::move(video_decoder);
}
void MojoCodecFactoryMojoDecoder::OnGetSupportedDecoderConfigs(
@ -143,7 +96,7 @@ void MojoCodecFactoryMojoDecoder::OnGetSupportedDecoderConfigs(
media::VideoDecoderType decoder_type) {
{
base::AutoLock lock(supported_profiles_lock_);
video_decoder_.emplace<mojo::Remote<media::mojom::VideoDecoder>>();
video_decoder_.reset();
supported_decoder_configs_.emplace(supported_configs);
video_decoder_type_ = decoder_type;
}

@ -13,7 +13,6 @@
#include "media/base/video_decoder.h"
#include "media/mojo/clients/mojo_codec_factory.h"
#include "media/mojo/mojom/interface_factory.mojom.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/video/gpu_video_accelerator_factories.h"
#include "mojo/public/cpp/bindings/remote.h"
@ -51,9 +50,7 @@ class MojoCodecFactoryMojoDecoder final : public media::MojoCodecFactory {
mojo::Remote<media::mojom::InterfaceFactory> interface_factory_;
absl::variant<mojo::Remote<media::mojom::VideoDecoder>,
mojo::Remote<media::stable::mojom::StableVideoDecoder>>
video_decoder_;
mojo::Remote<media::mojom::VideoDecoder> video_decoder_;
};
} // namespace media

@ -17,10 +17,6 @@
#include "media/mojo/mojom/interface_factory.mojom.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#include "media/mojo/clients/mojo_stable_video_decoder.h"
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
namespace media {
namespace {
@ -79,34 +75,9 @@ void MojoDecoderFactory::CreateVideoDecoders(
const gfx::ColorSpace& target_color_space,
std::vector<std::unique_ptr<VideoDecoder>>* video_decoders) {
#if BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
switch (GetOutOfProcessVideoDecodingMode()) {
case OOPVDMode::kEnabledWithoutGpuProcessAsProxy: {
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
stable_video_decoder_remote;
interface_factory_->CreateStableVideoDecoder(
stable_video_decoder_remote.InitWithNewPipeAndPassReceiver());
video_decoders->push_back(std::make_unique<MojoStableVideoDecoder>(
task_runner, gpu_factories, media_log,
std::move(stable_video_decoder_remote)));
break;
}
case OOPVDMode::kEnabledWithGpuProcessAsProxy:
case OOPVDMode::kDisabled:
CreateMojoVideoDecoder(interface_factory_, std::move(task_runner),
gpu_factories, media_log,
std::move(request_overlay_info_cb),
target_color_space, video_decoders);
break;
}
#else
CreateMojoVideoDecoder(
interface_factory_, std::move(task_runner), gpu_factories, media_log,
std::move(request_overlay_info_cb), target_color_space, video_decoders);
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#endif // BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
}

@ -246,8 +246,8 @@ class FakeMojoMediaClient : public media::MojoMediaClient {
media::mojom::CommandBufferIdPtr command_buffer_id,
media::RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
oop_video_decoder) override {
mojo::PendingRemote<media::mojom::VideoDecoder> oop_video_decoder)
override {
return std::make_unique<media::FakeVideoDecoder>(
0 /* decoder_id */, 0 /* decoding_delay */,
13 /* max_parallel_decoding_requests */, media::BytesDecodedCB());
@ -287,20 +287,19 @@ class FakeInterfaceFactory : public media::mojom::InterfaceFactory {
// FakeMojoMediaClient will create a FakeGpuVideoDecoder.
void CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) override {
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder)
override {
video_decoder_receivers_.Add(
std::make_unique<media::MojoVideoDecoderService>(
&mojo_media_client_, &cdm_service_context_,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>()),
mojo::PendingRemote<media::mojom::VideoDecoder>()),
std::move(receiver));
}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) override {
// TODO(b/327268445): we'll need to complete this for GTFO OOP-VD testing.
void CreateVideoDecoderWithTracker(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker) override {
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)

@ -1,479 +0,0 @@
// Copyright 2024 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/mojo/clients/mojo_stable_video_decoder.h"
#include <optional>
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_refptr.h"
#include "base/metrics/histogram_macros.h"
#include "base/types/pass_key.h"
#include "components/viz/common/resources/shared_image_format.h"
#include "gpu/command_buffer/client/client_shared_image.h"
#include "gpu/command_buffer/client/shared_image_interface.h"
#include "gpu/command_buffer/common/mailbox.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
#include "gpu/command_buffer/common/shared_image_capabilities.h"
#include "gpu/command_buffer/common/shared_image_usage.h"
#include "gpu/command_buffer/common/sync_token.h"
#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_frame.h"
#include "media/base/video_types.h"
#include "media/base/video_util.h"
#include "media/gpu/chromeos/frame_resource.h"
#include "media/gpu/chromeos/oop_video_decoder.h"
#include "media/gpu/macros.h"
#include "media/video/gpu_video_accelerator_factories.h"
#include "third_party/skia/include/core/SkAlphaType.h"
#include "third_party/skia/include/gpu/ganesh/GrTypes.h"
#include "ui/gfx/color_space.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gfx/gpu_memory_buffer.h"
namespace media {
const char kMojoStableVideoDecoderDecodeLatencyHistogram[] =
"Media.MojoStableVideoDecoder.Decode";
namespace {
std::optional<viz::SharedImageFormat> GetSharedImageFormat(
VideoPixelFormat format) {
viz::SharedImageFormat si_format;
switch (format) {
case PIXEL_FORMAT_ARGB:
si_format = viz::SinglePlaneFormat::kBGRA_8888;
break;
case PIXEL_FORMAT_NV12:
si_format = viz::MultiPlaneFormat::kNV12;
break;
case PIXEL_FORMAT_P010LE:
si_format = viz::MultiPlaneFormat::kP010;
break;
case PIXEL_FORMAT_YV12:
si_format = viz::MultiPlaneFormat::kYV12;
break;
default:
return std::nullopt;
}
if (si_format.is_multi_plane()) {
si_format.SetPrefersExternalSampler();
}
return si_format;
}
} // namespace
// A SharedImageHolder allows us to manage the lifetime of a SharedImage with
// reference counting.
//
// The reason we don't use the gpu::ClientSharedImage directly is that we want
// to make sure the gpu::SharedImageInterface that was used to create the
// SharedImage outlives the SharedImage and can be used to destroy it.
//
// Thread safety: the underlying gpu::ClientSharedImage is not documented to be
// thread-safe. Therefore, concurrent access to SharedImageHolder instances must
// be synchronized externally if needed.
class MojoStableVideoDecoder::SharedImageHolder
: public base::RefCountedThreadSafe<SharedImageHolder> {
public:
REQUIRE_ADOPTION_FOR_REFCOUNTED_TYPE();
static scoped_refptr<SharedImageHolder> CreateFromFrameResource(
const scoped_refptr<FrameResource>& frame_resource,
scoped_refptr<gpu::SharedImageInterface> sii) {
if (!sii) {
DVLOGF(1) << "No gpu::SharedImageInterface available";
return nullptr;
}
gpu::SharedImageUsageSet shared_image_usage =
gpu::SHARED_IMAGE_USAGE_DISPLAY_READ | gpu::SHARED_IMAGE_USAGE_SCANOUT;
if (frame_resource->metadata().is_webgpu_compatible &&
!sii->GetCapabilities().disable_webgpu_shared_images) {
shared_image_usage |= gpu::SHARED_IMAGE_USAGE_WEBGPU_READ;
}
gfx::GpuMemoryBufferHandle gmb_handle =
frame_resource->CreateGpuMemoryBufferHandle();
if (gmb_handle.type != gfx::NATIVE_PIXMAP ||
gmb_handle.native_pixmap_handle.planes.empty()) {
DVLOGF(1)
<< "Could not obtain a GpuMemoryBufferHandle for the FrameResource";
return nullptr;
}
std::optional<viz::SharedImageFormat> shared_image_format =
GetSharedImageFormat(frame_resource->format());
if (!shared_image_format.has_value()) {
DVLOGF(1) << "Unsupported VideoPixelFormat " << frame_resource->format();
return nullptr;
}
// The SharedImage size ultimately must correspond to the size used to
// import the decoded frame into a graphics API (e.g., the EGL image size
// when using OpenGL). For most videos, this is simply
// |frame_resource|->visible_rect().size(). However, some H.264 videos
// specify a visible rectangle that doesn't start at (0, 0). Since users of
// the decoded frames are expected to calculate UV coordinates to handle
// these exotic visible rectangles, we must include the area on the left and
// on the top of the frames when computing the SharedImage size. Hence the
// use of GetRectSizeFromOrigin().
const gpu::SharedImageInfo shared_image_info(
*shared_image_format,
GetRectSizeFromOrigin(frame_resource->visible_rect()),
frame_resource->ColorSpace(), shared_image_usage,
/*debug_label=*/"MojoStableVideoDecoder");
scoped_refptr<gpu::ClientSharedImage> new_client_shared_image =
sii->CreateSharedImage(shared_image_info, std::move(gmb_handle));
if (!new_client_shared_image) {
DVLOGF(1) << "Could not create a SharedImage for the FrameResource";
return nullptr;
}
return base::MakeRefCounted<SharedImageHolder>(
base::PassKey<SharedImageHolder>(), std::move(new_client_shared_image),
frame_resource->ColorSpace(), std::move(sii));
}
SharedImageHolder(base::PassKey<SharedImageHolder>,
scoped_refptr<gpu::ClientSharedImage> client_shared_image,
const gfx::ColorSpace& color_space,
scoped_refptr<gpu::SharedImageInterface> sii)
: color_space_(color_space),
sii_(std::move(sii)),
client_shared_image_(std::move(client_shared_image)) {}
SharedImageHolder(const SharedImageHolder&) = delete;
SharedImageHolder& operator=(const SharedImageHolder&) = delete;
const scoped_refptr<gpu::ClientSharedImage> client_shared_image() const {
return client_shared_image_;
}
uint32_t texture_target() const {
return client_shared_image_->GetTextureTarget();
}
bool IsCompatibleWith(
const scoped_refptr<FrameResource>& frame_resource) const {
return client_shared_image_->size() ==
GetRectSizeFromOrigin(frame_resource->visible_rect()) &&
color_space_ == frame_resource->ColorSpace();
}
gpu::SyncToken GenUnverifiedSyncToken() {
return sii_->GenUnverifiedSyncToken();
}
void Update() {
sii_->UpdateSharedImage(gpu::SyncToken(), client_shared_image()->mailbox());
}
private:
friend class base::RefCountedThreadSafe<SharedImageHolder>;
~SharedImageHolder() { CHECK(client_shared_image_->HasOneRef()); }
const gfx::ColorSpace color_space_;
const scoped_refptr<gpu::SharedImageInterface> sii_;
// |client_shared_image_| is declared after |sii_| to ensure the
// gpu::ClientSharedImage can use the gpu::SharedImageInterface for the
// destruction of the SharedImage.
//
// TODO(b/327268445): make gpu::ClientSharedImage::mailbox() and
// GetTextureTarget() const so that we can make |client_shared_image_| a
// const scoped_refptr<const gpu::ClientSharedImage>.
const scoped_refptr<gpu::ClientSharedImage> client_shared_image_;
};
MojoStableVideoDecoder::MojoStableVideoDecoder(
scoped_refptr<base::SequencedTaskRunner> media_task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
pending_remote_decoder)
: timestamps_(128),
media_task_runner_(std::move(media_task_runner)),
gpu_factories_(gpu_factories),
media_log_(media_log),
pending_remote_decoder_(std::move(pending_remote_decoder)),
weak_this_factory_(this) {
DETACH_FROM_SEQUENCE(sequence_checker_);
}
MojoStableVideoDecoder::~MojoStableVideoDecoder() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
bool MojoStableVideoDecoder::IsPlatformDecoder() const {
return true;
}
bool MojoStableVideoDecoder::SupportsDecryption() const {
// TODO(b/327268445): implement decoding of protected content for GTFO OOP-VD.
return false;
}
void MojoStableVideoDecoder::Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb,
const WaitingCB& waiting_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// TODO(b/327268445): consider not constructing a MojoStableVideoDecoder to
// begin with if there isn't a non-null GpuVideoAcceleratorFactories*
// available (and then this if can be turned into a CHECK()).
if (!gpu_factories_) {
std::move(init_cb).Run(DecoderStatus::Codes::kInvalidArgument);
return;
}
OOPVideoDecoder::NotifySupportKnown(
std::move(pending_remote_decoder_),
base::BindOnce(
&MojoStableVideoDecoder::InitializeOnceSupportedConfigsAreKnown,
weak_this_factory_.GetWeakPtr(), config, low_delay, cdm_context,
std::move(init_cb), output_cb, waiting_cb));
}
void MojoStableVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
CHECK(!!oop_video_decoder_);
if (!buffer->end_of_stream()) {
timestamps_.Put(buffer->timestamp().InMilliseconds(),
base::TimeTicks::Now());
}
oop_video_decoder_->Decode(std::move(buffer), std::move(decode_cb));
}
void MojoStableVideoDecoder::Reset(base::OnceClosure closure) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
CHECK(!!oop_video_decoder_);
oop_video_decoder_->Reset(std::move(closure));
}
bool MojoStableVideoDecoder::NeedsBitstreamConversion() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
CHECK(!!oop_video_decoder_);
return oop_video_decoder_->NeedsBitstreamConversion();
}
bool MojoStableVideoDecoder::CanReadWithoutStalling() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
CHECK(!!oop_video_decoder_);
return oop_video_decoder_->CanReadWithoutStalling();
}
int MojoStableVideoDecoder::GetMaxDecodeRequests() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
CHECK(!!oop_video_decoder_);
return oop_video_decoder_->GetMaxDecodeRequests();
}
VideoDecoderType MojoStableVideoDecoder::GetDecoderType() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// TODO(b/327268445): finish implementing GetDecoderType().
NOTIMPLEMENTED();
return VideoDecoderType::kOutOfProcess;
}
void MojoStableVideoDecoder::InitializeOnceSupportedConfigsAreKnown(
const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb,
const WaitingCB& waiting_cb,
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
pending_remote_decoder) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// The OOPVideoDecoder initialization path assumes that a higher layer checks
// that the VideoDecoderConfig is supported. That higher layer is the
// MojoStableVideoDecoder in this case.
std::optional<SupportedVideoDecoderConfigs> supported_configs =
OOPVideoDecoder::GetSupportedConfigs();
// InitializeOnceSupportedConfigsAreKnown() gets called only once the
// supported configurations are known.
CHECK(supported_configs.has_value());
if (!IsVideoDecoderConfigSupported(supported_configs.value(), config)) {
std::move(init_cb).Run(DecoderStatus::Codes::kUnsupportedConfig);
return;
}
if (!oop_video_decoder_) {
// This should correspond to the first MojoStableVideoDecoder::Initialize()
// call with a supported configuration, so |pending_remote_decoder| and
// |media_log_| must be valid.
CHECK(pending_remote_decoder);
CHECK(media_log_);
// |media_task_runner_| is expected to correspond to |sequence_checker_| and
// is the sequence on which |oop_video_decoder_| will be used.
CHECK(media_task_runner_->RunsTasksInCurrentSequence());
oop_video_decoder_ = OOPVideoDecoder::Create(
std::move(pending_remote_decoder), media_log_->Clone(),
/*decoder_task_runner=*/media_task_runner_,
/*client=*/nullptr);
CHECK(oop_video_decoder_);
media_log_ = nullptr;
}
CHECK(output_cb);
output_cb_ = output_cb;
oop_video_decoder()->Initialize(
config, low_delay, cdm_context, std::move(init_cb),
base::BindRepeating(&MojoStableVideoDecoder::OnFrameResourceDecoded,
weak_this_factory_.GetWeakPtr()),
waiting_cb);
}
scoped_refptr<MojoStableVideoDecoder::SharedImageHolder>
MojoStableVideoDecoder::CreateOrUpdateSharedImageForFrame(
const scoped_refptr<FrameResource>& frame_resource) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
const base::UnguessableToken frame_token = frame_resource->tracking_token();
CHECK(!frame_token.is_empty());
// First, let's see if the buffer for this frame already has a SharedImage
// that can be re-used.
const auto iter = shared_images_.find(frame_token);
SharedImageHolder* shared_image_holder =
iter != shared_images_.end() ? iter->second.get() : nullptr;
if (shared_image_holder &&
shared_image_holder->IsCompatibleWith(frame_resource)) {
shared_image_holder->Update();
return base::WrapRefCounted(shared_image_holder);
}
// Either we don't have an existing SharedImage or we can't re-use the
// existing one. Let's create a new one.
auto new_shared_image = SharedImageHolder::CreateFromFrameResource(
frame_resource,
base::WrapRefCounted(gpu_factories_->SharedImageInterface()));
if (!new_shared_image) {
return nullptr;
}
if (shared_image_holder) {
// In this case, the buffer already has a SharedImage associated with it,
// but it couldn't be re-used. We replace that SharedImage with
// |new_shared_image|. Note that there may still be references to the older
// SharedImage if the user of the decoded frames still hasn't released all
// frames that use that SharedImage.
shared_image_holder = nullptr;
shared_images_.insert_or_assign(frame_token, new_shared_image);
} else {
// In this case, the buffer does not have a SharedImage associated with it.
// Therefore, we need to ask the containing FrameResource to notify us when
// it's about to be destroyed so that we can release the reference to
// whatever SharedImage is associated with it.
FrameResource* original_frame_resource =
oop_video_decoder()->GetOriginalFrame(frame_resource->tracking_token());
CHECK(original_frame_resource);
shared_images_.insert_or_assign(frame_token, new_shared_image);
original_frame_resource->AddDestructionObserver(
base::BindPostTaskToCurrentDefault(
base::BindOnce(&MojoStableVideoDecoder::UnregisterSharedImage,
weak_this_factory_.GetWeakPtr(), frame_token)));
}
return new_shared_image;
}
void MojoStableVideoDecoder::UnregisterSharedImage(
base::UnguessableToken frame_token) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
CHECK(!frame_token.is_empty());
CHECK_EQ(1u, shared_images_.erase(frame_token));
}
void MojoStableVideoDecoder::OnFrameResourceDecoded(
scoped_refptr<FrameResource> frame_resource) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// The incoming |frame_resource| is backed by dma-buf FDs, but the
// MojoStableVideoDecoder needs to output SharedImage-backed VideoFrames. We
// do that here, but we have to be careful about the lifetime of the resulting
// SharedImage. In particular, a SharedImage should live for at least as long
// as the later of the following events:
//
// a) Once the OOPVideoDecoder forgets about the corresponding buffer (e.g. on
// detection of a typical resolution change event).
//
// b) Once the SharedImage cannot longer be associated with a particular
// buffer (e.g., when only the color space has changed).
//
// c) Once all VideoFrames output by the MojoStableVideoDecoder that share the
// same SharedImage have been released.
//
// To guarantee this, we maintain multiple references to the SharedImage using
// a SharedImageHolder.
scoped_refptr<SharedImageHolder> shared_image =
CreateOrUpdateSharedImageForFrame(frame_resource);
if (!shared_image) {
return;
}
// Note that |mailbox_frame| will maintain a reference to |shared_image| and
// to |frame_resource|. The former is to ensure that the SharedImage lives for
// at least as long as the user of the decoded frame needs it. The latter is
// to ensure the service gets notified that it may re-use the underlying
// buffer once the decoded frame is no longer needed.
auto client_shared_image = shared_image->client_shared_image();
auto sync_token = shared_image->GenUnverifiedSyncToken();
scoped_refptr<VideoFrame> mailbox_frame = VideoFrame::WrapSharedImage(
frame_resource->format(), std::move(client_shared_image), sync_token,
/*mailbox_holders_release_cb=*/
base::DoNothingWithBoundArgs(std::move(shared_image), frame_resource),
/*coded_size=*/GetRectSizeFromOrigin(frame_resource->visible_rect()),
frame_resource->visible_rect(), frame_resource->natural_size(),
frame_resource->timestamp());
if (!mailbox_frame) {
DVLOGF(1) << "Could not create a gpu::Mailbox-backed VideoFrame for the "
"decoded frame";
return;
}
mailbox_frame->set_color_space(frame_resource->ColorSpace());
mailbox_frame->set_hdr_metadata(frame_resource->hdr_metadata());
mailbox_frame->set_metadata(frame_resource->metadata());
mailbox_frame->metadata().read_lock_fences_enabled = true;
mailbox_frame->metadata().is_webgpu_compatible =
frame_resource->metadata().is_webgpu_compatible;
const int64_t timestamp = frame_resource->timestamp().InMilliseconds();
const auto timestamp_it = timestamps_.Peek(timestamp);
// The OOPVideoDecoder has an internal cache that ensures incoming frames have
// a timestamp that corresponds to an earlier Decode() call. The cache in the
// OOPVideoDecoder is of the same size as |timestamps_|. Therefore, we should
// always be able to find the incoming frame in |timestamps_|, hence the
// CHECK().
CHECK(timestamp_it != timestamps_.end());
const auto decode_start_time = timestamp_it->second;
const auto decode_end_time = base::TimeTicks::Now();
UMA_HISTOGRAM_TIMES(kMojoStableVideoDecoderDecodeLatencyHistogram,
decode_end_time - decode_start_time);
output_cb_.Run(std::move(mailbox_frame));
}
OOPVideoDecoder* MojoStableVideoDecoder::oop_video_decoder() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
return static_cast<OOPVideoDecoder*>(oop_video_decoder_.get());
}
const OOPVideoDecoder* MojoStableVideoDecoder::oop_video_decoder() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
return static_cast<const OOPVideoDecoder*>(oop_video_decoder_.get());
}
} // namespace media

@ -1,150 +0,0 @@
// Copyright 2024 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_MOJO_CLIENTS_MOJO_STABLE_VIDEO_DECODER_H_
#define MEDIA_MOJO_CLIENTS_MOJO_STABLE_VIDEO_DECODER_H_
#include "base/containers/flat_map.h"
#include "base/containers/lru_cache.h"
#include "base/memory/raw_ptr.h"
#include "base/memory/scoped_refptr.h"
#include "base/memory/weak_ptr.h"
#include "base/sequence_checker.h"
#include "base/thread_annotations.h"
#include "base/time/time.h"
#include "media/base/video_decoder.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "ui/gfx/generic_shared_memory_id.h"
namespace media {
class FrameResource;
class GpuVideoAcceleratorFactories;
class MediaLog;
class OOPVideoDecoder;
extern const char kMojoStableVideoDecoderDecodeLatencyHistogram[];
// A MojoStableVideoDecoder is analogous to a MojoVideoDecoder but for the
// stable::mojom::StableVideoDecoder interface, so in essence, it's just an
// adapter from the VideoDecoder API to the stable::mojom::StableVideoDecoder
// API.
//
// Consistent with the VideoDecoder contract, this class may be constructed on
// any sequence A. After that, it may be used on another sequence B but it must
// continue to be used and destroyed on that same sequence B. More specifically,
// B must correspond to the |media_task_runner| passed in the constructor.
class MojoStableVideoDecoder final : public VideoDecoder {
public:
MojoStableVideoDecoder(
scoped_refptr<base::SequencedTaskRunner> media_task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
pending_remote_decoder);
MojoStableVideoDecoder(const MojoStableVideoDecoder&) = delete;
MojoStableVideoDecoder& operator=(const MojoStableVideoDecoder&) = delete;
~MojoStableVideoDecoder() final;
// Decoder implementation.
bool IsPlatformDecoder() const final;
bool SupportsDecryption() const final;
// VideoDecoder implementation.
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb,
const WaitingCB& waiting_cb) final;
void Decode(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb) final;
void Reset(base::OnceClosure closure) final;
bool NeedsBitstreamConversion() const final;
bool CanReadWithoutStalling() const final;
int GetMaxDecodeRequests() const final;
VideoDecoderType GetDecoderType() const final;
private:
class SharedImageHolder;
// OOPVideoDecoder has a couple of important assumptions:
//
// 1) It needs to be constructed, used, and destroyed on the same sequence
// (despite the VideoDecoder contract).
//
// 2) It should only be initialized after a higher layer (in this case the
// MojoStableVideoDecoder) checks that the VideoDecoderConfig is supported.
//
// InitializeOnceSupportedConfigsAreKnown() allows us to make sure these
// assumptions are met because a) we can lazily create the OOPVideoDecoder in
// the initialization path, and b) we can validate the VideoDecoderConfig once
// the supported configurations are known and before calling
// OOPVideoDecoder::Initialize().
void InitializeOnceSupportedConfigsAreKnown(
const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb,
const WaitingCB& waiting_cb,
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
pending_remote_decoder);
scoped_refptr<SharedImageHolder> CreateOrUpdateSharedImageForFrame(
const scoped_refptr<FrameResource>& frame_resource);
void UnregisterSharedImage(base::UnguessableToken frame_token);
void OnFrameResourceDecoded(scoped_refptr<FrameResource> frame_resource);
OOPVideoDecoder* oop_video_decoder();
const OOPVideoDecoder* oop_video_decoder() const;
// DecodeBuffer/VideoFrame timestamps for histogram/tracing purposes. Must be
// large enough to account for any amount of frame reordering.
base::LRUCache<int64_t, base::TimeTicks> timestamps_;
scoped_refptr<base::SequencedTaskRunner> media_task_runner_;
SEQUENCE_CHECKER(sequence_checker_);
const raw_ptr<GpuVideoAcceleratorFactories> gpu_factories_
GUARDED_BY_CONTEXT(sequence_checker_);
// We hold onto the MediaLog* and the mojo::PendingRemote passed to the
// constructor so that we can lazily create the |oop_video_decoder_|. After
// the |oop_video_decoder_| is created, these two members should become
// invalid.
raw_ptr<MediaLog> media_log_ GUARDED_BY_CONTEXT(sequence_checker_);
mojo::PendingRemote<stable::mojom::StableVideoDecoder> pending_remote_decoder_
GUARDED_BY_CONTEXT(sequence_checker_);
// TODO(b/327268445): OOPVideoDecoder knows how to talk to a
// stable::mojom::StableVideoDecoder. The main reason we don't use it directly
// is that that's a component shared with the regular OOP-VD path and we need
// some things on top of it for GTFO OOP-VD, e.g., outputting
// gpu::Mailbox-backed VideoFrames instead of media::FrameResources. Instead
// of changing OOPVideoDecoder to handle both paths, we use it here as a
// delegate. Once we switch fully from regular OOP-VD to GTFO OOP-VD, we can
// merge OOPVideoDecoder into MojoStableVideoDecoder and get rid of it.
std::unique_ptr<VideoDecoder> oop_video_decoder_
GUARDED_BY_CONTEXT(sequence_checker_);
// |shared_images_| caches SharedImages so that we can re-use them if
// possible. The fact that we keep a reference to a SharedImageHolder
// guarantees that the corresponding SharedImage lives for at least as long as
// the OOPVideoDecoder knows about the corresponding buffer.
base::flat_map<base::UnguessableToken, scoped_refptr<SharedImageHolder>>
shared_images_ GUARDED_BY_CONTEXT(sequence_checker_);
OutputCB output_cb_ GUARDED_BY_CONTEXT(sequence_checker_);
base::WeakPtrFactory<MojoStableVideoDecoder> weak_this_factory_
GUARDED_BY_CONTEXT(sequence_checker_);
};
} // namespace media
#endif // MEDIA_MOJO_CLIENTS_MOJO_STABLE_VIDEO_DECODER_H_

File diff suppressed because it is too large Load Diff

@ -16,6 +16,8 @@ source_set("common") {
"mojo_decoder_buffer_converter.h",
"mojo_pipe_read_write_util.cc",
"mojo_pipe_read_write_util.h",
"validation_utils.cc",
"validation_utils.h",
]
deps = [

@ -0,0 +1,118 @@
// Copyright 2025 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/mojo/common/validation_utils.h"
namespace media {
std::unique_ptr<media::DecryptConfig> ValidateAndConvertMojoDecryptConfig(
media::mojom::DecryptConfigPtr decrypt_config) {
CHECK(decrypt_config);
if (decrypt_config->encryption_scheme ==
media::EncryptionScheme::kUnencrypted) {
// The DecryptConfig constructor has a DCHECK() that rejects
// EncryptionScheme::kUnencrypted.
return nullptr;
}
if (decrypt_config->key_id.empty()) {
return nullptr;
}
if (decrypt_config->iv.size() !=
static_cast<size_t>(media::DecryptConfig::kDecryptionKeySize)) {
return nullptr;
}
if (decrypt_config->encryption_scheme != media::EncryptionScheme::kCbcs &&
decrypt_config->encryption_pattern.has_value()) {
return nullptr;
}
return std::make_unique<media::DecryptConfig>(
decrypt_config->encryption_scheme, std::move(decrypt_config->key_id),
std::move(decrypt_config->iv), std::move(decrypt_config->subsamples),
decrypt_config->encryption_pattern);
}
std::unique_ptr<media::DecoderBufferSideData>
ValidateAndConvertMojoDecoderBufferSideData(
media::mojom::DecoderBufferSideDataPtr side_data) {
if (!side_data) {
return nullptr;
}
constexpr size_t kMaxSpatialLayers = 3;
if (side_data->spatial_layers.size() > kMaxSpatialLayers) {
return nullptr;
}
auto media_side_data = std::make_unique<media::DecoderBufferSideData>();
media_side_data->spatial_layers = side_data->spatial_layers;
if (!side_data->alpha_data.empty()) {
media_side_data->alpha_data =
base::HeapArray<uint8_t>::CopiedFrom(side_data->alpha_data);
}
media_side_data->secure_handle = side_data->secure_handle;
media_side_data->discard_padding.first = side_data->front_discard;
media_side_data->discard_padding.second = side_data->back_discard;
return media_side_data;
}
scoped_refptr<media::DecoderBuffer> ValidateAndConvertMojoDecoderBuffer(
media::mojom::DecoderBufferPtr decoder_buffer) {
if (!decoder_buffer) {
return nullptr;
}
if (decoder_buffer->is_eos()) {
const auto& eos_buffer = decoder_buffer->get_eos();
if (eos_buffer->next_config) {
if (eos_buffer->next_config->is_next_audio_config()) {
return media::DecoderBuffer::CreateEOSBuffer(
eos_buffer->next_config->get_next_audio_config());
} else if (eos_buffer->next_config->is_next_video_config()) {
return media::DecoderBuffer::CreateEOSBuffer(
eos_buffer->next_config->get_next_video_config());
}
}
return media::DecoderBuffer::CreateEOSBuffer();
}
const auto& mojo_buffer = decoder_buffer->get_data();
CHECK(!!mojo_buffer);
if (mojo_buffer->duration != media::kNoTimestamp &&
(mojo_buffer->duration < base::TimeDelta() ||
mojo_buffer->duration == media::kInfiniteDuration)) {
return nullptr;
}
std::unique_ptr<media::DecryptConfig> decrypt_config;
if (mojo_buffer->decrypt_config) {
decrypt_config = ValidateAndConvertMojoDecryptConfig(
std::move(mojo_buffer->decrypt_config));
if (!decrypt_config) {
return nullptr;
}
}
std::unique_ptr<media::DecoderBufferSideData> side_data;
if (mojo_buffer->side_data) {
side_data = ValidateAndConvertMojoDecoderBufferSideData(
std::move(mojo_buffer->side_data));
if (!side_data) {
return nullptr;
}
}
auto media_buffer = base::MakeRefCounted<media::DecoderBuffer>(
base::strict_cast<size_t>(mojo_buffer->data_size));
if (side_data) {
media_buffer->set_side_data(std::move(side_data));
}
media_buffer->set_timestamp(mojo_buffer->timestamp);
media_buffer->set_duration(mojo_buffer->duration);
media_buffer->set_is_key_frame(mojo_buffer->is_key_frame);
if (decrypt_config) {
media_buffer->set_decrypt_config(std::move(decrypt_config));
}
return media_buffer;
}
} // namespace media

@ -0,0 +1,38 @@
// Copyright 2025 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_MOJO_COMMON_VALIDATION_UTILS_H_
#define MEDIA_MOJO_COMMON_VALIDATION_UTILS_H_
#include "media/mojo/mojom/video_decoder.mojom.h"
// TODO(crbug.com/347331029): add unit tests.
namespace media {
// TODO(crbug.com/390706725): move this validation to media_type_converters.cc
// when we've verified that users of those converters can deal with a null
// return type. Eventually migrate to typemaps.
// TODO(crbug.com/40468949): migrate to typemaps.
std::unique_ptr<media::DecryptConfig> ValidateAndConvertMojoDecryptConfig(
media::mojom::DecryptConfigPtr decrypt_config);
// TODO(crbug.com/390706725): move this validation to media_type_converters.cc
// when we've verified that users of those converters are not affected by the
// additional validation. Eventually migrate to typemaps.
// TODO(crbug.com/40468949): migrate to typemaps.
std::unique_ptr<media::DecoderBufferSideData>
ValidateAndConvertMojoDecoderBufferSideData(
media::mojom::DecoderBufferSideDataPtr side_data);
// TODO(crbug.com/390706725): move this validation to media_type_converters.cc
// when we've verified that users of those converters can deal with a null
// return type. Eventually migrate to typemaps.
// TODO(crbug.com/40468949): migrate to typemaps.
scoped_refptr<media::DecoderBuffer> ValidateAndConvertMojoDecoderBuffer(
media::mojom::DecoderBufferPtr decoder_buffer);
} // namespace media
#endif // MEDIA_MOJO_COMMON_VALIDATION_UTILS_H_

@ -78,6 +78,13 @@ mojom("mojom") {
]
}
if (allow_oop_video_decoder) {
sources += [
"cdm_context_for_oopvd.mojom",
"video_decoder_factory_process.mojom",
]
}
public_deps = [
":audio_data",
":encryption_pattern",
@ -85,7 +92,6 @@ mojom("mojom") {
":web_speech_recognition",
"//gpu/ipc/common:interfaces",
"//media/learning/mojo/public/mojom",
"//media/mojo/mojom/stable:stable_video_decoder",
"//mojo/public/mojom/base",
"//sandbox/policy/mojom",
"//services/media_session/public/mojom",
@ -708,6 +714,24 @@ mojom("mojom") {
},
]
if (is_chromeos) {
shared_typemaps += [
{
types = [
{
mojom = "media.mojom.DecryptStatus"
cpp = "::media::Decryptor::Status"
},
{
mojom = "media.mojom.CdmContextEvent"
cpp = "::media::CdmContext::Event"
},
]
traits_headers = [ "cdm_context_for_oopvd_types_mojom_traits.h" ]
},
]
}
if (is_win) {
cpp_typemaps += [
{

@ -0,0 +1,63 @@
// Copyright 2021 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
module media.mojom;
import "media/mojo/mojom/media_types.mojom";
import "ui/gfx/geometry/mojom/geometry.mojom";
[EnableIf=allow_oop_video_decoder]
enum DecryptStatus {
kSuccess,
kNoKey,
kFailure,
};
[EnableIf=allow_oop_video_decoder]
enum CdmContextEvent {
kHasAdditionalUsableKey,
kHardwareContextReset,
};
// Interface for handling callbacks from the CdmContextForOOPVD interface below.
[EnableIf=allow_oop_video_decoder]
interface CdmContextEventCallback {
// Sends the event back to the registrar.
EventCallback(CdmContextEvent event);
};
// Maps to the media::CdmContext interface for remoting it to another process.
[EnableIf=allow_oop_video_decoder]
interface CdmContextForOOPVD {
// Proxies to media::CdmContext::GetChromeOsCdmContext()->GetHwKeyData.
GetHwKeyData(DecryptConfig decrypt_config, array<uint8> hw_identifier) =>
(DecryptStatus status, array<uint8> key_data);
// Registers an interface for receiving event callbacks. This maps to
// media::CdmContext::RegisterEventCB.
RegisterEventCallback(pending_remote<CdmContextEventCallback> callback);
// Proxies to media::CdmContext::GetChromeOsCdmContext()->GetHwConfigData.
GetHwConfigData() => (bool success, array<uint8> config_data);
// Proxies to media::CdmContext::GetChromeOsCdmContext()->
// GetScreenResolutions.
GetScreenResolutions() => (array<gfx.mojom.Size> resolutions);
// Proxies to
// media::CdmContext::GetChromeOsCdmContext()->AllocateSecureBuffer.
AllocateSecureBuffer(uint32 size) => (handle<platform>? secure_buffer);
// Proxies to
// media::CdmContext::GetChromeOsCdmContext()->ParseEncryptedSliceHeader.
ParseEncryptedSliceHeader(uint64 secure_handle, uint32 offset,
array<uint8> stream_data)
=> (bool success, array<uint8> slice_header);
// Proxies to
// media::CdmContext::GetDecryptor()->Decrypt for video data.
DecryptVideoBuffer(DecoderBuffer buffer, array<uint8> bytes)
=> (DecryptStatus status, DecoderBuffer? decoder_buffer,
array<uint8> bytes);
};

@ -0,0 +1,78 @@
// Copyright 2025 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MEDIA_MOJO_MOJOM_CDM_CONTEXT_FOR_OOPVD_TYPES_MOJOM_TRAITS_H_
#define MEDIA_MOJO_MOJOM_CDM_CONTEXT_FOR_OOPVD_TYPES_MOJOM_TRAITS_H_
#include "base/notreached.h"
#include "media/base/cdm_context.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
namespace mojo {
template <>
struct EnumTraits<media::mojom::CdmContextEvent, ::media::CdmContext::Event> {
static media::mojom::CdmContextEvent ToMojom(
::media::CdmContext::Event input) {
switch (input) {
case ::media::CdmContext::Event::kHasAdditionalUsableKey:
return media::mojom::CdmContextEvent::kHasAdditionalUsableKey;
case ::media::CdmContext::Event::kHardwareContextReset:
return media::mojom::CdmContextEvent::kHardwareContextReset;
}
NOTREACHED();
}
static bool FromMojom(media::mojom::CdmContextEvent input,
::media::CdmContext::Event* output) {
switch (input) {
case media::mojom::CdmContextEvent::kHasAdditionalUsableKey:
*output = ::media::CdmContext::Event::kHasAdditionalUsableKey;
return true;
case media::mojom::CdmContextEvent::kHardwareContextReset:
*output = ::media::CdmContext::Event::kHardwareContextReset;
return true;
}
NOTREACHED();
}
};
template <>
struct EnumTraits<media::mojom::DecryptStatus, ::media::Decryptor::Status> {
static media::mojom::DecryptStatus ToMojom(::media::Decryptor::Status input) {
switch (input) {
case ::media::Decryptor::Status::kSuccess:
return media::mojom::DecryptStatus::kSuccess;
case ::media::Decryptor::Status::kNoKey:
return media::mojom::DecryptStatus::kNoKey;
case ::media::Decryptor::Status::kNeedMoreData:
return media::mojom::DecryptStatus::kFailure;
case ::media::Decryptor::Status::kError:
return media::mojom::DecryptStatus::kFailure;
}
NOTREACHED();
}
static bool FromMojom(media::mojom::DecryptStatus input,
::media::Decryptor::Status* output) {
switch (input) {
case media::mojom::DecryptStatus::kSuccess:
*output = ::media::Decryptor::Status::kSuccess;
return true;
case media::mojom::DecryptStatus::kNoKey:
*output = ::media::Decryptor::Status::kNoKey;
return true;
case media::mojom::DecryptStatus::kFailure:
*output = ::media::Decryptor::Status::kError;
return true;
}
NOTREACHED();
}
};
} // namespace mojo
#endif // MEDIA_MOJO_MOJOM_CDM_CONTEXT_FOR_OOPVD_TYPES_MOJOM_TRAITS_H_

@ -12,11 +12,20 @@ import "media/mojo/mojom/media_log.mojom";
import "media/mojo/mojom/media_types.mojom";
import "media/mojo/mojom/renderer.mojom";
import "media/mojo/mojom/renderer_extensions.mojom";
import "media/mojo/mojom/stable/stable_video_decoder.mojom";
import "media/mojo/mojom/video_decoder.mojom";
import "mojo/public/mojom/base/token.mojom";
import "mojo/public/mojom/base/unguessable_token.mojom";
// A disconnection of this interface can be interpreted as the
// VideoDecoder implementation created by
// InterfaceFactory.CreateVideoDecoderWithTracker() having been destroyed
// (or never bound in the first place).
//
// This interface is intended to be implemented by the browser process and used
// by the video decoder processes.
[EnableIf=allow_oop_video_decoder]
interface VideoDecoderTracker {};
// A factory for creating media mojo interfaces. Renderers can only access
// ContentDecryptionModules created with the same factory.
interface InterfaceFactory {
@ -25,28 +34,25 @@ interface InterfaceFactory {
// Creates a VideoDecoder (|video_decoder|). If |dst_video_decoder| is
// provided and supported by the implementation, VideoDecoder calls on
// |video_decoder| should be forwarded to |dst_video_decoder|. This is
// intended to be used for out-of-process video decoding and LaCrOS in which
// intended to be used for out-of-process video decoding in which
// the GPU process acts a proxy between a renderer and a video decoder process
// so that it can create a gpu::Mailbox for each decoded frame. As such, only
// the GPU process should be provided with a valid |dst_video_decoder|.
//
// TODO(b/327268445): we can remove the `dst_video_decoder` parameter once
// GTFO OOP-VD is used instead of regular OOP-VD.
CreateVideoDecoder(
pending_receiver<VideoDecoder> video_decoder,
pending_remote<media.stable.mojom.StableVideoDecoder>? dst_video_decoder);
pending_remote<media.mojom.VideoDecoder>? dst_video_decoder);
// Creates a StableVideoDecoder (`video_decoder`). This method is intended to
// be used only for the "GTFO OOP-VD" mode, i.e., when the StableVideoDecoder
// client talks directly to the StableVideoDecoder service without using the
// GPU process as a proxy (in contrast to the "regular OOP-VD" mode).
//
// The StableVideoDecoder implementation lives in a utility process. Clients
// are expected to live in either the browser process (for supported profile
// querying) or the renderer process (to drive video decoding).
// Creates a VideoDecoder and should be called by the browser process.
// If |tracker| is provided, the caller can handle its disconnection in order
// to know when the VideoDecoder implementation is destroyed. Note that
// |tracker| may get disconnected without ever creating a VideoDecoder
// implementation instance, e.g., if an error occurs that prevents |receiver|
// from being bound to an implementation. This method is intended to be used
// only for OOP-VD.
[EnableIf=allow_oop_video_decoder]
CreateStableVideoDecoder(
pending_receiver<media.stable.mojom.StableVideoDecoder> video_decoder);
CreateVideoDecoderWithTracker(
pending_receiver<VideoDecoder> receiver,
pending_remote<VideoDecoderTracker>? tracker);
// Creates an AudioEncoder bound to a given receiver.
CreateAudioEncoder(pending_receiver<AudioEncoder> audio_encoder);

@ -5,149 +5,10 @@
import("//media/gpu/args.gni")
import("//mojo/public/tools/bindings/mojom.gni")
mojom("stable_video_decoder") {
generate_java = true
sources = [
"stable_video_decoder.mojom",
"stable_video_decoder_types.mojom",
]
public_deps = [
":native_pixmap_handle",
"//gpu/ipc/common:interfaces",
"//media/mojo/mojom:encryption_pattern",
"//mojo/public/mojom/base",
"//sandbox/policy/mojom",
"//ui/gfx/geometry/mojom",
"//ui/gfx/mojom",
]
cpp_typemaps = [
{
types = [
{
mojom = "media.stable.mojom.CdmContextEvent"
cpp = "::media::CdmContext::Event"
},
{
mojom = "media.stable.mojom.ColorSpacePrimaryID"
cpp = "::gfx::ColorSpace::PrimaryID"
},
{
mojom = "media.stable.mojom.ColorSpaceTransferID"
cpp = "::gfx::ColorSpace::TransferID"
},
{
mojom = "media.stable.mojom.ColorSpaceMatrixID"
cpp = "::gfx::ColorSpace::MatrixID"
},
{
mojom = "media.stable.mojom.ColorSpaceRangeID"
cpp = "::gfx::ColorSpace::RangeID"
},
{
mojom = "media.stable.mojom.ColorSpace"
cpp = "::gfx::ColorSpace"
},
{
mojom = "media.stable.mojom.ColorVolumeMetadata"
cpp = "::gfx::HdrMetadataSmpteSt2086"
},
{
mojom = "media.stable.mojom.DecoderBuffer"
cpp = "::scoped_refptr<::media::DecoderBuffer>"
nullable_is_same_type = true
},
{
mojom = "media.stable.mojom.DecryptConfig"
cpp = "::std::unique_ptr<::media::DecryptConfig>"
move_only = true
nullable_is_same_type = true
},
{
mojom = "media.stable.mojom.DecryptStatus"
cpp = "::media::Decryptor::Status"
},
{
mojom = "media.stable.mojom.EncryptionScheme"
cpp = "::media::EncryptionScheme"
},
{
mojom = "media.stable.mojom.HDRMetadata"
cpp = "::gfx::HDRMetadata"
},
{
mojom = "media.stable.mojom.MediaLogRecord.Type"
cpp = "::media::MediaLogRecord::Type"
},
{
mojom = "media.stable.mojom.MediaLogRecord"
cpp = "::media::MediaLogRecord"
},
{
mojom = "media.stable.mojom.NativeGpuMemoryBufferHandle"
cpp = "::gfx::GpuMemoryBufferHandle"
move_only = true
},
{
mojom = "media.stable.mojom.StatusData"
cpp = "::media::internal::StatusData"
},
{
mojom = "media.stable.mojom.Status"
cpp = "::media::DecoderStatus"
},
{
mojom = "media.stable.mojom.SubsampleEntry"
cpp = "::media::SubsampleEntry"
},
{
mojom = "media.stable.mojom.SupportedVideoDecoderConfig"
cpp = "::media::SupportedVideoDecoderConfig"
},
{
mojom = "media.stable.mojom.VideoCodec"
cpp = "::media::VideoCodec"
},
{
mojom = "media.stable.mojom.VideoCodecProfile"
cpp = "::media::VideoCodecProfile"
},
{
mojom = "media.stable.mojom.VideoDecoderConfig"
cpp = "::media::VideoDecoderConfig"
},
{
mojom = "media.stable.mojom.VideoDecoderType"
cpp = "::media::VideoDecoderType"
},
{
mojom = "media.stable.mojom.VideoFrameMetadata"
cpp = "::media::VideoFrameMetadata"
},
{
mojom = "media.stable.mojom.VideoPixelFormat"
cpp = "::media::VideoPixelFormat"
},
{
mojom = "media.stable.mojom.WaitingReason"
cpp = "::media::WaitingReason"
},
]
traits_headers = [ "stable_video_decoder_types_mojom_traits.h" ]
traits_sources = [ "stable_video_decoder_types_mojom_traits.cc" ]
traits_public_deps = [ "//media" ]
if (use_vaapi) {
traits_deps = [ "//media/gpu/vaapi:vaapi_status" ]
} else if (use_v4l2_codec) {
traits_deps = [ "//media/gpu/v4l2:v4l2_status" ]
}
},
]
}
# TODO(crbug.com/347331029): native_pixmap_handle.mojom is used by
# ash/components/arc/mojom/protected_buffer_manager.mojom. Consider moving
# the native_pixmap_handle.mojom file and its traits closer to the code
# that uses them.
mojom("native_pixmap_handle") {
generate_java = true
@ -179,24 +40,23 @@ mojom("native_pixmap_handle") {
}
}
source_set("unit_tests") {
testonly = true
sources = [ "stable_video_decoder_types_mojom_traits_unittest.cc" ]
deps = [
":stable_video_decoder",
"//testing/gtest",
]
if (use_vaapi) {
deps += [ "//media/gpu/vaapi:vaapi_status" ]
} else if (use_v4l2_codec) {
deps += [ "//media/gpu/v4l2:v4l2_status" ]
}
if (is_linux || is_chromeos) {
sources += [
if (is_linux || is_chromeos) {
source_set("unit_tests") {
testonly = true
sources = [
"mojom_traits_test_util.cc",
"mojom_traits_test_util.h",
"native_pixmap_handle_mojom_traits_unittest.cc",
]
deps += [ "//base/test:test_support" ]
deps = [
":native_pixmap_handle",
"//base/test:test_support",
"//testing/gtest",
]
if (use_vaapi) {
deps += [ "//media/gpu/vaapi:vaapi_status" ]
} else if (use_v4l2_codec) {
deps += [ "//media/gpu/v4l2:v4l2_status" ]
}
}
}

@ -1,237 +0,0 @@
// Copyright 2021 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
module media.stable.mojom;
import "gpu/ipc/common/gpu_feature_info.mojom";
import "media/mojo/mojom/stable/stable_video_decoder_types.mojom";
import "mojo/public/mojom/base/unguessable_token.mojom";
import "sandbox/policy/mojom/sandbox.mojom";
import "ui/gfx/geometry/mojom/geometry.mojom";
// This API is a stable version of VideoDecoder. This is used
// by out-of-process video decoding to allow the GPU process to forward video
// decoding requests to a video decoder process.
// In order to avoid depending on unstable definitions or on components which
// will cause cyclic dependencies, some similar but occasionally simplified
// version of structures were used rather than directly depending on the
// structures in other components.
// Based on |media.mojom.MediaLog| but does not depend on
// |media.mojom.MediaLogRecord|.
// Next min method ID: 1
[Uuid="2e4c1aed-fd62-40e6-8601-e5c4288246c0"]
interface MediaLog {
// Adds a log record to a MediaLog service.
AddLogRecord@0(MediaLogRecord event);
};
// Based on |media.mojom.VideoFrameHandleReleaser| but does not depend on
// |gpu.mojom.SyncToken|.
// Next min method ID: 1
[Uuid="8afdcf21-99d7-4864-a957-75d2a7e17da6"]
interface VideoFrameHandleReleaser {
// Signals that the VideoFrame identified by |release_token| should be
// released.
ReleaseVideoFrame@0(mojo_base.mojom.UnguessableToken release_token);
};
// Based on |media.mojom.VideoDecoderClient| but does not depend on
// |media.mojom.VideoFrame| or |media.mojom.WaitingReason|.
// Next min method ID: 2
[Uuid="8a6fce77-7fcc-42e1-ac74-443859039696"]
interface VideoDecoderClient {
// Output a decoded frame. Frames are output in presentation order.
//
// When |can_read_without_stalling| is false, preroll should be disabled. This
// is necessary if the decoder cannot guarantee that it can output another
// frame, for example if output buffers are limited or configuration changes
// require the return of all outstanding frames.
//
// The client shall call VideoFrameHandleReleaser::ReleaseVideoFrame() with
// |release_token| when it is finished using |frame|.
OnVideoFrameDecoded@0(VideoFrame frame,
bool can_read_without_stalling,
mojo_base.mojom.UnguessableToken release_token);
// Called when the remote decoder is waiting because of |reason|, e.g. waiting
// for decryption key.
OnWaiting@1(WaitingReason reason);
};
// Interface for handling callbacks from the StableCdmContext interface below.
// Next min method ID: 1
[Uuid="a1a73e1f-5297-49a2-a4e5-df875a44b61e"]
interface CdmContextEventCallback {
// Sends the event back to the registrar.
EventCallback@0(CdmContextEvent event);
};
// Maps to the media::CdmContext interface for remoting it to another process.
// Next MinVersion: 5
// Next min method ID: 7
[Uuid="33c7a00e-2970-41b3-8c7b-f1074a539740"]
interface StableCdmContext {
// Proxies to media::CdmContext::GetChromeOsCdmContext()->GetHwKeyData.
[MinVersion=1]
GetHwKeyData@0(DecryptConfig decrypt_config, array<uint8> hw_identifier)
=> (DecryptStatus status, array<uint8> key_data);
// Registers an interface for receiving event callbacks. This maps to
// media::CdmContext::RegisterEventCB.
[MinVersion=1]
RegisterEventCallback@1(pending_remote<CdmContextEventCallback> callback);
// Proxies to media::CdmContext::GetChromeOsCdmContext()->GetHwConfigData.
[MinVersion=1]
GetHwConfigData@2() => (bool success, array<uint8> config_data);
// Proxies to media::CdmContext::GetChromeOsCdmContext()->
// GetScreenResolutions.
[MinVersion=1]
GetScreenResolutions@3() => (array<gfx.mojom.Size> resolutions);
// Proxies to
// media::CdmContext::GetChromeOsCdmContext()->AllocateSecureBuffer.
[MinVersion=2]
AllocateSecureBuffer@4(uint32 size) => (handle<platform>? secure_buffer);
// Proxies to
// media::CdmContext::GetChromeOsCdmContext()->ParseEncryptedSliceHeader.
[MinVersion=3]
ParseEncryptedSliceHeader@5(
uint64 secure_handle, uint32 offset, array<uint8> stream_data)
=> (bool success, array<uint8> slice_header);
// Proxies to
// media::CdmContext::GetDecryptor()->Decrypt for video data.
[MinVersion=4]
DecryptVideoBuffer@6(DecoderBuffer buffer, array<uint8> bytes)
=> (DecryptStatus status,
DecoderBuffer? decoder_buffer,
array<uint8> bytes);
};
// Based on |media.mojom.VideoDecoder|.
// Next min method ID: 5
// Next min version: 2
[Uuid="85611470-3e87-43a9-ac75-a11a63e76415"]
interface StableVideoDecoder {
// Returns a list of supported configs as well as the decoder ID for the
// decoder which supports them. It is expected that Initialize() will fail
// for any config that does not match an entry in this list.
//
// May be called before Construct().
[Sync]
GetSupportedConfigs@0()
=> (array<SupportedVideoDecoderConfig> supported_configs,
VideoDecoderType decoder_type);
// Initialize the decoder. This must be called before any method other than
// GetSupportedConfigs().
// StableVideoDecoder may hold onto references to VideoFrames sent to the
// client. However, it shall not re-use those frames until the client calls
// ReleaseVideoFrame() on |video_frame_handle_releaser|. The
// StableVideoDecoder may, however, release those references at any time.
// Therefore, VideoFrames sent to the client shall contain resources whose
// lifetime is independent of the StableVideoDecoder's lifetime, e.g., file
// descriptors.
Construct@1(
pending_associated_remote<VideoDecoderClient> client,
pending_remote<MediaLog> media_log,
pending_receiver<VideoFrameHandleReleaser> video_frame_handle_releaser,
handle<data_pipe_consumer> decoder_buffer_pipe,
ColorSpace target_color_space);
// Configure (or reconfigure) the decoder. This must be called before decoding
// any frames, and must not be called while there are pending Initialize(),
// Decode(), or Reset() requests.
//
// |cdm_context| is required for the first Initialize() call that sets up
// encryption and is ignored on subsequent calls.
//
// |needs_transcryption| tells the client whether it needs to do transcryption
// for encrypted content before sending it to the decoder.
//
// TODO(b/195769334): consider passing |cdm_context| in Construct() instead of
// Initialize().
Initialize@2(VideoDecoderConfig config,
bool low_delay,
pending_remote<StableCdmContext>? cdm_context)
=> (Status status,
bool needs_bitstream_conversion,
int32 max_decode_requests,
VideoDecoderType decoder_type,
[MinVersion=1] bool needs_transcryption);
// Request decoding of exactly one frame or an EOS buffer. This must not be
// called while there are pending Initialize(), Reset(), or Decode(EOS)
// requests.
Decode@3(DecoderBuffer buffer) => (Status status);
// Reset the decoder. All ongoing Decode() requests must be completed or
// aborted before executing the callback. This must not be called while there
// is a pending Initialize() request.
Reset@4() => ();
};
// Only Chrome-for-Linux and ChromeOS should host the implementation of a
// StableVideoDecoderFactory.
[EnableIf=is_chromeos|is_linux]
const sandbox.mojom.Sandbox kStableVideoDecoderFactoryServiceSandbox =
sandbox.mojom.Sandbox.kHardwareVideoDecoding;
[EnableIfNot=is_chromeos|is_linux]
const sandbox.mojom.Sandbox kStableVideoDecoderFactoryServiceSandbox =
sandbox.mojom.Sandbox.kNoSandbox;
// A disconnection of this interface can be interpreted as the
// StableVideoDecoder implementation created by
// StableVideoDecoderFactory.CreateStableVideoDecoder() having been destroyed
// (or never bound in the first place).
//
// This interface is intended to be implemented by the browser process and used
// by the video decoder processes.
[Uuid="b2211aaa-78e9-4326-8c85-a7ab15b32032"]
interface StableVideoDecoderTracker {};
// A StableVideoDecoderFactory allows the browser process to bind a
// StableVideoDecoder on behalf of some client which can be, e.g., the
// renderer process in ChromeOS.
// Next min method ID: 1
// Next min version: 2
[Uuid="d6047fd9-fffb-4e37-ad9b-383a1c9e1d2d"]
interface StableVideoDecoderFactory {
// Creates a StableVideoDecoder and should be called by the browser process.
// If |tracker| is provided, the caller can handle its disconnection in order
// to know when the StableVideoDecoder implementation is destroyed. Note that
// |tracker| may get disconnected without ever creating a StableVideoDecoder
// implementation instance, e.g., if an error occurs that prevents |receiver|
// from being bound to an implementation.
CreateStableVideoDecoder@0(
pending_receiver<StableVideoDecoder> receiver,
[MinVersion=1] pending_remote<StableVideoDecoderTracker>? tracker);
};
// A StableVideoDecoderFactoryProcess is intended to be hosted in a utility
// process in either ChromeOS or Chrome-for-linux. The client is expected to
// be the browser process of ChromeOS or Chrome-for-linux. The intended usage
// is as follows:
//
// 1) The browser process of ChromeOS or Chrome-for-linux receives a request
// to bind a pending_receiver<StableVideoDecoderFactory>.
//
// 2) That browser process starts a utility process to bind a
// pending_receiver<StableVideoDecoderFactoryProcess>. It then uses this
// connection to call InitializeStableVideoDecoderFactory() with the
// pending_receiver<StableVideoDecoderFactory> from (1).
[ServiceSandbox=kStableVideoDecoderFactoryServiceSandbox,
EnableIf=is_chromeos|is_linux]
interface StableVideoDecoderFactoryProcess {
// Initializes a StableVideoDecoderFactory using |gpu_feature_info| to
// restrict the supported video decode configurations.
InitializeStableVideoDecoderFactory(
gpu.mojom.GpuFeatureInfo gpu_feature_info,
pending_receiver<StableVideoDecoderFactory> receiver);
};

@ -1,561 +0,0 @@
// Copyright 2021 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
module media.stable.mojom;
import "media/mojo/mojom/encryption_pattern.mojom";
import "media/mojo/mojom/stable/native_pixmap_handle.mojom";
import "mojo/public/mojom/base/time.mojom";
import "mojo/public/mojom/base/values.mojom";
import "ui/gfx/geometry/mojom/geometry.mojom";
import "ui/gfx/mojom/buffer_types.mojom";
// Maps to |media.mojom.VideoCodec|.
[Extensible]
enum VideoCodec {
[Default] kUnknown = 0,
kH264,
kVC1,
kMPEG2,
kMPEG4,
kTheora,
kVP8,
kVP9,
kHEVC,
kDolbyVision,
kAV1,
// DO NOT ADD RANDOM VIDEO CODECS!
//
// The only acceptable time to add a new codec is if there is production code
// that uses said codec in the same CL.
};
// Maps to |media.mojom.VideoCodecProfile|.
[Extensible]
enum VideoCodecProfile {
// Keep the values in this enum unique, as they imply format (h.264 vs. VP8,
// for example), and keep the values for a particular format grouped
// together for clarity.
// Next version: 3
// Next value: 52
[Default] kVideoCodecProfileUnknown = -1,
kH264ProfileMin = 0,
kH264ProfileBaseline = kH264ProfileMin,
kH264ProfileMain = 1,
kH264ProfileExtended = 2,
kH264ProfileHigh = 3,
kH264ProfileHigh10 = 4,
kH264ProfileHigh422 = 5,
kH264ProfileHigh444Predictive = 6,
kH264ProfileScalableBaseline = 7,
kH264ProfileScalableHigh = 8,
kH264ProfileStereoHigh = 9,
kH264ProfileMultiviewHigh = 10,
kH264ProfileMax = kH264ProfileMultiviewHigh,
kVP8ProfileMin = 11,
kVP8ProfileAny = kVP8ProfileMin,
kVP8ProfileMax = kVP8ProfileAny,
kVP9ProfileMin = 12,
kVP9Profile0 = kVP9ProfileMin,
kVP9Profile1 = 13,
kVP9Profile2 = 14,
kVP9Profile3 = 15,
kVP9ProfileMax = kVP9Profile3,
kHEVCProfileMin = 16,
kHEVCProfileMain = kHEVCProfileMin,
kHEVCProfileMain10 = 17,
kHEVCProfileMainStillPicture = 18,
kHEVCProfileMax = kHEVCProfileMainStillPicture,
kDolbyVisionProfile0 = 19,
// DO NOT USE: Deprecated since DV Profile 4 not used anymore (b/292554162).
kDeprecatedDolbyVisionProfile4 = 20,
kDolbyVisionProfile5 = 21,
kDolbyVisionProfile7 = 22,
kTheoraProfileMin = 23,
kTheoraProfileAny = kTheoraProfileMin,
kTheoraProfileMax = kTheoraProfileAny,
kAV1ProfileMin = 24,
kAV1ProfileMain = kAV1ProfileMin,
kAV1ProfileHigh = 25,
kAV1ProfilePro = 26,
kAV1ProfileMax = kAV1ProfilePro,
kDolbyVisionProfile8 = 27,
kDolbyVisionProfile9 = 28,
[MinVersion=1] kHEVCProfileExtMin = 29,
[MinVersion=1] kHEVCProfileRext = kHEVCProfileExtMin,
[MinVersion=1] kHEVCProfileHighThroughput = 30,
[MinVersion=1] kHEVCProfileMultiviewMain = 31,
[MinVersion=1] kHEVCProfileScalableMain = 32,
[MinVersion=1] kHEVCProfile3dMain = 33,
[MinVersion=1] kHEVCProfileScreenExtended = 34,
[MinVersion=1] kHEVCProfileScalableRext = 35,
[MinVersion=1] kHEVCProfileHighThroughputScreenExtended = 36,
[MinVersion=1] kHEVCProfileExtMax = kHEVCProfileHighThroughputScreenExtended,
[MinVersion=2] kVVCProfileMin = 37,
[MinVersion=2] kVVCProfileMain10 = kVVCProfileMin,
[MinVersion=2] kVVCProfileMain12 = 38,
[MinVersion=2] kVVCProfileMain12Intra = 39,
[MinVersion=2] kVVCProfileMultilayerMain10 = 40,
[MinVersion=2] kVVCProfileMain10444 = 41,
[MinVersion=2] kVVCProfileMain12444 = 42,
[MinVersion=2] kVVCProfileMain16444 = 43,
[MinVersion=2] kVVCProfileMain12444Intra = 44,
[MinVersion=2] kVVCProfileMain16444Intra = 45,
[MinVersion=2] kVVCProfileMultilayerMain10444 = 46,
[MinVersion=2] kVVCProfileMain10Still = 47,
[MinVersion=2] kVVCProfileMain12Still = 48,
[MinVersion=2] kVVCProfileMain10444Still = 49,
[MinVersion=2] kVVCProfileMain12444Still = 50,
[MinVersion=2] kVVCProfileMain16444Still = 51,
[MinVersion=2] kVVCProileMax = kVVCProfileMain16444Still,
};
// Based on |media.mojom.SubsampleEntry|.
// Next min field ID: 2
struct SubsampleEntry {
uint32 clear_bytes@0;
uint32 cypher_bytes@1;
};
// Maps to |media.mojom.EncryptionScheme|.
[Extensible]
enum EncryptionScheme {
[Default] kUnencrypted = 0,
kCenc, // 'cenc' subsample encryption using AES-CTR mode.
kCbcs, // 'cbcs' pattern encryption using AES-CBC mode.
};
// Based on |media.mojom.DecryptConfig| but does not depend on
// |media.mojom.EncryptionScheme| and |media.mojom.SubsampleEntry|.
// Next min field ID: 5
struct DecryptConfig {
EncryptionScheme encryption_scheme@0;
string key_id@1;
string iv@2;
array<SubsampleEntry> subsamples@3;
media.mojom.EncryptionPattern? encryption_pattern@4;
};
// Based on |gfx.mojom.ColorVolumeMetadata|.
// Next min field ID: 6
struct ColorVolumeMetadata {
gfx.mojom.PointF primary_r@0;
gfx.mojom.PointF primary_g@1;
gfx.mojom.PointF primary_b@2;
gfx.mojom.PointF white_point@3;
float luminance_max@4;
float luminance_min@5;
};
// Based on |gfx.mojom.HDRMetadata| but does not depend on
// |gfx.mojom.ColorVolumeMetadata|.
// Next min field ID: 3
struct HDRMetadata {
ColorVolumeMetadata color_volume_metadata@0;
uint32 max_content_light_level@1;
uint32 max_frame_average_light_level@2;
};
// Based on |media.mojom.VideoDecoderConfig| but does not depend on
// |media.mojom.VideoCodec|, |media.mojom.VideoCodecProfile|,
// |media.mojom.VideoTransformation|, |media.mojom.EncryptionScheme|,
// |media.mojom.VideoColorSpace| and |gfx.mojom.HDRMetadata|.
// Next min field ID: 11
struct VideoDecoderConfig {
VideoCodec codec@0;
VideoCodecProfile profile@1;
uint32 level@2;
bool has_alpha@3;
gfx.mojom.Size coded_size@4;
gfx.mojom.Rect visible_rect@5;
gfx.mojom.Size natural_size@6;
array<uint8> extra_data@7;
EncryptionScheme encryption_scheme@8;
ColorSpace color_space_info@9;
HDRMetadata? hdr_metadata@10;
};
// Based on |media.mojom.SupportedVideoDecoderConfig| but does not
// depend on |media.mojom.VideoCodecProfile|
// Next min field ID: 6
struct SupportedVideoDecoderConfig {
// Range of VideoCodecProfiles to match, inclusive.
VideoCodecProfile profile_min@0;
VideoCodecProfile profile_max@1;
// Range of coded sizes to match, inclusive in each dimension.
gfx.mojom.Size coded_size_min@2;
gfx.mojom.Size coded_size_max@3;
// Match configs that have encryption configured.
bool allow_encrypted@4;
// Do not match configs that do not have encryption configured.
bool require_encrypted@5;
};
[Extensible]
enum StatusCode {
kOk_DEPRECATED,
kAborted,
[Default] kError,
};
// Based on |media.mojom.StatusData|.
// Next min field ID: 6
struct StatusData {
string group@0;
StatusCode code@1;
string message@2;
// This should ideally be a `mojo_base.mojom.ListValue`, but changing the wire
// format here is too twisted.
array<mojo_base.mojom.Value> frames@3;
StatusData? cause@4;
mojo_base.mojom.Value data@5;
};
// Based on |media.mojom.Status|.
// Next min field ID: 1
struct Status {
StatusData? internal@0;
};
// Based on |media.mojom.DecoderBufferSideData|.
// Next min field ID: 3
struct DecoderBufferSideData {
array<uint32> spatial_layers@0;
array<uint8> alpha_data@1;
uint64 secure_handle@2;
};
// Based on |media.mojom.DecoderBuffer| but does not depend on
// |media.mojom.DecryptConfig|.
// Next min field ID: 10
// Next version: 2
struct DecoderBuffer {
mojo_base.mojom.TimeDelta timestamp@0;
mojo_base.mojom.TimeDelta duration@1;
bool is_end_of_stream@2;
uint32 data_size@3;
bool is_key_frame@4;
// TODO(b/269383891): Deprecate |raw_side_data| in M120.
array<uint8> raw_side_data@5;
DecryptConfig? decrypt_config@6;
mojo_base.mojom.TimeDelta front_discard@7;
mojo_base.mojom.TimeDelta back_discard@8;
[MinVersion=1] DecoderBufferSideData? side_data@9;
};
// Maps to |media.mojom.VideoDecoderType|.
[Extensible]
enum VideoDecoderType {
[Default] kUnknown,
kVaapi,
kVda,
kV4L2,
kTesting,
};
// Maps to |gfx.mojom.ColorSpacePrimaryID|.
[Extensible]
enum ColorSpacePrimaryID {
[Default] kInvalid,
kBT709,
kBT470M,
kBT470BG,
kSMPTE170M,
kSMPTE240M,
kFilm,
kBT2020,
kSMPTEST428_1,
kSMPTEST431_2,
kSMPTEST432_1,
kXYZ_D50,
kAdobeRGB,
kAppleGenericRGB,
kWideGamutColorSpin,
kCustom,
[MinVersion=1] kEBU_3213_E,
};
// Maps to |gfx.mojom.ColorSpaceTransferID|.
[Extensible]
enum ColorSpaceTransferID {
[Default] kInvalid,
kBT709,
kBT709Apple,
kGamma18,
kGamma22,
kGamma24,
kGamma28,
kSMPTE170M,
kSMPTE240M,
kLinear,
kLog,
kLogSqrt,
kIEC61966_2_4,
kBT1361_ECG,
kIEC61966_2_1,
kBT2020_10,
kBT2020_12,
kSMPTEST2084,
kSMPTEST428_1,
kARIB_STD_B67,
kIEC61966_2_1_HDR,
kLinearHDR,
kCustom,
kCustomHDR,
kPiecewiseHDR,
[MinVersion=1] kScrgbLinear80Nits,
};
// Maps to |gfx.mojom.ColorSpaceMatrixID|.
[Extensible]
enum ColorSpaceMatrixID {
[Default] kInvalid,
kRGB,
kBT709,
kFCC,
kBT470BG,
kSMPTE170M,
kSMPTE240M,
kYCOCG,
// DO NOT USE: Deprecated since BT2020_CL not used anymore (b/333906350).
kBT2020_NCL,
kBT2020_CL,
kYDZDX,
kGBR,
};
// Maps to |gfx.mojom.ColorSpaceRangeID|.
[Extensible]
enum ColorSpaceRangeID {
[Default] kInvalid,
kLimited,
kFull,
kDerived,
};
// Based on |gfx.mojom.ColorSpace| but does not depend on
// |gfx.mojom.ColorSpacePrimaryID|, |gfx.mojom.ColorSpaceTransferID|,
// |gfx.mojom.ColorSpaceMatrixID| and |gfx.mojom.ColorSpaceRangeID|.
// Next min field ID: 6
struct ColorSpace {
ColorSpacePrimaryID primaries@0;
ColorSpaceTransferID transfer@1;
ColorSpaceMatrixID matrix@2;
ColorSpaceRangeID range@3;
array<float, 9> custom_primary_matrix@4;
array<float, 7> transfer_params@5;
};
// Maps to |media.mojom.VideoPixelFormat|.
[Extensible]
enum VideoPixelFormat {
[Default] kPixelFormatUnknown = 0, // Unknown or unspecified format value.
kPixelFormatI420 =
1, // 12bpp YUV planar 1x1 Y, 2x2 UV samples, a.k.a. YU12.
// Note: Chrome does not actually support YVU compositing, so you probably
// don't actually want to use this. See http://crbug.com/784627.
kPixelFormatYV12 = 2, // 12bpp YVU planar 1x1 Y, 2x2 VU samples.
kPixelFormatI422 = 3, // 16bpp YUV planar 1x1 Y, 2x1 UV samples.
kPixelFormatI420A = 4, // 20bpp YUVA planar 1x1 Y, 2x2 UV, 1x1 A samples.
kPixelFormatI444 = 5, // 24bpp YUV planar, no subsampling.
kPixelFormatNV12 =
6, // 12bpp with Y plane followed by a 2x2 interleaved UV plane.
kPixelFormatNV21 =
7, // 12bpp with Y plane followed by a 2x2 interleaved VU plane.
kPixelFormatUYVY =
8, // 16bpp interleaved 2x1 U, 1x1 Y, 2x1 V, 1x1 Y samples.
kPixelFormatYUY2 =
9, // 16bpp interleaved 1x1 Y, 2x1 U, 1x1 Y, 2x1 V samples.
kPixelFormatARGB = 10, // 32bpp BGRA (byte-order), 1 plane.
kPixelFormatXRGB = 11, // 24bpp BGRX (byte-order), 1 plane.
kPixelFormatRGB24 = 12, // 24bpp BGR (byte-order), 1 plane.
kPixelFormatMJPEG = 14, // MJPEG compressed.
// The P* in the formats below designates the number of bits per pixel
// component. I.e. P9 is 9-bits per pixel component, P10 is 10-bits per pixel
// component, etc.
kPixelFormatYUV420P9 = 16,
kPixelFormatYUV420P10 = 17,
kPixelFormatYUV422P9 = 18,
kPixelFormatYUV422P10 = 19,
kPixelFormatYUV444P9 = 20,
kPixelFormatYUV444P10 = 21,
kPixelFormatYUV420P12 = 22,
kPixelFormatYUV422P12 = 23,
kPixelFormatYUV444P12 = 24,
kPixelFormatY16 = 26, // single 16bpp plane.
kPixelFormatABGR = 27, // 32bpp RGBA (byte-order), 1 plane.
kPixelFormatXBGR = 28, // 24bpp RGBX (byte-order), 1 plane.
// 15bpp YUV planar 1x1 Y, 2x2 interleaved UV, 10 bits per channel.
// data in the high bits, zeros in the low bits, little-endian.
kPixelFormatP010LE = 29,
kPixelFormatXR30 =
30, // 32bpp BGRX, 10 bits per channel, 2 bits ignored, 1 plane
kPixelFormatXB30 =
31, // 32bpp RGBX, 10 bits per channel, 2 bits ignored, 1 plane
kPixelFormatBGRA = 32, // 32bpp ARGB (byte-order), 1 plane.
kPixelFormatRGBAF16 = 33, // Half float RGBA, 1 plane.
[MinVersion=1]
kPixelFormatI422A = 34, // 24bpp YUVA planar 1x1 Y, 2x1 UV, 1x1 A samples.
[MinVersion=1]
kPixelFormatI444A = 35, // 32bpp YUVA planar, no subsampling.
// YUVA planar, 10 bits per pixel component.
[MinVersion=1] kPixelFormatYUV420AP10 = 36,
[MinVersion=1] kPixelFormatYUV422AP10 = 37,
[MinVersion=1] kPixelFormatYUV444AP10 = 38,
// 20bpp YUVA planar 1x1 Y, 2x2 interleaved UV, 1x1 A samples.
[MinVersion=2] kPixelFormatNV12A = 39,
// 16bpp YUV planar 1x1 Y, 2x1 interleaved UV, 8 bits per channel.
[MinVersion=3] kPixelFormatNV16 = 40,
// 24bpp YUV planar 1x1 Y, 1x1 interleaved UV, 8 bits per channel.
[MinVersion=3] kPixelFormatNV24 = 41,
// 20bpp YUV planar 1x1 Y, 2x1 interleaved UV, 16 bits per channel.
// data in the high bits, zeros in the low bits, little-endian.
[MinVersion=3] kPixelFormatP210LE = 42,
// 30bpp YUV planar 1x1 Y, 1x1 interleaved UV, 16 bits per channel.
// data in the high bits, zeros in the low bits, little-endian.
[MinVersion=3] kPixelFormatP410LE = 43,
};
// Based on |gfx.mojom.GpuMemoryBufferHandle| but does not depend on
// |gfx.mojom.GpuMemoryBufferPlatformHandle|.
// Next min field ID: 2
struct NativeGpuMemoryBufferHandle {
gfx.mojom.GpuMemoryBufferId id@0;
NativePixmapHandle platform_handle@1;
};
// Based on |media.mojom.VideoFrameMetadata| but does not depend on
// |media.mojom.VideoTransformation|.
// Next min field ID: 3
// Next version: 2
struct VideoFrameMetadata {
bool protected_video@0;
bool hw_protected@1;
[MinVersion=1] bool needs_detiling@2;
};
// Based on |media.mojom.VideoFrame| but does not depend on
// |media.mojom.VideoPixelFormat|, |media.mojom.VideoFrameData|,
// |media.mojom.VideoFrameMetadata|, |gfx.mojom.ColorSpace| or
// or |gfx.mojom.HDRMetadata|.
// Next min field ID: 9
struct VideoFrame {
// Format of the frame.
VideoPixelFormat format@0;
// Width and height of the video frame, in pixels.
gfx.mojom.Size coded_size@1;
// Visible size of the frame.
gfx.mojom.Rect visible_rect@2;
// Natural size of the frame.
gfx.mojom.Size natural_size@3;
// Timestamp in microseconds of the associated frame.
mojo_base.mojom.TimeDelta timestamp@4;
// Contents of the video frame.
NativeGpuMemoryBufferHandle gpu_memory_buffer_handle@5;
// Extra properties associated with the VideoFrame.
VideoFrameMetadata metadata@6;
ColorSpace color_space@7;
HDRMetadata? hdr_metadata@8;
};
// Maps to |media.mojom.WaitingReason|.
[Extensible]
enum WaitingReason {
// The playback cannot start because "Media Data May Contain Encrypted Blocks"
// and no CDM is available. The playback will start after a CDM is set. See
// https://www.w3.org/TR/encrypted-media/#media-may-contain-encrypted-blocks
[Default] kNoCdm,
// The playback cannot proceed because some decryption key is not available.
// This could happen when the license exchange is delayed or failed. The
// playback will resume after the decryption key becomes available.
// See https://www.w3.org/TR/encrypted-media/#encrypted-block-encountered
kNoDecryptionKey,
// The playback cannot proceed because the decoder has lost its state, e.g.
// information about reference frames. Usually this only happens to hardware
// decoders. To recover from this state, reset the decoder and start decoding
// from a key frame, which can typically be accomplished by a pipeline seek.
kDecoderStateLost,
// The playback cannot proceed because the secure output surface is gone. This
// can happen when user backgrounds the page when it's playing secure content.
[MinVersion=1] kSecureSurfaceLost,
};
// Based on |media.mojom.MediaLogRecord|.
// Next min field ID: 4
struct MediaLogRecord {
[Extensible]
enum Type {
// See media/base/media_log_message_levels.h for info.
[Default] kMessage,
// See media/base/media_log_properties.h for info.
kMediaPropertyChange,
// See media/base/media_log_events.h for info.
kMediaEventTriggered,
kMediaStatus,
};
int32 id@0;
Type type@1;
mojo_base.mojom.DictionaryValue params@2;
mojo_base.mojom.TimeTicks time@3;
};
[Extensible]
enum DecryptStatus {
kSuccess,
kNoKey,
[Default] kFailure,
};
[Extensible]
enum CdmContextEvent {
// We use |kHasAdditionalUsableKey| as the default since this maps to an
// existing enum in Chrome that only has the two values below. Receiving an
// event for |kHasAdditionalUsableKey| is always safe because it is a benign
// indicator that if something was waiting for a key, it should check again.
// It is not an indicator that the key it actually wanted is ready.
[Default] kHasAdditionalUsableKey,
kHardwareContextReset,
};

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -11,6 +11,9 @@ import "mojo/public/mojom/base/unguessable_token.mojom";
import "ui/gfx/geometry/mojom/geometry.mojom";
import "ui/gfx/mojom/color_space.mojom";
[EnableIf=allow_oop_video_decoder]
import "media/mojo/mojom/cdm_context_for_oopvd.mojom";
// Serializable rule for matching VideoDecoderConfigs.
struct SupportedVideoDecoderConfig {
// Range of VideoCodecProfiles to match, inclusive.
@ -59,11 +62,14 @@ interface VideoFrameHandleReleaser {
// A Mojo equivalent of media::VideoDecoder. In practice, this is used for
// hardware decode offloading; in this case the client is a <video> tag running
// in a renderer, and the implementation is running in the GPU process.
// in a renderer, and the implementation is running in the GPU process. With
// the ChromeOS/Linux out-of-process video decoding feature, the client may also
// be the GPU process with the implementations running in a utility process.
interface VideoDecoder {
// Returns a list of supported configs as well as the decoder ID for the decoder
// which supports them. It is expected that Initialize() will fail for any config
// that does not match an entry in this list.
// Returns a list of supported configs as well as the decoder ID for the
// decoder which supports them. It is expected that
// Initialize()/InitializeWithCdmContext() will fail for any config that does
// not match an entry in this list.
//
// May be called before Construct().
[Sync]
@ -90,11 +96,8 @@ interface VideoDecoder {
//
// |command_buffer_id|, when present, identifies a CommandBufferStub that
// the VideoDecoder can use for GL operations. Implementations that require GL
// will fail Initialize() if |command_buffer_id| is not provided.
//
// |implementation| selects the underlying VideoDecoder implementation. Not
// all implementations are supported. Initialize() will fail if
// |implementation| is not supported.
// will fail Initialize()/InitializeWithCdmContext() if |command_buffer_id| is
// not provided.
//
// TODO(sandersd): Rename to Initialize() if/when
// media::VideoDecoder::Initialize() is renamed to Configure().
@ -106,9 +109,7 @@ interface VideoDecoder {
CommandBufferId? command_buffer_id,
gfx.mojom.ColorSpace target_color_space);
// Configure (or reconfigure) the decoder. This must be called before decoding
// any frames, and must not be called while there are pending Initialize(),
// Decode(), or Reset() requests.
// Configure (or reconfigure) the decoder.
//
// If |low_delay| is true, the decoder must output frames as soon as possible;
// in particular, it must not wait for another Decode() request, except as
@ -122,6 +123,16 @@ interface VideoDecoder {
//
// |cdm_id| must refer to a valid CDM if |config.is_encrypted()|. It is not
// used for unencrypted streams.
//
// Note about configuration:
//
// A VideoDecoder implementation should provide exactly one way to be
// configured, either Initialize() or InitializeWithCdmContext() but not
// both. The one that's not provided should be either rejected or a no-op.
// Regardless of which way is provided, the VideoDecoder must be configured
// before decoding any frames, and it should not be re-configured while there
// are pending Initialize()/InitializeWithCdmContext(), Decode(), or Reset()
// requests.
Initialize(VideoDecoderConfig config, bool low_delay,
mojo_base.mojom.UnguessableToken? cdm_id)
=> (DecoderStatus status,
@ -129,10 +140,45 @@ interface VideoDecoder {
int32 max_decode_requests,
VideoDecoderType decoder_type);
// Request decoding of exactly one frame or an EOS buffer. This must not be
// called while there are pending Initialize(), Reset(), or Decode(EOS)
// Like Initialize() with two differences to address the requirements of the
// intended use case (out-of-process video decoding):
//
// - Accepts a |cdm_context| instead of a |cdm_id|. This allows the
// VideoDecoder implementation to talk to a CdmContext that lives in a
// different process. Note that |cdm_context| is required for the first
// InitializeWithCdmContext() call that sets up encryption and is ignored on
// subsequent calls.
//
// - As part of the reply callback, returns |needs_transcryption| which tells
// the client whether it needs to do transcryption for encrypted content
// before sending it to the decoder.
//
// Note about configuration:
//
// A VideoDecoder implementation should provide exactly one way to be
// configured, either Initialize() or InitializeWithCdmContext() but not
// both. The one that's not provided should be either rejected or a no-op.
// Regardless of which way is provided, the VideoDecoder must be configured
// before decoding any frames, and it should not be re-configured while there
// are pending Initialize()/InitializeWithCdmContext(), Decode(), or Reset()
// requests.
//
// TODO(crbug.com/195769334): consider passing |cdm_context| in Construct()
// instead of InitializeWithCdmContext().
// TODO(crbug.com/347331029): merge with the Initialize() method.
[EnableIf=allow_oop_video_decoder]
InitializeWithCdmContext(VideoDecoderConfig config, bool low_delay,
pending_remote<CdmContextForOOPVD>? cdm_context)
=> (DecoderStatus status,
bool needs_bitstream_conversion,
int32 max_decode_requests,
VideoDecoderType decoder_type,
bool needs_transcryption);
// Request decoding of exactly one frame or an EOS buffer. This must not be
// called while there are pending Initialize()/InitializeWithCdmContext(),
// Reset(), or Decode(EOS) requests.
//
// Implementations must eventually execute the callback, even if Decode() is
// not called again. It is not required that the decode status match the
// actual result of decoding the buffer, only that decode errors are
@ -145,7 +191,7 @@ interface VideoDecoder {
// Reset the decoder. All ongoing Decode() requests must be completed or
// aborted before executing the callback. This must not be called while there
// is a pending Initialize() request.
// is a pending Initialize()/InitializeWithCdmContext() request.
Reset() => ();
// Inform the decoder that new OverlayInfo is available.

@ -0,0 +1,30 @@
// Copyright 2021 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
module media.mojom;
import "gpu/ipc/common/gpu_feature_info.mojom";
import "media/mojo/mojom/interface_factory.mojom";
import "sandbox/policy/mojom/sandbox.mojom";
// A VideoDecoderFactoryProcess is intended to be hosted in a utility process.
// The client is expected to be the browser process. The intended usage is as
// follows:
//
// 1) The browser process receives a request to bind a
// pending_receiver<InterfaceFactory>.
//
// 2) The browser process starts a utility process to bind a
// pending_receiver<VideoDecoderFactoryProcess>. It then uses this
// connection to call InitializeVideoDecoderFactory() with the
// pending_receiver<InterfaceFactory> from (1).
[ServiceSandbox=sandbox.mojom.Sandbox.kHardwareVideoDecoding,
EnableIf=allow_oop_video_decoder]
interface VideoDecoderFactoryProcess {
// Initializes an InterfaceFactory using |gpu_feature_info| to
// restrict the supported video decode configurations.
InitializeVideoDecoderFactory(
gpu.mojom.GpuFeatureInfo gpu_feature_info,
pending_receiver<InterfaceFactory> receiver);
};

@ -213,11 +213,12 @@ component("services") {
"stable_video_decoder_service.h",
]
public_deps += [ "//media/mojo/mojom/stable:stable_video_decoder" ]
public_deps += [ "//media/mojo/mojom" ]
deps += [
"//components/viz/common",
"//media/gpu/chromeos",
"//media/mojo/common",
]
}
@ -326,7 +327,7 @@ source_set("unit_tests") {
}
if ((is_chromeos || is_linux) && (use_vaapi || use_v4l2_codec)) {
sources += [ "stable_video_decoder_service_unittest.cc" ]
sources += [ "oop_video_decoder_service_unittest.cc" ]
deps += [ "//gpu:test_support" ]
}

@ -77,7 +77,7 @@ VideoDecoderTraits::VideoDecoderTraits(
const gfx::ColorSpace* target_color_space,
GetConfigCacheCB get_cached_configs_cb,
GetCommandBufferStubCB get_command_buffer_stub_cb,
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder)
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder)
: task_runner(std::move(task_runner)),
media_log(std::move(media_log)),
request_overlay_info_cb(request_overlay_info_cb),
@ -204,9 +204,8 @@ GpuMojoMediaClient::GetSupportedVideoDecoderConfigs() {
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void GpuMojoMediaClient::NotifyDecoderSupportKnown(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<
void(mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb) {
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb) {
#if BUILDFLAG(USE_VAAPI) || BUILDFLAG(USE_V4L2_CODEC)
// TODO(b/195769334): this call should ideally be guarded only by
// BUILDFLAG(ALLOW_OOP_VIDEO_DECODER) because eventually, the GPU process
@ -227,7 +226,7 @@ std::unique_ptr<VideoDecoder> GpuMojoMediaClient::CreateVideoDecoder(
mojom::CommandBufferIdPtr command_buffer_id,
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space,
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder) {
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder) {
// Always respect GPU features.
if (gpu_preferences_.disable_accelerated_video_decode ||
(gpu_feature_info_

@ -46,7 +46,7 @@ struct VideoDecoderTraits {
// Android uses this twice.
GetCommandBufferStubCB get_command_buffer_stub_cb;
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder;
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder;
VideoDecoderTraits(
scoped_refptr<base::SequencedTaskRunner> task_runner,
@ -55,7 +55,7 @@ struct VideoDecoderTraits {
const gfx::ColorSpace* target_color_space,
GetConfigCacheCB get_cached_configs_cb,
GetCommandBufferStubCB get_command_buffer_stub_cb,
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder);
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder);
~VideoDecoderTraits();
};
@ -109,9 +109,9 @@ class MEDIA_MOJO_EXPORT GpuMojoMediaClient : public MojoMediaClient {
scoped_refptr<base::SequencedTaskRunner> task_runner) final;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void NotifyDecoderSupportKnown(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<void(
mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb) final;
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb)
final;
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
std::unique_ptr<VideoDecoder> CreateVideoDecoder(
scoped_refptr<base::SequencedTaskRunner> task_runner,
@ -119,8 +119,7 @@ class MEDIA_MOJO_EXPORT GpuMojoMediaClient : public MojoMediaClient {
mojom::CommandBufferIdPtr command_buffer_id,
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space,
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder)
final;
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder) final;
std::unique_ptr<CdmFactory> CreateCdmFactory(
mojom::FrameInterfaceFactory* interface_provider) final;
@ -158,9 +157,9 @@ class MEDIA_MOJO_EXPORT GpuMojoMediaClient : public MojoMediaClient {
// This function is thread- and sequence-safe. |cb| is always called on the
// same sequence as NotifyPlatformDecoderSupport().
virtual void NotifyPlatformDecoderSupport(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<
void(mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb) = 0;
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)>
cb) = 0;
#endif
// ------------------- [ Optional implementations below ] -------------------

@ -34,15 +34,8 @@ VideoDecoderType GetActualPlatformDecoderImplementation(
return VideoDecoderType::kUnknown;
}
switch (media::GetOutOfProcessVideoDecodingMode()) {
case media::OOPVDMode::kEnabledWithGpuProcessAsProxy:
return VideoDecoderType::kOutOfProcess;
case media::OOPVDMode::kEnabledWithoutGpuProcessAsProxy:
// The browser process ensures that this path is never reached for this
// OOP-VD mode.
NOTREACHED();
case media::OOPVDMode::kDisabled:
break;
if (IsOutOfProcessVideoDecodingEnabled()) {
return VideoDecoderType::kOutOfProcess;
}
#if BUILDFLAG(USE_VAAPI)
@ -108,9 +101,9 @@ class GpuMojoMediaClientCrOS final : public GpuMojoMediaClient {
}
void NotifyPlatformDecoderSupport(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<void(
mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb) final {
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb)
final {
switch (GetActualPlatformDecoderImplementation(gpu_preferences_)) {
case VideoDecoderType::kOutOfProcess:
case VideoDecoderType::kVaapi:

@ -30,15 +30,8 @@ VideoDecoderType GetPreferredLinuxDecoderImplementation() {
return VideoDecoderType::kUnknown;
}
switch (media::GetOutOfProcessVideoDecodingMode()) {
case media::OOPVDMode::kEnabledWithGpuProcessAsProxy:
return VideoDecoderType::kOutOfProcess;
case media::OOPVDMode::kEnabledWithoutGpuProcessAsProxy:
// The browser process ensures that this path is never reached for this
// OOP-VD mode.
NOTREACHED();
case media::OOPVDMode::kDisabled:
break;
if (IsOutOfProcessVideoDecodingEnabled()) {
return VideoDecoderType::kOutOfProcess;
}
#if BUILDFLAG(USE_VAAPI)
@ -218,9 +211,9 @@ class GpuMojoMediaClientLinux final : public GpuMojoMediaClient {
}
void NotifyPlatformDecoderSupport(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<void(
mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb) final {
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb)
final {
switch (
GetActualPlatformDecoderImplementation(gpu_preferences_, gpu_info_)) {
case VideoDecoderType::kOutOfProcess:

@ -168,8 +168,7 @@ void InterfaceFactoryImpl::CreateAudioDecoder(
void InterfaceFactoryImpl::CreateVideoDecoder(
mojo::PendingReceiver<mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) {
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder) {
DVLOG(2) << __func__;
#if BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
@ -194,9 +193,9 @@ void InterfaceFactoryImpl::CreateVideoDecoder(
}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void InterfaceFactoryImpl::CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) {
void InterfaceFactoryImpl::CreateVideoDecoderWithTracker(
mojo::PendingReceiver<mojom::VideoDecoder> receiver,
mojo::PendingRemote<mojom::VideoDecoderTracker> tracker) {
// The browser process ensures that this is not called in the GPU process.
NOTREACHED();
}
@ -452,8 +451,7 @@ void InterfaceFactoryImpl::OnCdmServiceInitialized(
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void InterfaceFactoryImpl::FinishCreatingVideoDecoder(
mojo::PendingReceiver<mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) {
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder) {
#if BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
video_decoder_receivers_.Add(std::make_unique<MojoVideoDecoderService>(
mojo_media_client_, &cdm_service_context_,

@ -54,12 +54,11 @@ class InterfaceFactoryImpl final
mojo::PendingReceiver<mojom::AudioDecoder> receiver) final;
void CreateVideoDecoder(
mojo::PendingReceiver<mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) final;
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder) final;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) final;
void CreateVideoDecoderWithTracker(
mojo::PendingReceiver<mojom::VideoDecoder> receiver,
mojo::PendingRemote<mojom::VideoDecoderTracker> tracker) final;
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateAudioEncoder(
@ -128,8 +127,7 @@ class InterfaceFactoryImpl final
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void FinishCreatingVideoDecoder(
mojo::PendingReceiver<mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder);
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder);
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// Must be declared before the receivers below because the bound objects might

@ -46,7 +46,7 @@ void MojoCdmServiceContext::UnregisterCdm(
cdm_services_.erase(cdm_id);
}
#if BUILDFLAG(IS_CHROMEOS)
#if BUILDFLAG(IS_CHROMEOS) && BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
base::UnguessableToken MojoCdmServiceContext::RegisterRemoteCdmContext(
chromeos::RemoteCdmContext* remote_context) {
DCHECK(remote_context);
@ -63,7 +63,7 @@ void MojoCdmServiceContext::UnregisterRemoteCdmContext(
DCHECK(remote_cdm_contexts_.count(cdm_id));
remote_cdm_contexts_.erase(cdm_id);
}
#endif // BUILDFLAG(IS_CHROMEOS)
#endif // BUILDFLAG(IS_CHROMEOS) && BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
std::unique_ptr<CdmContextRef> MojoCdmServiceContext::GetCdmContextRef(
const base::UnguessableToken& cdm_id) {
@ -81,12 +81,12 @@ std::unique_ptr<CdmContextRef> MojoCdmServiceContext::GetCdmContextRef(
}
}
#if BUILDFLAG(IS_CHROMEOS)
#if BUILDFLAG(IS_CHROMEOS) && BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// Try the remote contexts now.
auto remote_context = remote_cdm_contexts_.find(cdm_id);
if (remote_context != remote_cdm_contexts_.end())
return remote_context->second->GetCdmContextRef();
#endif // BUILDFLAG(IS_CHROMEOS)
#endif // BUILDFLAG(IS_CHROMEOS) && BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
LOG(ERROR) << "CdmContextRef cannot be obtained for CDM ID: " << cdm_id;
return nullptr;

@ -18,9 +18,9 @@
#include "media/media_buildflags.h"
#include "media/mojo/services/media_mojo_export.h"
#if BUILDFLAG(IS_CHROMEOS)
#if BUILDFLAG(IS_CHROMEOS) && BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
#include "chromeos/components/cdm_factory_daemon/remote_cdm_context.h"
#endif // BUILDFLAG(IS_CHROMEOS)
#endif // BUILDFLAG(IS_CHROMEOS) && BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
namespace media {
@ -43,7 +43,7 @@ class MEDIA_MOJO_EXPORT MojoCdmServiceContext {
// Unregisters the CDM. Must be called before the CDM is destroyed.
void UnregisterCdm(const base::UnguessableToken& cdm_id);
#if BUILDFLAG(IS_CHROMEOS)
#if BUILDFLAG(IS_CHROMEOS) && BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// Registers the |remote_context| and returns a unique (per-process) CDM ID.
// This is used with out-of-process video decoding with HWDRM. We run
// MojoCdmServiceContext in the GPU process which works with MojoCdmService.
@ -56,7 +56,7 @@ class MEDIA_MOJO_EXPORT MojoCdmServiceContext {
// Unregisters the RemoteCdmContext. Must be called before the
// RemoteCdmContext is destroyed.
void UnregisterRemoteCdmContext(const base::UnguessableToken& cdm_id);
#endif // BUILDFLAG(IS_CHROMEOS)
#endif // BUILDFLAG(IS_CHROMEOS) && BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// Returns the CdmContextRef associated with |cdm_id|.
std::unique_ptr<CdmContextRef> GetCdmContextRef(
@ -70,12 +70,12 @@ class MEDIA_MOJO_EXPORT MojoCdmServiceContext {
std::map<base::UnguessableToken, raw_ptr<MojoCdmService, CtnExperimental>>
cdm_services_ GUARDED_BY(cdm_services_lock_);
#if BUILDFLAG(IS_CHROMEOS)
#if BUILDFLAG(IS_CHROMEOS) && BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// A map between CDM ID and RemoteCdmContext.
std::map<base::UnguessableToken,
raw_ptr<chromeos::RemoteCdmContext, CtnExperimental>>
remote_cdm_contexts_;
#endif // BUILDFLAG(IS_CHROMEOS)
#endif // BUILDFLAG(IS_CHROMEOS) && BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
};
} // namespace media

@ -49,9 +49,8 @@ VideoDecoderType MojoMediaClient::GetDecoderImplementationType() {
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void MojoMediaClient::NotifyDecoderSupportKnown(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<
void(mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb) {
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb) {
std::move(cb).Run(std::move(oop_video_decoder));
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
@ -62,7 +61,7 @@ std::unique_ptr<VideoDecoder> MojoMediaClient::CreateVideoDecoder(
mojom::CommandBufferIdPtr command_buffer_id,
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space,
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder) {
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder) {
return nullptr;
}

@ -21,7 +21,6 @@
#include "media/mojo/mojom/audio_decoder.mojom.h"
#include "media/mojo/mojom/frame_interface_factory.mojom.h"
#include "media/mojo/mojom/renderer_extensions.mojom.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
@ -82,9 +81,8 @@ class MEDIA_MOJO_EXPORT MojoMediaClient {
//
// |cb| is always called on the same sequence as NotifyDecoderSupportKnown().
virtual void NotifyDecoderSupportKnown(
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder,
base::OnceCallback<
void(mojo::PendingRemote<stable::mojom::StableVideoDecoder>)> cb);
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder,
base::OnceCallback<void(mojo::PendingRemote<mojom::VideoDecoder>)> cb);
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
virtual std::unique_ptr<VideoDecoder> CreateVideoDecoder(
@ -93,7 +91,7 @@ class MEDIA_MOJO_EXPORT MojoMediaClient {
mojom::CommandBufferIdPtr command_buffer_id,
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space,
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder);
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder);
// Returns the Renderer to be used by MojoRendererService.
// TODO(hubbe): Find out whether we should pass in |target_color_space| here.

@ -124,8 +124,7 @@ class VideoFrameHandleReleaserImpl final
MojoVideoDecoderService::MojoVideoDecoderService(
MojoMediaClient* mojo_media_client,
MojoCdmServiceContext* mojo_cdm_service_context,
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
oop_video_decoder_pending_remote)
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder_pending_remote)
: mojo_media_client_(mojo_media_client),
mojo_cdm_service_context_(mojo_cdm_service_context),
oop_video_decoder_pending_remote_(
@ -481,6 +480,31 @@ void MojoVideoDecoderService::OnOverlayInfoChanged(
provide_overlay_info_cb_.Run(overlay_info);
}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void MojoVideoDecoderService::InitializeWithCdmContext(
const VideoDecoderConfig& config,
bool low_delay,
mojo::PendingRemote<mojom::CdmContextForOOPVD> cdm_context,
InitializeWithCdmContextCallback callback) {
// There are two cases:
//
// a) This MojoVideoDecoderService lives in the GPU process, in which case, it
// receives messages from renderer processes. Such processes are not
// supposed to use InitializeWithCdmContext(). They should use
// Initialize().
//
// b) This MojoVideoDecoderService lives in the utility process, in which
// case, it receives messages from the in-process OOPVideoDecoderService.
// The latter handles the InitializeWithCdmContext() calls and transforms
// them into Initialize() calls.
//
// In either case, MojoVideoDecoderService is not supposed to handle
// InitializeWithCdmContext() calls.
CHECK(mojo::IsInMessageDispatch());
mojo::ReportBadMessage("Unexpected call to InitializeWithCdmContext()");
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void MojoVideoDecoderService::OnDecoderRequestedOverlayInfo(
bool restart_for_transitions,
ProvideOverlayInfoCB provide_overlay_info_cb) {

@ -17,7 +17,6 @@
#include "media/base/decoder_status.h"
#include "media/base/overlay_info.h"
#include "media/base/video_decoder.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/mojo/services/mojo_media_client.h"
@ -43,8 +42,7 @@ class MEDIA_MOJO_EXPORT MojoVideoDecoderService final
explicit MojoVideoDecoderService(
MojoMediaClient* mojo_media_client,
MojoCdmServiceContext* mojo_cdm_service_context,
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
oop_video_decoder_remote);
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder_remote);
MojoVideoDecoderService(const MojoVideoDecoderService&) = delete;
MojoVideoDecoderService& operator=(const MojoVideoDecoderService&) = delete;
@ -68,6 +66,13 @@ class MEDIA_MOJO_EXPORT MojoVideoDecoderService final
void Decode(mojom::DecoderBufferPtr buffer, DecodeCallback callback) final;
void Reset(ResetCallback callback) final;
void OnOverlayInfoChanged(const OverlayInfo& overlay_info) final;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void InitializeWithCdmContext(
const VideoDecoderConfig& config,
bool low_delay,
mojo::PendingRemote<mojom::CdmContextForOOPVD> cdm_context,
InitializeWithCdmContextCallback callback) final;
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
private:
// Helper methods so that we can bind them with a weak pointer to avoid
@ -132,8 +137,7 @@ class MEDIA_MOJO_EXPORT MojoVideoDecoderService final
// just holds the PendingRemote in between the construction of the
// MojoVideoDecoderService and the call to
// |mojo_media_client_|->CreateVideoDecoder().
mojo::PendingRemote<stable::mojom::StableVideoDecoder>
oop_video_decoder_pending_remote_;
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder_pending_remote_;
InitializeCallback init_cb_;
ResetCallback reset_cb_;

@ -2,8 +2,6 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/mojo/services/stable_video_decoder_service.h"
#include <sys/mman.h>
#include "base/posix/eintr_wrapper.h"
@ -12,10 +10,12 @@
#include "components/viz/common/resources/shared_image_format.h"
#include "gpu/command_buffer/client/test_shared_image_interface.h"
#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/common/mojo_decoder_buffer_converter.h"
#include "media/mojo/mojom/media_log.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/services/stable_video_decoder_factory_service.h"
#include "media/mojo/services/stable_video_decoder_service.h"
#include "mojo/public/cpp/bindings/associated_receiver.h"
#include "mojo/public/cpp/bindings/remote.h"
#include "mojo/public/cpp/system/data_pipe.h"
@ -179,6 +179,13 @@ class MockVideoDecoder : public mojom::VideoDecoder {
bool low_delay,
const std::optional<base::UnguessableToken>& cdm_id,
InitializeCallback callback));
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
MOCK_METHOD4(InitializeWithCdmContext,
void(const VideoDecoderConfig& config,
bool low_delay,
mojo::PendingRemote<mojom::CdmContextForOOPVD> cdm_context,
InitializeWithCdmContextCallback callback));
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
MOCK_METHOD2(Decode,
void(mojom::DecoderBufferPtr buffer, DecodeCallback callback));
MOCK_METHOD1(Reset, void(ResetCallback callback));
@ -192,78 +199,76 @@ class MockVideoDecoder : public mojom::VideoDecoder {
std::unique_ptr<MojoDecoderBufferReader> mojo_decoder_buffer_reader_;
};
class MockStableVideoDecoderTracker
: public stable::mojom::StableVideoDecoderTracker {};
class MockVideoDecoderTracker : public mojom::VideoDecoderTracker {};
class MockStableVideoDecoderClient : public stable::mojom::VideoDecoderClient {
class MockVideoDecoderClient : public mojom::VideoDecoderClient {
public:
explicit MockStableVideoDecoderClient(
mojo::PendingAssociatedReceiver<stable::mojom::VideoDecoderClient>
explicit MockVideoDecoderClient(
mojo::PendingAssociatedReceiver<mojom::VideoDecoderClient>
pending_receiver)
: receiver_(this, std::move(pending_receiver)) {}
MockStableVideoDecoderClient(const MockStableVideoDecoderClient&) = delete;
MockStableVideoDecoderClient& operator=(const MockStableVideoDecoderClient&) =
delete;
~MockStableVideoDecoderClient() override = default;
MockVideoDecoderClient(const MockVideoDecoderClient&) = delete;
MockVideoDecoderClient& operator=(const MockVideoDecoderClient&) = delete;
~MockVideoDecoderClient() override = default;
// stable::mojom::VideoDecoderClient implementation.
MOCK_METHOD3(OnVideoFrameDecoded,
void(stable::mojom::VideoFramePtr frame,
bool can_read_without_stalling,
const base::UnguessableToken& release_token));
// mojom::VideoDecoderClient implementation.
MOCK_METHOD3(
OnVideoFrameDecoded,
void(const scoped_refptr<VideoFrame>& frame,
bool can_read_without_stalling,
const std::optional<base::UnguessableToken>& release_token));
MOCK_METHOD1(OnWaiting, void(WaitingReason reason));
MOCK_METHOD1(RequestOverlayInfo, void(bool restart_for_transitions));
private:
mojo::AssociatedReceiver<stable::mojom::VideoDecoderClient> receiver_;
mojo::AssociatedReceiver<mojom::VideoDecoderClient> receiver_;
};
class MockStableMediaLog : public stable::mojom::MediaLog {
class MockMediaLog : public mojom::MediaLog {
public:
explicit MockStableMediaLog(
mojo::PendingReceiver<stable::mojom::MediaLog> pending_receiver)
explicit MockMediaLog(mojo::PendingReceiver<mojom::MediaLog> pending_receiver)
: receiver_(this, std::move(pending_receiver)) {}
MockStableMediaLog(const MockStableMediaLog&) = delete;
MockStableMediaLog& operator=(const MockStableMediaLog&) = delete;
~MockStableMediaLog() override = default;
MockMediaLog(const MockMediaLog&) = delete;
MockMediaLog& operator=(const MockMediaLog&) = delete;
~MockMediaLog() override = default;
// stable::mojom::MediaLog implementation.
// mojom::MediaLog implementation.
MOCK_METHOD1(AddLogRecord, void(const MediaLogRecord& event));
private:
mojo::Receiver<stable::mojom::MediaLog> receiver_;
mojo::Receiver<mojom::MediaLog> receiver_;
};
// AuxiliaryEndpoints groups the endpoints that support the operation of a
// StableVideoDecoderService and that come from the Construct() call. That way,
// OOPVideoDecoderService and that come from the Construct() call. That way,
// tests can easily poke at one endpoint and set expectations on the other. For
// example, a test might want to simulate the scenario in which a frame has been
// decoded by the underlying mojom::VideoDecoder. In this case, the test can
// call |video_decoder_client_remote|->OnVideoFrameDecoded() and then set an
// expectation on |mock_stable_video_decoder_client|->OnVideoFrameDecoded().
// expectation on |mock_video_decoder_client|->OnVideoFrameDecoded().
struct AuxiliaryEndpoints {
// |video_decoder_client_remote| is the client that the underlying
// mojom::VideoDecoder receives through the Construct() call. Tests can make
// calls on it and those calls should ultimately be received by the
// |mock_stable_video_decoder_client|.
// |mock_video_decoder_client|.
mojo::AssociatedRemote<mojom::VideoDecoderClient> video_decoder_client_remote;
std::unique_ptr<StrictMock<MockStableVideoDecoderClient>>
mock_stable_video_decoder_client;
std::unique_ptr<StrictMock<MockVideoDecoderClient>> mock_video_decoder_client;
// |media_log_remote| is the MediaLog that the underlying mojom::VideoDecoder
// receives through the Construct() call. Tests can make calls on it and those
// calls should ultimately be received by the |mock_stable_media_log|.
// calls should ultimately be received by the |mock_media_log|.
mojo::Remote<mojom::MediaLog> media_log_remote;
std::unique_ptr<StrictMock<MockStableMediaLog>> mock_stable_media_log;
std::unique_ptr<StrictMock<MockMediaLog>> mock_media_log;
// Tests can use |stable_video_frame_handle_releaser_remote| to simulate
// Tests can use |video_frame_handle_releaser_remote| to simulate
// releasing a VideoFrame.
// |mock_video_frame_handle_releaser| is the VideoFrameHandleReleaser that's
// setup when the underlying mojom::VideoDecoder receives a Construct() call.
// Tests can make calls on |stable_video_frame_handle_releaser_remote| and
// Tests can make calls on |video_frame_handle_releaser_remote| and
// they should be ultimately received by the
// |mock_video_frame_handle_releaser|.
mojo::Remote<stable::mojom::VideoFrameHandleReleaser>
stable_video_frame_handle_releaser_remote;
mojo::Remote<mojom::VideoFrameHandleReleaser>
video_frame_handle_releaser_remote;
std::unique_ptr<StrictMock<MockVideoFrameHandleReleaser>>
mock_video_frame_handle_releaser;
@ -275,17 +280,16 @@ struct AuxiliaryEndpoints {
std::unique_ptr<MojoDecoderBufferReader> mojo_decoder_buffer_reader;
};
// Calls Construct() on |stable_video_decoder_remote| and, if
// Calls Construct() on |video_decoder_remote| and, if
// |expect_construct_call| is true, expects a corresponding Construct() call on
// |mock_video_decoder| which is assumed to be the backing decoder of
// |stable_video_decoder_remote|. Returns nullptr if the expectations on
// |video_decoder_remote|. Returns nullptr if the expectations on
// |mock_video_decoder| are violated. Otherwise, returns an AuxiliaryEndpoints
// instance that contains the supporting endpoints that tests can use to
// interact with the auxiliary interfaces used by the
// |stable_video_decoder_remote|.
std::unique_ptr<AuxiliaryEndpoints> ConstructStableVideoDecoder(
mojo::Remote<stable::mojom::StableVideoDecoder>&
stable_video_decoder_remote,
// |video_decoder_remote|.
std::unique_ptr<AuxiliaryEndpoints> ConstructVideoDecoder(
mojo::Remote<mojom::VideoDecoder>& video_decoder_remote,
StrictMock<MockVideoDecoder>& mock_video_decoder,
bool expect_construct_call) {
constexpr gfx::ColorSpace kTargetColorSpace = gfx::ColorSpace::CreateSRGB();
@ -294,18 +298,17 @@ std::unique_ptr<AuxiliaryEndpoints> ConstructStableVideoDecoder(
DoConstruct(/*command_buffer_id=*/_,
/*target_color_space=*/kTargetColorSpace));
}
mojo::PendingAssociatedRemote<stable::mojom::VideoDecoderClient>
stable_video_decoder_client_remote;
auto mock_stable_video_decoder_client =
std::make_unique<StrictMock<MockStableVideoDecoderClient>>(
stable_video_decoder_client_remote
.InitWithNewEndpointAndPassReceiver());
mojo::PendingAssociatedRemote<mojom::VideoDecoderClient>
video_decoder_client_remote;
auto mock_video_decoder_client =
std::make_unique<StrictMock<MockVideoDecoderClient>>(
video_decoder_client_remote.InitWithNewEndpointAndPassReceiver());
mojo::PendingRemote<stable::mojom::MediaLog> stable_media_log_remote;
auto mock_stable_media_log = std::make_unique<StrictMock<MockStableMediaLog>>(
stable_media_log_remote.InitWithNewPipeAndPassReceiver());
mojo::PendingRemote<mojom::MediaLog> media_log_remote;
auto mock_media_log = std::make_unique<StrictMock<MockMediaLog>>(
media_log_remote.InitWithNewPipeAndPassReceiver());
mojo::Remote<stable::mojom::VideoFrameHandleReleaser>
mojo::Remote<mojom::VideoFrameHandleReleaser>
video_frame_handle_releaser_remote;
mojo::ScopedDataPipeConsumerHandle remote_consumer_handle;
@ -314,28 +317,29 @@ std::unique_ptr<AuxiliaryEndpoints> ConstructStableVideoDecoder(
GetDefaultDecoderBufferConverterCapacity(DemuxerStream::VIDEO),
&remote_consumer_handle);
stable_video_decoder_remote->Construct(
std::move(stable_video_decoder_client_remote),
std::move(stable_media_log_remote),
video_decoder_remote->Construct(
std::move(video_decoder_client_remote), std::move(media_log_remote),
video_frame_handle_releaser_remote.BindNewPipeAndPassReceiver(),
std::move(remote_consumer_handle), kTargetColorSpace);
stable_video_decoder_remote.FlushForTesting();
std::move(remote_consumer_handle), media::mojom::CommandBufferIdPtr(),
kTargetColorSpace);
video_decoder_remote.FlushForTesting();
if (!Mock::VerifyAndClearExpectations(&mock_video_decoder))
if (!Mock::VerifyAndClearExpectations(&mock_video_decoder)) {
return nullptr;
}
auto auxiliary_endpoints = std::make_unique<AuxiliaryEndpoints>();
auxiliary_endpoints->video_decoder_client_remote =
mock_video_decoder.TakeClientRemote();
auxiliary_endpoints->mock_stable_video_decoder_client =
std::move(mock_stable_video_decoder_client);
auxiliary_endpoints->mock_video_decoder_client =
std::move(mock_video_decoder_client);
auxiliary_endpoints->media_log_remote =
mock_video_decoder.TakeMediaLogRemote();
auxiliary_endpoints->mock_stable_media_log = std::move(mock_stable_media_log);
auxiliary_endpoints->mock_media_log = std::move(mock_media_log);
auxiliary_endpoints->stable_video_frame_handle_releaser_remote =
auxiliary_endpoints->video_frame_handle_releaser_remote =
std::move(video_frame_handle_releaser_remote);
auxiliary_endpoints->mock_video_frame_handle_releaser =
mock_video_decoder.TakeVideoFrameHandleReleaser();
@ -348,53 +352,51 @@ std::unique_ptr<AuxiliaryEndpoints> ConstructStableVideoDecoder(
return auxiliary_endpoints;
}
class StableVideoDecoderServiceTest : public testing::Test {
class OOPVideoDecoderServiceTest : public testing::Test {
public:
StableVideoDecoderServiceTest()
: stable_video_decoder_factory_service_(gpu::GpuFeatureInfo()) {
stable_video_decoder_factory_service_
OOPVideoDecoderServiceTest()
: oop_video_decoder_factory_service_(gpu::GpuFeatureInfo()) {
oop_video_decoder_factory_service_
.SetVideoDecoderCreationCallbackForTesting(
video_decoder_creation_cb_.Get());
}
StableVideoDecoderServiceTest(const StableVideoDecoderServiceTest&) = delete;
StableVideoDecoderServiceTest& operator=(
const StableVideoDecoderServiceTest&) = delete;
~StableVideoDecoderServiceTest() override = default;
OOPVideoDecoderServiceTest(const OOPVideoDecoderServiceTest&) = delete;
OOPVideoDecoderServiceTest& operator=(const OOPVideoDecoderServiceTest&) =
delete;
~OOPVideoDecoderServiceTest() override = default;
void SetUp() override {
mojo::PendingReceiver<stable::mojom::StableVideoDecoderFactory>
stable_video_decoder_factory_receiver;
stable_video_decoder_factory_remote_ =
mojo::Remote<stable::mojom::StableVideoDecoderFactory>(
stable_video_decoder_factory_receiver
.InitWithNewPipeAndPassRemote());
stable_video_decoder_factory_service_.BindReceiver(
std::move(stable_video_decoder_factory_receiver),
mojo::PendingReceiver<mojom::InterfaceFactory>
video_decoder_factory_receiver;
video_decoder_factory_remote_ = mojo::Remote<mojom::InterfaceFactory>(
video_decoder_factory_receiver.InitWithNewPipeAndPassRemote());
oop_video_decoder_factory_service_.BindReceiver(
std::move(video_decoder_factory_receiver),
/*disconnect_cb=*/base::DoNothing());
ASSERT_TRUE(stable_video_decoder_factory_remote_.is_connected());
ASSERT_TRUE(video_decoder_factory_remote_.is_connected());
test_sii_ = base::MakeRefCounted<gpu::TestSharedImageInterface>();
}
protected:
mojo::Remote<stable::mojom::StableVideoDecoder> CreateStableVideoDecoder(
mojo::Remote<mojom::VideoDecoder> CreateVideoDecoder(
std::unique_ptr<StrictMock<MockVideoDecoder>> dst_video_decoder,
mojo::PendingRemote<stable::mojom::StableVideoDecoderTracker> tracker) {
// Each CreateStableVideoDecoder() should result in exactly one call to the
mojo::PendingRemote<mojom::VideoDecoderTracker> tracker) {
// Each CreateVideoDecoder() should result in exactly one call to the
// video decoder creation callback, i.e., the
// StableVideoDecoderFactoryService should not re-use mojom::VideoDecoder
// OOPVideoDecoderFactoryService should not re-use mojom::VideoDecoder
// implementation instances.
EXPECT_CALL(video_decoder_creation_cb_, Run(_, _))
.WillOnce(Return(ByMove(std::move(dst_video_decoder))));
mojo::PendingReceiver<stable::mojom::StableVideoDecoder>
stable_video_decoder_receiver;
mojo::Remote<stable::mojom::StableVideoDecoder> video_decoder_remote(
stable_video_decoder_receiver.InitWithNewPipeAndPassRemote());
stable_video_decoder_factory_remote_->CreateStableVideoDecoder(
std::move(stable_video_decoder_receiver), std::move(tracker));
stable_video_decoder_factory_remote_.FlushForTesting();
if (!Mock::VerifyAndClearExpectations(&video_decoder_creation_cb_))
mojo::PendingReceiver<mojom::VideoDecoder> video_decoder_receiver;
mojo::Remote<mojom::VideoDecoder> video_decoder_remote(
video_decoder_receiver.InitWithNewPipeAndPassRemote());
video_decoder_factory_remote_->CreateVideoDecoderWithTracker(
std::move(video_decoder_receiver), std::move(tracker));
video_decoder_factory_remote_.FlushForTesting();
if (!Mock::VerifyAndClearExpectations(&video_decoder_creation_cb_)) {
return {};
}
return video_decoder_remote;
}
@ -402,76 +404,71 @@ class StableVideoDecoderServiceTest : public testing::Test {
StrictMock<base::MockRepeatingCallback<std::unique_ptr<
mojom::VideoDecoder>(MojoMediaClient*, MojoCdmServiceContext*)>>
video_decoder_creation_cb_;
StableVideoDecoderFactoryService stable_video_decoder_factory_service_;
mojo::Remote<stable::mojom::StableVideoDecoderFactory>
stable_video_decoder_factory_remote_;
mojo::Remote<stable::mojom::StableVideoDecoder> stable_video_decoder_remote_;
OOPVideoDecoderFactoryService oop_video_decoder_factory_service_;
mojo::Remote<mojom::InterfaceFactory> video_decoder_factory_remote_;
mojo::Remote<mojom::VideoDecoder> video_decoder_remote_;
scoped_refptr<gpu::TestSharedImageInterface> test_sii_;
};
// Tests that we can create multiple StableVideoDecoder implementation instances
// through the StableVideoDecoderFactory and that they can exist concurrently.
TEST_F(StableVideoDecoderServiceTest, FactoryCanCreateStableVideoDecoders) {
std::vector<mojo::Remote<stable::mojom::StableVideoDecoder>>
stable_video_decoder_remotes;
// Tests that we can create multiple VideoDecoder implementation instances
// through the InterfaceFactory and that they can exist concurrently.
TEST_F(OOPVideoDecoderServiceTest, FactoryCanCreateVideoDecoders) {
std::vector<mojo::Remote<mojom::VideoDecoder>> video_decoder_remotes;
constexpr size_t kNumConcurrentDecoders = 5u;
for (size_t i = 0u; i < kNumConcurrentDecoders; i++) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
stable_video_decoder_remotes.push_back(
std::move(stable_video_decoder_remote));
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
video_decoder_remotes.push_back(std::move(video_decoder_remote));
}
for (const auto& remote : stable_video_decoder_remotes) {
for (const auto& remote : video_decoder_remotes) {
ASSERT_TRUE(remote.is_bound());
ASSERT_TRUE(remote.is_connected());
}
}
// Tests that a call to stable::mojom::VideoDecoder::Construct() gets routed
// Tests that a call to mojom::VideoDecoder::Construct() gets routed
// correctly to the underlying mojom::VideoDecoder.
TEST_F(StableVideoDecoderServiceTest, StableVideoDecoderCanBeConstructed) {
TEST_F(OOPVideoDecoderServiceTest, VideoDecoderCanBeConstructed) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto* mock_video_decoder_raw = mock_video_decoder.get();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
ASSERT_TRUE(ConstructStableVideoDecoder(stable_video_decoder_remote,
*mock_video_decoder_raw,
/*expect_construct_call=*/true));
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
ASSERT_TRUE(ConstructVideoDecoder(video_decoder_remote,
*mock_video_decoder_raw,
/*expect_construct_call=*/true));
}
// Tests that if two calls to stable::mojom::VideoDecoder::Construct() are made,
// Tests that if two calls to mojom::VideoDecoder::Construct() are made,
// only one is routed to the underlying mojom::VideoDecoder.
TEST_F(StableVideoDecoderServiceTest,
StableVideoDecoderCannotBeConstructedTwice) {
TEST_F(OOPVideoDecoderServiceTest, VideoDecoderCannotBeConstructedTwice) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto* mock_video_decoder_raw = mock_video_decoder.get();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
EXPECT_TRUE(ConstructStableVideoDecoder(stable_video_decoder_remote,
*mock_video_decoder_raw,
/*expect_construct_call=*/true));
EXPECT_TRUE(ConstructStableVideoDecoder(stable_video_decoder_remote,
*mock_video_decoder_raw,
/*expect_construct_call=*/false));
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
EXPECT_TRUE(ConstructVideoDecoder(video_decoder_remote,
*mock_video_decoder_raw,
/*expect_construct_call=*/true));
EXPECT_TRUE(ConstructVideoDecoder(video_decoder_remote,
*mock_video_decoder_raw,
/*expect_construct_call=*/false));
}
// Tests that a call to stable::mojom::VideoDecoder::GetSupportedConfigs() gets
// Tests that a call to mojom::VideoDecoder::GetSupportedConfigs() gets
// routed correctly to the underlying mojom::VideoDecoder. Also tests that the
// underlying mojom::VideoDecoder's reply gets routed correctly back to the
// client.
TEST_F(StableVideoDecoderServiceTest,
StableVideoDecoderCanGetSupportedConfigs) {
TEST_F(OOPVideoDecoderServiceTest, VideoDecoderCanGetSupportedConfigs) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto* mock_video_decoder_raw = mock_video_decoder.get();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
StrictMock<base::MockOnceCallback<void(
const std::vector<SupportedVideoDecoderConfig>& supported_configs,
@ -500,9 +497,9 @@ TEST_F(StableVideoDecoderServiceTest,
EXPECT_CALL(get_supported_configs_cb_to_send, Run(_, kDecoderTypeToReplyWith))
.WillOnce(SaveArg<0>(&received_supported_configs));
stable_video_decoder_remote->GetSupportedConfigs(
video_decoder_remote->GetSupportedConfigs(
get_supported_configs_cb_to_send.Get());
stable_video_decoder_remote.FlushForTesting();
video_decoder_remote.FlushForTesting();
ASSERT_TRUE(Mock::VerifyAndClearExpectations(mock_video_decoder_raw));
std::move(received_get_supported_configs_cb)
@ -512,20 +509,20 @@ TEST_F(StableVideoDecoderServiceTest,
EXPECT_EQ(received_supported_configs, supported_configs_to_reply_with);
}
// Tests that a call to stable::mojom::VideoDecoder::Initialize() gets routed
// correctly to the underlying mojom::VideoDecoder. Also tests that when the
// underlying mojom::VideoDecoder calls the initialization callback, the call
// gets routed to the client.
TEST_F(StableVideoDecoderServiceTest, StableVideoDecoderCanBeInitialized) {
// Tests that a call to mojom::VideoDecoder::InitializeWithCdmContext() gets
// routed correctly to the underlying mojom::VideoDecoder as an Initialize()
// call. Also tests that when the underlying mojom::VideoDecoder calls the
// initialization callback, the call gets routed to the client.
TEST_F(OOPVideoDecoderServiceTest, VideoDecoderCanBeInitialized) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto* mock_video_decoder_raw = mock_video_decoder.get();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto auxiliary_endpoints = ConstructStableVideoDecoder(
stable_video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
auto auxiliary_endpoints =
ConstructVideoDecoder(video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
ASSERT_TRUE(auxiliary_endpoints);
const VideoDecoderConfig config_to_send = CreateValidVideoDecoderConfig();
@ -555,11 +552,11 @@ TEST_F(StableVideoDecoderServiceTest, StableVideoDecoderCanBeInitialized) {
EXPECT_CALL(initialize_cb_to_send,
Run(kDecoderStatus, kNeedsBitstreamConversion, kMaxDecodeRequests,
kDecoderType, /*needs_transcryption=*/false));
stable_video_decoder_remote->Initialize(
video_decoder_remote->InitializeWithCdmContext(
config_to_send, kLowDelay,
mojo::PendingRemote<stable::mojom::StableCdmContext>(),
mojo::PendingRemote<mojom::CdmContextForOOPVD>(),
initialize_cb_to_send.Get());
stable_video_decoder_remote.FlushForTesting();
video_decoder_remote.FlushForTesting();
ASSERT_TRUE(Mock::VerifyAndClearExpectations(mock_video_decoder_raw));
std::move(received_initialize_cb)
@ -568,16 +565,16 @@ TEST_F(StableVideoDecoderServiceTest, StableVideoDecoderCanBeInitialized) {
task_environment_.RunUntilIdle();
}
// Tests that the StableVideoDecoderService rejects a call to
// stable::mojom::VideoDecoder::Initialize() before
// stable::mojom::VideoDecoder::Construct() gets called.
TEST_F(StableVideoDecoderServiceTest,
StableVideoDecoderCannotBeInitializedBeforeConstruction) {
// Tests that the OOPVideoDecoderService rejects a call to
// mojom::VideoDecoder::InitializeWithCdmContext() before
// mojom::VideoDecoder::Construct() gets called.
TEST_F(OOPVideoDecoderServiceTest,
VideoDecoderCannotBeInitializedBeforeConstruction) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
const VideoDecoderConfig config_to_send = CreateValidVideoDecoderConfig();
constexpr bool kLowDelay = true;
@ -588,31 +585,31 @@ TEST_F(StableVideoDecoderServiceTest,
initialize_cb_to_send;
EXPECT_CALL(initialize_cb_to_send,
Run(DecoderStatus(DecoderStatus::Codes::kFailed),
Run(DecoderStatus(DecoderStatus::Codes::kFailedToCreateDecoder),
/*needs_bitstream_conversion=*/false,
/*max_decode_requests=*/1, VideoDecoderType::kUnknown,
/*needs_transcryption=*/false));
stable_video_decoder_remote->Initialize(
video_decoder_remote->InitializeWithCdmContext(
config_to_send, kLowDelay,
mojo::PendingRemote<stable::mojom::StableCdmContext>(),
mojo::PendingRemote<mojom::CdmContextForOOPVD>(),
initialize_cb_to_send.Get());
stable_video_decoder_remote.FlushForTesting();
video_decoder_remote.FlushForTesting();
}
// Tests that a call to stable::mojom::VideoDecoder::Decode() gets routed
// Tests that a call to mojom::VideoDecoder::Decode() gets routed
// correctly to the underlying mojom::VideoDecoder and that the data pipe is
// plumbed correctly. Also tests that when the underlying mojom::VideoDecoder
// calls the decode callback, the call gets routed to the client.
TEST_F(StableVideoDecoderServiceTest, StableVideoDecoderCanDecode) {
TEST_F(OOPVideoDecoderServiceTest, VideoDecoderCanDecode) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto* mock_video_decoder_raw = mock_video_decoder.get();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto auxiliary_endpoints = ConstructStableVideoDecoder(
stable_video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
auto auxiliary_endpoints =
ConstructVideoDecoder(video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
ASSERT_TRUE(auxiliary_endpoints);
ASSERT_TRUE(auxiliary_endpoints->mojo_decoder_buffer_writer);
ASSERT_TRUE(auxiliary_endpoints->mojo_decoder_buffer_reader);
@ -636,12 +633,13 @@ TEST_F(StableVideoDecoderServiceTest, StableVideoDecoderCanDecode) {
received_decode_cb = std::move(callback);
});
EXPECT_CALL(decode_cb_to_send, Run(kDecoderStatus));
ASSERT_TRUE(
mojom::DecoderBufferPtr mojo_decoder_buffer =
auxiliary_endpoints->mojo_decoder_buffer_writer->WriteDecoderBuffer(
decoder_buffer_to_send));
stable_video_decoder_remote->Decode(decoder_buffer_to_send,
decode_cb_to_send.Get());
stable_video_decoder_remote.FlushForTesting();
decoder_buffer_to_send);
ASSERT_TRUE(mojo_decoder_buffer);
video_decoder_remote->Decode(std::move(mojo_decoder_buffer),
decode_cb_to_send.Get());
video_decoder_remote.FlushForTesting();
ASSERT_TRUE(Mock::VerifyAndClearExpectations(mock_video_decoder_raw));
ASSERT_TRUE(received_decoder_buffer_ptr);
@ -662,16 +660,15 @@ TEST_F(StableVideoDecoderServiceTest, StableVideoDecoderCanDecode) {
task_environment_.RunUntilIdle();
}
// Tests that the StableVideoDecoderService rejects a call to
// stable::mojom::VideoDecoder::Decode() before
// stable::mojom::VideoDecoder::Construct() gets called.
TEST_F(StableVideoDecoderServiceTest,
StableVideoDecoderCannotDecodeBeforeConstruction) {
// Tests that the OOPVideoDecoderService rejects a call to
// mojom::VideoDecoder::Decode() before
// mojom::VideoDecoder::Construct() gets called.
TEST_F(OOPVideoDecoderServiceTest, VideoDecoderCannotDecodeBeforeConstruction) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
constexpr uint8_t kEncodedData[] = {1, 2, 3};
scoped_refptr<DecoderBuffer> decoder_buffer_to_send =
@ -681,26 +678,29 @@ TEST_F(StableVideoDecoderServiceTest,
decode_cb_to_send;
EXPECT_CALL(decode_cb_to_send,
Run(DecoderStatus(DecoderStatus::Codes::kFailed)));
stable_video_decoder_remote->Decode(decoder_buffer_to_send,
decode_cb_to_send.Get());
stable_video_decoder_remote.FlushForTesting();
Run(DecoderStatus(DecoderStatus::Codes::kFailedToCreateDecoder)));
mojom::DecoderBufferPtr mojo_decoder_buffer =
mojom::DecoderBuffer::From(*decoder_buffer_to_send);
ASSERT_TRUE(mojo_decoder_buffer);
video_decoder_remote->Decode(std::move(mojo_decoder_buffer),
decode_cb_to_send.Get());
video_decoder_remote.FlushForTesting();
}
// Tests that a call to stable::mojom::VideoDecoder::Reset() gets routed
// Tests that a call to mojom::VideoDecoder::Reset() gets routed
// correctly to the underlying mojom::VideoDecoder. Also tests that when the
// underlying mojom::VideoDecoder calls the reset callback, the call gets routed
// to the client.
TEST_F(StableVideoDecoderServiceTest, StableVideoDecoderCanBeReset) {
TEST_F(OOPVideoDecoderServiceTest, VideoDecoderCanBeReset) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto* mock_video_decoder_raw = mock_video_decoder.get();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto auxiliary_endpoints = ConstructStableVideoDecoder(
stable_video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
auto auxiliary_endpoints =
ConstructVideoDecoder(video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
ASSERT_TRUE(auxiliary_endpoints);
StrictMock<base::MockOnceCallback<void()>> reset_cb_to_send;
@ -711,48 +711,48 @@ TEST_F(StableVideoDecoderServiceTest, StableVideoDecoderCanBeReset) {
received_reset_cb = std::move(callback);
});
EXPECT_CALL(reset_cb_to_send, Run());
stable_video_decoder_remote->Reset(reset_cb_to_send.Get());
stable_video_decoder_remote.FlushForTesting();
video_decoder_remote->Reset(reset_cb_to_send.Get());
video_decoder_remote.FlushForTesting();
ASSERT_TRUE(Mock::VerifyAndClearExpectations(mock_video_decoder_raw));
std::move(received_reset_cb).Run();
task_environment_.RunUntilIdle();
}
// Tests that the StableVideoDecoderService doesn't route a
// stable::mojom::VideoDecoder::Reset() call to the underlying
// mojom::VideoDecoder before stable::mojom::VideoDecoder::Construct() gets
// Tests that the OOPVideoDecoderService doesn't route a
// mojom::VideoDecoder::Reset() call to the underlying
// mojom::VideoDecoder before mojom::VideoDecoder::Construct() gets
// called and that it just calls the reset callback.
TEST_F(StableVideoDecoderServiceTest,
StableVideoDecoderCannotBeResetBeforeConstruction) {
TEST_F(OOPVideoDecoderServiceTest,
VideoDecoderCannotBeResetBeforeConstruction) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
StrictMock<base::MockOnceCallback<void()>> reset_cb_to_send;
EXPECT_CALL(reset_cb_to_send, Run());
stable_video_decoder_remote->Reset(reset_cb_to_send.Get());
stable_video_decoder_remote.FlushForTesting();
video_decoder_remote->Reset(reset_cb_to_send.Get());
video_decoder_remote.FlushForTesting();
}
// Tests that a call to
// stable::mojom::VideoFrameHandleReleaser::ReleaseVideoFrame() gets routed
// mojom::VideoFrameHandleReleaser::ReleaseVideoFrame() gets routed
// correctly to the underlying mojom::VideoFrameHandleReleaser.
TEST_F(StableVideoDecoderServiceTest, VideoFramesCanBeReleased) {
TEST_F(OOPVideoDecoderServiceTest, VideoFramesCanBeReleased) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto* mock_video_decoder_raw = mock_video_decoder.get();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto auxiliary_endpoints = ConstructStableVideoDecoder(
stable_video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
auto auxiliary_endpoints =
ConstructVideoDecoder(video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
ASSERT_TRUE(auxiliary_endpoints);
ASSERT_TRUE(auxiliary_endpoints->stable_video_frame_handle_releaser_remote);
ASSERT_TRUE(auxiliary_endpoints->video_frame_handle_releaser_remote);
ASSERT_TRUE(auxiliary_endpoints->mock_video_frame_handle_releaser);
const base::UnguessableToken release_token_to_send =
@ -763,92 +763,91 @@ TEST_F(StableVideoDecoderServiceTest, VideoFramesCanBeReleased) {
EXPECT_CALL(
*auxiliary_endpoints->mock_video_frame_handle_releaser,
ReleaseVideoFrame(release_token_to_send, expected_release_sync_token));
auxiliary_endpoints->stable_video_frame_handle_releaser_remote
->ReleaseVideoFrame(release_token_to_send);
auxiliary_endpoints->stable_video_frame_handle_releaser_remote
.FlushForTesting();
auxiliary_endpoints->video_frame_handle_releaser_remote->ReleaseVideoFrame(
release_token_to_send, /*release_sync_token=*/{});
auxiliary_endpoints->video_frame_handle_releaser_remote.FlushForTesting();
}
TEST_F(StableVideoDecoderServiceTest,
StableVideoDecoderClientReceivesOnVideoFrameDecodedEvent) {
TEST_F(OOPVideoDecoderServiceTest,
VideoDecoderClientReceivesOnVideoFrameDecodedEvent) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto* mock_video_decoder_raw = mock_video_decoder.get();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto auxiliary_endpoints = ConstructStableVideoDecoder(
stable_video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
auto auxiliary_endpoints =
ConstructVideoDecoder(video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
ASSERT_TRUE(auxiliary_endpoints);
ASSERT_TRUE(auxiliary_endpoints->video_decoder_client_remote);
ASSERT_TRUE(auxiliary_endpoints->mock_stable_video_decoder_client);
ASSERT_TRUE(auxiliary_endpoints->mock_video_decoder_client);
const auto token_for_release = base::UnguessableToken::Create();
const std::optional<base::UnguessableToken> token_for_release =
base::UnguessableToken::Create();
scoped_refptr<VideoFrame> video_frame_to_send =
CreateTestNV12MappableVideoFrame(test_sii_);
ASSERT_TRUE(video_frame_to_send);
stable::mojom::VideoFramePtr video_frame_received;
scoped_refptr<VideoFrame> video_frame_received;
constexpr bool kCanReadWithoutStalling = true;
EXPECT_CALL(
*auxiliary_endpoints->mock_stable_video_decoder_client,
*auxiliary_endpoints->mock_video_decoder_client,
OnVideoFrameDecoded(_, kCanReadWithoutStalling, token_for_release))
.WillOnce(WithArgs<0>(
[&video_frame_received](stable::mojom::VideoFramePtr frame) {
[&video_frame_received](const scoped_refptr<VideoFrame>& frame) {
video_frame_received = std::move(frame);
}));
auxiliary_endpoints->video_decoder_client_remote->OnVideoFrameDecoded(
video_frame_to_send, kCanReadWithoutStalling, token_for_release);
auxiliary_endpoints->video_decoder_client_remote.FlushForTesting();
ASSERT_TRUE(video_frame_received);
EXPECT_FALSE(video_frame_received->metadata.end_of_stream);
EXPECT_TRUE(video_frame_received->metadata.read_lock_fences_enabled);
EXPECT_TRUE(video_frame_received->metadata.power_efficient);
EXPECT_TRUE(video_frame_received->metadata.allow_overlay);
EXPECT_FALSE(video_frame_received->metadata().end_of_stream);
EXPECT_TRUE(video_frame_received->metadata().read_lock_fences_enabled);
EXPECT_TRUE(video_frame_received->metadata().power_efficient);
EXPECT_TRUE(video_frame_received->metadata().allow_overlay);
}
// Tests that a mojom::VideoDecoderClient::OnWaiting() call originating from the
// underlying mojom::VideoDecoder gets forwarded to the
// stable::mojom::VideoDecoderClient correctly.
TEST_F(StableVideoDecoderServiceTest,
StableVideoDecoderClientReceivesOnWaitingEvent) {
// mojom::VideoDecoderClient correctly.
TEST_F(OOPVideoDecoderServiceTest, VideoDecoderClientReceivesOnWaitingEvent) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto* mock_video_decoder_raw = mock_video_decoder.get();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto auxiliary_endpoints = ConstructStableVideoDecoder(
stable_video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
auto auxiliary_endpoints =
ConstructVideoDecoder(video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
ASSERT_TRUE(auxiliary_endpoints);
ASSERT_TRUE(auxiliary_endpoints->video_decoder_client_remote);
ASSERT_TRUE(auxiliary_endpoints->mock_stable_video_decoder_client);
ASSERT_TRUE(auxiliary_endpoints->mock_video_decoder_client);
constexpr WaitingReason kWaitingReason = WaitingReason::kNoDecryptionKey;
EXPECT_CALL(*auxiliary_endpoints->mock_stable_video_decoder_client,
EXPECT_CALL(*auxiliary_endpoints->mock_video_decoder_client,
OnWaiting(kWaitingReason));
auxiliary_endpoints->video_decoder_client_remote->OnWaiting(kWaitingReason);
auxiliary_endpoints->video_decoder_client_remote.FlushForTesting();
}
// Tests that a mojom::MediaLog::AddLogRecord() call originating from the
// underlying mojom::VideoDecoder gets forwarded to the stable::mojom::MediaLog
// underlying mojom::VideoDecoder gets forwarded to the mojom::MediaLog
// correctly.
TEST_F(StableVideoDecoderServiceTest,
StableVideoDecoderClientReceivesAddLogRecordEvent) {
TEST_F(OOPVideoDecoderServiceTest,
VideoDecoderClientReceivesAddLogRecordEvent) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
auto* mock_video_decoder_raw = mock_video_decoder.get();
auto stable_video_decoder_remote =
CreateStableVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto auxiliary_endpoints = ConstructStableVideoDecoder(
stable_video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
auto video_decoder_remote =
CreateVideoDecoder(std::move(mock_video_decoder), /*tracker=*/{});
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
auto auxiliary_endpoints =
ConstructVideoDecoder(video_decoder_remote, *mock_video_decoder_raw,
/*expect_construct_call=*/true);
ASSERT_TRUE(auxiliary_endpoints);
ASSERT_TRUE(auxiliary_endpoints->media_log_remote);
ASSERT_TRUE(auxiliary_endpoints->mock_stable_media_log);
ASSERT_TRUE(auxiliary_endpoints->mock_media_log);
MediaLogRecord media_log_record_to_send;
media_log_record_to_send.id = 2;
@ -856,39 +855,38 @@ TEST_F(StableVideoDecoderServiceTest,
media_log_record_to_send.params.Set("Test", "Value");
media_log_record_to_send.time = base::TimeTicks::Now();
EXPECT_CALL(*auxiliary_endpoints->mock_stable_media_log,
EXPECT_CALL(*auxiliary_endpoints->mock_media_log,
AddLogRecord(media_log_record_to_send));
auxiliary_endpoints->media_log_remote->AddLogRecord(media_log_record_to_send);
auxiliary_endpoints->media_log_remote.FlushForTesting();
}
// Tests that a StableVideoDecoderTracker can be used to know when the remote
// StableVideoDecoder implementation dies.
TEST_F(StableVideoDecoderServiceTest,
StableVideoDecoderTrackerDisconnectsWhenStableVideoDecoderDies) {
// Tests that a VideoDecoderTracker can be used to know when the remote
// VideoDecoder implementation dies.
TEST_F(OOPVideoDecoderServiceTest,
VideoDecoderTrackerDisconnectsWhenVideoDecoderDies) {
auto mock_video_decoder = std::make_unique<StrictMock<MockVideoDecoder>>();
MockStableVideoDecoderTracker tracker;
mojo::Receiver<stable::mojom::StableVideoDecoderTracker> tracker_receiver(
&tracker);
mojo::PendingRemote<stable::mojom::StableVideoDecoderTracker> tracker_remote =
MockVideoDecoderTracker tracker;
mojo::Receiver<mojom::VideoDecoderTracker> tracker_receiver(&tracker);
mojo::PendingRemote<mojom::VideoDecoderTracker> tracker_remote =
tracker_receiver.BindNewPipeAndPassRemote();
StrictMock<base::MockOnceCallback<void()>> tracker_disconnect_cb;
tracker_receiver.set_disconnect_handler(tracker_disconnect_cb.Get());
auto stable_video_decoder_remote = CreateStableVideoDecoder(
std::move(mock_video_decoder), std::move(tracker_remote));
ASSERT_TRUE(stable_video_decoder_remote.is_bound());
ASSERT_TRUE(stable_video_decoder_remote.is_connected());
auto video_decoder_remote = CreateVideoDecoder(std::move(mock_video_decoder),
std::move(tracker_remote));
ASSERT_TRUE(video_decoder_remote.is_bound());
ASSERT_TRUE(video_decoder_remote.is_connected());
// Until now, nothing in particular should happen.
task_environment_.RunUntilIdle();
ASSERT_TRUE(Mock::VerifyAndClearExpectations(&tracker_disconnect_cb));
// Once we reset the |stable_video_decoder_remote|, the StableVideoDecoder
// Once we reset the |video_decoder_remote|, the VideoDecoder
// implementation should die and the |tracker| should get disconnected.
EXPECT_CALL(tracker_disconnect_cb, Run());
stable_video_decoder_remote.reset();
video_decoder_remote.reset();
task_environment_.RunUntilIdle();
}

@ -6,30 +6,24 @@
namespace media {
StableVideoDecoderFactoryProcessService::
StableVideoDecoderFactoryProcessService(
mojo::PendingReceiver<stable::mojom::StableVideoDecoderFactoryProcess>
receiver)
OOPVideoDecoderFactoryProcessService::OOPVideoDecoderFactoryProcessService(
mojo::PendingReceiver<mojom::VideoDecoderFactoryProcess> receiver)
: receiver_(this, std::move(receiver)) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
StableVideoDecoderFactoryProcessService::
~StableVideoDecoderFactoryProcessService() {
OOPVideoDecoderFactoryProcessService::~OOPVideoDecoderFactoryProcessService() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
void StableVideoDecoderFactoryProcessService::
InitializeStableVideoDecoderFactory(
const gpu::GpuFeatureInfo& gpu_feature_info,
mojo::PendingReceiver<stable::mojom::StableVideoDecoderFactory>
receiver) {
void OOPVideoDecoderFactoryProcessService::InitializeVideoDecoderFactory(
const gpu::GpuFeatureInfo& gpu_feature_info,
mojo::PendingReceiver<mojom::InterfaceFactory> receiver) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// The browser process ensures this is called only once.
DCHECK(!factory_);
factory_ =
std::make_unique<StableVideoDecoderFactoryService>(gpu_feature_info);
factory_ = std::make_unique<OOPVideoDecoderFactoryService>(gpu_feature_info);
// base::Unretained(this) is safe here because the disconnection callback
// won't run beyond the lifetime of |factory_| which is fully owned by
@ -37,11 +31,11 @@ void StableVideoDecoderFactoryProcessService::
factory_->BindReceiver(
std::move(receiver),
base::BindOnce(
&StableVideoDecoderFactoryProcessService::OnFactoryDisconnected,
&OOPVideoDecoderFactoryProcessService::OnFactoryDisconnected,
base::Unretained(this)));
}
void StableVideoDecoderFactoryProcessService::OnFactoryDisconnected() {
void OOPVideoDecoderFactoryProcessService::OnFactoryDisconnected() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// This should cause the termination of the utility process that *|this| lives

@ -5,41 +5,41 @@
#ifndef MEDIA_MOJO_SERVICES_STABLE_VIDEO_DECODER_FACTORY_PROCESS_SERVICE_H_
#define MEDIA_MOJO_SERVICES_STABLE_VIDEO_DECODER_FACTORY_PROCESS_SERVICE_H_
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "base/sequence_checker.h"
#include "media/mojo/mojom/video_decoder_factory_process.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/mojo/services/stable_video_decoder_factory_service.h"
#include "mojo/public/cpp/bindings/receiver.h"
// TODO(crbug.com/347331029): rename this file to
// oop_video_decoder_factory_process_service.h.
namespace media {
// A StableVideoDecoderFactoryProcessService allows the browser process to
// initialize a StableVideoDecoderFactory with a gpu::GpuFeatureInfo.
class MEDIA_MOJO_EXPORT StableVideoDecoderFactoryProcessService final
: public stable::mojom::StableVideoDecoderFactoryProcess {
// An OOPVideoDecoderFactoryProcessService allows the browser process to
// initialize an InterfaceFactory with a gpu::GpuFeatureInfo.
class MEDIA_MOJO_EXPORT OOPVideoDecoderFactoryProcessService final
: public mojom::VideoDecoderFactoryProcess {
public:
explicit StableVideoDecoderFactoryProcessService(
mojo::PendingReceiver<stable::mojom::StableVideoDecoderFactoryProcess>
receiver);
StableVideoDecoderFactoryProcessService(
const StableVideoDecoderFactoryProcessService&) = delete;
StableVideoDecoderFactoryProcessService& operator=(
const StableVideoDecoderFactoryProcessService&) = delete;
~StableVideoDecoderFactoryProcessService() final;
explicit OOPVideoDecoderFactoryProcessService(
mojo::PendingReceiver<mojom::VideoDecoderFactoryProcess> receiver);
OOPVideoDecoderFactoryProcessService(
const OOPVideoDecoderFactoryProcessService&) = delete;
OOPVideoDecoderFactoryProcessService& operator=(
const OOPVideoDecoderFactoryProcessService&) = delete;
~OOPVideoDecoderFactoryProcessService() final;
// stable::mojom::StableVideoDecoderFactoryProcess implementation.
void InitializeStableVideoDecoderFactory(
// mojom::VideoDecoderFactoryProcess implementation.
void InitializeVideoDecoderFactory(
const gpu::GpuFeatureInfo& gpu_feature_info,
mojo::PendingReceiver<stable::mojom::StableVideoDecoderFactory> receiver)
final;
mojo::PendingReceiver<mojom::InterfaceFactory> receiver) final;
void OnFactoryDisconnected();
private:
mojo::Receiver<stable::mojom::StableVideoDecoderFactoryProcess> receiver_
mojo::Receiver<mojom::VideoDecoderFactoryProcess> receiver_
GUARDED_BY_CONTEXT(sequence_checker_);
std::unique_ptr<StableVideoDecoderFactoryService> factory_
std::unique_ptr<OOPVideoDecoderFactoryService> factory_
GUARDED_BY_CONTEXT(sequence_checker_);
SEQUENCE_CHECKER(sequence_checker_);

@ -12,9 +12,9 @@
#include "media/base/media_log.h"
#include "media/base/media_util.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/chromeos/default_video_frame_converter.h"
#include "media/gpu/chromeos/frame_registry.h"
#include "media/gpu/chromeos/platform_video_frame_pool.h"
#include "media/gpu/chromeos/registered_frame_converter.h"
#include "media/gpu/chromeos/video_decoder_pipeline.h"
#include "media/gpu/gpu_video_accelerator_util.h"
#include "media/gpu/gpu_video_decode_accelerator_helpers.h"
@ -34,11 +34,9 @@ namespace {
// like its |gpu_task_runner_| and |media_gpu_channel_manager_| members.
class MojoMediaClientImpl : public MojoMediaClient {
public:
MojoMediaClientImpl(const gpu::GpuFeatureInfo& gpu_feature_info,
scoped_refptr<FrameRegistry> frame_registry)
explicit MojoMediaClientImpl(const gpu::GpuFeatureInfo& gpu_feature_info)
: gpu_driver_bug_workarounds_(
gpu_feature_info.enabled_gpu_driver_bug_workarounds),
frame_registry_(std::move(frame_registry)) {}
gpu_feature_info.enabled_gpu_driver_bug_workarounds) {}
MojoMediaClientImpl(const MojoMediaClientImpl&) = delete;
MojoMediaClientImpl& operator=(const MojoMediaClientImpl&) = delete;
~MojoMediaClientImpl() override = default;
@ -66,7 +64,7 @@ class MojoMediaClientImpl : public MojoMediaClient {
#elif BUILDFLAG(USE_V4L2_CODEC)
return VideoDecoderType::kV4L2;
#else
#error StableVideoDecoderFactoryService should only be built on platforms that
#error OOPVideoDecoderFactoryService should only be built on platforms that
#error support video decode acceleration through either VA-API or V4L2.
#endif
}
@ -76,8 +74,7 @@ class MojoMediaClientImpl : public MojoMediaClient {
mojom::CommandBufferIdPtr command_buffer_id,
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space,
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder)
final {
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder) final {
// For out-of-process video decoding, |command_buffer_id| is not used and
// should not be supplied.
DCHECK(!command_buffer_id);
@ -93,7 +90,7 @@ class MojoMediaClientImpl : public MojoMediaClient {
gpu_driver_bug_workarounds_,
/*client_task_runner=*/std::move(task_runner),
std::make_unique<PlatformVideoFramePool>(),
RegisteredFrameConverter::Create(frame_registry_),
DefaultFrameConverter::Create(),
VideoDecoderPipeline::DefaultPreferredRenderableFourccs(),
std::move(log),
/*oop_video_decoder=*/{},
@ -107,23 +104,21 @@ class MojoMediaClientImpl : public MojoMediaClient {
} // namespace
StableVideoDecoderFactoryService::StableVideoDecoderFactoryService(
OOPVideoDecoderFactoryService::OOPVideoDecoderFactoryService(
const gpu::GpuFeatureInfo& gpu_feature_info)
: receiver_(this),
frame_registry_(base::MakeRefCounted<FrameRegistry>()),
mojo_media_client_(
std::make_unique<MojoMediaClientImpl>(gpu_feature_info,
frame_registry_)) {
std::make_unique<MojoMediaClientImpl>(gpu_feature_info)) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
mojo_media_client_->Initialize();
}
StableVideoDecoderFactoryService::~StableVideoDecoderFactoryService() {
OOPVideoDecoderFactoryService::~OOPVideoDecoderFactoryService() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
void StableVideoDecoderFactoryService::BindReceiver(
mojo::PendingReceiver<stable::mojom::StableVideoDecoderFactory> receiver,
void OOPVideoDecoderFactoryService::BindReceiver(
mojo::PendingReceiver<mojom::InterfaceFactory> receiver,
base::OnceClosure disconnect_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// The browser process should guarantee that BindReceiver() is only called
@ -133,9 +128,10 @@ void StableVideoDecoderFactoryService::BindReceiver(
receiver_.set_disconnect_handler(std::move(disconnect_cb));
}
void StableVideoDecoderFactoryService::CreateStableVideoDecoder(
mojo::PendingReceiver<stable::mojom::StableVideoDecoder> receiver,
mojo::PendingRemote<stable::mojom::StableVideoDecoderTracker> tracker) {
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void OOPVideoDecoderFactoryService::CreateVideoDecoderWithTracker(
mojo::PendingReceiver<mojom::VideoDecoder> receiver,
mojo::PendingRemote<mojom::VideoDecoderTracker> tracker) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
std::unique_ptr<mojom::VideoDecoder> dst_video_decoder;
@ -145,12 +141,44 @@ void StableVideoDecoderFactoryService::CreateStableVideoDecoder(
} else {
dst_video_decoder = std::make_unique<MojoVideoDecoderService>(
mojo_media_client_.get(), &cdm_service_context_,
mojo::PendingRemote<stable::mojom::StableVideoDecoder>());
mojo::PendingRemote<mojom::VideoDecoder>());
}
video_decoders_.Add(std::make_unique<StableVideoDecoderService>(
video_decoders_.Add(std::make_unique<OOPVideoDecoderService>(
std::move(tracker), std::move(dst_video_decoder),
&cdm_service_context_, frame_registry_),
&cdm_service_context_),
std::move(receiver));
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// The client of the OOPVideoDecoderFactoryService is the browser process which
// is up the trust gradient. The browser process should never use this service
// for anything other than creating video decoders. Therefore, it's appropriate
// to crash in the following methods via NOTREACHED().
void OOPVideoDecoderFactoryService::CreateAudioDecoder(
mojo::PendingReceiver<mojom::AudioDecoder> receiver) {
NOTREACHED();
}
void OOPVideoDecoderFactoryService::CreateVideoDecoder(
mojo::PendingReceiver<mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder) {
NOTREACHED();
}
void OOPVideoDecoderFactoryService::CreateAudioEncoder(
mojo::PendingReceiver<mojom::AudioEncoder> receiver) {
NOTREACHED();
}
void OOPVideoDecoderFactoryService::CreateDefaultRenderer(
const std::string& audio_device_id,
mojo::PendingReceiver<mojom::Renderer> receiver) {
NOTREACHED();
}
void OOPVideoDecoderFactoryService::CreateCdm(const CdmConfig& cdm_config,
CreateCdmCallback callback) {
NOTREACHED();
}
} // namespace media

@ -7,40 +7,43 @@
#include "base/functional/callback.h"
#include "base/sequence_checker.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "gpu/config/gpu_feature_info.h"
#include "media/mojo/mojom/interface_factory.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
#include "mojo/public/cpp/bindings/receiver.h"
#include "mojo/public/cpp/bindings/unique_receiver_set.h"
// TODO(crbug.com/347331029): rename this file to
// oop_video_decoder_factory_service.h.
namespace media {
namespace mojom {
class VideoDecoder;
} // namespace mojom
class MojoMediaClient;
class FrameRegistry;
// A StableVideoDecoderFactoryService allows a browser process to create
// StableVideoDecoders. It's intended to live inside a video decoder process (a
// An OOPVideoDecoderFactoryService allows a browser process to create
// VideoDecoders. It's intended to live inside a video decoder process (a
// utility process) and there should only be one such instance per process
// because one video decoder process corresponds to a client that handles one
// origin. For example, all the StableVideoDecoders for a video conference call
// origin. For example, all the VideoDecoders for a video conference call
// can live in the same process (and thus be created by the same
// StableVideoDecoderFactoryService). However, the StableVideoDecoder for a
// YouTube video should live in a process separate than a StableVideoDecoder for
// OOPVideoDecoderFactoryService). However, the VideoDecoder for a
// YouTube video should live in a process separate than a VideoDecoder for
// a Vimeo video.
class MEDIA_MOJO_EXPORT StableVideoDecoderFactoryService
: public stable::mojom::StableVideoDecoderFactory {
class MEDIA_MOJO_EXPORT OOPVideoDecoderFactoryService
: public mojom::InterfaceFactory {
public:
explicit StableVideoDecoderFactoryService(
explicit OOPVideoDecoderFactoryService(
const gpu::GpuFeatureInfo& gpu_feature_info);
StableVideoDecoderFactoryService(const StableVideoDecoderFactoryService&) =
delete;
StableVideoDecoderFactoryService& operator=(
const StableVideoDecoderFactoryService&) = delete;
~StableVideoDecoderFactoryService() override;
OOPVideoDecoderFactoryService(const OOPVideoDecoderFactoryService&) = delete;
OOPVideoDecoderFactoryService& operator=(
const OOPVideoDecoderFactoryService&) = delete;
~OOPVideoDecoderFactoryService() override;
using VideoDecoderCreationCBForTesting =
base::RepeatingCallback<std::unique_ptr<mojom::VideoDecoder>(
@ -53,24 +56,33 @@ class MEDIA_MOJO_EXPORT StableVideoDecoderFactoryService
video_decoder_creation_cb_for_testing;
}
void BindReceiver(
mojo::PendingReceiver<stable::mojom::StableVideoDecoderFactory> receiver,
base::OnceClosure disconnect_cb);
void BindReceiver(mojo::PendingReceiver<mojom::InterfaceFactory> receiver,
base::OnceClosure disconnect_cb);
// stable::mojom::StableVideoDecoderFactory implementation.
void CreateStableVideoDecoder(
mojo::PendingReceiver<stable::mojom::StableVideoDecoder> receiver,
mojo::PendingRemote<stable::mojom::StableVideoDecoderTracker> tracker)
override;
// mojom::InterfaceFactory implementation.
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateVideoDecoderWithTracker(
mojo::PendingReceiver<mojom::VideoDecoder> receiver,
mojo::PendingRemote<mojom::VideoDecoderTracker> tracker) override;
#endif
void CreateAudioDecoder(
mojo::PendingReceiver<mojom::AudioDecoder> receiver) override;
void CreateVideoDecoder(
mojo::PendingReceiver<mojom::VideoDecoder> receiver,
mojo::PendingRemote<mojom::VideoDecoder> dst_video_decoder) override;
void CreateAudioEncoder(
mojo::PendingReceiver<mojom::AudioEncoder> receiver) override;
void CreateDefaultRenderer(
const std::string& audio_device_id,
mojo::PendingReceiver<mojom::Renderer> receiver) override;
void CreateCdm(const CdmConfig& cdm_config,
CreateCdmCallback callback) override;
private:
VideoDecoderCreationCBForTesting video_decoder_creation_cb_for_testing_
GUARDED_BY_CONTEXT(sequence_checker_);
mojo::Receiver<stable::mojom::StableVideoDecoderFactory> receiver_;
// Shared between the MojoMediaClientImpl and the StableVideoDecoderService.
scoped_refptr<FrameRegistry> frame_registry_;
mojo::Receiver<mojom::InterfaceFactory> receiver_;
// |mojo_media_client_| and |cdm_service_context_| must be declared before
// |video_decoders_| because the interface implementation instances managed by
@ -79,7 +91,7 @@ class MEDIA_MOJO_EXPORT StableVideoDecoderFactoryService
GUARDED_BY_CONTEXT(sequence_checker_);
MojoCdmServiceContext cdm_service_context_
GUARDED_BY_CONTEXT(sequence_checker_);
mojo::UniqueReceiverSet<stable::mojom::StableVideoDecoder> video_decoders_
mojo::UniqueReceiverSet<mojom::VideoDecoder> video_decoders_
GUARDED_BY_CONTEXT(sequence_checker_);
SEQUENCE_CHECKER(sequence_checker_);

@ -7,6 +7,7 @@
#include "base/notreached.h"
#include "media/gpu/chromeos/frame_registry.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/common/validation_utils.h"
#if BUILDFLAG(IS_CHROMEOS) && BUILDFLAG(USE_VAAPI)
#include "media/gpu/vaapi/vaapi_wrapper.h"
@ -14,174 +15,51 @@
namespace media {
namespace {
// GetGpuMemoryBufferHandle() is a helper function that gets or creates a
// GpuMemoryBufferHandle from |media_frame|. For decoders that use VDA, the
// storage type is STORAGE_GPU_MEMORY_BUFFER. For decoders that use VD directly,
// the storage type is STORAGE_OPAQUE.
gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle(
scoped_refptr<VideoFrame> media_frame,
scoped_refptr<const FrameRegistry> frame_registry) {
switch (media_frame->storage_type()) {
case VideoFrame::STORAGE_GPU_MEMORY_BUFFER:
CHECK(media_frame->HasMappableGpuBuffer());
return media_frame->GetGpuMemoryBufferHandle();
case VideoFrame::STORAGE_OPAQUE: {
CHECK(frame_registry);
CHECK(media_frame->metadata().tracking_token.has_value());
auto frame_resource =
frame_registry->AccessFrame(*media_frame->metadata().tracking_token);
CHECK(frame_resource);
return frame_resource->CreateGpuMemoryBufferHandle();
}
default:
NOTREACHED();
}
}
stable::mojom::VideoFramePtr MediaVideoFrameToMojoVideoFrame(
scoped_refptr<VideoFrame> media_frame,
scoped_refptr<const FrameRegistry> frame_registry) {
CHECK(!media_frame->metadata().end_of_stream);
stable::mojom::VideoFramePtr mojo_frame = stable::mojom::VideoFrame::New();
CHECK(mojo_frame);
static_assert(
std::is_same<decltype(media_frame->format()),
decltype(stable::mojom::VideoFrame::format)>::value,
"Unexpected type for media::VideoFrame::format(). If you "
"need to change this assertion, please contact "
"chromeos-gfx-video@google.com.");
mojo_frame->format = media_frame->format();
static_assert(
std::is_same<decltype(media_frame->coded_size()),
std::add_lvalue_reference<std::add_const<
decltype(stable::mojom::VideoFrame::coded_size)>::type>::
type>::value,
"Unexpected type for media::VideoFrame::coded_size(). If you "
"need to change this assertion, please contact "
"chromeos-gfx-video@google.com.");
mojo_frame->coded_size = media_frame->coded_size();
static_assert(
std::is_same<
decltype(media_frame->visible_rect()),
std::add_lvalue_reference<std::add_const<
decltype(stable::mojom::VideoFrame::visible_rect)>::type>::type>::
value,
"Unexpected type for media::VideoFrame::visible_rect(). If you "
"need to change this assertion, please contact "
"chromeos-gfx-video@google.com.");
mojo_frame->visible_rect = media_frame->visible_rect();
static_assert(
std::is_same<
decltype(media_frame->natural_size()),
std::add_lvalue_reference<std::add_const<
decltype(stable::mojom::VideoFrame::natural_size)>::type>::type>::
value,
"Unexpected type for media::VideoFrame::natural_size(). If you "
"need to change this assertion, please contact "
"chromeos-gfx-video@google.com.");
mojo_frame->natural_size = media_frame->natural_size();
static_assert(
std::is_same<decltype(media_frame->timestamp()),
decltype(stable::mojom::VideoFrame::timestamp)>::value,
"Unexpected type for media::VideoFrame::timestamp(). If you "
"need to change this assertion, please contact "
"chromeos-gfx-video@google.com.");
mojo_frame->timestamp = media_frame->timestamp();
gfx::GpuMemoryBufferHandle gpu_memory_buffer_handle =
GetGpuMemoryBufferHandle(media_frame, frame_registry);
CHECK_EQ(gpu_memory_buffer_handle.type, gfx::NATIVE_PIXMAP);
CHECK(!gpu_memory_buffer_handle.native_pixmap_handle.planes.empty());
mojo_frame->gpu_memory_buffer_handle = std::move(gpu_memory_buffer_handle);
static_assert(
std::is_same<
decltype(media_frame->metadata()),
std::add_lvalue_reference<
decltype(stable::mojom::VideoFrame::metadata)>::type>::value,
"Unexpected type for media::VideoFrame::metadata(). If you "
"need to change this assertion, please contact "
"chromeos-gfx-video@google.com.");
mojo_frame->metadata = media_frame->metadata();
static_assert(
std::is_same<decltype(media_frame->ColorSpace()),
decltype(stable::mojom::VideoFrame::color_space)>::value,
"Unexpected type for media::VideoFrame::ColorSpace(). If you "
"need to change this assertion, please contact "
"chromeos-gfx-video@google.com.");
mojo_frame->color_space = media_frame->ColorSpace();
static_assert(
std::is_same<
decltype(media_frame->hdr_metadata()),
std::add_lvalue_reference<std::add_const<
decltype(stable::mojom::VideoFrame::hdr_metadata)>::type>::type>::
value,
"Unexpected type for media::VideoFrame::hdr_metadata(). If you "
"need to change this assertion, please contact "
"chromeos-gfx-video@google.com.");
mojo_frame->hdr_metadata = media_frame->hdr_metadata();
return mojo_frame;
}
} // namespace
StableVideoDecoderService::StableVideoDecoderService(
mojo::PendingRemote<stable::mojom::StableVideoDecoderTracker>
tracker_remote,
OOPVideoDecoderService::OOPVideoDecoderService(
mojo::PendingRemote<mojom::VideoDecoderTracker> tracker_remote,
std::unique_ptr<mojom::VideoDecoder> dst_video_decoder,
MojoCdmServiceContext* cdm_service_context,
scoped_refptr<const FrameRegistry> frame_registry)
MojoCdmServiceContext* cdm_service_context)
: tracker_remote_(std::move(tracker_remote)),
video_decoder_client_receiver_(this),
media_log_receiver_(this),
stable_video_frame_handle_releaser_receiver_(this),
video_frame_handle_releaser_receiver_(this),
dst_video_decoder_(std::move(dst_video_decoder)),
dst_video_decoder_receiver_(dst_video_decoder_.get())
#if BUILDFLAG(IS_CHROMEOS)
,
cdm_service_context_(cdm_service_context)
#endif // BUILDFLAG(IS_CHROMEOS)
,
frame_registry_(frame_registry) {
{
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
CHECK(!!dst_video_decoder_);
dst_video_decoder_remote_.Bind(
dst_video_decoder_receiver_.BindNewPipeAndPassRemote());
}
StableVideoDecoderService::~StableVideoDecoderService() {
OOPVideoDecoderService::~OOPVideoDecoderService() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
#if BUILDFLAG(IS_CHROMEOS)
if (cdm_id_)
#if BUILDFLAG(IS_CHROMEOS_ASH)
if (cdm_id_) {
cdm_service_context_->UnregisterRemoteCdmContext(cdm_id_.value());
#endif // BUILDFLAG(IS_CHROMEOS)
}
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
}
void StableVideoDecoderService::GetSupportedConfigs(
void OOPVideoDecoderService::GetSupportedConfigs(
GetSupportedConfigsCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
dst_video_decoder_remote_->GetSupportedConfigs(std::move(callback));
}
void StableVideoDecoderService::Construct(
mojo::PendingAssociatedRemote<stable::mojom::VideoDecoderClient>
stable_video_decoder_client_remote,
mojo::PendingRemote<stable::mojom::MediaLog> stable_media_log_remote,
mojo::PendingReceiver<stable::mojom::VideoFrameHandleReleaser>
stable_video_frame_handle_releaser_receiver,
void OOPVideoDecoderService::Construct(
mojo::PendingAssociatedRemote<mojom::VideoDecoderClient>
video_decoder_client_remote,
mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<mojom::VideoFrameHandleReleaser>
video_frame_handle_releaser_receiver,
mojo::ScopedDataPipeConsumerHandle decoder_buffer_pipe,
mojom::CommandBufferIdPtr command_buffer_id,
const gfx::ColorSpace& target_color_space) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (video_decoder_client_receiver_.is_bound()) {
@ -190,18 +68,17 @@ void StableVideoDecoderService::Construct(
}
DCHECK(!video_decoder_client_receiver_.is_bound());
DCHECK(!stable_video_decoder_client_remote_.is_bound());
stable_video_decoder_client_remote_.Bind(
std::move(stable_video_decoder_client_remote));
DCHECK(!video_decoder_client_remote_.is_bound());
video_decoder_client_remote_.Bind(std::move(video_decoder_client_remote));
DCHECK(!media_log_receiver_.is_bound());
DCHECK(!stable_media_log_remote_.is_bound());
stable_media_log_remote_.Bind(std::move(stable_media_log_remote));
DCHECK(!media_log_remote_.is_bound());
media_log_remote_.Bind(std::move(media_log_remote));
DCHECK(!video_frame_handle_releaser_remote_.is_bound());
DCHECK(!stable_video_frame_handle_releaser_receiver_.is_bound());
stable_video_frame_handle_releaser_receiver_.Bind(
std::move(stable_video_frame_handle_releaser_receiver));
DCHECK(!video_frame_handle_releaser_receiver_.is_bound());
video_frame_handle_releaser_receiver_.Bind(
std::move(video_frame_handle_releaser_receiver));
dst_video_decoder_remote_->Construct(
video_decoder_client_receiver_.BindNewEndpointAndPassRemote(),
@ -211,11 +88,25 @@ void StableVideoDecoderService::Construct(
target_color_space);
}
void StableVideoDecoderService::Initialize(
void OOPVideoDecoderService::Initialize(
const VideoDecoderConfig& config,
bool low_delay,
mojo::PendingRemote<stable::mojom::StableCdmContext> cdm_context,
const std::optional<base::UnguessableToken>& cdm_id,
InitializeCallback callback) {
// The client of the OOPVideoDecoderService is the OOPVideoDecoder which lives
// in the GPU process and is therefore up the trust gradient. The
// OOPVideoDecoder doesn't call Initialize() (it calls
// InitializeWithCdmContext() instead). Thus, it's appropriate to crash here
// via a NOTREACHED().
NOTREACHED();
}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void OOPVideoDecoderService::InitializeWithCdmContext(
const VideoDecoderConfig& config,
bool low_delay,
mojo::PendingRemote<mojom::CdmContextForOOPVD> cdm_context,
InitializeWithCdmContextCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!video_decoder_client_receiver_.is_bound()) {
DVLOG(2) << __func__ << " Construct() must be called first";
@ -270,13 +161,14 @@ void StableVideoDecoderService::Initialize(
// the lifetime of *|this|.
dst_video_decoder_remote_->Initialize(
config, low_delay, cdm_id_,
base::BindOnce(&StableVideoDecoderService::OnInitializeDone,
base::BindOnce(&OOPVideoDecoderService::OnInitializeDone,
base::Unretained(this), std::move(callback),
needs_transcryption));
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void StableVideoDecoderService::OnInitializeDone(
InitializeCallback init_cb,
void OOPVideoDecoderService::OnInitializeDone(
InitializeWithCdmContextCallback init_cb,
bool needs_transcryption,
const DecoderStatus& status,
bool needs_bitstream_conversion,
@ -288,9 +180,8 @@ void StableVideoDecoderService::OnInitializeDone(
needs_transcryption);
}
void StableVideoDecoderService::Decode(
const scoped_refptr<DecoderBuffer>& buffer,
DecodeCallback callback) {
void OOPVideoDecoderService::Decode(mojom::DecoderBufferPtr buffer,
DecodeCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!video_decoder_client_receiver_.is_bound()) {
DVLOG(2) << __func__ << " Construct() must be called first";
@ -298,14 +189,20 @@ void StableVideoDecoderService::Decode(
return;
}
CHECK(buffer);
mojom::DecoderBufferPtr mojo_buffer = mojom::DecoderBuffer::From(*buffer);
// TODO(crbug.com/390706725): remove this check once the extra validation in
// ValidateAndConvertMojoDecoderBuffer() is merged into
// media_type_converters.cc.
scoped_refptr<media::DecoderBuffer> media_decoder_buffer =
ValidateAndConvertMojoDecoderBuffer(std::move(buffer));
CHECK(media_decoder_buffer);
mojom::DecoderBufferPtr mojo_buffer =
mojom::DecoderBuffer::From(*media_decoder_buffer);
CHECK(mojo_buffer);
dst_video_decoder_remote_->Decode(std::move(mojo_buffer),
std::move(callback));
}
void StableVideoDecoderService::Reset(ResetCallback callback) {
void OOPVideoDecoderService::Reset(ResetCallback callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!video_decoder_client_receiver_.is_bound()) {
DVLOG(2) << __func__ << " Construct() must be called first";
@ -315,8 +212,18 @@ void StableVideoDecoderService::Reset(ResetCallback callback) {
dst_video_decoder_remote_->Reset(std::move(callback));
}
void StableVideoDecoderService::ReleaseVideoFrame(
const base::UnguessableToken& release_token) {
void OOPVideoDecoderService::OnOverlayInfoChanged(
const OverlayInfo& overlay_info) {
// The client of the OOPVideoDecoderService is the OOPVideoDecoder which lives
// in the GPU process and is therefore up the trust gradient. The
// OOPVideoDecoder doesn't call OnOverlayInfoChanged(). Thus, it's appropriate
// to crash here via a NOTREACHED().
NOTREACHED();
}
void OOPVideoDecoderService::ReleaseVideoFrame(
const base::UnguessableToken& release_token,
const std::optional<gpu::SyncToken>& release_sync_token) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(video_frame_handle_releaser_remote_.is_bound());
// Note: we don't pass a gpu::SyncToken because it's assumed that the client
@ -327,40 +234,39 @@ void StableVideoDecoderService::ReleaseVideoFrame(
release_token, /*release_sync_token=*/std::nullopt);
}
void StableVideoDecoderService::OnVideoFrameDecoded(
void OOPVideoDecoderService::OnVideoFrameDecoded(
const scoped_refptr<VideoFrame>& frame,
bool can_read_without_stalling,
const std::optional<base::UnguessableToken>& release_token) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(stable_video_decoder_client_remote_.is_bound());
DCHECK(video_decoder_client_remote_.is_bound());
DCHECK(release_token.has_value());
// The mojo traits have been coded assuming these conditions.
CHECK(frame->metadata().allow_overlay);
CHECK(!frame->metadata().end_of_stream);
CHECK(frame->metadata().power_efficient);
CHECK(frame->HasMappableGpuBuffer());
stable_video_decoder_client_remote_->OnVideoFrameDecoded(
MediaVideoFrameToMojoVideoFrame(std::move(frame), frame_registry_),
can_read_without_stalling, *release_token);
video_decoder_client_remote_->OnVideoFrameDecoded(
std::move(frame), can_read_without_stalling, *release_token);
}
void StableVideoDecoderService::OnWaiting(WaitingReason reason) {
void OOPVideoDecoderService::OnWaiting(WaitingReason reason) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(stable_video_decoder_client_remote_.is_bound());
stable_video_decoder_client_remote_->OnWaiting(reason);
DCHECK(video_decoder_client_remote_.is_bound());
video_decoder_client_remote_->OnWaiting(reason);
}
void StableVideoDecoderService::RequestOverlayInfo(
bool restart_for_transitions) {
void OOPVideoDecoderService::RequestOverlayInfo(bool restart_for_transitions) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
NOTREACHED();
}
void StableVideoDecoderService::AddLogRecord(const MediaLogRecord& event) {
void OOPVideoDecoderService::AddLogRecord(const MediaLogRecord& event) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(stable_media_log_remote_.is_bound());
stable_media_log_remote_->AddLogRecord(event);
DCHECK(media_log_remote_.is_bound());
media_log_remote_->AddLogRecord(event);
}
} // namespace media

@ -10,8 +10,8 @@
#include "base/thread_annotations.h"
#include "base/unguessable_token.h"
#include "build/build_config.h"
#include "media/mojo/mojom/interface_factory.mojom.h"
#include "media/mojo/mojom/media_log.mojom.h"
#include "media/mojo/mojom/stable/stable_video_decoder.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
@ -20,71 +20,78 @@
#include "mojo/public/cpp/bindings/receiver.h"
#include "mojo/public/cpp/bindings/remote.h"
// TODO(crbug.com/347331029): rename this file to oop_video_decoder_service.h.
#if BUILDFLAG(IS_CHROMEOS)
#include "chromeos/components/cdm_factory_daemon/remote_cdm_context.h"
#endif // BUILDFLAG(IS_CHROMEOS)
namespace media {
class FrameRegistry;
// A StableVideoDecoderService serves as an adapter between the
// stable::mojom::StableVideoDecoder interface and the mojom::VideoDecoder
// interface. This allows us to provide hardware video decoding capabilities to
// clients that may be using a different version of the
// stable::mojom::StableVideoDecoder interface, e.g., LaCrOS. A
// StableVideoDecoderService is intended to live in a video decoder process.
// This process can host multiple StableVideoDecoderServices, but the assumption
// is that they don't distrust each other. For example, they should all be
// serving the same renderer process.
// An OOPVideoDecoderService is a "frontend" for a media::mojom::VideoDecoder
// that lives in a utility process. This utility process can host multiple
// OOPVideoDecoderServices, but the assumption is that they don't distrust each
// other. For example, they should all be serving the same renderer process.
//
// TODO(b/195769334): a StableVideoDecoderService should probably be responsible
// for checking incoming data to address issues that may arise due to the stable
// nature of the stable::mojom::StableVideoDecoder interface. For example,
// suppose the StableVideoDecoderService implements an older version of the
// interface relative to the one used by the client. If the client Initialize()s
// the StableVideoDecoderService with a VideoCodecProfile that's unsupported by
// the older version of the interface, the StableVideoDecoderService should
// reject that initialization. Conversely, the client of the
// StableVideoDecoderService should also check incoming data due to similar
// concerns.
class MEDIA_MOJO_EXPORT StableVideoDecoderService
: public stable::mojom::StableVideoDecoder,
public stable::mojom::VideoFrameHandleReleaser,
// A previous version of this class used to serve as an adapter between the
// stable version of media::mojom::VideoDecoder (the now removed
// media::stable::mojom::StableVideoDecoder) and media::mojom::VideoDecoder.
// Since that stable version is no longer needed, the role of
// OOPVideoDecoderService is much more narrow: it needs to transform
// InitializeWithCdmContext() calls into Initialize() calls -- the client of
// OOPVideoDecoderService in the GPU process can't use Initialize() directly
// because the |cdm_id| in that call can't be used to find the corresponding
// CdmContext outside of the GPU process.
//
// TODO(crbug.com/347331029): consider handling the InitializeWithCdmContext()
// call directly in the MojoVideoDecoderService. If we can do that, we can
// probably remove the OOPVideoDecoderService class (thus also removing the
// in-process Mojo hop that we currently have just to abide by the requirements
// of associated interfaces, see the documentation of |dst_video_decoder_| in
// the class declaration).
class MEDIA_MOJO_EXPORT OOPVideoDecoderService
: public mojom::VideoDecoder,
public mojom::VideoFrameHandleReleaser,
public mojom::VideoDecoderClient,
public mojom::MediaLog {
public:
StableVideoDecoderService(
mojo::PendingRemote<stable::mojom::StableVideoDecoderTracker>
tracker_remote,
OOPVideoDecoderService(
mojo::PendingRemote<mojom::VideoDecoderTracker> tracker_remote,
std::unique_ptr<mojom::VideoDecoder> dst_video_decoder,
MojoCdmServiceContext* cdm_service_context,
scoped_refptr<const FrameRegistry> frame_registry);
StableVideoDecoderService(const StableVideoDecoderService&) = delete;
StableVideoDecoderService& operator=(const StableVideoDecoderService&) =
delete;
~StableVideoDecoderService() override;
MojoCdmServiceContext* cdm_service_context);
OOPVideoDecoderService(const OOPVideoDecoderService&) = delete;
OOPVideoDecoderService& operator=(const OOPVideoDecoderService&) = delete;
~OOPVideoDecoderService() override;
// stable::mojom::StableVideoDecoder implementation.
// mojom::VideoDecoder implementation.
void GetSupportedConfigs(GetSupportedConfigsCallback callback) final;
void Construct(
mojo::PendingAssociatedRemote<stable::mojom::VideoDecoderClient>
stable_video_decoder_client_remote,
mojo::PendingRemote<stable::mojom::MediaLog> stable_media_log_remote,
mojo::PendingReceiver<stable::mojom::VideoFrameHandleReleaser>
stable_video_frame_handle_releaser_receiver,
mojo::ScopedDataPipeConsumerHandle decoder_buffer_pipe,
const gfx::ColorSpace& target_color_space) final;
void Initialize(
void Construct(mojo::PendingAssociatedRemote<mojom::VideoDecoderClient>
video_decoder_client_remote,
mojo::PendingRemote<mojom::MediaLog> media_log_remote,
mojo::PendingReceiver<mojom::VideoFrameHandleReleaser>
video_frame_handle_releaser_receiver,
mojo::ScopedDataPipeConsumerHandle decoder_buffer_pipe,
mojom::CommandBufferIdPtr command_buffer_id,
const gfx::ColorSpace& target_color_space) final;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
const std::optional<base::UnguessableToken>& cdm_id,
InitializeCallback callback) final;
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void InitializeWithCdmContext(
const VideoDecoderConfig& config,
bool low_delay,
mojo::PendingRemote<stable::mojom::StableCdmContext> cdm_context,
InitializeCallback callback) final;
void Decode(const scoped_refptr<DecoderBuffer>& buffer,
DecodeCallback callback) final;
mojo::PendingRemote<mojom::CdmContextForOOPVD> cdm_context,
InitializeWithCdmContextCallback callback) final;
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void Decode(mojom::DecoderBufferPtr buffer, DecodeCallback callback) final;
void Reset(ResetCallback callback) final;
void OnOverlayInfoChanged(const OverlayInfo& overlay_info) final;
// mojom::stable::VideoFrameHandleReleaser implementation.
void ReleaseVideoFrame(const base::UnguessableToken& release_token) final;
// mojom::VideoFrameHandleReleaser implementation.
void ReleaseVideoFrame(
const base::UnguessableToken& release_token,
const std::optional<gpu::SyncToken>& release_sync_token) final;
// mojom::VideoDecoderClient implementation.
void OnVideoFrameDecoded(
@ -98,41 +105,41 @@ class MEDIA_MOJO_EXPORT StableVideoDecoderService
void AddLogRecord(const MediaLogRecord& event) final;
private:
void OnInitializeDone(InitializeCallback init_cb,
void OnInitializeDone(InitializeWithCdmContextCallback init_cb,
bool needs_transcryption,
const DecoderStatus& status,
bool needs_bitstream_conversion,
int32_t max_decode_requests,
VideoDecoderType decoder_type);
mojo::Remote<stable::mojom::StableVideoDecoderTracker> tracker_remote_
mojo::Remote<mojom::VideoDecoderTracker> tracker_remote_
GUARDED_BY_CONTEXT(sequence_checker_);
// Incoming calls from the |dst_video_decoder_| to
// |video_decoder_client_receiver_| are forwarded to
// |stable_video_decoder_client_remote_|.
// |video_decoder_client_remote_|.
mojo::AssociatedReceiver<mojom::VideoDecoderClient>
video_decoder_client_receiver_ GUARDED_BY_CONTEXT(sequence_checker_);
mojo::AssociatedRemote<stable::mojom::VideoDecoderClient>
stable_video_decoder_client_remote_ GUARDED_BY_CONTEXT(sequence_checker_);
mojo::AssociatedRemote<mojom::VideoDecoderClient> video_decoder_client_remote_
GUARDED_BY_CONTEXT(sequence_checker_);
// Incoming calls from the |dst_video_decoder_| to |media_log_receiver_| are
// forwarded to |stable_media_log_remote_|.
// forwarded to |media_log_remote_|.
mojo::Receiver<mojom::MediaLog> media_log_receiver_
GUARDED_BY_CONTEXT(sequence_checker_);
mojo::Remote<stable::mojom::MediaLog> stable_media_log_remote_
mojo::Remote<mojom::MediaLog> media_log_remote_
GUARDED_BY_CONTEXT(sequence_checker_);
// Incoming requests from the client to
// |stable_video_frame_handle_releaser_receiver_| are forwarded to
// |video_frame_handle_releaser_receiver_| are forwarded to
// |video_frame_handle_releaser_remote_|.
mojo::Receiver<stable::mojom::VideoFrameHandleReleaser>
stable_video_frame_handle_releaser_receiver_
mojo::Receiver<mojom::VideoFrameHandleReleaser>
video_frame_handle_releaser_receiver_
GUARDED_BY_CONTEXT(sequence_checker_);
mojo::Remote<mojom::VideoFrameHandleReleaser>
video_frame_handle_releaser_remote_ GUARDED_BY_CONTEXT(sequence_checker_);
// The incoming stable::mojom::StableVideoDecoder requests are forwarded to
// The incoming mojom::VideoDecoder requests are forwarded to
// |dst_video_decoder_receiver_| through |dst_video_decoder_remote_|.
//
// Note: the implementation behind |dst_video_decoder_receiver_| (i.e.,
@ -156,10 +163,6 @@ class MEDIA_MOJO_EXPORT StableVideoDecoderService
GUARDED_BY_CONTEXT(sequence_checker_);
#endif // BUILDFLAG(IS_CHROMEOS)
// Used by OnVideoFrameDecoded() to convert media VideoFrames to a
// stable::mojo::VideoFrame.
const scoped_refptr<const FrameRegistry> frame_registry_;
std::optional<base::UnguessableToken> cdm_id_
GUARDED_BY_CONTEXT(sequence_checker_);

@ -192,8 +192,7 @@ class FakeMojoMediaClient : public MojoMediaClient {
mojom::CommandBufferIdPtr command_buffer_id,
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space,
mojo::PendingRemote<stable::mojom::StableVideoDecoder> oop_video_decoder)
override {
mojo::PendingRemote<mojom::VideoDecoder> oop_video_decoder) override {
return create_video_decoder_cb_.Run(media_log);
}
@ -232,7 +231,7 @@ class MojoVideoDecoderIntegrationTest : public ::testing::Test {
video_decoder_receivers_.Add(
std::make_unique<MojoVideoDecoderService>(
&mojo_media_client_, &mojo_cdm_service_context_,
mojo::PendingRemote<stable::mojom::StableVideoDecoder>()),
mojo::PendingRemote<mojom::VideoDecoder>()),
remote_video_decoder.InitWithNewPipeAndPassReceiver());
return remote_video_decoder;
}

@ -106,14 +106,14 @@ class TestInterfaceFactory final : public media::mojom::InterfaceFactory {
}
void CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) override {
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder)
override {
NOTREACHED();
}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) override {
void CreateVideoDecoderWithTracker(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker) override {
NOTREACHED();
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)

@ -152,8 +152,8 @@ class TestInterfaceFactory : public media::mojom::InterfaceFactory {
// Stub out other `mojom::InterfaceFactory` interfaces.
void CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) override {}
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder)
override {}
void CreateAudioDecoder(
mojo::PendingReceiver<media::mojom::AudioDecoder> receiver) override {}
void CreateDefaultRenderer(

@ -135,12 +135,13 @@ class FakeInterfaceFactory : public media::mojom::InterfaceFactory {
// Stub out other mojom::InterfaceFactory interfaces.
void CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) override {}
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder)
override {}
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) override {}
void CreateVideoDecoderWithTracker(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker) override {
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateDefaultRenderer(
const std::string& audio_device_id,

@ -94,8 +94,8 @@ class TestInterfaceFactory : public media::mojom::InterfaceFactory {
// Stub out other mojom::InterfaceFactory interfaces.
void CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) override {}
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder)
override {}
void CreateAudioDecoder(
mojo::PendingReceiver<media::mojom::AudioDecoder> receiver) override {}
void CreateDefaultRenderer(

@ -89,8 +89,8 @@ class FakeMojoMediaClient : public media::MojoMediaClient {
media::mojom::CommandBufferIdPtr command_buffer_id,
media::RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
oop_video_decoder) override {
mojo::PendingRemote<media::mojom::VideoDecoder> oop_video_decoder)
override {
return std::make_unique<FakeGpuVideoDecoder>();
}
};
@ -116,24 +116,22 @@ class FakeInterfaceFactory : public media::mojom::InterfaceFactory {
// FakeMojoMediaClient will create a FakeGpuVideoDecoder.
void CreateVideoDecoder(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>
dst_video_decoder) override {
mojo::PendingRemote<media::mojom::VideoDecoder> dst_video_decoder)
override {
video_decoder_receivers_.Add(
std::make_unique<media::MojoVideoDecoderService>(
&mojo_media_client_, &cdm_service_context_,
mojo::PendingRemote<media::stable::mojom::StableVideoDecoder>()),
mojo::PendingRemote<media::mojom::VideoDecoder>()),
std::move(receiver));
}
// Stub out other mojom::InterfaceFactory interfaces.
#if BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
void CreateStableVideoDecoder(
mojo::PendingReceiver<media::stable::mojom::StableVideoDecoder>
video_decoder) override {
// TODO(b/327268445): we'll need to complete this for GTFO OOP-VD testing.
void CreateVideoDecoderWithTracker(
mojo::PendingReceiver<media::mojom::VideoDecoder> receiver,
mojo::PendingRemote<media::mojom::VideoDecoderTracker> tracker) override {
}
#endif // BUILDFLAG(ALLOW_OOP_VIDEO_DECODER)
// Stub out other mojom::InterfaceFactory interfaces.
void CreateAudioDecoder(
mojo::PendingReceiver<media::mojom::AudioDecoder> receiver) override {}
void CreateAudioEncoder(