0

Implement MediaCapabilities API encodingInfo for WebRTC

"webrtc" was recently added as an MediaEncodingType to the
MediaCapabilities API, see
https://w3c.github.io/media-capabilities/

"webrtc" replaces the deprecated encoding type "transmission".

This CL implements support for responding to supported and powerEfficient.
smooth is currently hardcoded to be true given that the configuration
is supported.

The feature is implemented behind the blink runtime enabled flag
MediaCapabilitiesWebRtc

Bug: chromium:1187565
Change-Id: I98d8d971304194d93aa436b8821ea665dfdab9f7
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2932438
Reviewed-by: Justin Novosad <junov@chromium.org>
Reviewed-by: Chrome Cunningham <chcunningham@chromium.org>
Reviewed-by: Henrik Boström <hbos@chromium.org>
Commit-Queue: Johannes Kron <kron@chromium.org>
Cr-Commit-Position: refs/heads/master@{#890184}
This commit is contained in:
Johannes Kron
2021-06-08 11:11:58 +00:00
committed by Chromium LUCI CQ
parent 2962d27f6b
commit 7c12ca15e4
18 changed files with 950 additions and 725 deletions

@@ -70,8 +70,8 @@
#include "third_party/blink/renderer/platform/media_capabilities/web_media_capabilities_info.h"
#include "third_party/blink/renderer/platform/media_capabilities/web_media_configuration.h"
#include "third_party/blink/renderer/platform/network/parsed_content_type.h"
#include "third_party/blink/renderer/platform/peerconnection/transmission_encoding_info_handler.h"
#include "third_party/blink/renderer/platform/peerconnection/webrtc_decoding_info_handler.h"
#include "third_party/blink/renderer/platform/peerconnection/webrtc_encoding_info_handler.h"
#include "third_party/blink/renderer/platform/runtime_enabled_features.h"
#include "third_party/blink/renderer/platform/wtf/functional.h"
#include "third_party/blink/renderer/platform/wtf/vector.h"
@@ -147,6 +147,16 @@ MediaCapabilitiesDecodingInfo* CreateDecodingInfoWith(bool value) {
return info;
}
// Utility function that will create a MediaCapabilitiesInfo object with
// all the values set to either true or false.
MediaCapabilitiesInfo* CreateEncodingInfoWith(bool value) {
MediaCapabilitiesInfo* info = MediaCapabilitiesInfo::Create();
info->setSupported(value);
info->setSmooth(value);
info->setPowerEfficient(value);
return info;
}
ScriptPromise CreateResolvedPromiseToDecodingInfoWith(
bool value,
ScriptState* script_state,
@@ -258,6 +268,31 @@ bool IsValidMediaConfiguration(const MediaConfiguration* configuration) {
return configuration->hasAudio() || configuration->hasVideo();
}
bool IsValidVideoConfiguration(const VideoConfiguration* configuration,
bool is_webrtc) {
DCHECK(configuration->hasContentType());
if (!IsValidMimeType(configuration->contentType(), kVideoMimeTypePrefix,
is_webrtc))
return false;
DCHECK(configuration->hasFramerate());
if (!IsValidFrameRate(configuration->framerate()))
return false;
return true;
}
bool IsValidAudioConfiguration(const AudioConfiguration* configuration,
bool is_webrtc) {
DCHECK(configuration->hasContentType());
if (!IsValidMimeType(configuration->contentType(), kAudioMimeTypePrefix,
is_webrtc))
return false;
return true;
}
bool IsValidMediaDecodingConfiguration(
const MediaDecodingConfiguration* configuration,
bool is_webrtc,
@@ -294,30 +329,43 @@ bool IsValidMediaDecodingConfiguration(
}
}
return true;
}
bool IsValidVideoConfiguration(const VideoConfiguration* configuration,
bool is_webrtc) {
DCHECK(configuration->hasContentType());
if (!IsValidMimeType(configuration->contentType(), kVideoMimeTypePrefix,
is_webrtc))
if (configuration->hasVideo() &&
!IsValidVideoConfiguration(configuration->video(), is_webrtc)) {
*message = "The video configuration dictionary is not valid.";
return false;
}
DCHECK(configuration->hasFramerate());
if (!IsValidFrameRate(configuration->framerate()))
if (configuration->hasAudio() &&
!IsValidAudioConfiguration(configuration->audio(), is_webrtc)) {
*message = "The audio configuration dictionary is not valid.";
return false;
}
return true;
}
bool IsValidAudioConfiguration(const AudioConfiguration* configuration,
bool is_webrtc) {
DCHECK(configuration->hasContentType());
if (!IsValidMimeType(configuration->contentType(), kAudioMimeTypePrefix,
is_webrtc))
bool IsValidMediaEncodingConfiguration(
const MediaEncodingConfiguration* configuration,
bool is_webrtc,
String* message) {
if (!IsValidMediaConfiguration(configuration)) {
*message =
"The configuration dictionary has neither |video| nor |audio| "
"specified and needs at least one of them.";
return false;
}
if (configuration->hasVideo() &&
!IsValidVideoConfiguration(configuration->video(), is_webrtc)) {
*message = "The video configuration dictionary is not valid.";
return false;
}
if (configuration->hasAudio() &&
!IsValidAudioConfiguration(configuration->audio(), is_webrtc)) {
*message = "The audio configuration dictionary is not valid.";
return false;
}
return true;
}
@@ -705,21 +753,6 @@ ScriptPromise MediaCapabilities::decodingInfo(
exception_state.ThrowTypeError(message);
return ScriptPromise();
}
if (config->hasVideo() &&
!IsValidVideoConfiguration(config->video(), is_webrtc)) {
exception_state.ThrowTypeError(
"The video configuration dictionary is not valid.");
return ScriptPromise();
}
if (config->hasAudio() &&
!IsValidAudioConfiguration(config->audio(), is_webrtc)) {
exception_state.ThrowTypeError(
"The audio configuration dictionary is not valid.");
return ScriptPromise();
}
// Validation errors should return above.
DCHECK(message.IsEmpty());
@@ -878,7 +911,19 @@ ScriptPromise MediaCapabilities::decodingInfo(
ScriptPromise MediaCapabilities::encodingInfo(
ScriptState* script_state,
const MediaEncodingConfiguration* configuration) {
const MediaEncodingConfiguration* config,
ExceptionState& exception_state) {
const base::TimeTicks request_time = base::TimeTicks::Now();
const bool is_webrtc = config->type() == "webrtc";
String message;
if (!IsValidMediaEncodingConfiguration(config, is_webrtc, &message)) {
exception_state.ThrowTypeError(message);
return ScriptPromise();
}
// Validation errors should return above.
DCHECK(message.IsEmpty());
auto* resolver = MakeGarbageCollected<ScriptPromiseResolver>(script_state);
// IMPORTANT: Acquire the promise before potentially synchronously resolving
@@ -886,48 +931,55 @@ ScriptPromise MediaCapabilities::encodingInfo(
// undefined. See comment above Promise() in script_promise_resolver.h
ScriptPromise promise = resolver->Promise();
if (!IsValidMediaConfiguration(configuration)) {
resolver->Reject(V8ThrowException::CreateTypeError(
script_state->GetIsolate(),
"The configuration dictionary has neither |video| nor |audio| "
"specified and needs at least one of them."));
return promise;
}
const bool is_webrtc = configuration->type() == "webrtc";
if (configuration->hasVideo() &&
!IsValidVideoConfiguration(configuration->video(), is_webrtc)) {
resolver->Reject(V8ThrowException::CreateTypeError(
script_state->GetIsolate(),
"The video configuration dictionary is not valid."));
return promise;
}
if (is_webrtc) {
if (auto* handler = WebrtcEncodingInfoHandler::Instance()) {
const int callback_id = CreateCallbackId();
pending_cb_map_.insert(
callback_id,
MakeGarbageCollected<MediaCapabilities::PendingCallbackState>(
resolver, nullptr, request_time, absl::nullopt));
if (configuration->hasAudio() &&
!IsValidAudioConfiguration(configuration->audio(), is_webrtc)) {
resolver->Reject(V8ThrowException::CreateTypeError(
script_state->GetIsolate(),
"The audio configuration dictionary is not valid."));
return promise;
}
absl::optional<String> audio_mime_type =
config->hasAudio()
? absl::make_optional(config->audio()->contentType())
: absl::nullopt;
absl::optional<String> video_mime_type =
config->hasVideo()
? absl::make_optional(config->video()->contentType())
: absl::nullopt;
absl::optional<String> scalability_mode =
config->hasVideo() && config->video()->hasScalabilityMode()
? absl::make_optional(config->video()->scalabilityMode())
: absl::nullopt;
handler->EncodingInfo(
audio_mime_type, video_mime_type, scalability_mode,
WTF::Bind(&MediaCapabilities::OnWebrtcEncodingInfoSupport,
WrapPersistent(this), callback_id));
if (configuration->type() == "transmission") {
if (auto* handler = TransmissionEncodingInfoHandler::Instance()) {
handler->EncodingInfo(ToWebMediaConfiguration(configuration),
WTF::Bind(&OnMediaCapabilitiesEncodingInfo,
WrapPersistent(resolver)));
return promise;
}
resolver->Reject(MakeGarbageCollected<DOMException>(
DOMExceptionCode::kInvalidStateError,
"Platform error: could not get EncodingInfoHandler."));
// TODO(crbug.com/1187565): This should not happen unless we're out of
// memory or something similar. Add UMA metric to count how often it
// happens.
DCHECK(false);
DVLOG(2) << __func__ << " Could not get EncodingInfoHandler.";
MediaCapabilitiesInfo* info = CreateEncodingInfoWith(false);
resolver->Resolve(info);
return promise;
}
if (configuration->type() == "record") {
if (config->type() == "record") {
if (!RuntimeEnabledFeatures::MediaCapabilitiesEncodingInfoEnabled()) {
exception_state.ThrowTypeError(
"The provided value 'record' is not a valid enum value of type "
"MediaEncodingType.");
return promise;
}
if (auto* handler = MakeGarbageCollected<MediaRecorderHandler>(
ExecutionContext::From(script_state)
->GetTaskRunner(TaskType::kInternalMediaRealTime))) {
handler->EncodingInfo(ToWebMediaConfiguration(configuration),
handler->EncodingInfo(ToWebMediaConfiguration(config),
WTF::Bind(&OnMediaCapabilitiesEncodingInfo,
WrapPersistent(resolver)));
return promise;
@@ -938,10 +990,9 @@ ScriptPromise MediaCapabilities::encodingInfo(
return promise;
}
resolver->Reject(V8ThrowException::CreateTypeError(
script_state->GetIsolate(),
"Valid configuration |type| should be either 'transmission' or "
"'record'."));
exception_state.ThrowTypeError(
"The provided value is not a valid enum value of type "
"MediaEncodingType.");
return promise;
}
@@ -1401,6 +1452,10 @@ void MediaCapabilities::ResolveWebrtcDecodingCallbackIfReady(int callback_id) {
DCHECK(pending_cb_map_.Contains(callback_id));
PendingCallbackState* pending_cb = pending_cb_map_.at(callback_id);
// Resolve the promise if we have gathered both supported and power efficient
// as well as smooth. Smooth is temporarily set to the same as supported but
// will eventually be queried from a local database with historical
// performance data.
if (!pending_cb->is_supported.has_value())
return;
@@ -1433,6 +1488,45 @@ void MediaCapabilities::ResolveWebrtcDecodingCallbackIfReady(int callback_id) {
pending_cb_map_.erase(callback_id);
}
void MediaCapabilities::ResolveWebrtcEncodingCallbackIfReady(int callback_id) {
DCHECK(pending_cb_map_.Contains(callback_id));
PendingCallbackState* pending_cb = pending_cb_map_.at(callback_id);
// Resolve the promise if we have gathered both supported and power efficient
// as well as smooth. Smooth is temporarily set to the same as supported but
// will eventually be queried from a local database with historical
// performance data.
if (!pending_cb->is_supported.has_value())
return;
// supported and gpu factories supported are set simultaneously.
DCHECK(pending_cb->is_gpu_factories_supported.has_value());
if (!pending_cb->db_is_smooth.has_value())
return;
if (!pending_cb->resolver->GetExecutionContext() ||
pending_cb->resolver->GetExecutionContext()->IsContextDestroyed()) {
// We're too late! Now that all the callbacks have provided state, its safe
// to erase the entry in the map.
pending_cb_map_.erase(callback_id);
return;
}
Persistent<MediaCapabilitiesInfo> info(MediaCapabilitiesInfo::Create());
info->setSupported(*pending_cb->is_supported);
info->setPowerEfficient(*pending_cb->is_gpu_factories_supported);
info->setSmooth(*pending_cb->db_is_smooth);
const base::TimeDelta process_time =
base::TimeTicks::Now() - pending_cb->request_time;
UMA_HISTOGRAM_TIMES("Media.Capabilities.EncodingInfo.Time.Webrtc",
process_time);
pending_cb->resolver->Resolve(std::move(info));
pending_cb_map_.erase(callback_id);
}
void MediaCapabilities::OnBadWindowPrediction(
int callback_id,
const absl::optional<::media::learning::TargetHistogram>& histogram) {
@@ -1520,6 +1614,22 @@ void MediaCapabilities::OnWebrtcDecodingInfoSupport(int callback_id,
ResolveWebrtcDecodingCallbackIfReady(callback_id);
}
void MediaCapabilities::OnWebrtcEncodingInfoSupport(int callback_id,
bool is_supported,
bool is_power_efficient) {
DCHECK(pending_cb_map_.Contains(callback_id));
PendingCallbackState* pending_cb = pending_cb_map_.at(callback_id);
pending_cb->is_supported = is_supported;
pending_cb->is_gpu_factories_supported = is_power_efficient;
// TODO(crbug.com/1187565): Add call in encodingInfo() to get smoothness score
// from database and remove this default assignment.
pending_cb->db_is_smooth = is_supported;
ResolveWebrtcEncodingCallbackIfReady(callback_id);
}
int MediaCapabilities::CreateCallbackId() {
// Search for the next available callback ID. 0 and -1 are reserved by
// wtf::HashMap (meaning "empty" and "deleted").

@@ -52,7 +52,9 @@ class MODULES_EXPORT MediaCapabilities final
ScriptPromise decodingInfo(ScriptState*,
const MediaDecodingConfiguration*,
ExceptionState&);
ScriptPromise encodingInfo(ScriptState*, const MediaEncodingConfiguration*);
ScriptPromise encodingInfo(ScriptState*,
const MediaEncodingConfiguration*,
ExceptionState&);
private:
// Stores pending callback state from and intermediate prediction values while
@@ -145,7 +147,12 @@ class MODULES_EXPORT MediaCapabilities final
bool is_supported,
bool is_power_efficient);
void OnWebrtcEncodingInfoSupport(int callback_id,
bool is_supported,
bool is_power_efficient);
void ResolveWebrtcDecodingCallbackIfReady(int callback_id);
void ResolveWebrtcEncodingCallbackIfReady(int callback_id);
// Creates a new (incremented) callback ID from |last_callback_id_| for
// mapping in |pending_cb_map_|.

@@ -7,6 +7,6 @@
[Exposed=(Window,Worker)]
interface MediaCapabilities {
[CallWith=ScriptState, RaisesException, Measure] Promise<MediaCapabilitiesDecodingInfo> decodingInfo(MediaDecodingConfiguration configuration);
[CallWith=ScriptState, Measure, RuntimeEnabled=MediaCapabilitiesEncodingInfo] Promise<MediaCapabilitiesInfo> encodingInfo(
[CallWith=ScriptState, RaisesException, Measure, RuntimeEnabled=MediaCapabilitiesWebRtc] Promise<MediaCapabilitiesInfo> encodingInfo(
MediaEncodingConfiguration configuration);
};

@@ -33,6 +33,7 @@
#include "third_party/blink/renderer/bindings/modules/v8/v8_media_capabilities_info.h"
#include "third_party/blink/renderer/bindings/modules/v8/v8_media_configuration.h"
#include "third_party/blink/renderer/bindings/modules/v8/v8_media_decoding_configuration.h"
#include "third_party/blink/renderer/bindings/modules/v8/v8_media_encoding_configuration.h"
#include "third_party/blink/renderer/bindings/modules/v8/v8_video_configuration.h"
#include "third_party/blink/renderer/core/frame/local_dom_window.h"
#include "third_party/blink/renderer/core/frame/navigator.h"
@@ -350,8 +351,8 @@ class MediaCapabilitiesTestContext {
std::unique_ptr<MockLearningTaskControllerService> nnr_service_;
};
// |kContentType|, |kCodec|, and |kCodecProfile| must match.
const char kContentType[] = "video/webm; codecs=\"vp09.00.10.08\"";
// |kVideoContentType|, |kCodec|, and |kCodecProfile| must match.
const char kVideoContentType[] = "video/webm; codecs=\"vp09.00.10.08\"";
const char kAudioContentType[] = "audio/webm; codecs=\"opus\"";
const media::VideoCodecProfile kCodecProfile = media::VP9PROFILE_PROFILE0;
const media::VideoCodec kCodec = media::kCodecVP9;
@@ -359,15 +360,15 @@ const double kFramerate = 20.5;
const int kWidth = 3840;
const int kHeight = 2160;
const int kBitrate = 2391000;
const char kWebrtcContentType[] = "video/VP9; profile-id=\"0\"";
const char kWebrtcVideoContentType[] = "video/VP9; profile-id=\"0\"";
const char kWebrtcAudioContentType[] = "audio/opus";
// Construct AudioConfig using the constants above.
MediaDecodingConfiguration* CreateAudioDecodingConfig(const char content_type[],
const char type[]) {
template <class T>
T* CreateAudioConfig(const char content_type[], const char type[]) {
auto* audio_config = MakeGarbageCollected<AudioConfiguration>();
audio_config->setContentType(content_type);
auto* decoding_config = MakeGarbageCollected<MediaDecodingConfiguration>();
auto* decoding_config = MakeGarbageCollected<T>();
decoding_config->setType(type);
decoding_config->setAudio(audio_config);
return decoding_config;
@@ -375,24 +376,32 @@ MediaDecodingConfiguration* CreateAudioDecodingConfig(const char content_type[],
// Construct media-source AudioConfig using the constants above.
MediaDecodingConfiguration* CreateAudioDecodingConfig() {
return CreateAudioDecodingConfig(kAudioContentType, "media-source");
return CreateAudioConfig<MediaDecodingConfiguration>(kAudioContentType,
"media-source");
}
// Construct webrtc AudioConfig using the constants above.
// Construct webrtc decoding AudioConfig using the constants above.
MediaDecodingConfiguration* CreateWebrtcAudioDecodingConfig() {
return CreateAudioDecodingConfig(kWebrtcAudioContentType, "webrtc");
return CreateAudioConfig<MediaDecodingConfiguration>(kWebrtcAudioContentType,
"webrtc");
}
// Construct webrtc decoding AudioConfig using the constants above.
MediaEncodingConfiguration* CreateWebrtcAudioEncodingConfig() {
return CreateAudioConfig<MediaEncodingConfiguration>(kWebrtcAudioContentType,
"webrtc");
}
// Construct VideoConfig using the constants above.
MediaDecodingConfiguration* CreateDecodingConfig(const char content_type[],
const char type[]) {
template <class T>
T* CreateVideoConfig(const char content_type[], const char type[]) {
auto* video_config = MakeGarbageCollected<VideoConfiguration>();
video_config->setFramerate(kFramerate);
video_config->setContentType(content_type);
video_config->setWidth(kWidth);
video_config->setHeight(kHeight);
video_config->setBitrate(kBitrate);
auto* decoding_config = MakeGarbageCollected<MediaDecodingConfiguration>();
auto* decoding_config = MakeGarbageCollected<T>();
decoding_config->setType(type);
decoding_config->setVideo(video_config);
return decoding_config;
@@ -400,12 +409,20 @@ MediaDecodingConfiguration* CreateDecodingConfig(const char content_type[],
// Construct media-source VideoConfig using the constants above.
MediaDecodingConfiguration* CreateDecodingConfig() {
return CreateDecodingConfig(kContentType, "media-source");
return CreateVideoConfig<MediaDecodingConfiguration>(kVideoContentType,
"media-source");
}
// Construct webrtc VideoConfig using the constants above.
// Construct webrtc decoding VideoConfig using the constants above.
MediaDecodingConfiguration* CreateWebrtcDecodingConfig() {
return CreateDecodingConfig(kWebrtcContentType, "webrtc");
return CreateVideoConfig<MediaDecodingConfiguration>(kWebrtcVideoContentType,
"webrtc");
}
// Construct webrtc encoding VideoConfig using the constants above.
MediaEncodingConfiguration* CreateWebrtcEncodingConfig() {
return CreateVideoConfig<MediaEncodingConfiguration>(kWebrtcVideoContentType,
"webrtc");
}
// Construct PredicitonFeatures matching the CreateDecodingConfig, using the
@@ -501,7 +518,7 @@ base::FieldTrialParams MakeMlParams(double bad_window_threshold,
return params;
}
// Wrapping deocdingInfo() call for readability. Await resolution of the promise
// Wrapping decodingInfo() call for readability. Await resolution of the promise
// and return its info.
MediaCapabilitiesInfo* DecodingInfo(
const MediaDecodingConfiguration* decoding_config,
@@ -519,6 +536,23 @@ MediaCapabilitiesInfo* DecodingInfo(
context->GetExceptionState());
}
// Wrapping encodingInfo() call for readability. Await resolution of the promise
// and return its info.
MediaCapabilitiesInfo* EncodingInfo(
const MediaEncodingConfiguration* encoding_config,
MediaCapabilitiesTestContext* context) {
ScriptPromise promise = context->GetMediaCapabilities()->encodingInfo(
context->GetScriptState(), encoding_config, context->GetExceptionState());
ScriptPromiseTester tester(context->GetScriptState(), promise);
tester.WaitUntilSettled();
CHECK(!tester.IsRejected()) << " Cant get info from rejected promise.";
return NativeValueTraits<MediaCapabilitiesInfo>::NativeValue(
context->GetIsolate(), tester.Value().V8Value(),
context->GetExceptionState());
}
} // namespace
TEST(MediaCapabilitiesTests, BasicAudio) {
@@ -1033,8 +1067,8 @@ TEST(MediaCapabilitiesTests, PredictionCallbackPermutations) {
} while (std::next_permutation(callback_order.begin(), callback_order.end()));
}
// WebRTC tests.
TEST(MediaCapabilitiesTests, WebrtcBasicAudio) {
// WebRTC decodingInfo tests.
TEST(MediaCapabilitiesTests, WebrtcDecodingBasicAudio) {
MediaCapabilitiesTestContext context;
ON_CALL(context.GetMockPlatform(), GetGpuFactories())
.WillByDefault(Return(nullptr));
@@ -1049,7 +1083,7 @@ TEST(MediaCapabilitiesTests, WebrtcBasicAudio) {
EXPECT_TRUE(info->powerEfficient());
}
TEST(MediaCapabilitiesTests, WebrtcUnsupportedAudio) {
TEST(MediaCapabilitiesTests, WebrtcDecodingUnsupportedAudio) {
MediaCapabilitiesTestContext context;
ON_CALL(context.GetMockPlatform(), GetGpuFactories())
.WillByDefault(Return(nullptr));
@@ -1057,14 +1091,14 @@ TEST(MediaCapabilitiesTests, WebrtcUnsupportedAudio) {
.Times(testing::AtMost(1));
const MediaDecodingConfiguration* kDecodingConfig =
CreateAudioDecodingConfig("audio/FooCodec", "webrtc");
CreateAudioConfig<MediaDecodingConfiguration>("audio/FooCodec", "webrtc");
MediaCapabilitiesInfo* info = DecodingInfo(kDecodingConfig, &context);
EXPECT_FALSE(info->supported());
EXPECT_FALSE(info->smooth());
EXPECT_FALSE(info->powerEfficient());
}
TEST(MediaCapabilitiesTests, WebrtcBasicVideo) {
TEST(MediaCapabilitiesTests, WebrtcDecodingBasicVideo) {
MediaCapabilitiesTestContext context;
ON_CALL(context.GetMockPlatform(), GetGpuFactories())
.WillByDefault(Return(nullptr));
@@ -1079,7 +1113,7 @@ TEST(MediaCapabilitiesTests, WebrtcBasicVideo) {
EXPECT_FALSE(info->powerEfficient());
}
TEST(MediaCapabilitiesTests, WebrtcUnsupportedVideo) {
TEST(MediaCapabilitiesTests, WebrtcDecodingUnsupportedVideo) {
MediaCapabilitiesTestContext context;
ON_CALL(context.GetMockPlatform(), GetGpuFactories())
.WillByDefault(Return(nullptr));
@@ -1087,7 +1121,7 @@ TEST(MediaCapabilitiesTests, WebrtcUnsupportedVideo) {
.Times(testing::AtMost(1));
const MediaDecodingConfiguration* kDecodingConfig =
CreateDecodingConfig("video/FooCodec", "webrtc");
CreateVideoConfig<MediaDecodingConfiguration>("video/FooCodec", "webrtc");
MediaCapabilitiesInfo* info = DecodingInfo(kDecodingConfig, &context);
EXPECT_FALSE(info->supported());
@@ -1095,4 +1129,66 @@ TEST(MediaCapabilitiesTests, WebrtcUnsupportedVideo) {
EXPECT_FALSE(info->powerEfficient());
}
// WebRTC encodingInfo tests.
TEST(MediaCapabilitiesTests, WebrtcEncodingBasicAudio) {
MediaCapabilitiesTestContext context;
ON_CALL(context.GetMockPlatform(), GetGpuFactories())
.WillByDefault(Return(nullptr));
EXPECT_CALL(context.GetMockPlatform(), GetGpuFactories())
.Times(testing::AtMost(1));
const MediaEncodingConfiguration* kEncodingConfig =
CreateWebrtcAudioEncodingConfig();
MediaCapabilitiesInfo* info = EncodingInfo(kEncodingConfig, &context);
EXPECT_TRUE(info->supported());
EXPECT_TRUE(info->smooth());
EXPECT_TRUE(info->powerEfficient());
}
TEST(MediaCapabilitiesTests, WebrtcEncodingUnsupportedAudio) {
MediaCapabilitiesTestContext context;
ON_CALL(context.GetMockPlatform(), GetGpuFactories())
.WillByDefault(Return(nullptr));
EXPECT_CALL(context.GetMockPlatform(), GetGpuFactories())
.Times(testing::AtMost(1));
const MediaEncodingConfiguration* kEncodingConfig =
CreateAudioConfig<MediaEncodingConfiguration>("audio/FooCodec", "webrtc");
MediaCapabilitiesInfo* info = EncodingInfo(kEncodingConfig, &context);
EXPECT_FALSE(info->supported());
EXPECT_FALSE(info->smooth());
EXPECT_FALSE(info->powerEfficient());
}
TEST(MediaCapabilitiesTests, WebrtcEncodingBasicVideo) {
MediaCapabilitiesTestContext context;
ON_CALL(context.GetMockPlatform(), GetGpuFactories())
.WillByDefault(Return(nullptr));
EXPECT_CALL(context.GetMockPlatform(), GetGpuFactories())
.Times(testing::AtMost(1));
const MediaEncodingConfiguration* kEncodingConfig =
CreateWebrtcEncodingConfig();
MediaCapabilitiesInfo* info = EncodingInfo(kEncodingConfig, &context);
EXPECT_TRUE(info->supported());
EXPECT_TRUE(info->smooth());
EXPECT_FALSE(info->powerEfficient());
}
TEST(MediaCapabilitiesTests, WebrtcEncodingUnsupportedVideo) {
MediaCapabilitiesTestContext context;
ON_CALL(context.GetMockPlatform(), GetGpuFactories())
.WillByDefault(Return(nullptr));
EXPECT_CALL(context.GetMockPlatform(), GetGpuFactories())
.Times(testing::AtMost(1));
const MediaEncodingConfiguration* kEncodingConfig =
CreateVideoConfig<MediaEncodingConfiguration>("video/FooCodec", "webrtc");
MediaCapabilitiesInfo* info = EncodingInfo(kEncodingConfig, &context);
EXPECT_FALSE(info->supported());
EXPECT_FALSE(info->smooth());
EXPECT_FALSE(info->powerEfficient());
}
} // namespace blink

@@ -1347,8 +1347,6 @@ component("platform") {
"peerconnection/rtc_video_encoder_factory.cc",
"peerconnection/rtc_video_encoder_factory.h",
"peerconnection/rtc_void_request.h",
"peerconnection/transmission_encoding_info_handler.cc",
"peerconnection/transmission_encoding_info_handler.h",
"peerconnection/two_keys_adapter_map.h",
"peerconnection/video_codec_factory.cc",
"peerconnection/video_codec_factory.h",
@@ -1356,6 +1354,9 @@ component("platform") {
"peerconnection/webrtc_audio_sink.h",
"peerconnection/webrtc_decoding_info_handler.cc",
"peerconnection/webrtc_decoding_info_handler.h",
"peerconnection/webrtc_encoding_info_handler.cc",
"peerconnection/webrtc_encoding_info_handler.h",
"peerconnection/webrtc_util.cc",
"peerconnection/webrtc_util.h",
"peerconnection/webrtc_video_track_source.cc",
"peerconnection/webrtc_video_track_source.h",
@@ -2141,10 +2142,10 @@ source_set("blink_platform_unittests_sources") {
"peerconnection/rtc_video_decoder_stream_adapter_test.cc",
"peerconnection/rtc_video_encoder_test.cc",
"peerconnection/task_queue_factory_test.cc",
"peerconnection/transmission_encoding_info_handler_test.cc",
"peerconnection/two_keys_adapter_map_unittest.cc",
"peerconnection/webrtc_audio_sink_test.cc",
"peerconnection/webrtc_decoding_info_handler_test.cc",
"peerconnection/webrtc_encoding_info_handler_test.cc",
"peerconnection/webrtc_video_track_source_test.cc",
"runtime_enabled_features_test.cc",
"text/bidi_resolver_test.cc",

@@ -1,189 +0,0 @@
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/peerconnection/transmission_encoding_info_handler.h"
#include <utility>
#include <vector>
#include "base/containers/contains.h"
#include "base/cpu.h"
#include "base/logging.h"
#include "base/system/sys_info.h"
#include "third_party/blink/public/platform/platform.h"
#include "third_party/blink/renderer/platform/media_capabilities/web_media_configuration.h"
#include "third_party/blink/renderer/platform/media_capabilities/web_video_configuration.h"
#include "third_party/blink/renderer/platform/peerconnection/audio_codec_factory.h"
#include "third_party/blink/renderer/platform/peerconnection/video_codec_factory.h"
#include "third_party/blink/renderer/platform/wtf/text/string_hash.h"
#include "third_party/webrtc/api/audio_codecs/audio_encoder_factory.h"
#include "third_party/webrtc/api/audio_codecs/audio_format.h"
#include "third_party/webrtc/api/scoped_refptr.h"
#include "third_party/webrtc/api/video_codecs/sdp_video_format.h"
#include "third_party/webrtc/api/video_codecs/video_encoder_factory.h"
namespace blink {
namespace {
// Composes elements of set<string> to a string with ", " delimiter.
String StringHashSetToString(const HashSet<String>& string_set) {
String result;
String delim;
for (auto& s : string_set) {
result = result + delim + s;
if (delim.IsEmpty())
delim = ", ";
}
return result;
}
// Composes human readable string for |info|.
String ToString(const blink::WebMediaCapabilitiesInfo& info) {
return String::Format("(supported:%s, smooth:%s, power_efficient:%s)",
info.supported ? "true" : "false",
info.smooth ? "true" : "false",
info.power_efficient ? "true" : "false");
}
// Gets GpuVideoAcceleratorFactories instance pointer.
// Returns nullptr if RenderThreadImpl instance is not available.
media::GpuVideoAcceleratorFactories* GetGpuFactories() {
return Platform::Current()->GetGpuFactories();
}
// Returns true if CPU can encode HD video smoothly.
// The logic is borrowed from Google Meet (crbug.com/941352).
bool CanCpuEncodeHdSmoothly() {
const int num_processors = base::SysInfo::NumberOfProcessors();
if (num_processors >= 4)
return true;
if (num_processors < 2)
return false;
return base::CPU().has_sse41();
}
const unsigned int kHdVideoAreaSize = 1280 * 720;
} // namespace
TransmissionEncodingInfoHandler* TransmissionEncodingInfoHandler::Instance() {
DEFINE_STATIC_LOCAL(TransmissionEncodingInfoHandler, instance, ());
return &instance;
}
// If GetGpuFactories() returns null, CreateWebrtcVideoEncoderFactory()
// returns software encoder factory only.
TransmissionEncodingInfoHandler::TransmissionEncodingInfoHandler()
: TransmissionEncodingInfoHandler(
blink::CreateHWVideoEncoderFactory(GetGpuFactories()),
CanCpuEncodeHdSmoothly()) {}
TransmissionEncodingInfoHandler::TransmissionEncodingInfoHandler(
std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory,
bool cpu_hd_smooth)
: cpu_hd_smooth_(cpu_hd_smooth) {
if (video_encoder_factory) {
std::vector<webrtc::SdpVideoFormat> supported_video_formats =
video_encoder_factory->GetSupportedFormats();
for (const auto& video_format : supported_video_formats) {
const String codec_name =
String::FromUTF8(video_format.name).LowerASCII();
hardware_accelerated_video_codecs_.insert(codec_name);
}
}
rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory =
blink::CreateWebrtcAudioEncoderFactory();
std::vector<webrtc::AudioCodecSpec> supported_audio_specs =
audio_encoder_factory->GetSupportedEncoders();
for (const auto& audio_spec : supported_audio_specs) {
supported_audio_codecs_.insert(
String::FromUTF8(audio_spec.format.name).LowerASCII());
}
DVLOG(2) << String::Format(
"hardware_accelerated_video_codecs_:[%s] "
"supported_audio_codecs_:[%s]",
StringHashSetToString(hardware_accelerated_video_codecs_).Utf8().c_str(),
StringHashSetToString(supported_audio_codecs_).Utf8().c_str());
}
TransmissionEncodingInfoHandler::~TransmissionEncodingInfoHandler() = default;
String TransmissionEncodingInfoHandler::ExtractSupportedCodecFromMimeType(
const String& mime_type) const {
const char* video_prefix = "video/";
const char* audio_prefix = "audio/";
if (mime_type.StartsWith(video_prefix)) {
// Currently support "video/vp8" only.
// TODO(crbug.com/941320): support "video/vp9" and "video/h264" once their
// MIME type parser are implemented.
const String codec_name = mime_type.Substring(strlen(video_prefix));
if (codec_name == "vp8")
return codec_name;
} else if (mime_type.StartsWith(audio_prefix)) {
const String codec_name = mime_type.Substring(strlen(audio_prefix));
if (base::Contains(supported_audio_codecs_, codec_name))
return codec_name;
}
return "";
}
bool TransmissionEncodingInfoHandler::CanCpuEncodeSmoothly(
const blink::WebVideoConfiguration& configuration) const {
if (configuration.width * configuration.height < kHdVideoAreaSize)
return true;
return cpu_hd_smooth_;
}
void TransmissionEncodingInfoHandler::EncodingInfo(
const blink::WebMediaConfiguration& configuration,
OnMediaCapabilitiesEncodingInfoCallback callback) const {
DCHECK(configuration.video_configuration ||
configuration.audio_configuration);
auto info = std::make_unique<blink::WebMediaCapabilitiesInfo>();
if (!configuration.video_configuration &&
!configuration.audio_configuration) {
DVLOG(2) << "Neither video nor audio configuration specified.";
std::move(callback).Run(std::move(info));
return;
}
// Either video or audio capabilities will be AND-ed so set |info|'s default
// value to true.
info->supported = info->smooth = info->power_efficient = true;
if (configuration.video_configuration) {
const auto& video_config = configuration.video_configuration.value();
const String mime_type = String(video_config.mime_type).LowerASCII();
const String codec_name = ExtractSupportedCodecFromMimeType(mime_type);
info->supported = !codec_name.IsEmpty();
if (info->supported) {
const bool is_hardware_accelerated =
base::Contains(hardware_accelerated_video_codecs_, codec_name);
info->smooth =
is_hardware_accelerated || CanCpuEncodeSmoothly(video_config);
info->power_efficient = is_hardware_accelerated;
} else {
info->smooth = false;
info->power_efficient = false;
}
DVLOG(2) << "Video MIME type:" << mime_type
<< " capabilities:" << ToString(*info);
}
if (configuration.audio_configuration) {
const String mime_type =
String(configuration.audio_configuration->mime_type).LowerASCII();
const String codec_name = ExtractSupportedCodecFromMimeType(mime_type);
info->supported &= !codec_name.IsEmpty();
// Audio is always assumed to be smooth and efficient whenever it is
// supported.
info->smooth &= info->supported;
info->power_efficient &= info->supported;
DVLOG(2) << "Audio MIME type:" << mime_type
<< " capabilities:" << ToString(*info);
}
std::move(callback).Run(std::move(info));
}
} // namespace blink

@@ -1,73 +0,0 @@
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_PEERCONNECTION_TRANSMISSION_ENCODING_INFO_HANDLER_H_
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_PEERCONNECTION_TRANSMISSION_ENCODING_INFO_HANDLER_H_
#include <memory>
#include "base/callback_forward.h"
#include "base/macros.h"
#include "third_party/blink/renderer/platform/media_capabilities/web_media_capabilities_info.h"
#include "third_party/blink/renderer/platform/platform_export.h"
#include "third_party/blink/renderer/platform/wtf/hash_set.h"
#include "third_party/blink/renderer/platform/wtf/text/wtf_string.h"
namespace blink {
struct WebMediaConfiguration;
struct WebVideoConfiguration;
} // namespace blink
namespace webrtc {
class VideoEncoderFactory;
} // namespace webrtc
namespace blink {
// blink::WebTransmissionEncodingInfoHandler implementation.
class PLATFORM_EXPORT TransmissionEncodingInfoHandler {
public:
static TransmissionEncodingInfoHandler* Instance();
TransmissionEncodingInfoHandler();
// Constructor for unittest to inject VideoEncodeFactory instance and
// |cpu_hd_smooth|.
explicit TransmissionEncodingInfoHandler(
std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory,
bool cpu_hd_smooth);
~TransmissionEncodingInfoHandler();
// Queries the capabilities of the given encoding configuration and passes
// WebMediaCapabilitiesInfo result via callbacks.
// It implements WICG Media Capabilities encodingInfo() call for transmission
// encoding.
// https://wicg.github.io/media-capabilities/#media-capabilities-interface
using OnMediaCapabilitiesEncodingInfoCallback =
base::OnceCallback<void(std::unique_ptr<WebMediaCapabilitiesInfo>)>;
void EncodingInfo(const blink::WebMediaConfiguration& configuration,
OnMediaCapabilitiesEncodingInfoCallback cb) const;
private:
// Extracts supported video/audio codec name from |mime_type|. Returns "" if
// it is not supported.
String ExtractSupportedCodecFromMimeType(const String& mime_type) const;
// True if it can encode |configuration| smoothly via CPU.
bool CanCpuEncodeSmoothly(
const blink::WebVideoConfiguration& configuration) const;
// List of hardware accelerated codecs.
HashSet<String> hardware_accelerated_video_codecs_;
// List of supported audio codecs.
HashSet<String> supported_audio_codecs_;
// True if CPU is capable to encode 720p video smoothly.
bool cpu_hd_smooth_;
DISALLOW_COPY_AND_ASSIGN(TransmissionEncodingInfoHandler);
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_PEERCONNECTION_TRANSMISSION_ENCODING_INFO_HANDLER_H_

@@ -1,281 +0,0 @@
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/peerconnection/transmission_encoding_info_handler.h"
#include <utility>
#include <vector>
#include "base/bind.h"
#include "media/base/video_codecs.h"
#include "media/video/video_encode_accelerator.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/platform/web_string.h"
#include "third_party/blink/renderer/platform/media_capabilities/web_audio_configuration.h"
#include "third_party/blink/renderer/platform/media_capabilities/web_media_configuration.h"
#include "third_party/blink/renderer/platform/media_capabilities/web_video_configuration.h"
#include "third_party/webrtc/api/video_codecs/sdp_video_format.h"
#include "third_party/webrtc/api/video_codecs/video_encoder.h"
#include "third_party/webrtc/api/video_codecs/video_encoder_factory.h"
#include "ui/gfx/geometry/size.h"
namespace blink {
namespace {
class FakeVideoEncoderFactory : public webrtc::VideoEncoderFactory {
public:
FakeVideoEncoderFactory() = default;
~FakeVideoEncoderFactory() override = default;
void AddSupportedFormat(const webrtc::SdpVideoFormat& video_format) {
supported_video_formats_.push_back(video_format);
}
std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override {
return supported_video_formats_;
}
std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(
const webrtc::SdpVideoFormat& format) override {
return std::unique_ptr<webrtc::VideoEncoder>();
}
private:
std::vector<webrtc::SdpVideoFormat> supported_video_formats_;
};
} // namespace
// Stores WebMediaCapabilitiesEncodingInfoCallbacks' result for verify.
class EncodingInfoObserver {
public:
EncodingInfoObserver() = default;
~EncodingInfoObserver() = default;
void OnSuccess(std::unique_ptr<blink::WebMediaCapabilitiesInfo> info) {
info_.swap(info);
is_success_ = true;
}
void OnError() { is_error_ = true; }
const blink::WebMediaCapabilitiesInfo* info() const { return info_.get(); }
bool IsCalled() const { return is_success_ || is_error_; }
bool is_success() const { return is_success_; }
bool is_error() const { return is_error_; }
private:
std::unique_ptr<blink::WebMediaCapabilitiesInfo> info_;
bool is_success_;
bool is_error_;
};
// It places callback's result to EncodingInfoObserver for testing code
// to verify. Because blink::WebMediaCapabilitiesEncodingInfoCallbacks instance
// is handed to TransmissionEncodingInfoHandler, we cannot directly inspect
// OnSuccess() received argument. So it moves OnSuccess()'s received argument,
// WebMediaCapabilitiesInfo instance, to EncodingInfoObserver instance for
// inspection.
class WebMediaCapabilitiesEncodingInfoCallbacksForTest {
public:
WebMediaCapabilitiesEncodingInfoCallbacksForTest(
EncodingInfoObserver* observer)
: observer_(observer) {
DCHECK(observer_);
}
virtual ~WebMediaCapabilitiesEncodingInfoCallbacksForTest() = default;
void OnSuccess(std::unique_ptr<blink::WebMediaCapabilitiesInfo> info) {
observer_->OnSuccess(std::move(info));
}
void OnError() { observer_->OnError(); }
private:
EncodingInfoObserver* observer_;
};
class TransmissionEncodingInfoHandlerTest : public testing::Test {
protected:
blink::WebVideoConfiguration ComposeVideoConfiguration(
const std::string& mime_type,
const std::string& codec,
unsigned int width = 1920,
unsigned int height = 1080,
double framerate = 30.0) {
constexpr int kBitrate = 2661034;
return blink::WebVideoConfiguration{blink::WebString::FromASCII(mime_type),
blink::WebString::FromASCII(codec),
width,
height,
kBitrate,
framerate};
}
blink::WebAudioConfiguration ComposeAudioConfiguration(
const std::string& mime_type,
const std::string& codec) {
return blink::WebAudioConfiguration{blink::WebString::FromASCII(mime_type),
blink::WebString::FromASCII(codec),
blink::WebString(), absl::nullopt,
absl::nullopt};
}
blink::WebMediaConfiguration ComposeWebMediaConfigurationForVideo(
const std::string& mime_type,
const std::string& codec) {
return blink::WebMediaConfiguration(
blink::MediaConfigurationType::kTransmission, absl::nullopt,
ComposeVideoConfiguration(mime_type, codec));
}
blink::WebMediaConfiguration ComposeWebMediaConfigurationForAudio(
const std::string& mime_type,
const std::string& codec) {
return blink::WebMediaConfiguration(
blink::MediaConfigurationType::kTransmission,
ComposeAudioConfiguration(mime_type, codec), absl::nullopt);
}
void VerifyEncodingInfo(const TransmissionEncodingInfoHandler& handler,
const blink::WebMediaConfiguration& configuration,
bool expect_supported,
bool expect_smooth,
bool expect_power_efficient) {
EncodingInfoObserver observer;
auto callbacks =
std::make_unique<WebMediaCapabilitiesEncodingInfoCallbacksForTest>(
&observer);
handler.EncodingInfo(
configuration,
base::BindOnce(
&WebMediaCapabilitiesEncodingInfoCallbacksForTest::OnSuccess,
base::Unretained(callbacks.get())));
EXPECT_TRUE(observer.IsCalled());
EXPECT_TRUE(observer.is_success());
const blink::WebMediaCapabilitiesInfo* encoding_info = observer.info();
ASSERT_TRUE(encoding_info);
EXPECT_EQ(expect_supported, encoding_info->supported);
EXPECT_EQ(expect_smooth, encoding_info->smooth);
EXPECT_EQ(expect_power_efficient, encoding_info->power_efficient);
}
};
TEST_F(TransmissionEncodingInfoHandlerTest, SupportedVideoCodec) {
TransmissionEncodingInfoHandler handler(nullptr, false);
VerifyEncodingInfo(handler,
ComposeWebMediaConfigurationForVideo("video/vp8", ""),
true, false, false);
// Temporarily unsupported: "video/vp9" and "video/h264".
// TODO(crbug.com/941320): "video/vp9" and "video/h264" should be supported
// once their MIME type parser are implemented.
VerifyEncodingInfo(handler,
ComposeWebMediaConfigurationForVideo("video/vp9", ""),
false, false, false);
VerifyEncodingInfo(handler,
ComposeWebMediaConfigurationForVideo("video/h264", ""),
false, false, false);
// "video/webm" is not a "transmission" MIME type.
VerifyEncodingInfo(handler,
ComposeWebMediaConfigurationForVideo("video/webm", "vp8"),
false, false, false);
}
TEST_F(TransmissionEncodingInfoHandlerTest, SupportedAudioCodec) {
TransmissionEncodingInfoHandler handler;
for (const char* mime_type :
{"audio/g722", "audio/isac", "audio/opus", "audio/pcma", "audio/pcmu"}) {
// For audio codec, if it is supported, it is smooth.
VerifyEncodingInfo(handler,
ComposeWebMediaConfigurationForAudio(mime_type, ""),
true, true, true);
}
}
TEST_F(TransmissionEncodingInfoHandlerTest, HardwareAcceleratedVideoCodec) {
auto video_encoder_factory = std::make_unique<FakeVideoEncoderFactory>();
video_encoder_factory->AddSupportedFormat(webrtc::SdpVideoFormat("vp8"));
TransmissionEncodingInfoHandler handler(std::move(video_encoder_factory),
false);
VerifyEncodingInfo(handler,
ComposeWebMediaConfigurationForVideo("video/vp8", ""),
true, true, true);
}
TEST_F(TransmissionEncodingInfoHandlerTest, SmoothVideoCodecPowerfulCpu) {
// Assume no HW vp8 encoder.
// Assume powerful CPU.
TransmissionEncodingInfoHandler handler(nullptr, true);
VerifyEncodingInfo(handler,
ComposeWebMediaConfigurationForVideo("video/vp8", ""),
true, true, false);
}
TEST_F(TransmissionEncodingInfoHandlerTest, SmoothVideoCodecVgaResolution) {
// Assume no HW vp8 encoder.
// Assume no powerful CPU.
TransmissionEncodingInfoHandler handler(nullptr, false);
// VP8 encoding for 640x480 video.
blink::WebMediaConfiguration config(
blink::MediaConfigurationType::kTransmission, absl::nullopt,
ComposeVideoConfiguration("video/vp8", "", 640, 480));
VerifyEncodingInfo(handler, config, true, true, false);
}
TEST_F(TransmissionEncodingInfoHandlerTest, SmoothVideoCodecBelowHdResolution) {
// Assume no HW vp8 encoder.
// Assume no powerful CPU.
TransmissionEncodingInfoHandler handler(nullptr, false);
// VP8 encoding for 1024x768 video. Note its area size is below 1280x720).
blink::WebMediaConfiguration config(
blink::MediaConfigurationType::kTransmission, absl::nullopt,
ComposeVideoConfiguration("video/vp8", "", 1024, 768));
VerifyEncodingInfo(handler, config, true, true, false);
}
TEST_F(TransmissionEncodingInfoHandlerTest, AudioAndVideoCodec) {
// Both video/vp8 and audio/opus are given.
blink::WebMediaConfiguration config(
blink::MediaConfigurationType::kTransmission,
ComposeAudioConfiguration("audio/opus", ""),
ComposeVideoConfiguration("video/vp8", ""));
TransmissionEncodingInfoHandler handler(nullptr, false);
VerifyEncodingInfo(handler, config, true, false, false);
}
TEST_F(TransmissionEncodingInfoHandlerTest,
AudioAndVideoCodecWithVideoHardwareEncoder) {
// Both video/vp8 and audio/opus are given.
blink::WebMediaConfiguration config(
blink::MediaConfigurationType::kTransmission,
ComposeAudioConfiguration("audio/opus", ""),
ComposeVideoConfiguration("video/vp8", ""));
auto video_encoder_factory = std::make_unique<FakeVideoEncoderFactory>();
video_encoder_factory->AddSupportedFormat(webrtc::SdpVideoFormat("vp8"));
TransmissionEncodingInfoHandler handler(std::move(video_encoder_factory),
false);
VerifyEncodingInfo(handler, config, true, true, true);
}
TEST_F(TransmissionEncodingInfoHandlerTest, AudioAndVideoCodecWithPowerfulCpu) {
// Both video/vp8 and audio/opus are given.
blink::WebMediaConfiguration config(
blink::MediaConfigurationType::kTransmission,
ComposeAudioConfiguration("audio/opus", ""),
ComposeVideoConfiguration("video/vp8", ""));
// Assume no HW vp8 encoder.
// Assume powerful CPU.
TransmissionEncodingInfoHandler handler(nullptr, true);
VerifyEncodingInfo(handler, config, true, true, false);
}
} // namespace blink

@@ -15,6 +15,7 @@
#include "third_party/blink/renderer/platform/network/parsed_content_type.h"
#include "third_party/blink/renderer/platform/peerconnection/audio_codec_factory.h"
#include "third_party/blink/renderer/platform/peerconnection/video_codec_factory.h"
#include "third_party/blink/renderer/platform/peerconnection/webrtc_util.h"
#include "third_party/blink/renderer/platform/wtf/text/string_hash.h"
#include "third_party/webrtc/api/audio_codecs/audio_decoder_factory.h"
#include "third_party/webrtc/api/audio_codecs/audio_format.h"
@@ -23,42 +24,6 @@
#include "third_party/webrtc/api/video_codecs/video_decoder_factory.h"
namespace blink {
namespace {
String ExtractCodecNameFromMimeType(const String& mime_type,
const char* prefix) {
if (mime_type.StartsWith(prefix)) {
size_t length = mime_type.length() - strlen(prefix) - 1;
const String codec_name = mime_type.Right(length);
return codec_name;
}
return "";
}
webrtc::SdpVideoFormat::Parameters ConvertToSdpVideoFormatParameters(
const ParsedContentHeaderFieldParameters& parameters) {
webrtc::SdpVideoFormat::Parameters sdp_parameters;
for (const auto& parameter : parameters) {
sdp_parameters[parameter.name.Utf8()] = parameter.value.Utf8();
}
return sdp_parameters;
}
// Composes elements of set<string> to a string with ", " delimiter.
String StringHashSetToString(const HashSet<String>& string_set) {
String result;
String delim;
for (auto& s : string_set) {
result = result + delim + s;
if (delim.IsEmpty())
delim = ", ";
}
return result;
}
} // namespace
WebrtcDecodingInfoHandler* WebrtcDecodingInfoHandler::Instance() {
DEFINE_STATIC_LOCAL(WebrtcDecodingInfoHandler, instance, ());
return &instance;
@@ -84,9 +49,6 @@ WebrtcDecodingInfoHandler::WebrtcDecodingInfoHandler(
supported_audio_codecs_.insert(
String::FromUTF8(audio_spec.format.name).LowerASCII());
}
DVLOG(2) << String::Format(
"supported_audio_codecs_:[%s]",
StringHashSetToString(supported_audio_codecs_).Utf8().c_str());
}
WebrtcDecodingInfoHandler::~WebrtcDecodingInfoHandler() = default;
@@ -105,7 +67,7 @@ void WebrtcDecodingInfoHandler::DecodingInfo(
ParsedContentType audio_content_type(audio_mime_type->LowerASCII());
DCHECK(audio_content_type.IsValid());
const String codec_name =
ExtractCodecNameFromMimeType(audio_content_type.MimeType(), "audio");
WebrtcCodecNameFromMimeType(audio_content_type.MimeType(), "audio");
supported = base::Contains(supported_audio_codecs_, codec_name);
// Audio is always assumed to be power efficient whenever it is
// supported.
@@ -121,7 +83,7 @@ void WebrtcDecodingInfoHandler::DecodingInfo(
ParsedContentType video_content_type(video_mime_type->LowerASCII());
DCHECK(video_content_type.IsValid());
const String codec_name =
ExtractCodecNameFromMimeType(video_content_type.MimeType(), "video");
WebrtcCodecNameFromMimeType(video_content_type.MimeType(), "video");
const webrtc::SdpVideoFormat::Parameters parameters =
ConvertToSdpVideoFormatParameters(video_content_type.GetParameters());
webrtc::SdpVideoFormat sdp_video_format(codec_name.Utf8(), parameters);

@@ -35,7 +35,7 @@ class PLATFORM_EXPORT WebrtcDecodingInfoHandler {
~WebrtcDecodingInfoHandler();
// Queries the capabilities of the given decoding configuration and passes
// WebMediaCapabilitiesInfo result via callbacks.
// the result via callbacks.
// It implements WICG Media Capabilities decodingInfo() call for webrtc
// encoding.
// https://wicg.github.io/media-capabilities/#media-capabilities-interface

@@ -0,0 +1,104 @@
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/peerconnection/webrtc_encoding_info_handler.h"
#include <utility>
#include <vector>
#include "base/containers/contains.h"
#include "base/cpu.h"
#include "base/logging.h"
#include "base/system/sys_info.h"
#include "third_party/blink/public/platform/platform.h"
#include "third_party/blink/renderer/platform/network/parsed_content_type.h"
#include "third_party/blink/renderer/platform/peerconnection/audio_codec_factory.h"
#include "third_party/blink/renderer/platform/peerconnection/video_codec_factory.h"
#include "third_party/blink/renderer/platform/peerconnection/webrtc_util.h"
#include "third_party/blink/renderer/platform/wtf/text/string_hash.h"
#include "third_party/webrtc/api/audio_codecs/audio_encoder_factory.h"
#include "third_party/webrtc/api/audio_codecs/audio_format.h"
#include "third_party/webrtc/api/scoped_refptr.h"
#include "third_party/webrtc/api/video_codecs/sdp_video_format.h"
#include "third_party/webrtc/api/video_codecs/video_encoder_factory.h"
namespace blink {
WebrtcEncodingInfoHandler* WebrtcEncodingInfoHandler::Instance() {
DEFINE_STATIC_LOCAL(WebrtcEncodingInfoHandler, instance, ());
return &instance;
}
WebrtcEncodingInfoHandler::WebrtcEncodingInfoHandler()
: WebrtcEncodingInfoHandler(blink::CreateWebrtcVideoEncoderFactory(
Platform::Current()->GetGpuFactories()),
blink::CreateWebrtcAudioEncoderFactory()) {}
WebrtcEncodingInfoHandler::WebrtcEncodingInfoHandler(
std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory,
rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory)
: video_encoder_factory_(std::move(video_encoder_factory)),
audio_encoder_factory_(std::move(audio_encoder_factory)) {
std::vector<webrtc::AudioCodecSpec> supported_audio_specs =
audio_encoder_factory_->GetSupportedEncoders();
for (const auto& audio_spec : supported_audio_specs) {
supported_audio_codecs_.insert(
String::FromUTF8(audio_spec.format.name).LowerASCII());
}
}
WebrtcEncodingInfoHandler::~WebrtcEncodingInfoHandler() = default;
void WebrtcEncodingInfoHandler::EncodingInfo(
const absl::optional<String> audio_mime_type,
const absl::optional<String> video_mime_type,
const absl::optional<String> video_scalability_mode,
OnMediaCapabilitiesEncodingInfoCallback callback) const {
DCHECK(audio_mime_type || video_mime_type);
// Set default values to true in case an audio configuration is not specified.
bool supported = true;
bool power_efficient = true;
if (audio_mime_type) {
ParsedContentType audio_content_type(audio_mime_type->LowerASCII());
DCHECK(audio_content_type.IsValid());
const String codec_name =
WebrtcCodecNameFromMimeType(audio_content_type.MimeType(), "audio");
supported = base::Contains(supported_audio_codecs_, codec_name);
// Audio is always assumed to be power efficient whenever it is
// supported.
power_efficient = supported;
DVLOG(1) << "Audio MIME type:" << codec_name << " supported:" << supported
<< " power_efficient:" << power_efficient;
}
// Only check video configuration if the audio configuration was supported (or
// not specified).
if (video_mime_type && supported) {
// Convert video_configuration to SdpVideoFormat.
ParsedContentType video_content_type(video_mime_type->LowerASCII());
DCHECK(video_content_type.IsValid());
const String codec_name =
WebrtcCodecNameFromMimeType(video_content_type.MimeType(), "video");
const webrtc::SdpVideoFormat::Parameters parameters =
ConvertToSdpVideoFormatParameters(video_content_type.GetParameters());
webrtc::SdpVideoFormat sdp_video_format(codec_name.Utf8(), parameters);
absl::optional<std::string> scalability_mode =
video_scalability_mode
? absl::make_optional(video_scalability_mode->Utf8())
: absl::nullopt;
webrtc::VideoEncoderFactory::CodecSupport support =
video_encoder_factory_->QueryCodecSupport(sdp_video_format,
scalability_mode);
supported = support.is_supported;
power_efficient = support.is_power_efficient;
DVLOG(1) << "Video MIME type:" << codec_name << " supported:" << supported
<< " power_efficient:" << power_efficient;
}
std::move(callback).Run(supported, power_efficient);
}
} // namespace blink

@@ -0,0 +1,58 @@
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_PEERCONNECTION_WEBRTC_ENCODING_INFO_HANDLER_H_
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_PEERCONNECTION_WEBRTC_ENCODING_INFO_HANDLER_H_
#include <memory>
#include "base/callback_forward.h"
#include "base/macros.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
#include "third_party/blink/renderer/platform/peerconnection/audio_codec_factory.h"
#include "third_party/blink/renderer/platform/peerconnection/video_codec_factory.h"
#include "third_party/blink/renderer/platform/platform_export.h"
#include "third_party/blink/renderer/platform/wtf/hash_set.h"
#include "third_party/blink/renderer/platform/wtf/text/wtf_string.h"
namespace blink {
class PLATFORM_EXPORT WebrtcEncodingInfoHandler {
public:
static WebrtcEncodingInfoHandler* Instance();
WebrtcEncodingInfoHandler();
// Constructor for unittest to inject video and audio encoder factory
// instances.
WebrtcEncodingInfoHandler(
std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory,
rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory);
// Not copyable or movable.
WebrtcEncodingInfoHandler(const WebrtcEncodingInfoHandler&) = delete;
WebrtcEncodingInfoHandler& operator=(const WebrtcEncodingInfoHandler&) =
delete;
~WebrtcEncodingInfoHandler();
// Queries the capabilities of the given encoding configuration and passes
// the result via callbacks.
// It implements WICG Media Capabilities encodingInfo() call for webrtc
// encoding.
// https://wicg.github.io/media-capabilities/#media-capabilities-interface
using OnMediaCapabilitiesEncodingInfoCallback =
base::OnceCallback<void(bool, bool)>;
void EncodingInfo(const absl::optional<String> audio_mime_type,
const absl::optional<String> video_mime_type,
const absl::optional<String> video_scalability_mode,
OnMediaCapabilitiesEncodingInfoCallback callback) const;
private:
std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory_;
rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory_;
// List of supported audio codecs.
HashSet<String> supported_audio_codecs_;
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_PEERCONNECTION_WEBRTC_ENCODING_INFO_HANDLER_H_

@@ -0,0 +1,195 @@
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/peerconnection/webrtc_encoding_info_handler.h"
#include <memory>
#include <utility>
#include <vector>
#include "base/bind.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/platform/web_string.h"
#include "third_party/webrtc/api/audio_codecs/audio_encoder_factory.h"
#include "third_party/webrtc/api/video_codecs/sdp_video_format.h"
#include "third_party/webrtc/api/video_codecs/video_encoder.h"
#include "third_party/webrtc/api/video_codecs/video_encoder_factory.h"
#include "ui/gfx/geometry/size.h"
namespace blink {
namespace {
class MockVideoEncoderFactory : public webrtc::VideoEncoderFactory {
public:
// webrtc::VideoEncoderFactory implementation:
MOCK_METHOD(std::unique_ptr<webrtc::VideoEncoder>,
CreateVideoEncoder,
(const webrtc::SdpVideoFormat& format),
(override));
MOCK_METHOD(std::vector<webrtc::SdpVideoFormat>,
GetSupportedFormats,
(),
(const));
MOCK_METHOD(webrtc::VideoEncoderFactory::CodecSupport,
QueryCodecSupport,
(const webrtc::SdpVideoFormat& format,
absl::optional<std::string> scalability_mode),
(const, override));
};
class MediaCapabilitiesEncodingInfoCallback {
public:
void OnWebrtcEncodingInfoSupport(bool is_supported, bool is_power_efficient) {
is_success_ = true;
is_supported_ = is_supported;
is_power_efficient_ = is_power_efficient;
}
void OnError() { is_error_ = true; }
bool IsCalled() const { return is_success_ || is_error_; }
bool IsSuccess() const { return is_success_; }
bool IsError() const { return is_error_; }
bool IsSupported() const { return is_supported_; }
bool IsPowerEfficient() const { return is_power_efficient_; }
private:
bool is_success_ = false;
bool is_error_ = false;
bool is_supported_ = false;
bool is_power_efficient_ = false;
};
} // namespace
typedef webrtc::VideoEncoderFactory::CodecSupport CodecSupport;
class WebrtcEncodingInfoHandlerTests : public ::testing::Test {
public:
WebrtcEncodingInfoHandlerTests()
: mock_video_encoder_factory_(new MockVideoEncoderFactory()),
video_encoder_factory_(mock_video_encoder_factory_),
audio_encoder_factory_(blink::CreateWebrtcAudioEncoderFactory()) {}
void SetUp() override {}
void VerifyEncodingInfo(
const absl::optional<String> audio_mime_type,
const absl::optional<String> video_mime_type,
const absl::optional<String> video_scalability_mode,
const absl::optional<webrtc::SdpVideoFormat> expected_format,
const CodecSupport support) {
if (expected_format) {
const absl::optional<std::string> expected_scalability_mode =
video_scalability_mode
? absl::make_optional(video_scalability_mode->Utf8())
: absl::nullopt;
ON_CALL(*mock_video_encoder_factory_, QueryCodecSupport)
.WillByDefault(testing::Invoke(
[expected_format, expected_scalability_mode, support](
const webrtc::SdpVideoFormat& format,
absl::optional<std::string> scalability_mode) {
format.IsSameCodec(*expected_format);
EXPECT_EQ(scalability_mode, expected_scalability_mode);
return support;
}));
EXPECT_CALL(*mock_video_encoder_factory_, QueryCodecSupport)
.Times(::testing::AtMost(1));
}
WebrtcEncodingInfoHandler encoding_info_handler(
std::move(video_encoder_factory_), audio_encoder_factory_);
MediaCapabilitiesEncodingInfoCallback encoding_info_callback;
encoding_info_handler.EncodingInfo(
audio_mime_type, video_mime_type, video_scalability_mode,
base::BindOnce(
&MediaCapabilitiesEncodingInfoCallback::OnWebrtcEncodingInfoSupport,
base::Unretained(&encoding_info_callback)));
EXPECT_TRUE(encoding_info_callback.IsCalled());
EXPECT_TRUE(encoding_info_callback.IsSuccess());
EXPECT_EQ(encoding_info_callback.IsSupported(), support.is_supported);
EXPECT_EQ(encoding_info_callback.IsPowerEfficient(),
support.is_power_efficient);
}
protected:
std::vector<webrtc::AudioCodecSpec> kSupportedAudioCodecs;
MockVideoEncoderFactory* mock_video_encoder_factory_;
std::unique_ptr<webrtc::VideoEncoderFactory> video_encoder_factory_;
rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory_;
};
TEST_F(WebrtcEncodingInfoHandlerTests, BasicAudio) {
VerifyEncodingInfo(
"audio/opus", /*video_mime_type=*/absl::nullopt,
/*video_scalability_mode=*/absl::nullopt,
/*expected_format=*/absl::nullopt,
CodecSupport{/*is_supported=*/true, /*is_power_efficient=*/true});
}
TEST_F(WebrtcEncodingInfoHandlerTests, UnsupportedAudio) {
VerifyEncodingInfo(
"audio/foo", /*video_mime_type=*/absl::nullopt,
/*video_scalability_mode=*/absl::nullopt,
/*expected_format=*/absl::nullopt,
CodecSupport{/*is_supported=*/false, /*is_power_efficient=*/false});
}
// These tests verify that the video MIME type is correctly parsed into
// SdpVideoFormat and that the return value from
// VideoEncoderFactory::QueryCodecSupport is correctly returned through the
// callback.
TEST_F(WebrtcEncodingInfoHandlerTests, BasicVideo) {
const webrtc::SdpVideoFormat kExpectedFormat("VP9");
VerifyEncodingInfo(
/*audio_mime_type=*/absl::nullopt, "video/VP9",
/*video_scalability_mode=*/absl::nullopt, kExpectedFormat,
CodecSupport{/*is_supported=*/true, /*is_power_efficient=*/false});
}
TEST_F(WebrtcEncodingInfoHandlerTests, BasicVideoPowerEfficient) {
const webrtc::SdpVideoFormat kExpectedFormat("VP9");
VerifyEncodingInfo(
/*audio_mime_type=*/absl::nullopt, "video/VP9",
/*video_scalability_mode=*/absl::nullopt, kExpectedFormat,
CodecSupport{/*is_supported=*/true, /*is_power_efficient=*/true});
}
TEST_F(WebrtcEncodingInfoHandlerTests, UnsupportedVideo) {
const webrtc::SdpVideoFormat kExpectedFormat(
"VP9", webrtc::SdpVideoFormat::Parameters{{"profile-level", "5"}});
VerifyEncodingInfo(
/*audio_mime_type=*/absl::nullopt, "video/VP9; profile-level=5",
/*video_scalability_mode=*/absl::nullopt, kExpectedFormat,
CodecSupport{/*is_supported=*/true, /*is_power_efficient=*/false});
}
TEST_F(WebrtcEncodingInfoHandlerTests, VideoWithScalabilityMode) {
const webrtc::SdpVideoFormat kExpectedFormat("VP9");
VerifyEncodingInfo(
/*audio_mime_type=*/absl::nullopt, "video/VP9", "L1T3", kExpectedFormat,
CodecSupport{/*is_supported=*/true, /*is_power_efficient=*/false});
}
TEST_F(WebrtcEncodingInfoHandlerTests, SupportedAudioUnsupportedVideo) {
const webrtc::SdpVideoFormat kExpectedFormat("foo");
VerifyEncodingInfo(
"audio/opus", "video/foo", /*video_scalability_mode=*/absl::nullopt,
kExpectedFormat,
CodecSupport{/*is_supported=*/false, /*is_power_efficient=*/false});
}
TEST_F(WebrtcEncodingInfoHandlerTests, SupportedVideoUnsupportedAudio) {
const webrtc::SdpVideoFormat kExpectedFormat("VP9");
VerifyEncodingInfo(
"audio/foo", "video/VP9", /*video_scalability_mode=*/absl::nullopt,
kExpectedFormat,
CodecSupport{/*is_supported=*/false, /*is_power_efficient=*/false});
}
} // namespace blink

@@ -0,0 +1,34 @@
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/peerconnection/webrtc_util.h"
#include <cstring>
#include "third_party/blink/renderer/platform/network/parsed_content_type.h"
#include "third_party/blink/renderer/platform/wtf/text/wtf_string.h"
#include "third_party/webrtc/api/video_codecs/sdp_video_format.h"
namespace blink {
String WebrtcCodecNameFromMimeType(const String& mime_type,
const char* prefix) {
if (mime_type.StartsWith(prefix)) {
size_t length = mime_type.length() - strlen(prefix) - 1;
const String codec_name = mime_type.Right(length);
return codec_name;
}
return "";
}
webrtc::SdpVideoFormat::Parameters ConvertToSdpVideoFormatParameters(
const ParsedContentHeaderFieldParameters& parameters) {
webrtc::SdpVideoFormat::Parameters sdp_parameters;
for (const auto& parameter : parameters) {
sdp_parameters[parameter.name.Utf8()] = parameter.value.Utf8();
}
return sdp_parameters;
}
} // namespace blink

@@ -6,6 +6,9 @@
#define THIRD_PARTY_BLINK_RENDERER_PLATFORM_PEERCONNECTION_WEBRTC_UTIL_H_
#include "third_party/abseil-cpp/absl/types/optional.h"
#include "third_party/blink/renderer/platform/network/parsed_content_type.h"
#include "third_party/blink/renderer/platform/wtf/text/wtf_string.h"
#include "third_party/webrtc/api/video_codecs/sdp_video_format.h"
namespace blink {
@@ -32,6 +35,11 @@ bool OptionalEquals(const OptionalT1& lhs, const OptionalT2& rhs) {
return *lhs == *rhs;
}
String WebrtcCodecNameFromMimeType(const String& mime_type, const char* prefix);
webrtc::SdpVideoFormat::Parameters ConvertToSdpVideoFormatParameters(
const ParsedContentHeaderFieldParameters& parameters);
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_PEERCONNECTION_WEBRTC_UTIL_H_

@@ -14,12 +14,28 @@ var minimalVideoConfiguration = {
framerate: 24,
};
// Minimal WebRTC VideoConfiguration that will be allowed per spec. All optional
// properties are missing.
var minimalWebrtcVideoConfiguration = {
contentType: 'video/VP9',
width: 800,
height: 600,
bitrate: 3000,
framerate: 24,
};
// Minimal AudioConfiguration that will be allowed per spec. All optional
// properties are missing.
var minimalAudioConfiguration = {
contentType: 'audio/webm; codecs="opus"',
};
// Minimal WebRTC AudioConfiguration that will be allowed per spec. All optional
// properties are missing.
var minimalWebrtcAudioConfiguration = {
contentType: 'audio/opus',
};
promise_test(t => {
return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo());
}, "Test that encodingInfo rejects if it doesn't get a configuration");
@@ -263,20 +279,8 @@ promise_test(t => {
});
}, "Test that encodingInfo returns a valid MediaCapabilitiesInfo objects for record type");
promise_test(t => {
return navigator.mediaCapabilities.encodingInfo({
type: 'transmission',
video: minimalVideoConfiguration,
audio: minimalAudioConfiguration,
}).then(ability => {
assert_equals(typeof ability.supported, "boolean");
assert_equals(typeof ability.smooth, "boolean");
assert_equals(typeof ability.powerEfficient, "boolean");
});
}, "Test that encodingInfo returns a valid MediaCapabilitiesInfo objects for transmission type");
async_test(t => {
var validTypes = [ 'record', 'transmission' ];
var validTypes = [ 'record', 'webrtc' ];
var invalidTypes = [ undefined, null, '', 'foobar', 'mse', 'MediaSource',
'file', 'media-source', ];
@@ -286,8 +290,8 @@ async_test(t => {
validTypes.forEach(type => {
validPromises.push(navigator.mediaCapabilities.encodingInfo({
type: type,
video: minimalVideoConfiguration,
audio: minimalAudioConfiguration,
video: type != "webrtc" ? minimalVideoConfiguration : minimalWebrtcVideoConfiguration,
audio: type != "webrtc" ? minimalAudioConfiguration : minimalWebrtcAudioConfiguration,
}));
});

@@ -0,0 +1,217 @@
<!DOCTYPE html>
<title>MediaCapabilities.encodingInfo() for webrtc</title>
<script src=/resources/testharness.js></script>
<script src="/resources/testharnessreport.js"></script>
<script>
// Minimal VideoConfiguration that will be allowed per spec. All optional
// properties are missing.
var minimalVideoConfiguration = {
contentType: 'video/VP9; profile-level="0"',
width: 800,
height: 600,
bitrate: 3000,
framerate: 24,
};
// Minimal AudioConfiguration that will be allowed per spec. All optional
// properties are missing.
var minimalAudioConfiguration = {
contentType: 'audio/opus',
};
promise_test(t => {
return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
}));
}, "Test that encodingInfo rejects if the configuration doesn't have an audio or video field");
promise_test(t => {
return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
video: {
contentType: 'video/VP9',
width: 800,
height: 600,
bitrate: 3000,
framerate: -1,
},
}));
}, "Test that encodingInfo rejects if the video configuration has a negative framerate");
promise_test(t => {
return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
video: {
contentType: 'video/VP9"',
width: 800,
height: 600,
bitrate: 3000,
framerate: 0,
},
}));
}, "Test that encodingInfo rejects if the video configuration has a framerate set to 0");
promise_test(t => {
return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
video: {
contentType: 'video/VP9"',
width: 800,
height: 600,
bitrate: 3000,
framerate: Infinity,
},
}));
}, "Test that encodingInfo rejects if the video configuration has a framerate set to Infinity");
promise_test(t => {
return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
video: {
contentType: 'fgeoa',
width: 800,
height: 600,
bitrate: 3000,
framerate: 24,
},
}));
}, "Test that encodingInfo rejects if the video configuration contentType doesn't parse");
promise_test(t => {
return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
video: {
contentType: 'audio/fgeoa',
width: 800,
height: 600,
bitrate: 3000,
framerate: 24,
},
}));
}, "Test that encodingInfo rejects if the video configuration contentType isn't of type video");
promise_test(t => {
return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
audio: { contentType: 'fgeoa' },
}));
}, "Test that encodingInfo rejects if the audio configuration contentType doesn't parse");
promise_test(t => {
return promise_rejects_js(t, TypeError, navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
audio: { contentType: 'video/fgeoa' },
}));
}, "Test that encodingInfo rejects if the audio configuration contentType isn't of type audio");
promise_test(t => {
return navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
video: minimalVideoConfiguration,
audio: minimalAudioConfiguration,
}).then(ability => {
assert_equals(typeof ability.supported, "boolean");
assert_equals(typeof ability.smooth, "boolean");
assert_equals(typeof ability.powerEfficient, "boolean");
});
}, "Test that encodingInfo returns a valid MediaCapabilitiesInfo objects");
promise_test(t => {
return navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
video: {
contentType: 'video/webm; codecs="vp09.00.10.08"',
width: 800,
height: 600,
bitrate: 3000,
framerate: 24,
},
audio: minimalAudioConfiguration,
}).then(ability => {
assert_false(ability.supported);
assert_false(ability.smooth);
assert_false(ability.powerEfficient);
});
}, "Test that encodingInfo returns supported, smooth, and powerEfficient set to false for non-webrtc video content type.");
promise_test(t => {
return navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
video: minimalVideoConfiguration,
audio: {
contentType: 'audio/webm; codecs="opus"',
},
}).then(ability => {
assert_false(ability.supported);
assert_false(ability.smooth);
assert_false(ability.powerEfficient);
});
}, "Test that encodingInfo returns supported, smooth, and powerEfficient set to false for non-webrtc audio content type.");
var validAudioCodecs = (() => {
// Some codecs that are returned by getCapabilities() are not real codecs,
// exclude these from the test.
var excludeList = [ 'audio/CN', 'audio/telephone-event' ];
var audioCodecs = [];
RTCRtpSender.getCapabilities("audio")['codecs'].forEach(codec => {
if (excludeList.indexOf(codec.mimeType) < 0 &&
audioCodecs.indexOf(codec.mimeType) < 0) {
audioCodecs.push(codec.mimeType);
}
});
return audioCodecs;
})();
validAudioCodecs.forEach(codec => {
promise_test(t => {
return navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
audio: {
contentType: codec
}
}).then(ability => {
assert_true(ability.supported);
});
}, "Test that encodingInfo returns supported true for the codec " + codec + " returned by RTCRtpSender.getCapabilities()")}
);
var validVideoCodecs = (() => {
// Some codecs that are returned by getCapabilities() are not real codecs but
// only used for error correction, exclude these from the test.
var excludeList = [ 'video/rtx', 'video/red', 'video/ulpfec',
'video/flexfec-03' ];
var videoCodecs = [];
RTCRtpSender.getCapabilities("video")['codecs'].forEach(codec => {
if (excludeList.indexOf(codec.mimeType) < 0) {
var mimeType = codec.mimeType;
if ('sdpFmtpLine' in codec) {
mimeType += "; " + codec.sdpFmtpLine;
}
if (!(mimeType in videoCodecs)) {
videoCodecs.push(mimeType);
}
}
});
return videoCodecs;
})();
validVideoCodecs.forEach(codec => {
promise_test(t => {
return navigator.mediaCapabilities.encodingInfo({
type: 'webrtc',
video: {
contentType: codec,
width: 800,
height: 600,
bitrate: 3000,
framerate: 24,
}
}).then(ability => {
assert_true(ability.supported);
});
}, "Test that encodingInfo returns supported true for the codec " + codec + " returned by RTCRtpSender.getCapabilities()")}
);
</script>

@@ -28,20 +28,6 @@ const recordTypeTestCases = [
[ 'audio/ogg', false],
[ 'audio/webm;codecs=vorbis', false]];
const transmissionTypeTestCases = [
[ 'video/vp8', true ],
[ 'audio/opus', true ],
// Temporary unsupported.
[ 'video/h264', false ],
[ 'video/vp9', false ],
// Rejected MIME types
[ 'video/invalid', false ],
[ 'video/mpeg4', false ],
[ 'video/webm;codecs=daala', false ],
[ 'video/webm;codecs=vp8', false ],
[ 'audio/invalid', false ],
[ 'audio/ogg', false ]];
function composeQuery(mimeType, queryType) {
const media = mimeType.split('/')[0];
assert_true(media == 'video' || media == 'audio', 'Unsupported media type');
@@ -72,18 +58,4 @@ recordTypeTestCases.forEach((testCase) => {
testName);
});
// Tests encodingInfo()'s response for "transmission" type request.
transmissionTypeTestCases.forEach((testCase) => {
const queryType = 'transmission'
const mimeType = testCase[0];
const isSupported = testCase[1];
const testName = 'MediaEncodingConfiguration {type:' + queryType + ' video:{contentType:' + mimeType + '}}';
promise_test(() => navigator.mediaCapabilities.encodingInfo(
composeQuery(mimeType, queryType)).then(
(result) => {
assert_equals(result.supported, isSupported, mimeType + ' supported?');
}),
testName);
});
</script>