[MediaCapture Extensions] Eye gaze correction constraints and settings
Some platforms or User Agents may provide built-in support for automatic Eye Gaze Correction, in particular for camera video streams. Web applications may either want to control the lever which can be a simple ON/OFF, or at least be aware that. This may allow the web application to update its UI or to not apply Eye Gaze Correction on its own. Intent to Prototype: https://groups.google.com/a/chromium.org/g/blink-dev/c/EMiN_hyfZ0Y Spec: https://w3c.github.io/mediacapture-extensions/#exposing-mediastreamtrack-source-eye-gaze-correction-support Explainer: https://github.com/riju/eyeGazeCorrection/blob/main/explainer.md Bug: 1506956 Change-Id: Id4bf18ee8e8e92969d6476cee24d617b08a0b7ec Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/5075004 Reviewed-by: Rijubrata Bhaumik <rijubrata.bhaumik@intel.com> Reviewed-by: Guido Urdaneta <guidou@chromium.org> Reviewed-by: danakj <danakj@chromium.org> Commit-Queue: Eero Hakkinen <eero.hakkinen@intel.com> Cr-Commit-Position: refs/heads/main@{#1239079}
This commit is contained in:

committed by
Chromium LUCI CQ

parent
2ce6751569
commit
0b9090f669
media/capture
mojom
video
third_party/blink/renderer/modules
imagecapture
mediastream
identifiability_metrics.ccimage_capture_device_settings.hmedia_constraints.ccmedia_constraints.hmedia_constraints_impl.ccmedia_stream_constraints_util_video_device.ccmedia_stream_constraints_util_video_device_test.ccmedia_stream_track_impl.ccmedia_track_capabilities.idlmedia_track_constraint_set.idlmedia_track_settings.idlmedia_track_supported_constraints.idl
test
user_media_request.cc@ -22,6 +22,14 @@ enum BackgroundBlurMode {
|
||||
BLUR
|
||||
};
|
||||
|
||||
[Stable, Extensible]
|
||||
enum EyeGazeCorrectionMode {
|
||||
[Default]
|
||||
OFF,
|
||||
ON,
|
||||
STARE
|
||||
};
|
||||
|
||||
// https://w3c.github.io/mediacapture-image/#meteringmode-section
|
||||
[Stable, Extensible]
|
||||
enum MeteringMode { NONE, MANUAL, SINGLE_SHOT, CONTINUOUS };
|
||||
@ -174,6 +182,15 @@ struct PhotoState {
|
||||
// Only meaningful if |supported_face_framing_modes| is non-empty.
|
||||
[MinVersion=2]
|
||||
MeteringMode current_face_framing_mode;
|
||||
// An array of unique supported values for
|
||||
// |PhotoSettings.current_eye_gaze_correction_mode|.
|
||||
// Should not be null (but is nullable for stable backward compatibility).
|
||||
[MinVersion=3]
|
||||
array<EyeGazeCorrectionMode>? supported_eye_gaze_correction_modes;
|
||||
// The current eye gaze correction mode.
|
||||
// Only meaningful if |supported_eye_gaze_correction_modes| is non-empty.
|
||||
[MinVersion=3]
|
||||
EyeGazeCorrectionMode current_eye_gaze_correction_mode;
|
||||
};
|
||||
|
||||
// Equivalent to idl Point2D.
|
||||
@ -186,7 +203,8 @@ struct Point2D {
|
||||
};
|
||||
|
||||
// Equivalent to idl PhotoSettings + MediaTrackConstraintSet.
|
||||
// PODs cannot be nullable, i.e. uint32? bla doesn't work, use |has_bla| flags.
|
||||
// Primitive types were not nullable in the past. This is the reason why older
|
||||
// fields in this struct have has_ flags, and newer fields are nullable.
|
||||
[Stable]
|
||||
struct PhotoSettings {
|
||||
// https://w3c.github.io/mediacapture-image/#mediatracksettings-section and
|
||||
@ -249,6 +267,8 @@ struct PhotoSettings {
|
||||
bool has_face_framing_mode;
|
||||
[MinVersion=2]
|
||||
MeteringMode face_framing_mode;
|
||||
[MinVersion=3]
|
||||
EyeGazeCorrectionMode? eye_gaze_correction_mode;
|
||||
};
|
||||
|
||||
// This is a mojo move-only equivalent of a Blob, i.e. MIME type and Data.
|
||||
|
@ -26,6 +26,7 @@ media::mojom::PhotoStatePtr CreateEmptyPhotoState() {
|
||||
photo_capabilities->torch = false;
|
||||
photo_capabilities->red_eye_reduction = media::mojom::RedEyeReduction::NEVER;
|
||||
photo_capabilities->supported_background_blur_modes = {};
|
||||
photo_capabilities->supported_eye_gaze_correction_modes = {};
|
||||
photo_capabilities->supported_face_framing_modes = {};
|
||||
return photo_capabilities;
|
||||
}
|
||||
|
@ -179,6 +179,25 @@ gfx::ColorSpace GetDefaultColorSpace(VideoPixelFormat format) {
|
||||
|
||||
} // anonymous namespace
|
||||
|
||||
FakeDeviceState::FakeDeviceState(double pan,
|
||||
double tilt,
|
||||
double zoom,
|
||||
double exposure_time,
|
||||
double focus_distance,
|
||||
float frame_rate,
|
||||
VideoPixelFormat pixel_format)
|
||||
: pan(pan),
|
||||
tilt(tilt),
|
||||
zoom(zoom),
|
||||
exposure_time(exposure_time),
|
||||
focus_distance(focus_distance),
|
||||
format(gfx::Size(), frame_rate, pixel_format) {
|
||||
exposure_mode = (exposure_time >= 0.0f) ? mojom::MeteringMode::MANUAL
|
||||
: mojom::MeteringMode::CONTINUOUS;
|
||||
focus_mode = (focus_distance >= 0.0f) ? mojom::MeteringMode::MANUAL
|
||||
: mojom::MeteringMode::CONTINUOUS;
|
||||
}
|
||||
|
||||
// Paints and delivers frames to a client, which is set via Initialize().
|
||||
class FrameDeliverer {
|
||||
public:
|
||||
@ -679,6 +698,13 @@ void FakePhotoDevice::GetPhotoState(
|
||||
? mojom::BackgroundBlurMode::BLUR
|
||||
: mojom::BackgroundBlurMode::OFF;
|
||||
|
||||
photo_state->supported_eye_gaze_correction_modes = {
|
||||
mojom::EyeGazeCorrectionMode::OFF, mojom::EyeGazeCorrectionMode::ON};
|
||||
photo_state->current_eye_gaze_correction_mode =
|
||||
fake_device_state_->eye_gaze_correction
|
||||
? mojom::EyeGazeCorrectionMode::ON
|
||||
: mojom::EyeGazeCorrectionMode::OFF;
|
||||
|
||||
std::move(callback).Run(std::move(photo_state));
|
||||
}
|
||||
|
||||
@ -729,6 +755,19 @@ void FakePhotoDevice::SetPhotoOptions(
|
||||
}
|
||||
}
|
||||
|
||||
if (settings->eye_gaze_correction_mode.has_value()) {
|
||||
switch (settings->eye_gaze_correction_mode.value()) {
|
||||
case mojom::EyeGazeCorrectionMode::OFF:
|
||||
device_state_write_access->eye_gaze_correction = false;
|
||||
break;
|
||||
case mojom::EyeGazeCorrectionMode::ON:
|
||||
device_state_write_access->eye_gaze_correction = true;
|
||||
break;
|
||||
case mojom::EyeGazeCorrectionMode::STARE:
|
||||
return; // Not a supported fake eye gaze correction mode.
|
||||
}
|
||||
}
|
||||
|
||||
std::move(callback).Run(true);
|
||||
}
|
||||
|
||||
|
@ -123,18 +123,7 @@ struct FakeDeviceState {
|
||||
double exposure_time,
|
||||
double focus_distance,
|
||||
float frame_rate,
|
||||
VideoPixelFormat pixel_format)
|
||||
: pan(pan),
|
||||
tilt(tilt),
|
||||
zoom(zoom),
|
||||
exposure_time(exposure_time),
|
||||
focus_distance(focus_distance),
|
||||
format(gfx::Size(), frame_rate, pixel_format) {
|
||||
exposure_mode = (exposure_time >= 0.0f) ? mojom::MeteringMode::MANUAL
|
||||
: mojom::MeteringMode::CONTINUOUS;
|
||||
focus_mode = (focus_distance >= 0.0f) ? mojom::MeteringMode::MANUAL
|
||||
: mojom::MeteringMode::CONTINUOUS;
|
||||
}
|
||||
VideoPixelFormat pixel_format);
|
||||
|
||||
double pan;
|
||||
double tilt;
|
||||
@ -145,6 +134,7 @@ struct FakeDeviceState {
|
||||
mojom::MeteringMode focus_mode;
|
||||
VideoCaptureFormat format;
|
||||
bool background_blur = false;
|
||||
bool eye_gaze_correction = false;
|
||||
};
|
||||
|
||||
// A dependency needed by FakeVideoCaptureDevice.
|
||||
|
@ -367,6 +367,15 @@ TEST_F(FakeVideoCaptureDeviceTest, GetAndSetCapabilities) {
|
||||
mojom::BackgroundBlurMode::BLUR));
|
||||
EXPECT_EQ(mojom::BackgroundBlurMode::OFF, state->background_blur_mode);
|
||||
|
||||
ASSERT_TRUE(state->supported_eye_gaze_correction_modes);
|
||||
EXPECT_EQ(2u, state->supported_eye_gaze_correction_modes->size());
|
||||
EXPECT_EQ(1, base::ranges::count(*state->supported_eye_gaze_correction_modes,
|
||||
mojom::EyeGazeCorrectionMode::OFF));
|
||||
EXPECT_EQ(1, base::ranges::count(*state->supported_eye_gaze_correction_modes,
|
||||
mojom::EyeGazeCorrectionMode::ON));
|
||||
EXPECT_EQ(mojom::EyeGazeCorrectionMode::OFF,
|
||||
state->current_eye_gaze_correction_mode);
|
||||
|
||||
// Set options: zoom to the maximum value.
|
||||
const int max_zoom_value = state->zoom->max;
|
||||
VideoCaptureDevice::SetPhotoOptionsCallback scoped_set_callback =
|
||||
|
@ -65,6 +65,7 @@ enum class ImageCapture::MediaTrackConstraintSetType {
|
||||
namespace {
|
||||
|
||||
using BackgroundBlurMode = media::mojom::blink::BackgroundBlurMode;
|
||||
using EyeGazeCorrectionMode = media::mojom::blink::EyeGazeCorrectionMode;
|
||||
using FillLightMode = media::mojom::blink::FillLightMode;
|
||||
using MeteringMode = media::mojom::blink::MeteringMode;
|
||||
using RedEyeReduction = media::mojom::blink::RedEyeReduction;
|
||||
@ -197,6 +198,9 @@ void CopyCommonMembers(const T* source,
|
||||
if (source->hasBackgroundBlur()) {
|
||||
destination->setBackgroundBlur(source->backgroundBlur());
|
||||
}
|
||||
if (source->hasEyeGazeCorrection()) {
|
||||
destination->setEyeGazeCorrection(source->eyeGazeCorrection());
|
||||
}
|
||||
if (source->hasFaceFraming()) {
|
||||
destination->setFaceFraming(source->faceFraming());
|
||||
}
|
||||
@ -514,6 +518,10 @@ BackgroundBlurMode ParseBackgroundBlur(bool blink_mode) {
|
||||
return blink_mode ? BackgroundBlurMode::BLUR : BackgroundBlurMode::OFF;
|
||||
}
|
||||
|
||||
EyeGazeCorrectionMode ParseEyeGazeCorrection(bool blink_mode) {
|
||||
return blink_mode ? EyeGazeCorrectionMode::ON : EyeGazeCorrectionMode::OFF;
|
||||
}
|
||||
|
||||
MeteringMode ParseFaceFraming(bool blink_mode) {
|
||||
return blink_mode ? MeteringMode::CONTINUOUS : MeteringMode::NONE;
|
||||
}
|
||||
@ -550,6 +558,17 @@ bool ToBooleanMode(BackgroundBlurMode mode) {
|
||||
NOTREACHED_NORETURN();
|
||||
}
|
||||
|
||||
bool ToBooleanMode(EyeGazeCorrectionMode mode) {
|
||||
switch (mode) {
|
||||
case EyeGazeCorrectionMode::OFF:
|
||||
return false;
|
||||
case EyeGazeCorrectionMode::ON:
|
||||
case EyeGazeCorrectionMode::STARE:
|
||||
return true;
|
||||
}
|
||||
NOTREACHED_NORETURN();
|
||||
}
|
||||
|
||||
WebString ToString(MeteringMode value) {
|
||||
switch (value) {
|
||||
case MeteringMode::NONE:
|
||||
@ -2130,6 +2149,18 @@ void ImageCapture::ApplyMediaTrackConstraintSetToSettings(
|
||||
settings->background_blur_mode = ParseBackgroundBlur(setting);
|
||||
}
|
||||
}
|
||||
if (constraint_set->hasEyeGazeCorrection() &&
|
||||
effective_capabilities->hasEyeGazeCorrection()) {
|
||||
bool has_setting = false;
|
||||
bool setting;
|
||||
effective_capabilities->setEyeGazeCorrection(ApplyValueConstraint(
|
||||
&has_setting, &setting, effective_capabilities->eyeGazeCorrection(),
|
||||
constraint_set->eyeGazeCorrection(), constraint_set_type));
|
||||
if (has_setting) {
|
||||
settings->eye_gaze_correction_mode.emplace(
|
||||
ParseEyeGazeCorrection(setting));
|
||||
}
|
||||
}
|
||||
if (constraint_set->hasFaceFraming() &&
|
||||
effective_capabilities->hasFaceFraming()) {
|
||||
bool has_setting = false;
|
||||
@ -2314,6 +2345,16 @@ bool ImageCapture::CheckMediaTrackConstraintSet(
|
||||
"backgroundBlur setting value not supported");
|
||||
return false;
|
||||
}
|
||||
if (constraint_set->hasEyeGazeCorrection() &&
|
||||
effective_capabilities->hasEyeGazeCorrection() &&
|
||||
!CheckValueConstraint(effective_capabilities->eyeGazeCorrection(),
|
||||
constraint_set->eyeGazeCorrection(),
|
||||
constraint_set_type)) {
|
||||
MaybeRejectWithOverconstrainedError(
|
||||
resolver, "eyeGazeCorrection",
|
||||
"eyeGazeCorrection setting value not supported");
|
||||
return false;
|
||||
}
|
||||
if (constraint_set->hasFaceFraming() &&
|
||||
effective_capabilities->hasFaceFraming() &&
|
||||
!CheckValueConstraint(effective_capabilities->faceFraming(),
|
||||
@ -2591,14 +2632,33 @@ void ImageCapture::UpdateMediaTrackSettingsAndCapabilities(
|
||||
if (photo_state->supported_background_blur_modes &&
|
||||
!photo_state->supported_background_blur_modes->empty()) {
|
||||
Vector<bool> supported_background_blur_modes;
|
||||
for (auto mode : *photo_state->supported_background_blur_modes)
|
||||
supported_background_blur_modes.push_back(ToBooleanMode(mode));
|
||||
for (auto mode : *photo_state->supported_background_blur_modes) {
|
||||
bool boolean_mode = ToBooleanMode(mode);
|
||||
if (!base::Contains(supported_background_blur_modes, boolean_mode)) {
|
||||
supported_background_blur_modes.push_back(boolean_mode);
|
||||
}
|
||||
}
|
||||
capabilities_->setBackgroundBlur(
|
||||
std::move(supported_background_blur_modes));
|
||||
settings_->setBackgroundBlur(
|
||||
ToBooleanMode(photo_state->background_blur_mode));
|
||||
}
|
||||
|
||||
if (photo_state->supported_eye_gaze_correction_modes &&
|
||||
!photo_state->supported_eye_gaze_correction_modes->empty()) {
|
||||
Vector<bool> supported_eye_gaze_correction_modes;
|
||||
for (const auto& mode : *photo_state->supported_eye_gaze_correction_modes) {
|
||||
bool boolean_mode = ToBooleanMode(mode);
|
||||
if (!base::Contains(supported_eye_gaze_correction_modes, boolean_mode)) {
|
||||
supported_eye_gaze_correction_modes.push_back(boolean_mode);
|
||||
}
|
||||
}
|
||||
capabilities_->setEyeGazeCorrection(
|
||||
std::move(supported_eye_gaze_correction_modes));
|
||||
settings_->setEyeGazeCorrection(
|
||||
ToBooleanMode(photo_state->current_eye_gaze_correction_mode));
|
||||
}
|
||||
|
||||
if (photo_state->supported_face_framing_modes &&
|
||||
!photo_state->supported_face_framing_modes->empty()) {
|
||||
Vector<bool> supported_face_framing_modes;
|
||||
@ -2789,6 +2849,13 @@ ImageCapture::GetConstraintWithCapabilityExistenceMismatch(
|
||||
constraint_set_type)) {
|
||||
return "backgroundBlur";
|
||||
}
|
||||
if (constraint_set->hasEyeGazeCorrection() &&
|
||||
!CheckIfCapabilityExistenceSatisfiesConstraint(
|
||||
constraint_set->eyeGazeCorrection(),
|
||||
CapabilityExists(capabilities_->hasEyeGazeCorrection()),
|
||||
constraint_set_type)) {
|
||||
return "eyeGazeCorrection";
|
||||
}
|
||||
if (constraint_set->hasFaceFraming() &&
|
||||
!CheckIfCapabilityExistenceSatisfiesConstraint(
|
||||
constraint_set->faceFraming(),
|
||||
|
@ -352,6 +352,9 @@ void CheckExactValues(
|
||||
EXPECT_TRUE(settings->has_background_blur_mode);
|
||||
EXPECT_EQ(settings->background_blur_mode,
|
||||
media::mojom::blink::BackgroundBlurMode::BLUR);
|
||||
EXPECT_TRUE(settings->eye_gaze_correction_mode.has_value());
|
||||
EXPECT_EQ(settings->eye_gaze_correction_mode.value(),
|
||||
media::mojom::blink::EyeGazeCorrectionMode::OFF);
|
||||
EXPECT_TRUE(settings->has_face_framing_mode);
|
||||
EXPECT_EQ(settings->face_framing_mode,
|
||||
media::mojom::blink::MeteringMode::CONTINUOUS);
|
||||
@ -425,6 +428,7 @@ void CheckMaxValues(const media::mojom::blink::PhotoSettingsPtr& settings,
|
||||
}
|
||||
EXPECT_FALSE(settings->has_torch);
|
||||
EXPECT_FALSE(settings->has_background_blur_mode);
|
||||
EXPECT_FALSE(settings->eye_gaze_correction_mode.has_value());
|
||||
EXPECT_FALSE(settings->has_face_framing_mode);
|
||||
}
|
||||
|
||||
@ -496,6 +500,7 @@ void CheckMinValues(const media::mojom::blink::PhotoSettingsPtr& settings,
|
||||
}
|
||||
EXPECT_FALSE(settings->has_torch);
|
||||
EXPECT_FALSE(settings->has_background_blur_mode);
|
||||
EXPECT_FALSE(settings->eye_gaze_correction_mode.has_value());
|
||||
EXPECT_FALSE(settings->has_face_framing_mode);
|
||||
}
|
||||
|
||||
@ -520,6 +525,7 @@ void CheckNoValues(const media::mojom::blink::PhotoSettingsPtr& settings,
|
||||
EXPECT_FALSE(settings->has_zoom);
|
||||
EXPECT_FALSE(settings->has_torch);
|
||||
EXPECT_FALSE(settings->has_background_blur_mode);
|
||||
EXPECT_FALSE(settings->eye_gaze_correction_mode.has_value());
|
||||
EXPECT_FALSE(settings->has_face_framing_mode);
|
||||
}
|
||||
|
||||
@ -604,6 +610,9 @@ void PopulateConstraintSet(
|
||||
constraint_set->setBackgroundBlur(
|
||||
MakeGarbageCollected<V8UnionBooleanOrConstrainBooleanParameters>(
|
||||
ConstraintCreator::Create(all_capabilities->backgroundBlur()[0])));
|
||||
constraint_set->setEyeGazeCorrection(
|
||||
MakeGarbageCollected<V8UnionBooleanOrConstrainBooleanParameters>(
|
||||
ConstraintCreator::Create(all_capabilities->eyeGazeCorrection()[0])));
|
||||
constraint_set->setFaceFraming(
|
||||
MakeGarbageCollected<V8UnionBooleanOrConstrainBooleanParameters>(
|
||||
ConstraintCreator::Create(all_capabilities->faceFraming()[0])));
|
||||
@ -726,8 +735,10 @@ class ImageCaptureConstraintTest : public ImageCaptureTest {
|
||||
all_capabilities_->setZoom(CreateMediaSettingsRange("zo"));
|
||||
all_capabilities_->setTorch(true);
|
||||
all_capabilities_->setBackgroundBlur({true});
|
||||
all_capabilities_->setEyeGazeCorrection({false});
|
||||
all_capabilities_->setFaceFraming({true, false});
|
||||
all_non_capabilities_->setBackgroundBlur({false});
|
||||
all_non_capabilities_->setEyeGazeCorrection({true});
|
||||
default_settings_ = MediaTrackSettings::Create();
|
||||
default_settings_->setWhiteBalanceMode(
|
||||
all_capabilities_->whiteBalanceMode()[0]);
|
||||
@ -752,7 +763,8 @@ class ImageCaptureConstraintTest : public ImageCaptureTest {
|
||||
default_settings_->setTilt(RangeMean(all_capabilities_->tilt()));
|
||||
default_settings_->setZoom(RangeMean(all_capabilities_->zoom()));
|
||||
default_settings_->setTorch(false);
|
||||
default_settings_->setBackgroundBlur(false);
|
||||
default_settings_->setBackgroundBlur(true);
|
||||
default_settings_->setEyeGazeCorrection(false);
|
||||
default_settings_->setFaceFraming(false);
|
||||
// Capabilities and default settings must be chosen so that at least
|
||||
// the constraint set {exposureCompensation: {max: ...}} with
|
||||
|
@ -226,6 +226,7 @@ void Visit(IdentifiableTokenBuilder& builder,
|
||||
Visit(builder, set.getZoomOr(nullptr));
|
||||
Visit(builder, set.getTorchOr(nullptr));
|
||||
Visit(builder, set.getBackgroundBlurOr(nullptr));
|
||||
Visit(builder, set.getEyeGazeCorrectionOr(nullptr));
|
||||
Visit(builder, set.getFaceFramingOr(nullptr));
|
||||
}
|
||||
|
||||
|
@ -16,6 +16,8 @@ struct MODULES_EXPORT ImageCaptureDeviceSettings {
|
||||
absl::optional<double> zoom;
|
||||
absl::optional<bool> torch;
|
||||
absl::optional<bool> background_blur;
|
||||
absl::optional<bool> eye_gaze_correction;
|
||||
absl::optional<bool> face_framing;
|
||||
};
|
||||
|
||||
} // namespace blink
|
||||
|
@ -387,6 +387,8 @@ MediaTrackConstraintSetPlatform::MediaTrackConstraintSetPlatform()
|
||||
zoom("zoom"),
|
||||
torch("torch"),
|
||||
background_blur("backgroundBlur"),
|
||||
eye_gaze_correction("eyeGazeCorrection"),
|
||||
face_framing("faceFraming"),
|
||||
media_stream_source("mediaStreamSource"),
|
||||
render_to_associated_sink("chromeRenderToAssociatedSink"),
|
||||
goog_echo_cancellation("googEchoCancellation"),
|
||||
@ -425,6 +427,8 @@ Vector<const BaseConstraint*> MediaTrackConstraintSetPlatform::AllConstraints()
|
||||
&zoom,
|
||||
&torch,
|
||||
&background_blur,
|
||||
&eye_gaze_correction,
|
||||
&face_framing,
|
||||
&render_to_associated_sink,
|
||||
&goog_echo_cancellation,
|
||||
&goog_experimental_echo_cancellation,
|
||||
|
@ -256,6 +256,8 @@ struct MediaTrackConstraintSetPlatform {
|
||||
|
||||
// W3C Media Capture Extensions
|
||||
BooleanConstraint background_blur;
|
||||
BooleanConstraint eye_gaze_correction;
|
||||
BooleanConstraint face_framing;
|
||||
|
||||
// Constraints not exposed in Blink at the moment, only through
|
||||
// the legacy name interface.
|
||||
|
@ -618,6 +618,16 @@ bool ValidateAndCopyConstraintSet(
|
||||
constraint_buffer.background_blur);
|
||||
}
|
||||
|
||||
if (constraints_in->hasEyeGazeCorrection()) {
|
||||
CopyBooleanConstraint(constraints_in->eyeGazeCorrection(), naked_treatment,
|
||||
constraint_buffer.eye_gaze_correction);
|
||||
}
|
||||
|
||||
if (constraints_in->hasFaceFraming()) {
|
||||
CopyBooleanConstraint(constraints_in->faceFraming(), naked_treatment,
|
||||
constraint_buffer.face_framing);
|
||||
}
|
||||
|
||||
if (constraints_in->hasDisplaySurface()) {
|
||||
if (!ValidateAndCopyStringConstraint(
|
||||
constraints_in->displaySurface(), naked_treatment,
|
||||
@ -840,6 +850,13 @@ void ConvertConstraintSet(const MediaTrackConstraintSetPlatform& input,
|
||||
output->setBackgroundBlur(
|
||||
ConvertBoolean(input.background_blur, naked_treatment));
|
||||
}
|
||||
if (!input.eye_gaze_correction.IsUnconstrained()) {
|
||||
output->setEyeGazeCorrection(
|
||||
ConvertBoolean(input.eye_gaze_correction, naked_treatment));
|
||||
}
|
||||
if (!input.face_framing.IsUnconstrained()) {
|
||||
output->setFaceFraming(ConvertBoolean(input.face_framing, naked_treatment));
|
||||
}
|
||||
if (!input.suppress_local_audio_playback.IsUnconstrained()) {
|
||||
output->setSuppressLocalAudioPlayback(
|
||||
ConvertBoolean(input.suppress_local_audio_playback, naked_treatment));
|
||||
|
31
third_party/blink/renderer/modules/mediastream/media_stream_constraints_util_video_device.cc
vendored
31
third_party/blink/renderer/modules/mediastream/media_stream_constraints_util_video_device.cc
vendored
@ -607,6 +607,8 @@ class ImageCaptureDeviceState {
|
||||
|
||||
absl::optional<BoolSet> torch_intersection_;
|
||||
absl::optional<BoolSet> background_blur_intersection_;
|
||||
absl::optional<BoolSet> eye_gaze_correction_intersection_;
|
||||
absl::optional<BoolSet> face_framing_intersection_;
|
||||
};
|
||||
|
||||
explicit ImageCaptureDeviceState(const DeviceInfo& device) {}
|
||||
@ -621,7 +623,14 @@ class ImageCaptureDeviceState {
|
||||
failed_constraint_name) &&
|
||||
TryToApplyConstraint(
|
||||
constraint_set.background_blur, background_blur_set_,
|
||||
result->background_blur_intersection_, failed_constraint_name))) {
|
||||
result->background_blur_intersection_, failed_constraint_name) &&
|
||||
TryToApplyConstraint(constraint_set.eye_gaze_correction,
|
||||
eye_gaze_correction_set_,
|
||||
result->eye_gaze_correction_intersection_,
|
||||
failed_constraint_name) &&
|
||||
TryToApplyConstraint(constraint_set.face_framing, face_framing_set_,
|
||||
result->face_framing_intersection_,
|
||||
failed_constraint_name))) {
|
||||
result.reset();
|
||||
}
|
||||
|
||||
@ -635,13 +644,22 @@ class ImageCaptureDeviceState {
|
||||
if (result.background_blur_intersection_.has_value()) {
|
||||
background_blur_set_ = *result.background_blur_intersection_;
|
||||
}
|
||||
if (result.eye_gaze_correction_intersection_.has_value()) {
|
||||
eye_gaze_correction_set_ = *result.eye_gaze_correction_intersection_;
|
||||
}
|
||||
if (result.face_framing_intersection_.has_value()) {
|
||||
face_framing_set_ = *result.face_framing_intersection_;
|
||||
}
|
||||
}
|
||||
|
||||
double Fitness(
|
||||
const MediaTrackConstraintSetPlatform& basic_constraint_set) const {
|
||||
return BoolSetFitness(basic_constraint_set.torch, torch_set_) +
|
||||
BoolSetFitness(basic_constraint_set.background_blur,
|
||||
background_blur_set_);
|
||||
background_blur_set_) +
|
||||
BoolSetFitness(basic_constraint_set.eye_gaze_correction,
|
||||
eye_gaze_correction_set_) +
|
||||
BoolSetFitness(basic_constraint_set.face_framing, face_framing_set_);
|
||||
}
|
||||
|
||||
absl::optional<ImageCaptureDeviceSettings> SelectSettings(
|
||||
@ -656,9 +674,14 @@ class ImageCaptureDeviceState {
|
||||
settings->torch = SelectSetting(basic_constraint_set.torch, torch_set_);
|
||||
settings->background_blur = SelectSetting(
|
||||
basic_constraint_set.background_blur, background_blur_set_);
|
||||
settings->eye_gaze_correction = SelectSetting(
|
||||
basic_constraint_set.eye_gaze_correction, eye_gaze_correction_set_);
|
||||
settings->face_framing =
|
||||
SelectSetting(basic_constraint_set.face_framing, face_framing_set_);
|
||||
|
||||
if (!(settings->pan || settings->tilt || settings->zoom ||
|
||||
settings->torch || settings->background_blur)) {
|
||||
settings->torch || settings->background_blur ||
|
||||
settings->eye_gaze_correction || settings->face_framing)) {
|
||||
settings.reset();
|
||||
}
|
||||
|
||||
@ -700,6 +723,8 @@ class ImageCaptureDeviceState {
|
||||
|
||||
BoolSet torch_set_;
|
||||
BoolSet background_blur_set_;
|
||||
BoolSet eye_gaze_correction_set_;
|
||||
BoolSet face_framing_set_;
|
||||
};
|
||||
|
||||
// Returns true if |constraint_set| can be satisfied by |device|. Otherwise,
|
||||
|
38
third_party/blink/renderer/modules/mediastream/media_stream_constraints_util_video_device_test.cc
vendored
38
third_party/blink/renderer/modules/mediastream/media_stream_constraints_util_video_device_test.cc
vendored
@ -207,6 +207,8 @@ class MediaStreamConstraintsUtilVideoDeviceTest : public testing::Test {
|
||||
return {
|
||||
&MediaTrackConstraintSetPlatform::torch,
|
||||
&MediaTrackConstraintSetPlatform::background_blur,
|
||||
&MediaTrackConstraintSetPlatform::eye_gaze_correction,
|
||||
&MediaTrackConstraintSetPlatform::face_framing,
|
||||
};
|
||||
}
|
||||
|
||||
@ -2620,6 +2622,9 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest,
|
||||
// ignored because it contradicts the third set.
|
||||
EXPECT_EQ(result.image_capture_device_settings()->torch.has_value(),
|
||||
constraint == &MediaTrackConstraintSetPlatform::torch);
|
||||
if (result.image_capture_device_settings()->torch.has_value()) {
|
||||
EXPECT_FALSE(result.image_capture_device_settings()->torch.value());
|
||||
}
|
||||
EXPECT_EQ(
|
||||
result.image_capture_device_settings()->background_blur.has_value(),
|
||||
constraint == &MediaTrackConstraintSetPlatform::background_blur);
|
||||
@ -2627,8 +2632,19 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest,
|
||||
EXPECT_FALSE(
|
||||
result.image_capture_device_settings()->background_blur.value());
|
||||
}
|
||||
if (result.image_capture_device_settings()->torch.has_value()) {
|
||||
EXPECT_FALSE(result.image_capture_device_settings()->torch.value());
|
||||
EXPECT_EQ(
|
||||
result.image_capture_device_settings()->eye_gaze_correction.has_value(),
|
||||
constraint == &MediaTrackConstraintSetPlatform::eye_gaze_correction);
|
||||
if (result.image_capture_device_settings()
|
||||
->eye_gaze_correction.has_value()) {
|
||||
EXPECT_FALSE(
|
||||
result.image_capture_device_settings()->eye_gaze_correction.value());
|
||||
}
|
||||
EXPECT_EQ(result.image_capture_device_settings()->face_framing.has_value(),
|
||||
constraint == &MediaTrackConstraintSetPlatform::face_framing);
|
||||
if (result.image_capture_device_settings()->face_framing.has_value()) {
|
||||
EXPECT_FALSE(
|
||||
result.image_capture_device_settings()->face_framing.value());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2764,6 +2780,9 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, BasicImageCapture) {
|
||||
ASSERT_TRUE(result.image_capture_device_settings().has_value());
|
||||
EXPECT_EQ(result.image_capture_device_settings()->torch.has_value(),
|
||||
constraint == &MediaTrackConstraintSetPlatform::torch);
|
||||
if (result.image_capture_device_settings()->torch.has_value()) {
|
||||
EXPECT_FALSE(result.image_capture_device_settings()->torch.value());
|
||||
}
|
||||
EXPECT_EQ(
|
||||
result.image_capture_device_settings()->background_blur.has_value(),
|
||||
constraint == &MediaTrackConstraintSetPlatform::background_blur);
|
||||
@ -2771,8 +2790,19 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, BasicImageCapture) {
|
||||
EXPECT_FALSE(
|
||||
result.image_capture_device_settings()->background_blur.value());
|
||||
}
|
||||
if (result.image_capture_device_settings()->torch.has_value()) {
|
||||
EXPECT_FALSE(result.image_capture_device_settings()->torch.value());
|
||||
EXPECT_EQ(
|
||||
result.image_capture_device_settings()->eye_gaze_correction.has_value(),
|
||||
constraint == &MediaTrackConstraintSetPlatform::eye_gaze_correction);
|
||||
if (result.image_capture_device_settings()
|
||||
->eye_gaze_correction.has_value()) {
|
||||
EXPECT_FALSE(
|
||||
result.image_capture_device_settings()->eye_gaze_correction.value());
|
||||
}
|
||||
EXPECT_EQ(result.image_capture_device_settings()->face_framing.has_value(),
|
||||
constraint == &MediaTrackConstraintSetPlatform::face_framing);
|
||||
if (result.image_capture_device_settings()->face_framing.has_value()) {
|
||||
EXPECT_FALSE(
|
||||
result.image_capture_device_settings()->face_framing.value());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -90,6 +90,7 @@ bool ConstraintSetHasImageCapture(
|
||||
constraint_set->hasFocusDistance() || constraint_set->hasPan() ||
|
||||
constraint_set->hasTilt() || constraint_set->hasZoom() ||
|
||||
constraint_set->hasTorch() || constraint_set->hasBackgroundBlur() ||
|
||||
constraint_set->hasEyeGazeCorrection() ||
|
||||
constraint_set->hasFaceFraming();
|
||||
}
|
||||
|
||||
|
@ -40,6 +40,7 @@ dictionary MediaTrackCapabilities {
|
||||
boolean torch;
|
||||
// W3C Media Capture Extensions
|
||||
[RuntimeEnabled=MediaCaptureBackgroundBlur] sequence<boolean> backgroundBlur;
|
||||
[RuntimeEnabled=MediaCaptureCameraControls] sequence<boolean> eyeGazeCorrection;
|
||||
[RuntimeEnabled=MediaCaptureCameraControls] sequence<boolean> faceFraming;
|
||||
// Screen Capture API
|
||||
// https://w3c.github.io/mediacapture-screen-share
|
||||
|
@ -50,6 +50,7 @@ dictionary MediaTrackConstraintSet {
|
||||
|
||||
// W3C Media Capture Extensions
|
||||
[RuntimeEnabled=MediaCaptureBackgroundBlur] ConstrainBoolean backgroundBlur;
|
||||
[RuntimeEnabled=MediaCaptureCameraControls] ConstrainBoolean eyeGazeCorrection;
|
||||
[RuntimeEnabled=MediaCaptureCameraControls] ConstrainBoolean faceFraming;
|
||||
|
||||
// Screen Capture API
|
||||
|
@ -45,6 +45,7 @@ dictionary MediaTrackSettings {
|
||||
|
||||
// W3C Media Capture Extensions
|
||||
[RuntimeEnabled=MediaCaptureBackgroundBlur] boolean backgroundBlur;
|
||||
[RuntimeEnabled=MediaCaptureCameraControls] boolean eyeGazeCorrection;
|
||||
[RuntimeEnabled=MediaCaptureCameraControls] boolean faceFraming;
|
||||
|
||||
// Screen Capture API
|
||||
|
@ -48,6 +48,7 @@ dictionary MediaTrackSupportedConstraints {
|
||||
|
||||
// W3C Media Capture Extensions
|
||||
[RuntimeEnabled=MediaCaptureBackgroundBlur] boolean backgroundBlur = true;
|
||||
[RuntimeEnabled=MediaCaptureCameraControls] boolean eyeGazeCorrection = true;
|
||||
[RuntimeEnabled=MediaCaptureCameraControls] boolean faceFraming = true;
|
||||
|
||||
// Screen Capture API
|
||||
|
@ -41,10 +41,12 @@ void FakeImageCapture::GetPhotoState(const WTF::String& source_id,
|
||||
photo_capabilities->tilt = media::mojom::blink::Range::New();
|
||||
photo_capabilities->zoom = media::mojom::blink::Range::New();
|
||||
photo_capabilities->focus_distance = media::mojom::blink::Range::New();
|
||||
photo_capabilities->torch = false;
|
||||
photo_capabilities->supports_torch = false;
|
||||
photo_capabilities->red_eye_reduction =
|
||||
media::mojom::blink::RedEyeReduction::NEVER;
|
||||
photo_capabilities->supported_background_blur_modes = {};
|
||||
photo_capabilities->supported_eye_gaze_correction_modes = {};
|
||||
photo_capabilities->supported_face_framing_modes = {};
|
||||
std::move(callback).Run(std::move(photo_capabilities));
|
||||
}
|
||||
|
||||
|
@ -419,6 +419,8 @@ UserMediaRequest* UserMediaRequest::Create(
|
||||
&video_basic.tilt,
|
||||
&video_basic.zoom,
|
||||
&video_basic.background_blur,
|
||||
&video_basic.eye_gaze_correction,
|
||||
&video_basic.face_framing,
|
||||
};
|
||||
for (const BaseConstraint* constraint : constraints) {
|
||||
if (constraint->HasMandatory()) {
|
||||
|
Reference in New Issue
Block a user