0

Remove Media Capture Depth Stream Extensions videoKind

As the Media Capture Depth Stream Extensions spec is discontinued[1],
this CL cleans Chromium implementation by removing the videoKind
constraint, capability, and setting.

[1]: https://lists.w3.org/Archives/Public/public-webrtc/2022Jan/0027.html

Bug: 1241003
Change-Id: If08555ac7cf2191b50d3932b9b4f325ad62e1970
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2621812
Reviewed-by: Elad Alon <eladalon@chromium.org>
Reviewed-by: Jochen Eisinger <jochen@chromium.org>
Reviewed-by: Brian Sheedy <bsheedy@chromium.org>
Commit-Queue: Fr <beaufort.francois@gmail.com>
Cr-Commit-Position: refs/heads/main@{#963954}
This commit is contained in:
François Beaufort
2022-01-27 09:53:46 +00:00
committed by Chromium LUCI CQ
parent 105716ef02
commit df80d46d91
34 changed files with 12 additions and 3172 deletions

@ -1,102 +0,0 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stddef.h>
#include "base/command_line.h"
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h"
#include "content/browser/webrtc/webrtc_content_browsertest_base.h"
#include "content/public/common/content_switches.h"
#include "content/public/test/browser_test.h"
#include "content/public/test/content_browser_test_utils.h"
#include "media/base/media_switches.h"
#include "net/test/embedded_test_server/embedded_test_server.h"
namespace {
static const char kGetDepthStreamAndCallCreateImageBitmap[] =
"getDepthStreamAndCallCreateImageBitmap";
static const char kGetStreamsByVideoKind[] = "getStreamsByVideoKind";
static const char kGetStreamsByVideoKindNoDepth[] =
"getStreamsByVideoKindNoDepth";
} // namespace
namespace content {
template <int device_count, bool enable_video_kind>
class WebRtcDepthCaptureBrowserTest : public WebRtcContentBrowserTestBase {
public:
WebRtcDepthCaptureBrowserTest() {
// Automatically grant device permission.
AppendUseFakeUIForMediaStreamFlag();
}
~WebRtcDepthCaptureBrowserTest() override {}
void SetUp() override {
base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
ASSERT_FALSE(
command_line->HasSwitch(switches::kUseFakeDeviceForMediaStream));
command_line->AppendSwitchASCII(
switches::kUseFakeDeviceForMediaStream,
base::StringPrintf("device-count=%d", device_count));
if (enable_video_kind) {
command_line->AppendSwitchASCII(switches::kEnableBlinkFeatures,
"MediaCaptureDepthVideoKind");
}
WebRtcContentBrowserTestBase::SetUp();
}
};
// Command lines must be configured in SetUpCommandLine, before the test is
// multi-threaded, so any variations must be embedded in the test fixture.
// Test using two video capture devices - a color and a 16-bit depth device.
using WebRtcTwoDeviceDepthCaptureBrowserTest =
WebRtcDepthCaptureBrowserTest<2, false>;
using WebRtcTwoDeviceDepthCaptureVideoKindBrowserTest =
WebRtcDepthCaptureBrowserTest<2, true>;
// Test using only a color device.
using WebRtcOneDeviceDepthCaptureVideoKindBrowserTest =
WebRtcDepthCaptureBrowserTest<1, true>;
IN_PROC_BROWSER_TEST_F(WebRtcTwoDeviceDepthCaptureBrowserTest,
GetDepthStreamAndCallCreateImageBitmap) {
ASSERT_TRUE(embedded_test_server()->Start());
GURL url(
embedded_test_server()->GetURL("/media/getusermedia-depth-capture.html"));
EXPECT_TRUE(NavigateToURL(shell(), url));
ExecuteJavascriptAndWaitForOk(base::StringPrintf(
"%s({video: true});", kGetDepthStreamAndCallCreateImageBitmap));
}
IN_PROC_BROWSER_TEST_F(WebRtcTwoDeviceDepthCaptureVideoKindBrowserTest,
GetStreamsByVideoKind) {
ASSERT_TRUE(embedded_test_server()->Start());
GURL url(
embedded_test_server()->GetURL("/media/getusermedia-depth-capture.html"));
EXPECT_TRUE(NavigateToURL(shell(), url));
ExecuteJavascriptAndWaitForOk(
base::StringPrintf("%s({video: true});", kGetStreamsByVideoKind));
}
IN_PROC_BROWSER_TEST_F(WebRtcOneDeviceDepthCaptureVideoKindBrowserTest,
GetStreamsByVideoKindNoDepth) {
ASSERT_TRUE(embedded_test_server()->Start());
GURL url(
embedded_test_server()->GetURL("/media/getusermedia-depth-capture.html"));
EXPECT_TRUE(NavigateToURL(shell(), url));
ExecuteJavascriptAndWaitForOk(
base::StringPrintf("%s({video: true});", kGetStreamsByVideoKindNoDepth));
}
} // namespace content

@ -748,11 +748,6 @@ group("telemetry_gpu_integration_test_support") {
# added in the future.
"//tools/perf/page_sets/maps_perf_test/",
# For depth_capture
"//content/test/data/media/depth_stream_test_utilities.js",
"//content/test/data/media/getusermedia-depth-capture.html",
"//content/test/data/media/webrtc_test_utilities.js",
# For GpuProcess.video
"//content/test/data/media/bear.ogv",
@ -1336,7 +1331,6 @@ test("content_browsertests") {
"../browser/webrtc/webrtc_content_browsertest_base.h",
"../browser/webrtc/webrtc_data_browsertest.cc",
"../browser/webrtc/webrtc_datachannel_browsertest.cc",
"../browser/webrtc/webrtc_depth_capture_browsertest.cc",
"../browser/webrtc/webrtc_getusermedia_browsertest.cc",
"../browser/webrtc/webrtc_image_capture_browsertest.cc",
"../browser/webrtc/webrtc_internals_browsertest.cc",

@ -1,76 +0,0 @@
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function getConstraintsForDevice(deviceLabel) {
return new Promise(function(resolve, reject) {
navigator.mediaDevices.enumerateDevices()
.then(function(devices) {
for (var i = 0; i < devices.length; ++i) {
if (deviceLabel == devices[i].label) {
return resolve({video:{deviceId: {exact: devices[i].deviceId},
width: {exact:96},
height: {exact:96}}
});
}
}
return reject("Expected to have a device with label:" + deviceLabel);
})
});
}
function getFake16bitStream() {
return new Promise(function(resolve, reject) {
getConstraintsForDevice("fake_device_1")
.then(function(constraints) {
if (!constraints)
return reject("No fake device found");
return navigator.mediaDevices.getUserMedia(constraints);
}).then(function(stream) {
return resolve(stream);
});
});
}
function getStreamOfVideoKind(constraint_kind) {
var constraints = {
video:{
videoKind: constraint_kind
}
}
return navigator.mediaDevices.getUserMedia(constraints);
}
// Data is RGBA array data and could be used with different formats:
// e.g. Uint8Array, Uint8ClampedArray, Float32Array...
// Value at point (row, column) is calculated as
// (top_left_value + (row + column) * step) % wrap_around. wrap_around is 255
// (for Uint8) or 1.0 for float. See FakeVideoCaptureDevice for details.
function verifyPixels(
data, width, height, flip_y, step, wrap_around, tolerance, test_name) {
var rowsColumnsToCheck = [[1, 1],
[0, width - 1],
[height - 1, 0],
[height - 1, width - 1],
[height - 3, width - 3]];
// Calculate all reference points based on top left and compare.
for (var j = 0; j < rowsColumnsToCheck.length; ++j) {
var row = rowsColumnsToCheck[j][0];
var column = rowsColumnsToCheck[j][1];
var i = (width * row + column) * 4;
var calculated = (data[0] + wrap_around +
step * ((flip_y ? -row : row) + column)) % wrap_around;
var diff = Math.abs(calculated - data[i]);
// We reconstruct the values based on top left value. When the reconstructed
// value is near |wrap_around|, read value, data[i], could be wrapped
// around, and vice versa - "diff > wrap_around - tolerance", in that case.
if (diff >= tolerance && diff <= wrap_around - tolerance) {
return Promise.reject(test_name + ": reference value " + data[i] +
" differs from calculated: " + calculated + " at index (row, column) "
+ i + " (" + row + ", " + column + "). TopLeft value:" + data[0] +
", step:" + step + ", flip_y:" + flip_y);
}
}
return true;
}

@ -1,444 +0,0 @@
<html>
<head>
<script type="text/javascript" src="webrtc_test_utilities.js"></script>
<script type="text/javascript" src="depth_stream_test_utilities.js"></script>
<script type="text/javascript">
function cubemapFaces(gl) {
return [gl.TEXTURE_CUBE_MAP_POSITIVE_X,
gl.TEXTURE_CUBE_MAP_NEGATIVE_X,
gl.TEXTURE_CUBE_MAP_POSITIVE_Y,
gl.TEXTURE_CUBE_MAP_NEGATIVE_Y,
gl.TEXTURE_CUBE_MAP_POSITIVE_Z,
gl.TEXTURE_CUBE_MAP_NEGATIVE_Z];
}
$ = function(id) {
return document.getElementById(id);
};
// testVideoToImageBitmap and the tests below are web tests that we
// run here because they require --use-fake-device-for-media-capture.
function getDepthStreamAndCallCreateImageBitmap() {
console.log('Calling getDepthStreamAndCallCreateImageBitmap');
getFake16bitStream().then((stream) => {
detectVideoInLocalView1(stream).then(() => {
testVideoToImageBitmap('local-view-1', function() {
stream.getVideoTracks()[0].stop();
detectVideoStopped('local-view-1')
.then(reportTestSuccess);
}, failedCallback);
});
}, failedCallback);
}
function getBothStreamsAndCheckForFeaturesPresence() {
console.log('Calling getBothStreamsAndCheckForFeaturesPresence');
getConstraintsForDevice("fake_device_0")
.then(function(constraints) {
if (!constraints)
return failTest("No fake video device found.");
return navigator.mediaDevices.getUserMedia(constraints);
}).then(function(video_stream) {
getFake16bitStream().then(function(depth_stream) {
if (video_stream.getVideoTracks().length != 1) {
return failTest("Expected one video track, got " +
video_stream.getVideoTracks().length);
}
if (depth_stream.getVideoTracks().length != 1) {
return failTest("Expected one depth track, got " +
depth_stream.getVideoTracks().length);
}
var video_track = video_stream.getVideoTracks()[0];
var depth_track = depth_stream.getVideoTracks()[0];
// We have specified the fields in getUserMedia constraints. Expect that
// both tracks have them in constraints and settings.
var expected_fields = ["deviceId", "height", "width"];
for (var field in expected_fields) {
var expected_field = expected_fields[field];
if (video_track.getSettings()[expected_field] === undefined) {
return failTest(expected_field +
" missing from video track getSettings().");
}
if (video_track.getConstraints()[expected_field] === undefined) {
return failTest(expected_field +
" missing from video track getConstraints().");
}
if (depth_track.getSettings()[expected_field] === undefined) {
return failTest(expected_field +
" missing from depth track getSettings().");
}
if (depth_track.getConstraints()[expected_field] === undefined) {
return failTest(expected_field +
" missing from depth track getConstraints().");
}
}
reportTestSuccess();
},
failedCallback);
},
failedCallback);
}
function testGetStreamByVideoKindConstraint(constraint, kind) {
return new Promise(function(resolve, reject) {
getStreamOfVideoKind(constraint).then(function(stream) {
if (stream.getVideoTracks().length != 1) {
return reject("Expected one " + kind + " track, got " +
stream.getVideoTracks().length +
" when using constraint " + JSON.stringify(constraint));
}
var track = stream.getVideoTracks()[0];
if (track.getSettings().videoKind != kind) {
return reject("Expected " + kind + " track, got " +
track.getSettings().videoKind +
" when using constraint " + JSON.stringify(constraint));
}
return resolve();
},
failedCallback);
});
}
function getStreamsByVideoKind() {
console.log('Calling getStreamsByVideoKind');
var cases = [{constraint: {exact: "depth"}, kind: "depth"},
{constraint: {exact: "color"}, kind: "color"}];
var tests = [];
for (var i in cases) {
var test_case = cases[i];
tests.push(testGetStreamByVideoKindConstraint(test_case.constraint,
test_case.kind));
}
Promise.all(tests).then(reportTestSuccess, reason => {
failedCallback({name: reason});
});
}
function getStreamsByVideoKindNoDepth() {
console.log('Calling getStreamsByVideoKindNoDepth');
testGetStreamByVideoKindConstraint({exact: "color"}, "color")
.then(function() {
// Getting a depth stream should fail.
getStreamOfVideoKind({exact: "depth"}).then(function(stream) {
return failedCallback({name: "Expected to fail, got depth instead."});
}, function() {
// Getting a random stream should fail.
getStreamOfVideoKind({exact: "fisheye"}).then(function(stream) {
return failedCallback(
{name: "Expected to fail, got fisheye instead."});
}, reportTestSuccess);
});
}, reason => {
failedCallback({name: reason});
});
}
function depthStreamToRGBAUint8Texture() {
console.log('Calling depthStreamToRGBAUint8Texture');
getFake16bitStream().then((stream) => {
detectVideoInLocalView1(stream).then(() => {
testVideoToRGBA8Texture('local-view-1', function() {
stream.getVideoTracks()[0].stop();
detectVideoStopped('local-view-1')
.then(reportTestSuccess);
}, failedCallback);
});
}, failedCallback);
}
function depthStreamToRGBAFloatTexture() {
console.log('Calling depthStreamToRGBAFloatTexture');
getFake16bitStream().then((stream) => {
detectVideoInLocalView1(stream).then(() => {
testVideoToRGBA32FTexture('local-view-1', function() {
stream.getVideoTracks()[0].stop();
detectVideoStopped('local-view-1')
.then(reportTestSuccess);
}, failedCallback);
});
}, failedCallback);
}
function depthStreamToR32FloatTexture() {
console.log('Calling depthStreamToR32FloatTexture');
getFake16bitStream().then((stream) => {
detectVideoInLocalView1(stream).then(() => {
testVideoToR32FTexture('local-view-1', function() {
stream.getVideoTracks()[0].stop();
detectVideoStopped('local-view-1')
.then(reportTestSuccess);
}, failedCallback);
});
}, failedCallback);
}
function failedCallback(error) {
failTest('GetUserMedia call failed with error name ' + error.name);
}
function attachMediaStream(stream, videoElement) {
$(videoElement).srcObject = stream;
}
function detectVideoInLocalView1(stream) {
attachMediaStream(stream, 'local-view-1');
return detectVideoPlaying('local-view-1');
}
function testVideoToImageBitmap(videoElementName, success, error) {
var bitmaps = {};
var video = $(videoElementName);
var canvas = document.createElement('canvas');
canvas.width = 96;
canvas.height = 96;
document.body.appendChild(canvas);
var p1 = createImageBitmap(video).then(function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, false); });
var p2 = createImageBitmap(video,
{imageOrientation: "none", premultiplyAlpha: "premultiply"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, false); });
var p3 = createImageBitmap(video,
{imageOrientation: "none", premultiplyAlpha: "default"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, false); });
var p4 = createImageBitmap(video,
{imageOrientation: "none", premultiplyAlpha: "none"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, false); });
var p5 = createImageBitmap(video,
{imageOrientation: "flipY", premultiplyAlpha: "premultiply"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, true); });
var p6 = createImageBitmap(video,
{imageOrientation: "flipY", premultiplyAlpha: "default"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, true); });
var p7 = createImageBitmap(video,
{imageOrientation: "flipY", premultiplyAlpha: "none"}).then(
function(imageBitmap) {
return runImageBitmapTest(imageBitmap, canvas, true); });
return Promise.all([p1, p2, p3, p4, p5, p6, p7]).then(success(), reason => {
return error({name: reason});
});
}
function runImageBitmapTest(bitmap, canvas, flip_y) {
var context = canvas.getContext('2d');
context.drawImage(bitmap, 0, 0);
var imageData = context.getImageData(0, 0, canvas.width, canvas.height);
// Fake capture device 96x96 depth image is gradient. See also
// Draw16BitGradient in fake_video_capture_device.cc.
var color_step = 255.0 / (canvas.width + canvas.height);
return verifyPixels(imageData.data, canvas.width, canvas.height, flip_y,
color_step, 255, 2, "ImageBitmap");
}
function testVideoToRGBA32FTexture(videoElementName, success, error) {
var video = $(videoElementName);
var canvas = document.createElement('canvas');
canvas.width = 96;
canvas.height = 96;
var gl = canvas.getContext('webgl');
if (!gl)
return error({name:"WebGL is not available."});
if (!gl.getExtension("OES_texture_float"))
return error({name:"OES_texture_float extension is not available."});
return testVideoToTexture(gl, video, gl.RGBA, gl.RGBA, gl.FLOAT,
readAndVerifyRGBA32F, success, error);
}
function testVideoToRGBA8Texture(videoElementName, success, error) {
var video = $(videoElementName);
var canvas = document.createElement('canvas');
canvas.width = 96;
canvas.height = 96;
var gl = canvas.getContext('webgl');
if (!gl)
return error({name:"WebGL is not available."});
return testVideoToTexture(gl, video, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE,
readAndVerifyRGBA8, success, error);
}
function testVideoToR32FTexture(videoElementName, success, error) {
var video = $(videoElementName);
var canvas = document.createElement('canvas');
canvas.width = 96;
canvas.height = 96;
var gl = canvas.getContext('webgl2');
if (!gl)
return error({name:"WebGL2 is not available."});
if (!gl.getExtension('EXT_color_buffer_float'))
return error({name:"EXT_color_buffer_float extension is not available."});
return testVideoToTexture(gl, video, gl.R32F, gl.RED, gl.FLOAT,
readAndVerifyR32F, success, error);
}
function testVideoToTexture(gl, video, internalformat, format, type,
readAndVerifyFunction, success, error) {
// Create framebuffer that we will use for reading back the texture.
var fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
var tests = [];
// Premultiply alpha is ignored but we just test both values.
var cases = [
{flip_y: false, premultiply_alpha: true},
{flip_y: true, premultiply_alpha: false}
];
for (var i in cases) {
var flip_y = cases[i].flip_y;
var premultiply = cases[i].premultiply_alpha;
uploadVideoToTexture2D(gl, video, internalformat, format, type, flip_y,
premultiply);
tests.push(readAndVerifyFunction(gl, video.width, video.height, flip_y,
"TexImage_TEXTURE_2D"));
uploadVideoToSubTexture2D(gl, video, internalformat, format, type, flip_y,
premultiply);
tests.push(readAndVerifyFunction(gl, video.width, video.height, flip_y,
"TexSubImage_TEXTURE_2D"));
// cubemap texImage2D.
var tex = uploadVideoToTextureCubemap(gl, video, internalformat, format,
type, flip_y, premultiply);
for (var i = 0; i < cubemapFaces(gl).length; ++i) {
// Attach the texture to framebuffer for readback.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
cubemapFaces(gl)[i], tex, 0);
tests.push(readAndVerifyFunction(gl, video.width, video.height,
flip_y,
"TexImage_" + cubemapFaces(gl)[i]));
}
// cubemap texSubImage2D.
tex = uploadVideoToSubTextureCubemap(gl, video, internalformat, format,
type, flip_y, premultiply);
for (var i = 0; i < cubemapFaces(gl).length; ++i) {
// Attach the texture to framebuffer for readback.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
cubemapFaces(gl)[i], tex, 0);
tests.push(readAndVerifyFunction(gl, video.width, video.height,
flip_y,
"TexSubImage_" + cubemapFaces(gl)[i]));
}
}
return Promise.all(tests).then(success(), reason => {
return error({name: reason});
});
}
// Test setup helper method: create the texture and set texture parameters.
// For cubemap, target is gl.TEXTURE_CUBE_MAP. For gl.TEXTURE_2D, it is
// gl.TEXTURE_2D.
function createTexture(gl, target, video, flip_y, premultiply_alpha) {
var tex = gl.createTexture();
gl.bindTexture(target, tex);
gl.texParameteri(target, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(target, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, flip_y);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, premultiply_alpha);
return tex;
}
function uploadVideoToTexture2D(gl, video, internalformat, format, type,
flip_y, premultiply_alpha) {
var tex = createTexture(gl, gl.TEXTURE_2D, video, flip_y,
premultiply_alpha);
// Attach the texture to framebuffer for readback.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D,
tex, 0);
gl.texImage2D(gl.TEXTURE_2D, 0, internalformat, format, type, video);
return tex;
}
function uploadVideoToSubTexture2D(gl, video, internalformat, format, type,
flip_y, premultiply_alpha) {
var tex = createTexture(gl, gl.TEXTURE_2D, video, flip_y,
premultiply_alpha);
// Attach the texture to framebuffer for readback.
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D,
tex, 0);
gl.texImage2D(gl.TEXTURE_2D, 0, internalformat, video.width, video.height,
0, format, type, null);
gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, format, type, video);
return tex;
}
function uploadVideoToTextureCubemap(gl, video, internalformat, format, type,
flip_y, premultiply_alpha) {
var tex = createTexture(gl, gl.TEXTURE_CUBE_MAP, video, flip_y,
premultiply_alpha);
for (var i = 0; i < cubemapFaces(gl).length; ++i) {
gl.texImage2D(cubemapFaces(gl)[i], 0, internalformat, format, type,
video);
}
return tex;
}
function uploadVideoToSubTextureCubemap(gl, video, internalformat, format,
type, flip_y, premultiply_alpha) {
var tex = createTexture(gl, gl.TEXTURE_CUBE_MAP, video, flip_y,
premultiply_alpha);
for (var i = 0; i < cubemapFaces(gl).length; ++i) {
gl.texImage2D(cubemapFaces(gl)[i], 0, internalformat, video.width,
video.height, 0, format, type, null);
gl.texSubImage2D(cubemapFaces(gl)[i], 0, 0, 0, format, type, video);
}
return tex;
}
function readAndVerifyRGBA8(gl, width, height, flip_y, test_name) {
var arr = new Uint8Array(width * height * 4);
gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, arr);
var color_step = 255.0 / (width + height);
return verifyPixels(arr, width, height, flip_y, color_step,
255 /*wrap_around*/, 2 /*tolerance*/, test_name);
}
function readAndVerifyRGBA32F(gl, width, height, flip_y, test_name) {
var arr = new Float32Array(width * height * 4);
gl.readPixels(0, 0, width, height, gl.RGBA, gl.FLOAT, arr);
var color_step = 1.0 / (width + height);
return verifyPixels(arr, width, height, flip_y, color_step,
1.0 /*wrap_around*/, 1.5/65535 /*tolerance*/,
test_name);
}
function readAndVerifyR32F(gl, width, height, flip_y, test_name) {
var arr = new Float32Array(width * height * 4);
gl.readPixels(0, 0, width, height, gl.RGBA, gl.FLOAT, arr);
var color_step = 1.0 / (width + height);
return verifyPixels(arr, width, height, flip_y, color_step,
1.0 /*wrap_around*/, 1.5 / 65535 /*tolerance*/,
test_name);
}
function onLoad() {
var query = /query=(.*)/.exec(window.location.href);
if (!query)
return;
if (query[1] == "RGBAUint8")
depthStreamToRGBAUint8Texture();
else if (query[1] == "RGBAFloat")
depthStreamToRGBAFloatTexture();
else if (query[1] == "R32Float")
depthStreamToR32FloatTexture();
}
</script>
</head>
<body onload="onLoad()">
<table border="0">
<tr>
<td><video id="local-view-1" width="96" height="96" autoplay
style="display:none"></video></td>
<!-- The canvas is used to detect when video starts and stops. -->
<td><canvas id="local-view-1-canvas" width="96" height="96"
style="display:none"></canvas></td>
</tr>
</table>
</body>
</html>

@ -1,100 +0,0 @@
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import os
import sys
from gpu_tests import common_browser_args as cba
from gpu_tests import gpu_integration_test
from gpu_tests import path_util
data_path = os.path.join(path_util.GetChromiumSrcDir(), 'content', 'test',
'data', 'media')
wait_timeout = 60 # seconds
harness_script = r"""
var domAutomationController = {};
domAutomationController._succeeded = false;
domAutomationController._finished = false;
domAutomationController._error_msg = "";
domAutomationController.send = function(msg) {
if (msg == "OK") {
if (!domAutomationController._finished) {
domAutomationController._succeeded = true;
}
domAutomationController._finished = true;
} else {
domAutomationController._succeeded = false;
domAutomationController._finished = true;
domAutomationController._error_msg = msg;
}
}
domAutomationController.reset = function() {
domAutomationController._succeeded = false;
domAutomationController._finished = false;
}
window.domAutomationController = domAutomationController;
console.log("Harness injected.");
"""
class DepthCaptureIntegrationTest(gpu_integration_test.GpuIntegrationTest):
@classmethod
def Name(cls):
return 'depth_capture'
@classmethod
def GenerateGpuTests(cls, options):
tests = (('DepthCapture_depthStreamToRGBAUint8Texture',
'getusermedia-depth-capture.html?query=RGBAUint8'),
('DepthCapture_depthStreamToRGBAFloatTexture',
'getusermedia-depth-capture.html?query=RGBAFloat'),
('DepthCapture_depthStreamToR32FloatTexture',
'getusermedia-depth-capture.html?query=R32Float'))
for t in tests:
yield (t[0], t[1], ('_' + t[0]))
def RunActualGpuTest(self, test_path, *args):
url = self.UrlOfStaticFilePath(test_path)
tab = self.tab
tab.Navigate(url, script_to_evaluate_on_commit=harness_script)
tab.action_runner.WaitForJavaScriptCondition(
'domAutomationController._finished', timeout=60)
if not tab.EvaluateJavaScript('domAutomationController._succeeded'):
self.fail('page indicated test failure:' +
tab.EvaluateJavaScript('domAutomationController._error_msg'))
@classmethod
def SetUpProcess(cls):
super(DepthCaptureIntegrationTest, cls).SetUpProcess()
cls.CustomizeBrowserArgs([
cba.DISABLE_DOMAIN_BLOCKING_FOR_3D_APIS,
'--enable-es3-apis',
'--use-fake-ui-for-media-stream',
'--use-fake-device-for-media-stream=device-count=2',
# Required for about:gpucrash handling from Telemetry.
cba.ENABLE_GPU_BENCHMARKING,
])
cls.StartBrowser()
cls.SetStaticServerDirs([data_path])
@classmethod
def ExpectationsFiles(cls):
return [
os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'test_expectations',
'depth_capture_expectations.txt')
]
def load_tests(loader, tests, pattern):
del loader, tests, pattern # Unused.
return gpu_integration_test.LoadAllTestsInModule(sys.modules[__name__])

@ -1,92 +0,0 @@
# BEGIN TAG HEADER (autogenerated, see validate_tag_consistency.py)
# OS
# tags: [ android android-lollipop android-marshmallow android-nougat
# android-pie android-r android-s
# chromeos
# fuchsia
# linux ubuntu
# mac bigsur catalina lion highsierra mac-10.12 mojave monterey
# mountainlion sierra
# win win7 win8 win10 ]
# Devices
# tags: [ android-nexus-5 android-nexus-5x android-nexus-9 android-pixel-2
# android-pixel-4 android-pixel-6 android-shield-android-tv
# chromeos-board-amd64-generic chromeos-board-kevin chromeos-board-eve
# fuchsia-board-astro fuchsia-board-sherlock fuchsia-board-qemu-x64 ]
# Platform
# tags: [ desktop
# mobile ]
# Browser
# tags: [ android-chromium android-webview-instrumentation
# debug debug-x64
# release release-x64 ]
# GPU
# tags: [ amd amd-0x6613 amd-0x679e amd-0x6821 amd-0x7340
# apple apple-apple-m1 apple-angle-metal-renderer:-apple-m1
# arm
# google google-0xffff
# intel intel-0xa2e intel-0xd26 intel-0xa011 intel-0x3e92 intel-0x3e9b
# intel-0x5912
# nvidia nvidia-0xfe9 nvidia-0x1cb3 nvidia-0x2184
# qualcomm ]
# Decoder
# tags: [ passthrough no-passthrough ]
# ANGLE Backend
# tags: [ angle-disabled
# angle-d3d9 angle-d3d11
# angle-metal
# angle-opengl angle-opengles
# angle-swiftshader
# angle-vulkan ]
# Skia Renderer
# tags: [ skia-renderer-dawn
# skia-renderer-disabled
# skia-renderer-gl
# skia-renderer-vulkan ]
# SwiftShader
# tags: [ swiftshader-gl no-swiftshader-gl ]
# Driver
# tags: [ intel_lt_25.20.100.6444 intel_lt_25.20.100.6577
# intel_lt_26.20.100.7000 intel_lt_26.20.100.7870
# intel_lt_26.20.100.7323 intel_lt_26.20.100.8141
# intel_lt_27.20.100.8280
# mesa_lt_19.1 mesa_ge_20.1 ]
# ASan
# tags: [ asan no-asan ]
# Display Server
# tags: [ display-server-wayland display-server-x ]
# OOP-Canvas
# tags: [ oop-c no-oop-c ]
# results: [ Failure RetryOnFailure Skip ]
# END TAG HEADER
###############################
# Permanent Skip Expectations #
###############################
# The "Skip" expectations in this section are expected to never be removed.
# This is for things like tests that will never be supported on a particular
# platform/configuration.
###############################
# Temporary Skip Expectations #
###############################
# The "Skip" expectations in this section are expected to be removable at some
# point. This is for things like tests that fail in a way that negatively and
# significantly impacts other tests, e.g. killing the test device.
###################
# Failures/Flakes #
###################
# Non-"Skip" expectations go here to suppress regular flakes/failures.
crbug.com/765913 [ android android-nexus-5 ] DepthCapture_depthStreamToR32FloatTexture [ Failure ]
crbug.com/1080375 [ chromeos chromeos-board-kevin ] DepthCapture_depthStreamToRGBAFloatTexture [ Failure ]
crbug.com/1159394 [ chromeos chromeos-board-kevin ] DepthCapture_depthStreamToR32FloatTexture [ Failure ]
crbug.com/1159394 [ chromeos chromeos-board-kevin ] DepthCapture_depthStreamToRGBAUint8Texture [ Failure ]
crbug.com/1286830 [ android android-pixel-6 ] DepthCapture_depthStreamToRGBAFloatTexture [ Failure ]
#######################################################################
# Automated Entries After This Point - Do Not Manually Add Below Here #
#######################################################################

@ -86,7 +86,6 @@ def ParseArgs():
# distinguish WebGL 1 from WebGL 2.
choices=[
'context_lost',
'depth_capture',
'hardware_accelerated_feature',
'gpu_process',
'info_collection',

@ -604,7 +604,6 @@ class GetSuiteFilterClauseUnittest(unittest.TestCase):
"""Tests that no filter is returned for non-WebGL suites."""
for suite in [
'context_lost',
'depth_capture',
'hardware_accelerated_feature',
'gpu_process',
'info_collection',

@ -258,9 +258,6 @@ of all suites and resulting step names as of April 15th 2021:
* `context_lost_tests`
* `context_lost_validating_tests`
* `gl_renderer_context_lost_tests`
* `depth_capture`
* `depth_capture_tests`
* `gl_renderer_depth_capture_tests`
* `hardware_accelerated_feature`
* `gl_renderer_hardware_accelerated_feature_tests`
* `hardware_accelerated_feature_tests`

@ -10023,39 +10023,6 @@
},
"test_id_prefix": "ninja://content/test:fuchsia_telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=web-engine-shell",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "fuchsia_telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1",
"os": "Ubuntu-16.04"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://content/test:fuchsia_telemetry_gpu_integration_test/"
},
{
"args": [
"gpu_process",

@ -24411,39 +24411,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test_fuchsia/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=web-engine-shell",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test_fuchsia",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1",
"os": "Ubuntu-18.04"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test_fuchsia/"
},
{
"args": [
"gpu_process",
@ -24908,40 +24875,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test_fuchsia/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=fuchsia-chrome",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc",
"--custom-image=workstation.qemu-x64-release"
],
"isolate_name": "telemetry_gpu_integration_test_fuchsia",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1",
"os": "Ubuntu-18.04"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test_fuchsia/"
},
{
"args": [
"gpu_process",

File diff suppressed because it is too large Load Diff

@ -46,49 +46,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test_android_chrome/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=android-chromium",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test_android_chrome",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"cipd_packages": [
{
"cipd_package": "infra/tools/luci/logdog/butler/${platform}",
"location": "bin",
"revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c"
}
],
"containment_type": "AUTO",
"dimension_sets": [
{
"device_os": "MMB29Q",
"device_os_type": "userdebug",
"device_type": "bullhead",
"os": "Android"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test_android_chrome/"
},
{
"args": [
"gpu_process",
@ -614,41 +571,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=debug",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"containment_type": "AUTO",
"dimension_sets": [
{
"gpu": "10de:2184-440.100",
"os": "Ubuntu-18.04.5|Ubuntu-18.04.6",
"pool": "chromium.tests.gpu"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"screenshot_sync",
@ -1107,41 +1029,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"containment_type": "AUTO",
"dimension_sets": [
{
"gpu": "10de:2184-440.100",
"os": "Ubuntu-18.04.5|Ubuntu-18.04.6",
"pool": "chromium.tests.gpu"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"screenshot_sync",
@ -1628,42 +1515,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=debug",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"containment_type": "AUTO",
"dimension_sets": [
{
"cpu": "x86-64",
"display_attached": "1",
"gpu": "8086:3e9b",
"os": "Mac-11.5.2"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"gpu_process",
@ -2150,42 +2001,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"containment_type": "AUTO",
"dimension_sets": [
{
"cpu": "x86-64",
"display_attached": "1",
"gpu": "8086:3e9b",
"os": "Mac-11.5.2"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"gpu_process",
@ -2652,44 +2467,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=debug",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"containment_type": "AUTO",
"dimension_sets": [
{
"cpu": "x86-64",
"display_attached": "1",
"gpu": "1002:6821",
"hidpi": "1",
"os": "Mac-11.4|Mac-12.1",
"pool": "chromium.tests.gpu"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"gpu_process",
@ -3176,44 +2953,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"containment_type": "AUTO",
"dimension_sets": [
{
"cpu": "x86-64",
"display_attached": "1",
"gpu": "1002:6821",
"hidpi": "1",
"os": "Mac-11.4|Mac-12.1",
"pool": "chromium.tests.gpu"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"gpu_process",
@ -3738,41 +3477,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=debug_x64",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"containment_type": "AUTO",
"dimension_sets": [
{
"gpu": "10de:2184-27.21.14.5638",
"os": "Windows-10-18363",
"pool": "chromium.tests.gpu"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"gpu_process",
@ -4244,41 +3948,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release_x64",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"containment_type": "AUTO",
"dimension_sets": [
{
"gpu": "10de:2184-27.21.14.5638",
"os": "Windows-10-18363",
"pool": "chromium.tests.gpu"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"gpu_process",

@ -45,48 +45,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test_android_chrome/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=android-chromium",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test_android_chrome",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"cipd_packages": [
{
"cipd_package": "infra/tools/luci/logdog/butler/${platform}",
"location": "bin",
"revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c"
}
],
"dimension_sets": [
{
"device_os": "MMB29Q",
"device_os_type": "userdebug",
"device_type": "bullhead",
"os": "Android"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test_android_chrome/"
},
{
"args": [
"gpu_process",
@ -491,40 +449,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "10de:2184-440.100",
"os": "Ubuntu-18.04.5|Ubuntu-18.04.6",
"pool": "chromium.tests.gpu"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"gpu_process",
@ -859,40 +783,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "10de:2184-440.100",
"os": "Ubuntu-18.04.5|Ubuntu-18.04.6",
"pool": "chromium.tests.gpu"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"gpu_process",
@ -1228,41 +1118,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"cpu": "x86-64",
"display_attached": "1",
"gpu": "8086:3e9b",
"os": "Mac-11.5.2"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"gpu_process",
@ -1925,40 +1780,6 @@
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"depth_capture",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "depth_capture_tests",
"resultdb": {
"enable": true,
"has_native_resultdb_integration": true
},
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "10de:2184-27.21.14.5638",
"os": "Windows-10-18363",
"pool": "chromium.tests.gpu"
}
],
"idempotent": false,
"service_account": "chromium-tester@chops-service-accounts.iam.gserviceaccount.com"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
},
{
"args": [
"gpu_process",

@ -1518,13 +1518,6 @@
},
},
},
'depth_capture_tests': {
'remove_from': [
# TODO(https://crbug.com/850107): Remove the Android FYI Release (Pixel 2)
# exception once there is enough capacity to run these tests.
'Android FYI Release (Pixel 2)',
],
},
'device_unittests': {
'modifications': {
'android-12-x64-fyi-rel': {
@ -1637,12 +1630,6 @@
'Lacros FYI x64 Release (Intel)',
],
},
'gl_renderer_depth_capture_tests': {
'remove_from': [
'ChromeOS FYI Release (kevin)',
'Lacros FYI x64 Release (Intel)',
],
},
'gl_renderer_gpu_process_launch_tests': {
'remove_from': [
'ChromeOS FYI Release (kevin)',

@ -1788,12 +1788,6 @@
'has_native_resultdb_integration',
],
},
'depth_capture': {
'isolate_name': 'fuchsia_telemetry_gpu_integration_test',
'mixins': [
'has_native_resultdb_integration',
],
},
'gpu_process': {
'name': 'gpu_process_launch_tests',
'isolate_name': 'fuchsia_telemetry_gpu_integration_test',
@ -2886,18 +2880,6 @@
'has_native_resultdb_integration',
],
},
'gl_renderer_depth_capture_tests': {
'telemetry_test_name': 'depth_capture',
'args': [
'--extra-browser-args=--disable-features=UseSkiaRenderer',
],
'chromeos_args': [
'$$MAGIC_SUBSTITUTION_ChromeOSTelemetryRemote',
],
'mixins': [
'has_native_resultdb_integration',
],
},
'gl_renderer_gpu_process_launch_tests': {
'telemetry_test_name': 'gpu_process',
'args': [
@ -3095,21 +3077,6 @@
'has_native_resultdb_integration',
],
},
'depth_capture': {
'chromeos_args': [
'$$MAGIC_SUBSTITUTION_ChromeOSTelemetryRemote',
],
'lacros_args': [
'--extra-browser-args=--enable-features=UseOzonePlatform --ozone-platform=wayland',
'--xvfb',
'--no-xvfb',
'--use-weston',
'--weston-use-gl',
],
'mixins': [
'has_native_resultdb_integration',
],
},
'gpu_process': {
'name': 'gpu_process_launch_tests',
'chromeos_args': [
@ -3322,14 +3289,6 @@
'has_native_resultdb_integration',
],
},
'depth_capture': {
'args': [
'--extra-browser-args=--enable-features=UseSkiaRenderer',
],
'mixins': [
'has_native_resultdb_integration',
],
},
'gpu_process': {
'name': 'gpu_process_launch_tests',
'args': [
@ -3466,21 +3425,6 @@
'has_native_resultdb_integration',
],
},
'depth_capture': {
'chromeos_args': [
'$$MAGIC_SUBSTITUTION_ChromeOSTelemetryRemote',
],
'lacros_args': [
'--extra-browser-args=--enable-features=UseOzonePlatform --ozone-platform=wayland',
'--xvfb',
'--no-xvfb',
'--use-weston',
'--weston-use-gl',
],
'mixins': [
'has_native_resultdb_integration',
],
},
'gpu_process': {
'name': 'gpu_process_launch_tests',
'chromeos_args': [

@ -1389,7 +1389,7 @@ enum WebFeature {
kMediaStreamConstraintsDisableLocalEcho = 1935,
kMediaStreamConstraintsGroupIdAudio = 1936,
kMediaStreamConstraintsGroupIdVideo = 1937,
kMediaStreamConstraintsVideoKind = 1938,
kOBSOLETE_MediaStreamConstraintsVideoKind = 1938,
kMediaStreamConstraintsMediaStreamSourceAudio = 1943,
kMediaStreamConstraintsMediaStreamSourceVideo = 1944,
kMediaStreamConstraintsRenderToAssociatedSink = 1945,

@ -208,7 +208,6 @@ void Visit(IdentifiableTokenBuilder& builder,
Visit(builder, set.getAutoGainControlOr(nullptr));
Visit(builder, set.getLatencyOr(nullptr));
Visit(builder, set.getChannelCountOr(nullptr));
Visit(builder, set.getVideoKindOr(nullptr));
Visit(builder, set.getWhiteBalanceModeOr(nullptr));
Visit(builder, set.getExposureModeOr(nullptr));
Visit(builder, set.getFocusModeOr(nullptr));

@ -148,10 +148,6 @@ const char kAudioLatency[] = "latencyMs";
// https://crbug.com/579729
const char kGoogLeakyBucket[] = "googLeakyBucket";
const char kPowerLineFrequency[] = "googPowerLineFrequency";
// mediacapture-depth: videoKind key and VideoKindEnum values.
const char kVideoKind[] = "videoKind";
const char kVideoKindColor[] = "color";
const char kVideoKindDepth[] = "depth";
// Names used for testing.
const char kTestConstraint1[] = "valid_and_supported_1";
const char kTestConstraint2[] = "valid_and_supported_2";
@ -436,14 +432,6 @@ static void ParseOldStyleNames(
mojom::ConsoleMessageLevel::kWarning,
"Obsolete constraint named " + String(constraint.name_) +
" is ignored. Please stop using it."));
} else if (constraint.name_.Equals(kVideoKind)) {
if (!constraint.value_.Equals(kVideoKindColor) &&
!constraint.value_.Equals(kVideoKindDepth)) {
error_state.ThrowConstraintError("Illegal value for constraint",
constraint.name_);
} else {
result.video_kind.SetExact(constraint.value_);
}
} else if (constraint.name_.Equals(kTestConstraint1) ||
constraint.name_.Equals(kTestConstraint2)) {
// These constraints are only for testing parsing.
@ -843,14 +831,6 @@ bool ValidateAndCopyConstraintSet(
return false;
}
}
if (constraints_in->hasVideoKind()) {
if (!ValidateAndCopyStringConstraint(
constraints_in->videoKind(), naked_treatment,
constraint_buffer.video_kind, error_state)) {
DCHECK(error_state.HadException());
return false;
}
}
if (constraints_in->hasPan()) {
CopyBooleanOrDoubleConstraint(constraints_in->pan(), naked_treatment,
constraint_buffer.pan);
@ -1130,8 +1110,6 @@ void ConvertConstraintSet(const MediaTrackConstraintSetPlatform& input,
output->setDeviceId(ConvertString(input.device_id, naked_treatment));
if (!input.group_id.IsUnconstrained())
output->setGroupId(ConvertString(input.group_id, naked_treatment));
if (!input.video_kind.IsUnconstrained())
output->setVideoKind(ConvertString(input.video_kind, naked_treatment));
if (!input.pan.IsUnconstrained())
output->setPan(ConvertBooleanOrDouble(input.pan, naked_treatment));
if (!input.tilt.IsUnconstrained())

@ -43,10 +43,6 @@ using DistanceVector = WTF::Vector<double>;
// device ID, noise reduction, resolution and frame rate.
const int kNumDefaultDistanceEntries = 4;
// VideoKind enum values. See https://w3c.github.io/mediacapture-depth.
const char kVideoKindColor[] = "color";
const char kVideoKindDepth[] = "depth";
WebString ToWebString(mojom::blink::FacingMode facing_mode) {
switch (facing_mode) {
case mojom::blink::FacingMode::USER:
@ -213,9 +209,6 @@ class CandidateFormat {
return kMinDeviceCaptureFrameRate;
}
// Convenience accessor for video kind using Blink type.
WebString VideoKind() const { return GetVideoKindForFormat(format_); }
// This function tries to apply |constraint_set| to this candidate format
// and returns true if successful. If |constraint_set| cannot be satisfied,
// false is returned, and the name of one of the constraints that
@ -259,12 +252,6 @@ class CandidateFormat {
return false;
}
if (!constraint_set.video_kind.Matches(VideoKind())) {
UpdateFailedConstraintName(constraint_set.video_kind,
failed_constraint_name);
return false;
}
resolution_set_ = resolution_intersection;
rescale_set_ = rescale_intersection;
constrained_frame_rate_ = constrained_frame_rate_.Intersection(
@ -358,19 +345,16 @@ class CandidateFormat {
track_fitness_with_rescale += 1.0;
}
}
double fitness = StringConstraintFitnessDistance(
VideoKind(), basic_constraint_set.video_kind);
// If rescaling and not rescaling have the same fitness, prefer not
// rescaling.
if (track_fitness_without_rescale <= track_fitness_with_rescale) {
fitness += track_fitness_without_rescale;
*track_settings = track_settings_without_rescale;
} else {
fitness += track_fitness_with_rescale;
*track_settings = track_settings_with_rescale;
return track_fitness_without_rescale;
}
return fitness;
*track_settings = track_settings_with_rescale;
return track_fitness_with_rescale;
}
// Returns a custom "native" fitness distance that expresses how close the
@ -710,12 +694,6 @@ VideoInputDeviceCapabilities& VideoInputDeviceCapabilities::operator=(
VideoInputDeviceCapabilities::~VideoInputDeviceCapabilities() = default;
WebString GetVideoKindForFormat(const media::VideoCaptureFormat& format) {
return (format.pixel_format == media::PIXEL_FORMAT_Y16)
? WebString::FromASCII(kVideoKindDepth)
: WebString::FromASCII(kVideoKindColor);
}
MediaStreamTrackPlatform::FacingMode ToPlatformFacingMode(
mojom::blink::FacingMode video_facing) {
switch (video_facing) {

@ -15,16 +15,10 @@
namespace blink {
class MediaConstraints;
class WebString;
} // namespace blink
namespace blink {
// Calculates and returns videoKind value for |format|.
// See https://w3c.github.io/mediacapture-depth.
MODULES_EXPORT WebString
GetVideoKindForFormat(const media::VideoCaptureFormat& format);
MODULES_EXPORT MediaStreamTrackPlatform::FacingMode ToPlatformFacingMode(
mojom::blink::FacingMode video_facing);

@ -296,16 +296,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest,
result.failed_constraint_name());
}
TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, OverconstrainedOnVideoKind) {
constraint_factory_.Reset();
// No device in |capabilities_| has video kind infrared.
constraint_factory_.basic().video_kind.SetExact("infrared");
auto result = SelectSettings();
EXPECT_FALSE(result.HasValue());
EXPECT_EQ(constraint_factory_.basic().video_kind.GetName(),
result.failed_constraint_name());
}
TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, OverconstrainedOnHeight) {
constraint_factory_.Reset();
constraint_factory_.basic().height.SetExact(123467890);
@ -525,22 +515,6 @@ TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryFacingMode) {
CheckTrackAdapterSettingsEqualsFormat(result);
}
TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryVideoKind) {
constraint_factory_.Reset();
constraint_factory_.basic().video_kind.SetExact("depth");
auto result = SelectSettings();
EXPECT_TRUE(result.HasValue());
EXPECT_EQ(kDeviceID4, result.device_id());
EXPECT_EQ(media::PIXEL_FORMAT_Y16, result.Format().pixel_format);
CheckTrackAdapterSettingsEqualsFormat(result);
constraint_factory_.basic().video_kind.SetExact("color");
result = SelectSettings();
EXPECT_TRUE(result.HasValue());
EXPECT_EQ(default_device_->device_id.Utf8(), result.device_id());
CheckTrackAdapterSettingsEqualsFormat(result);
}
TEST_F(MediaStreamConstraintsUtilVideoDeviceTest, MandatoryNoiseReduction) {
constraint_factory_.Reset();
const bool kNoiseReductionValues[] = {true, false};

@ -111,7 +111,7 @@ bool ConstraintSetHasNonImageCapture(
constraint_set->hasFrameRate() || constraint_set->hasGroupId() ||
constraint_set->hasHeight() || constraint_set->hasLatency() ||
constraint_set->hasSampleRate() || constraint_set->hasSampleSize() ||
constraint_set->hasVideoKind() || constraint_set->hasWidth();
constraint_set->hasWidth();
}
bool ConstraintSetHasImageAndNonImageCapture(
@ -609,11 +609,6 @@ MediaTrackSettings* MediaStreamTrack::getSettings() const {
settings->setHeight(platform_settings.height);
if (platform_settings.HasAspectRatio())
settings->setAspectRatio(platform_settings.aspect_ratio);
if (RuntimeEnabledFeatures::MediaCaptureDepthVideoKindEnabled() &&
component_->Source()->GetType() == MediaStreamSource::kTypeVideo) {
if (platform_settings.HasVideoKind())
settings->setVideoKind(platform_settings.video_kind);
}
settings->setDeviceId(platform_settings.device_id);
if (!platform_settings.group_id.IsNull())
settings->setGroupId(platform_settings.group_id);

@ -693,7 +693,6 @@ void MediaStreamVideoTrack::GetSettings(
if (format) {
if (frame_rate_ == 0.0)
settings.frame_rate = format->frame_rate;
settings.video_kind = GetVideoKindForFormat(*format);
} else {
// Format is only set for local tracks. For other tracks, use the frame rate
// reported through settings callback SetSizeAndComputedFrameRate().

@ -26,11 +26,6 @@ dictionary MediaTrackConstraintSet {
ConstrainLong channelCount;
ConstrainDOMString deviceId;
ConstrainDOMString groupId;
// Media Capture Depth Stream Extensions
// https://w3c.github.io/mediacapture-depth/#mediatrackconstraints
// TODO(riju): videoKind attribute should be declared as partial
// dictionary but IDL parser can't support it yet. http://crbug.com/579896.
[RuntimeEnabled=MediaCaptureDepthVideoKind] ConstrainDOMString videoKind;
// W3C Image Capture API
// https://w3c.github.io/mediacapture-image/#mediatrackconstraintset-section
// TODO(mcasas) move out when partial dictionaries are supported

@ -20,11 +20,6 @@ dictionary MediaTrackSettings {
long channelCount;
DOMString deviceId;
DOMString groupId;
// Media Capture Depth Stream Extensions
// https://w3c.github.io/mediacapture-depth/#mediatracksettings-dictionary
// TODO(riju): videoKind attribute should be declared as partial
// dictionary but IDL parser can't support it yet. http://crbug.com/579896.
[RuntimeEnabled=MediaCaptureDepthVideoKind] DOMString videoKind;
// W3C Image Capture API
// https://w3c.github.io/mediacapture-image/#mediatracksettings-section

@ -24,12 +24,6 @@ dictionary MediaTrackSupportedConstraints {
boolean deviceId = true;
boolean groupId = true;
// Media Capture Depth Stream Extensions
// https://w3c.github.io/mediacapture-depth
// TODO(riju): videoKind attribute should be declared as partial
// dictionary but IDL parser can't support it yet. http://crbug.com/579896.
[RuntimeEnabled=MediaCaptureDepthVideoKind] boolean videoKind = true;
// W3C Image Capture API
// https://w3c.github.io/mediacapture-image/#mediatracksupportedconstraints-section
// TODO(mcasas) move out when partial dictionaries are supported

@ -262,10 +262,6 @@ void CountVideoConstraintUses(ExecutionContext* context,
constraints, &MediaTrackConstraintSetPlatform::group_id)) {
counter.Count(WebFeature::kMediaStreamConstraintsGroupIdVideo);
}
if (RequestUsesDiscreteConstraint(
constraints, &MediaTrackConstraintSetPlatform::video_kind)) {
counter.Count(WebFeature::kMediaStreamConstraintsVideoKind);
}
if (RequestUsesDiscreteConstraint(
constraints, &MediaTrackConstraintSetPlatform::media_stream_source)) {
counter.Count(WebFeature::kMediaStreamConstraintsMediaStreamSourceVideo);

@ -354,7 +354,6 @@ MediaTrackConstraintSetPlatform::MediaTrackConstraintSetPlatform()
tilt("tilt"),
zoom("zoom"),
group_id("groupId"),
video_kind("videoKind"),
media_stream_source("mediaStreamSource"),
render_to_associated_sink("chromeRenderToAssociatedSink"),
goog_echo_cancellation("googEchoCancellation"),
@ -403,7 +402,6 @@ Vector<const BaseConstraint*> MediaTrackConstraintSetPlatform::AllConstraints()
&channel_count,
&device_id,
&group_id,
&video_kind,
&media_stream_source,
&disable_local_echo,
&pan,

@ -248,8 +248,6 @@ struct MediaTrackConstraintSetPlatform {
DoubleConstraint tilt;
DoubleConstraint zoom;
StringConstraint group_id;
// https://w3c.github.io/mediacapture-depth/#mediatrackconstraints
StringConstraint video_kind;
// Constraints not exposed in Blink at the moment, only through
// the legacy name interface.
StringConstraint media_stream_source; // tab, screen, desktop, system

@ -29,7 +29,6 @@ class PLATFORM_EXPORT MediaStreamTrackPlatform {
bool HasSampleSize() const { return sample_size >= 0; }
bool HasChannelCount() const { return channel_count >= 0; }
bool HasLatency() const { return latency >= 0; }
bool HasVideoKind() const { return !video_kind.IsNull(); }
// The variables are read from
// MediaStreamTrack::GetSettings only.
double frame_rate = -1.0;
@ -49,9 +48,6 @@ class PLATFORM_EXPORT MediaStreamTrackPlatform {
int32_t channel_count = -1;
double latency = -1.0;
// Media Capture Depth Stream Extensions.
String video_kind;
// Screen Capture extensions
absl::optional<media::mojom::DisplayCaptureSurfaceType> display_surface;
absl::optional<bool> logical_surface;

@ -1416,10 +1416,6 @@
name: "MediaCapture",
status: {"Android": "stable"},
},
{
name: "MediaCaptureDepthVideoKind",
status: "experimental",
},
// Set to reflect the MediaCastOverlayButton feature.
{
name: "MediaCastOverlayButton",

@ -1,10 +1,10 @@
This is a testharness.js-based test.
PASS MediaTrackSupportedConstraints dictionary include attributes are correct
FAIL MediaTrackSupportedConstraints dictionary include attributes are correct assert_equals: Expect dictionary.videoKind to be boolean expected "boolean" but got "undefined"
FAIL MediaTrackCapabilities dictionary of depth include attributes are correct assert_equals: Expect dictionary.videoKind to be string expected "string" but got "undefined"
PASS MediaTrackConstraintSet dictionary of depth include attributes are correct
PASS MediaTrackSettings dictionary of depth include attributes are correct
FAIL MediaTrackConstraintSet dictionary of depth include attributes are correct promise_test: Unhandled rejection with value: object "TypeError: Cannot read properties of undefined (reading '0')"
FAIL MediaTrackSettings dictionary of depth include attributes are correct assert_equals: Expect dictionary.videoKind to be string expected "string" but got "undefined"
FAIL MediaTrackCapabilities dictionary of color include attributes are correct assert_equals: Expect dictionary.videoKind to be string expected "string" but got "undefined"
PASS MediaTrackConstraintSet dictionary of color include attributes are correct
PASS MediaTrackSettings dictionary of color include attributes are correct
FAIL MediaTrackConstraintSet dictionary of color include attributes are correct promise_test: Unhandled rejection with value: object "TypeError: Cannot read properties of undefined (reading '0')"
FAIL MediaTrackSettings dictionary of color include attributes are correct assert_equals: Expect dictionary.videoKind to be string expected "string" but got "undefined"
Harness: the test ran to completion.

@ -34576,7 +34576,7 @@ Called by update_use_counter_feature_enum.py.-->
<int value="1935" label="MediaStreamConstraintsDisableLocalEcho"/>
<int value="1936" label="MediaStreamConstraintsGroupIdAudio"/>
<int value="1937" label="MediaStreamConstraintsGroupIdVideo"/>
<int value="1938" label="MediaStreamConstraintsVideoKind"/>
<int value="1938" label="OBSOLETE_MediaStreamConstraintsVideoKind"/>
<int value="1939" label="OBSOLETE_MediaStreamConstraintsDepthNear"/>
<int value="1940" label="OBSOLETE_MediaStreamConstraintsDepthFar"/>
<int value="1941" label="OBSOLETE_MediaStreamConstraintsFocalLengthX"/>