[GPU] Remove usage of GLImage::GetSize() in CmdDecoderImpl
We changed the validating decoder to always get the size from the source texture with a Finch killswitch in 112. There have been no problems with the rollout and the old NaCL GPU codepaths are now definitively gone, so we can rip out the unused code here (note: I was waiting until the old NaCL GPU codepaths were definitively gone just to avoid any potential interactions between those codepaths being re-enabled and somehow being dependent on this behavior). This CL does so. Bug: 1412075 Change-Id: Ic937e99a2460d6298f46f189d1240e78da62250a Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/4551253 Reviewed-by: Vasiliy Telezhnikov <vasilyt@chromium.org> Commit-Queue: Colin Blundell <blundell@chromium.org> Cr-Commit-Position: refs/heads/main@{#1148939}
This commit is contained in:

committed by
Chromium LUCI CQ

parent
c0b3d069a1
commit
a558ec20a5
chrome/browser
gpu
@ -9973,12 +9973,6 @@ const FeatureEntry kFeatureEntries[] = {
|
||||
policy::features::kSafeSitesFilterBehaviorPolicyAndroid)},
|
||||
#endif
|
||||
|
||||
{"cmd-decoder-always-get-size-from-source-texture",
|
||||
flag_descriptions::kCmdDecoderAlwaysGetSizeFromSourceTextureName,
|
||||
flag_descriptions::kCmdDecoderAlwaysGetSizeFromSourceTextureDescription,
|
||||
kOsAll,
|
||||
FEATURE_VALUE_TYPE(features::kCmdDecoderAlwaysGetSizeFromSourceTexture)},
|
||||
|
||||
#if BUILDFLAG(IS_WIN)
|
||||
{"cloud-ap-auth", flag_descriptions::kCloudApAuthName,
|
||||
flag_descriptions::kCloudApAuthDescription, kOsWin,
|
||||
|
@ -1119,11 +1119,6 @@
|
||||
"owners": [ "igorruvinov", "zmin" ],
|
||||
"expiry_milestone": 120
|
||||
},
|
||||
{
|
||||
"name": "cmd-decoder-always-get-size-from-source-texture",
|
||||
"owners": [ "blundell" ],
|
||||
"expiry_milestone": 128
|
||||
},
|
||||
{
|
||||
"name": "colr-v1-fonts",
|
||||
"owners": [ "drott", "layout-dev@chromium.org" ],
|
||||
|
@ -204,14 +204,6 @@ const char kEnableBenchmarkingDescription[] =
|
||||
"after 3 restarts. On the third restart, the flag will appear to be off "
|
||||
"but the effect is still active.";
|
||||
|
||||
extern const char kCmdDecoderAlwaysGetSizeFromSourceTextureName[] =
|
||||
"Controls whether the GLES2 validating decoder always gets size from the "
|
||||
"source texture when copying textures";
|
||||
extern const char kCmdDecoderAlwaysGetSizeFromSourceTextureDescription[] =
|
||||
"When enabled, the GLES2 validating decoder will obtain size from the "
|
||||
"source texture rather than the GLImage when copying textures even if a "
|
||||
"GLImage is present";
|
||||
|
||||
const char kPrivacyIndicatorsName[] = "Enable Privacy Indicators";
|
||||
const char kPrivacyIndicatorsDescription[] =
|
||||
"While screen sharing or camera/microphone is being accessed, show a green "
|
||||
|
@ -151,9 +151,6 @@ extern const char kPasspointARCSupportDescription[];
|
||||
extern const char kPasspointSettingsName[];
|
||||
extern const char kPasspointSettingsDescription[];
|
||||
|
||||
extern const char kCmdDecoderAlwaysGetSizeFromSourceTextureName[];
|
||||
extern const char kCmdDecoderAlwaysGetSizeFromSourceTextureDescription[];
|
||||
|
||||
extern const char kPrivacyIndicatorsName[];
|
||||
extern const char kPrivacyIndicatorsDescription[];
|
||||
|
||||
|
@ -193,11 +193,6 @@ struct TexSubCoord3D {
|
||||
int depth;
|
||||
};
|
||||
|
||||
bool AlwaysGetSizeFromSourceTexture() {
|
||||
return base::FeatureList::IsEnabled(
|
||||
features::kCmdDecoderAlwaysGetSizeFromSourceTexture);
|
||||
}
|
||||
|
||||
// Check if all |ref| bits are set in |bits|.
|
||||
bool AllBitsSet(GLbitfield bits, GLbitfield ref) {
|
||||
DCHECK_NE(0u, ref);
|
||||
@ -17861,30 +17856,18 @@ void GLES2DecoderImpl::DoCopyTextureCHROMIUM(
|
||||
|
||||
int source_width = 0;
|
||||
int source_height = 0;
|
||||
gl::GLImage* image =
|
||||
source_texture->GetLevelImage(source_target, source_level);
|
||||
if (image && !AlwaysGetSizeFromSourceTexture()) {
|
||||
gfx::Size size = image->GetSize();
|
||||
source_width = size.width();
|
||||
source_height = size.height();
|
||||
if (source_width <= 0 || source_height <= 0) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, kFunctionName, "invalid image size");
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
if (!source_texture->GetLevelSize(source_target, source_level,
|
||||
&source_width, &source_height, nullptr)) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, kFunctionName,
|
||||
"source texture has no data for level");
|
||||
return;
|
||||
}
|
||||
if (!source_texture->GetLevelSize(source_target, source_level, &source_width,
|
||||
&source_height, nullptr)) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, kFunctionName,
|
||||
"source texture has no data for level");
|
||||
return;
|
||||
}
|
||||
|
||||
// Check that this type of texture is allowed.
|
||||
if (!texture_manager()->ValidForTextureTarget(
|
||||
source_texture, source_level, source_width, source_height, 1)) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, kFunctionName, "Bad dimensions");
|
||||
return;
|
||||
}
|
||||
// Check that this type of texture is allowed.
|
||||
if (!texture_manager()->ValidForTextureTarget(
|
||||
source_texture, source_level, source_width, source_height, 1)) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, kFunctionName, "Bad dimensions");
|
||||
return;
|
||||
}
|
||||
|
||||
if (dest_texture->IsImmutable()) {
|
||||
@ -18002,54 +17985,26 @@ void GLES2DecoderImpl::CopySubTextureHelper(const char* function_name,
|
||||
GLenum dest_binding_target = dest_texture->target();
|
||||
int source_width = 0;
|
||||
int source_height = 0;
|
||||
gl::GLImage* image =
|
||||
source_texture->GetLevelImage(source_target, source_level);
|
||||
if (image && !AlwaysGetSizeFromSourceTexture()) {
|
||||
gfx::Size size = image->GetSize();
|
||||
source_width = size.width();
|
||||
source_height = size.height();
|
||||
if (source_width <= 0 || source_height <= 0) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, function_name, "invalid image size");
|
||||
return;
|
||||
}
|
||||
if (!source_texture->GetLevelSize(source_target, source_level, &source_width,
|
||||
&source_height, nullptr)) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, function_name,
|
||||
"source texture has no data for level");
|
||||
return;
|
||||
}
|
||||
|
||||
// Ideally we should not need to check that the sub-texture copy rectangle
|
||||
// is valid in two different ways, here and below. However currently there
|
||||
// is no guarantee that a texture backed by a GLImage will have sensible
|
||||
// level info. If this synchronization were to be enforced then this and
|
||||
// other functions in this file could be cleaned up.
|
||||
// See: https://crbug.com/586476
|
||||
int32_t max_x;
|
||||
int32_t max_y;
|
||||
if (!base::CheckAdd(x, width).AssignIfValid(&max_x) ||
|
||||
!base::CheckAdd(y, height).AssignIfValid(&max_y) || x < 0 || y < 0 ||
|
||||
max_x > source_width || max_y > source_height) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, function_name,
|
||||
"source texture bad dimensions");
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
if (!source_texture->GetLevelSize(source_target, source_level,
|
||||
&source_width, &source_height, nullptr)) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, function_name,
|
||||
"source texture has no data for level");
|
||||
return;
|
||||
}
|
||||
// Check that this type of texture is allowed.
|
||||
if (!texture_manager()->ValidForTextureTarget(
|
||||
source_texture, source_level, source_width, source_height, 1)) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, function_name,
|
||||
"source texture bad dimensions");
|
||||
return;
|
||||
}
|
||||
|
||||
// Check that this type of texture is allowed.
|
||||
if (!texture_manager()->ValidForTextureTarget(
|
||||
source_texture, source_level, source_width, source_height, 1)) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, function_name,
|
||||
"source texture bad dimensions");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!source_texture->ValidForTexture(source_target, source_level, x, y, 0,
|
||||
width, height, 1)) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, function_name,
|
||||
"source texture bad dimensions.");
|
||||
return;
|
||||
}
|
||||
if (!source_texture->ValidForTexture(source_target, source_level, x, y, 0,
|
||||
width, height, 1)) {
|
||||
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, function_name,
|
||||
"source texture bad dimensions.");
|
||||
return;
|
||||
}
|
||||
|
||||
GLenum source_type = 0;
|
||||
|
@ -389,13 +389,6 @@ BASE_FEATURE(kPassthroughYuvRgbConversion,
|
||||
"PassthroughYuvRgbConversion",
|
||||
base::FEATURE_ENABLED_BY_DEFAULT);
|
||||
|
||||
// When enabled, the validating command decoder always obtains the size to use
|
||||
// from the source texture when copying textures, rather than first checking if
|
||||
// there is a GLImage present and using its size if so.
|
||||
BASE_FEATURE(kCmdDecoderAlwaysGetSizeFromSourceTexture,
|
||||
"CmdDecoderAlwaysGetSizeFromSourceTexture",
|
||||
base::FEATURE_ENABLED_BY_DEFAULT);
|
||||
|
||||
// When the application is in background, whether to perform immediate GPU
|
||||
// cleanup when executing deferred requests.
|
||||
BASE_FEATURE(kGpuCleanupInBackground,
|
||||
|
@ -94,8 +94,6 @@ GPU_EXPORT BASE_DECLARE_FEATURE(kIncreasedCmdBufferParseSlice);
|
||||
|
||||
GPU_EXPORT BASE_DECLARE_FEATURE(kPassthroughYuvRgbConversion);
|
||||
|
||||
GPU_EXPORT BASE_DECLARE_FEATURE(kCmdDecoderAlwaysGetSizeFromSourceTexture);
|
||||
|
||||
GPU_EXPORT BASE_DECLARE_FEATURE(kGpuCleanupInBackground);
|
||||
|
||||
GPU_EXPORT bool UseGles2ForOopR();
|
||||
|
Reference in New Issue
Block a user