Delete build/lacros/*
Lacros is not getting built anymore. Bug: b:370356213 Change-Id: I130e4ed91cff48316604c89e803a186a95824a01 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/6054826 Reviewed-by: Sven Zheng <svenzheng@chromium.org> Reviewed-by: Dirk Pranke <dpranke@google.com> Commit-Queue: Georg Neis <neis@chromium.org> Cr-Commit-Position: refs/heads/main@{#1390756}
This commit is contained in:

committed by
Chromium LUCI CQ

parent
fab2cd739c
commit
a805d21567
@ -2281,7 +2281,6 @@ _GENERIC_PYDEPS_FILES = [
|
||||
'build/android/resource_sizes.pydeps',
|
||||
'build/android/test_runner.pydeps',
|
||||
'build/android/test_wrapper/logdog_wrapper.pydeps',
|
||||
'build/lacros/lacros_resource_sizes.pydeps',
|
||||
'build/protoc_java.pydeps',
|
||||
'chrome/android/monochrome/scripts/monochrome_python_tests.pydeps',
|
||||
'chrome/test/chromedriver/log_replay/client_replay_unittest.pydeps',
|
||||
|
1
build/.gitignore
vendored
1
build/.gitignore
vendored
@ -12,7 +12,6 @@ ciopfs
|
||||
/Debug_x64
|
||||
/fuchsia/internal/
|
||||
/ipch/
|
||||
/lacros/prebuilt_ash_chrome/
|
||||
/Release
|
||||
/Release_x64
|
||||
/win_toolchain.json
|
||||
|
@ -1,28 +0,0 @@
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import("//build/config/python.gni")
|
||||
import("//build/util/process_version.gni")
|
||||
|
||||
python_library("lacros_resource_sizes_py") {
|
||||
pydeps_file = "lacros_resource_sizes.pydeps"
|
||||
data = [ "//buildtools/third_party/eu-strip/bin/eu-strip" ]
|
||||
data_deps = [
|
||||
"//build/util:test_results",
|
||||
"//third_party/catapult/tracing:convert_chart_json",
|
||||
]
|
||||
}
|
||||
|
||||
# Lacros is built with "{arch}-generic" configuration. However, in Chrome
|
||||
# OS, it is just "one board variation", so the libraries on the *-generic
|
||||
# boards may not be compatible with the ones on the actual DUTs.
|
||||
# One of the common pattern recently we hit is symbols exposed by libgcc.
|
||||
# The symbols start to be exposed recently because of libunwind transition
|
||||
# and along with it they are or are not re-exposed by other libraries, too,
|
||||
# depending on per-board implementation.
|
||||
# To mitigate the situation, marking -shared-libgcc to look up the system
|
||||
# libgcc always.
|
||||
config("optional_shared_libgcc") {
|
||||
ldflags = [ "-shared-libgcc" ]
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
erikchen@chromium.org
|
||||
jennyz@chromium.org
|
||||
kuanhuang@chromium.org
|
||||
svenzheng@chromium.org
|
@ -1,22 +0,0 @@
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Presubmit script for changes affecting //build/lacros"""
|
||||
|
||||
|
||||
|
||||
def _CommonChecks(input_api, output_api):
|
||||
# Don't run lacros tests on Windows.
|
||||
if input_api.is_windows:
|
||||
return []
|
||||
tests = input_api.canned_checks.GetUnitTestsInDirectory(
|
||||
input_api, output_api, '.', [r'^.+_test\.py$'])
|
||||
return input_api.RunTests(tests)
|
||||
|
||||
|
||||
def CheckChangeOnUpload(input_api, output_api):
|
||||
return _CommonChecks(input_api, output_api)
|
||||
|
||||
|
||||
def CheckChangeOnCommit(input_api, output_api):
|
||||
return _CommonChecks(input_api, output_api)
|
@ -1,11 +0,0 @@
|
||||
This folder contains code for running lacros in tests.
|
||||
|
||||
This includes:
|
||||
* test_runner.py
|
||||
Run linux-lacros related tests.
|
||||
|
||||
* mojo_connection_lacros_launcher
|
||||
Script for launching lacros for debugging.
|
||||
|
||||
* lacros_resource_sizes.py
|
||||
Monitoring lacros binary size script used by builders.
|
@ -1,21 +0,0 @@
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import("//build/util/generate_wrapper.gni")
|
||||
|
||||
# Generates a script in the bin directory that runs
|
||||
# //build/lacros/lacros_resource_sizes.py for the provided configuration.
|
||||
template("lacros_resource_sizes_test") {
|
||||
generate_wrapper(target_name) {
|
||||
forward_variables_from(invoker, [ "data_deps" ])
|
||||
executable = "//build/lacros/lacros_resource_sizes.py"
|
||||
wrapper_script = "$root_out_dir/bin/run_${target_name}"
|
||||
|
||||
deps = [ "//build/lacros:lacros_resource_sizes_py" ]
|
||||
executable_args = [
|
||||
"--chromium-output-directory",
|
||||
"@WrappedPath(.)",
|
||||
]
|
||||
}
|
||||
}
|
@ -1,401 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Reports binary size metrics for LaCrOS build artifacts.
|
||||
|
||||
More information at //docs/speed/binary_size/metrics.md.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import collections
|
||||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
SRC_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||
sys.path.insert(0, os.path.join(SRC_DIR, 'build', 'util'))
|
||||
from lib.results import result_sink
|
||||
from lib.results import result_types
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _SysPath(path):
|
||||
"""Library import context that temporarily appends |path| to |sys.path|."""
|
||||
if path and path not in sys.path:
|
||||
sys.path.insert(0, path)
|
||||
else:
|
||||
path = None # Indicates that |sys.path| is not modified.
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if path:
|
||||
sys.path.pop(0)
|
||||
|
||||
|
||||
DIR_SOURCE_ROOT = os.environ.get(
|
||||
'CHECKOUT_SOURCE_ROOT',
|
||||
os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)))
|
||||
|
||||
BUILD_UTIL_PATH = os.path.join(DIR_SOURCE_ROOT, 'build', 'util')
|
||||
|
||||
TRACING_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'catapult',
|
||||
'tracing')
|
||||
|
||||
EU_STRIP_PATH = os.path.join(DIR_SOURCE_ROOT, 'buildtools', 'third_party',
|
||||
'eu-strip', 'bin', 'eu-strip')
|
||||
|
||||
with _SysPath(BUILD_UTIL_PATH):
|
||||
from lib.common import perf_tests_results_helper
|
||||
|
||||
with _SysPath(TRACING_PATH):
|
||||
from tracing.value import convert_chart_json # pylint: disable=import-error
|
||||
|
||||
_BASE_CHART = {
|
||||
'format_version': '0.1',
|
||||
'benchmark_name': 'resource_sizes',
|
||||
'trace_rerun_options': [],
|
||||
'charts': {}
|
||||
}
|
||||
|
||||
_KEY_RAW = 'raw'
|
||||
_KEY_GZIPPED = 'gzipped'
|
||||
_KEY_STRIPPED = 'stripped'
|
||||
_KEY_STRIPPED_GZIPPED = 'stripped_then_gzipped'
|
||||
|
||||
|
||||
class _Group:
|
||||
"""A group of build artifacts whose file sizes are summed and tracked.
|
||||
|
||||
Build artifacts for size tracking fall under these categories:
|
||||
* File: A single file.
|
||||
* Group: A collection of files.
|
||||
* Dir: All files under a directory.
|
||||
|
||||
Attributes:
|
||||
paths: A list of files or directories to be tracked together.
|
||||
title: The display name of the group.
|
||||
track_stripped: Whether to also track summed stripped ELF sizes.
|
||||
track_compressed: Whether to also track summed compressed sizes.
|
||||
"""
|
||||
|
||||
def __init__(self, paths, title, track_stripped=False,
|
||||
track_compressed=False):
|
||||
self.paths = paths
|
||||
self.title = title
|
||||
self.track_stripped = track_stripped
|
||||
self.track_compressed = track_compressed
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Overrides the default implementation"""
|
||||
if isinstance(other, _Group):
|
||||
return (self.paths == other.paths) & (self.title == other.title) & (
|
||||
self.track_stripped == other.track_stripped) & (
|
||||
self.track_compressed == other.track_compressed)
|
||||
return False
|
||||
|
||||
# Common artifacts in official builder lacros-arm32 and lacros64 in
|
||||
# src-internal. The artifcts can be found in
|
||||
# chromium/src-internal/testing/buildbot/archive/lacros64.json and
|
||||
# chromium/src-internal/testing/buildbot/archive/lacros-arm32.json
|
||||
# chromium/src-internal/testing/buildbot/archive/lacros-arm64.json
|
||||
_TRACKED_GROUPS = [
|
||||
_Group(paths=['chrome'],
|
||||
title='File: chrome',
|
||||
track_stripped=True,
|
||||
track_compressed=True),
|
||||
_Group(paths=['chrome_crashpad_handler'],
|
||||
title='File: chrome_crashpad_handler'),
|
||||
_Group(paths=['icudtl.dat'], title='File: icudtl.dat'),
|
||||
_Group(paths=['icudtl.dat.hash'], title='File: icudtl.dat.hash'),
|
||||
_Group(paths=['libEGL.so'], title='File: libEGL.so'),
|
||||
_Group(paths=['libGLESv2.so'], title='File: libGLESv2.so'),
|
||||
_Group(paths=['nacl_helper'], title='File: nacl_helper'),
|
||||
_Group(paths=['resources.pak'], title='File: resources.pak'),
|
||||
_Group(paths=[
|
||||
'chrome_100_percent.pak', 'chrome_200_percent.pak',
|
||||
'headless_lib_data.pak', 'headless_lib_strings.pak'
|
||||
],
|
||||
title='Group: Other PAKs'),
|
||||
_Group(paths=['snapshot_blob.bin'], title='Group: Misc'),
|
||||
_Group(paths=['locales/'], title='Dir: locales'),
|
||||
_Group(paths=['PrivacySandboxAttestationsPreloaded/'],
|
||||
title='Dir: PrivacySandboxAttestationsPreloaded'),
|
||||
_Group(paths=['resources/accessibility/'],
|
||||
title='Dir: resources/accessibility'),
|
||||
_Group(paths=['WidevineCdm/'], title='Dir: WidevineCdm'),
|
||||
]
|
||||
|
||||
|
||||
def _visit_paths(base_dir, paths):
|
||||
"""Itemizes files specified by a list of paths.
|
||||
|
||||
Args:
|
||||
base_dir: Base directory for all elements in |paths|.
|
||||
paths: A list of filenames or directory names to specify files whose sizes
|
||||
to be counted. Directories are recursed. There's no de-duping effort.
|
||||
Non-existing files or directories are ignored (with warning message).
|
||||
"""
|
||||
for path in paths:
|
||||
full_path = os.path.join(base_dir, path)
|
||||
if os.path.exists(full_path):
|
||||
if os.path.isdir(full_path):
|
||||
for dirpath, _, filenames in os.walk(full_path):
|
||||
for filename in filenames:
|
||||
yield os.path.join(dirpath, filename)
|
||||
else: # Assume is file.
|
||||
yield full_path
|
||||
else:
|
||||
logging.critical('Not found: %s', path)
|
||||
|
||||
|
||||
def _is_probably_elf(filename):
|
||||
"""Heuristically decides whether |filename| is ELF via magic signature."""
|
||||
with open(filename, 'rb') as fh:
|
||||
return fh.read(4) == '\x7FELF'
|
||||
|
||||
|
||||
def _is_unstrippable_elf(filename):
|
||||
"""Identifies known-unstrippable ELF files to denoise the system."""
|
||||
return filename.endswith('.nexe') or filename.endswith('libwidevinecdm.so')
|
||||
|
||||
|
||||
def _get_filesize(filename):
|
||||
"""Returns the size of a file, or 0 if file is not found."""
|
||||
try:
|
||||
return os.path.getsize(filename)
|
||||
except OSError:
|
||||
logging.critical('Failed to get size: %s', filename)
|
||||
return 0
|
||||
|
||||
|
||||
def _get_gzipped_filesize(filename):
|
||||
"""Returns the gzipped size of a file, or 0 if file is not found."""
|
||||
BUFFER_SIZE = 65536
|
||||
if not os.path.isfile(filename):
|
||||
return 0
|
||||
try:
|
||||
# Call gzip externally instead of using gzip package since it's > 2x faster.
|
||||
cmd = ['gzip', '-c', filename]
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
# Manually counting bytes instead of using len(p.communicate()[0]) to avoid
|
||||
# buffering the entire compressed data (can be ~100 MB).
|
||||
ret = 0
|
||||
while True:
|
||||
chunk = len(p.stdout.read(BUFFER_SIZE))
|
||||
if chunk == 0:
|
||||
break
|
||||
ret += chunk
|
||||
return ret
|
||||
except OSError:
|
||||
logging.critical('Failed to get gzipped size: %s', filename)
|
||||
return 0
|
||||
|
||||
|
||||
def _get_catagorized_filesizes(filename):
|
||||
"""Measures |filename| sizes under various transforms.
|
||||
|
||||
Returns: A Counter (keyed by _Key_* constants) that stores measured sizes.
|
||||
"""
|
||||
sizes = collections.Counter()
|
||||
sizes[_KEY_RAW] = _get_filesize(filename)
|
||||
sizes[_KEY_GZIPPED] = _get_gzipped_filesize(filename)
|
||||
|
||||
# Pre-assign values for non-ELF, or in case of failure for ELF.
|
||||
sizes[_KEY_STRIPPED] = sizes[_KEY_RAW]
|
||||
sizes[_KEY_STRIPPED_GZIPPED] = sizes[_KEY_GZIPPED]
|
||||
|
||||
if _is_probably_elf(filename) and not _is_unstrippable_elf(filename):
|
||||
try:
|
||||
fd, temp_file = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
cmd = [EU_STRIP_PATH, filename, '-o', temp_file]
|
||||
subprocess.check_output(cmd)
|
||||
sizes[_KEY_STRIPPED] = _get_filesize(temp_file)
|
||||
sizes[_KEY_STRIPPED_GZIPPED] = _get_gzipped_filesize(temp_file)
|
||||
if sizes[_KEY_STRIPPED] > sizes[_KEY_RAW]:
|
||||
# This weird case has been observed for libwidevinecdm.so.
|
||||
logging.critical('Stripping made things worse for %s' % filename)
|
||||
except subprocess.CalledProcessError:
|
||||
logging.critical('Failed to strip file: %s' % filename)
|
||||
finally:
|
||||
os.unlink(temp_file)
|
||||
return sizes
|
||||
|
||||
|
||||
def _dump_chart_json(output_dir, chartjson):
|
||||
"""Writes chart histogram to JSON files.
|
||||
|
||||
Output files:
|
||||
results-chart.json contains the chart JSON.
|
||||
perf_results.json contains histogram JSON for Catapult.
|
||||
|
||||
Args:
|
||||
output_dir: Directory to place the JSON files.
|
||||
chartjson: Source JSON data for output files.
|
||||
"""
|
||||
results_path = os.path.join(output_dir, 'results-chart.json')
|
||||
logging.critical('Dumping chartjson to %s', results_path)
|
||||
with open(results_path, 'w') as json_file:
|
||||
json.dump(chartjson, json_file, indent=2)
|
||||
|
||||
# We would ideally generate a histogram set directly instead of generating
|
||||
# chartjson then converting. However, perf_tests_results_helper is in
|
||||
# //build, which doesn't seem to have any precedent for depending on
|
||||
# anything in Catapult. This can probably be fixed, but since this doesn't
|
||||
# need to be super fast or anything, converting is a good enough solution
|
||||
# for the time being.
|
||||
histogram_result = convert_chart_json.ConvertChartJson(results_path)
|
||||
if histogram_result.returncode != 0:
|
||||
raise Exception('chartjson conversion failed with error: ' +
|
||||
histogram_result.stdout)
|
||||
|
||||
histogram_path = os.path.join(output_dir, 'perf_results.json')
|
||||
logging.critical('Dumping histograms to %s', histogram_path)
|
||||
with open(histogram_path, 'wb') as json_file:
|
||||
json_file.write(histogram_result.stdout)
|
||||
|
||||
|
||||
def _run_resource_sizes(args):
|
||||
"""Main flow to extract and output size data."""
|
||||
chartjson = _BASE_CHART.copy()
|
||||
chartjson.update({
|
||||
'benchmark_description':
|
||||
('LaCrOS %s resource size information.' % args.arch)
|
||||
})
|
||||
report_func = perf_tests_results_helper.ReportPerfResult
|
||||
total_sizes = collections.Counter()
|
||||
|
||||
def report_sizes(sizes, title, track_stripped, track_compressed):
|
||||
report_func(chart_data=chartjson,
|
||||
graph_title=title,
|
||||
trace_title='size',
|
||||
value=sizes[_KEY_RAW],
|
||||
units='bytes')
|
||||
|
||||
if track_stripped:
|
||||
report_func(chart_data=chartjson,
|
||||
graph_title=title + ' (Stripped)',
|
||||
trace_title='size',
|
||||
value=sizes[_KEY_STRIPPED],
|
||||
units='bytes')
|
||||
|
||||
if track_compressed:
|
||||
report_func(chart_data=chartjson,
|
||||
graph_title=title + ' (Gzipped)',
|
||||
trace_title='size',
|
||||
value=sizes[_KEY_GZIPPED],
|
||||
units='bytes')
|
||||
|
||||
if track_stripped and track_compressed:
|
||||
report_func(chart_data=chartjson,
|
||||
graph_title=title + ' (Stripped, Gzipped)',
|
||||
trace_title='size',
|
||||
value=sizes[_KEY_STRIPPED_GZIPPED],
|
||||
units='bytes')
|
||||
|
||||
tracked_groups = _TRACKED_GROUPS.copy()
|
||||
# Architecture amd64 requires artifact nacl_irt_x86_64.nexe.
|
||||
if args.arch == 'amd64':
|
||||
tracked_groups.append(
|
||||
_Group(paths=['nacl_irt_x86_64.nexe'],
|
||||
title='File: nacl_irt_x86_64.nexe'))
|
||||
# Architecture arm32 requires artifact nacl_irt_arm.nexe.
|
||||
elif args.arch == 'arm32':
|
||||
tracked_groups.append(
|
||||
_Group(paths=['nacl_irt_arm.nexe'], title='File: nacl_irt_arm.nexe'))
|
||||
tracked_groups.append(
|
||||
_Group(paths=['nacl_helper_bootstrap'],
|
||||
title='File: nacl_helper_bootstrap'))
|
||||
# TODO(crbug.com/40236427): remove the following part once nacl files
|
||||
# are available.
|
||||
elif args.arch == 'arm64':
|
||||
tracked_groups.remove(
|
||||
_Group(paths=['nacl_helper'], title='File: nacl_helper'))
|
||||
for g in tracked_groups:
|
||||
sizes = sum(
|
||||
map(_get_catagorized_filesizes, _visit_paths(args.out_dir, g.paths)),
|
||||
collections.Counter())
|
||||
report_sizes(sizes, g.title, g.track_stripped, g.track_compressed)
|
||||
|
||||
# Total compressed size is summed over individual compressed sizes, instead
|
||||
# of concatanating first, then compress everything. This is done for
|
||||
# simplicity. It also gives a conservative size estimate (assuming file
|
||||
# metadata and overheads are negligible).
|
||||
total_sizes += sizes
|
||||
|
||||
report_sizes(total_sizes, 'Total', True, True)
|
||||
|
||||
_dump_chart_json(args.output_dir, chartjson)
|
||||
|
||||
|
||||
def main():
|
||||
"""Parses arguments and runs high level flows."""
|
||||
argparser = argparse.ArgumentParser(description='Writes LaCrOS size metrics.')
|
||||
|
||||
argparser.add_argument('--chromium-output-directory',
|
||||
dest='out_dir',
|
||||
required=True,
|
||||
type=os.path.realpath,
|
||||
help='Location of the build artifacts.')
|
||||
argparser.add_argument('--arch',
|
||||
required=True,
|
||||
type=str,
|
||||
help='The architecture of lacros, valid values: amd64,'
|
||||
' arm32, arm64')
|
||||
|
||||
output_group = argparser.add_mutually_exclusive_group()
|
||||
|
||||
output_group.add_argument('--output-dir',
|
||||
default='.',
|
||||
help='Directory to save chartjson to.')
|
||||
|
||||
# Accepted to conform to the isolated script interface, but ignored.
|
||||
argparser.add_argument('--isolated-script-test-filter',
|
||||
help=argparse.SUPPRESS)
|
||||
argparser.add_argument('--isolated-script-test-perf-output',
|
||||
type=os.path.realpath,
|
||||
help=argparse.SUPPRESS)
|
||||
|
||||
output_group.add_argument(
|
||||
'--isolated-script-test-output',
|
||||
type=os.path.realpath,
|
||||
help='File to which results will be written in the simplified JSON '
|
||||
'output format.')
|
||||
|
||||
args = argparser.parse_args()
|
||||
|
||||
isolated_script_output = {'valid': False, 'failures': []}
|
||||
if args.isolated_script_test_output:
|
||||
test_name = 'lacros_resource_sizes'
|
||||
args.output_dir = os.path.join(
|
||||
os.path.dirname(args.isolated_script_test_output), test_name)
|
||||
if not os.path.exists(args.output_dir):
|
||||
os.makedirs(args.output_dir)
|
||||
|
||||
try:
|
||||
_run_resource_sizes(args)
|
||||
isolated_script_output = {'valid': True, 'failures': []}
|
||||
finally:
|
||||
if args.isolated_script_test_output:
|
||||
results_path = os.path.join(args.output_dir, 'test_results.json')
|
||||
with open(results_path, 'w') as output_file:
|
||||
json.dump(isolated_script_output, output_file)
|
||||
with open(args.isolated_script_test_output, 'w') as output_file:
|
||||
json.dump(isolated_script_output, output_file)
|
||||
result_sink_client = result_sink.TryInitClient()
|
||||
if result_sink_client:
|
||||
status = result_types.PASS
|
||||
if not isolated_script_output['valid']:
|
||||
status = result_types.UNKNOWN
|
||||
elif isolated_script_output['failures']:
|
||||
status = result_types.FAIL
|
||||
result_sink_client.Post(test_name, status, None, None, None)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,17 +0,0 @@
|
||||
# Generated by running:
|
||||
# build/print_python_deps.py --root build/lacros --output build/lacros/lacros_resource_sizes.pydeps build/lacros/lacros_resource_sizes.py
|
||||
../../third_party/catapult/third_party/six/six.py
|
||||
../../third_party/catapult/third_party/vinn/vinn/__init__.py
|
||||
../../third_party/catapult/third_party/vinn/vinn/_vinn.py
|
||||
../../third_party/catapult/tracing/tracing/__init__.py
|
||||
../../third_party/catapult/tracing/tracing/value/__init__.py
|
||||
../../third_party/catapult/tracing/tracing/value/convert_chart_json.py
|
||||
../../third_party/catapult/tracing/tracing_project.py
|
||||
../util/lib/__init__.py
|
||||
../util/lib/common/__init__.py
|
||||
../util/lib/common/perf_result_data_type.py
|
||||
../util/lib/common/perf_tests_results_helper.py
|
||||
../util/lib/results/__init__.py
|
||||
../util/lib/results/result_sink.py
|
||||
../util/lib/results/result_types.py
|
||||
lacros_resource_sizes.py
|
@ -1,210 +0,0 @@
|
||||
#!/usr/bin/env vpython3
|
||||
#
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Helps launch lacros-chrome with mojo connection established on Linux
|
||||
or Chrome OS. Use on Chrome OS is for dev purposes.
|
||||
|
||||
The main use case is to be able to launch lacros-chrome in a debugger.
|
||||
|
||||
Please first launch an ash-chrome in the background as usual except without
|
||||
the '--lacros-chrome-path' argument and with an additional
|
||||
'--lacros-mojo-socket-for-testing' argument pointing to a socket path:
|
||||
|
||||
XDG_RUNTIME_DIR=/tmp/ash_chrome_xdg_runtime ./out/ash/chrome \\
|
||||
--user-data-dir=/tmp/ash-chrome --enable-wayland-server \\
|
||||
--no-startup-window --enable-features=LacrosOnly \\
|
||||
--lacros-mojo-socket-for-testing=/tmp/lacros.sock
|
||||
|
||||
Then, run this script with '-s' pointing to the same socket path used to
|
||||
launch ash-chrome, followed by a command one would use to launch lacros-chrome
|
||||
inside a debugger:
|
||||
|
||||
EGL_PLATFORM=surfaceless XDG_RUNTIME_DIR=/tmp/ash_chrome_xdg_runtime \\
|
||||
./build/lacros/mojo_connection_lacros_launcher.py -s /tmp/lacros.sock
|
||||
gdb --args ./out/lacros-release/chrome --user-data-dir=/tmp/lacros-chrome
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import array
|
||||
import contextlib
|
||||
import getpass
|
||||
import grp
|
||||
import os
|
||||
import pathlib
|
||||
import pwd
|
||||
import resource
|
||||
import socket
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
_NUM_FDS_MAX = 3
|
||||
|
||||
|
||||
# contextlib.nullcontext is introduced in 3.7, while Python version on
|
||||
# CrOS is still 3.6. This is for backward compatibility.
|
||||
class NullContext:
|
||||
def __init__(self, enter_ret=None):
|
||||
self.enter_ret = enter_ret
|
||||
|
||||
def __enter__(self):
|
||||
return self.enter_ret
|
||||
|
||||
def __exit__(self, exc_type, exc_value, trace):
|
||||
pass
|
||||
|
||||
|
||||
def _ReceiveFDs(sock):
|
||||
"""Receives FDs from ash-chrome that will be used to launch lacros-chrome.
|
||||
|
||||
Args:
|
||||
sock: A connected unix domain socket.
|
||||
|
||||
Returns:
|
||||
File objects for the mojo connection and maybe startup data file.
|
||||
"""
|
||||
# This function is borrowed from with modifications:
|
||||
# https://docs.python.org/3/library/socket.html#socket.socket.recvmsg
|
||||
fds = array.array("i") # Array of ints
|
||||
# Along with the file descriptor, ash-chrome also sends the version in the
|
||||
# regular data.
|
||||
version, ancdata, _, _ = sock.recvmsg(
|
||||
1, socket.CMSG_LEN(fds.itemsize * _NUM_FDS_MAX))
|
||||
for cmsg_level, cmsg_type, cmsg_data in ancdata:
|
||||
if cmsg_level == socket.SOL_SOCKET and cmsg_type == socket.SCM_RIGHTS:
|
||||
# There are three versions currently this script supports.
|
||||
# The oldest one: ash-chrome returns one FD, the mojo connection of
|
||||
# old bootstrap procedure (i.e., it will be BrowserService).
|
||||
# The middle one: ash-chrome returns two FDs, the mojo connection of
|
||||
# old bootstrap procedure, and the second for the start up data FD.
|
||||
# The newest one: ash-chrome returns three FDs, the mojo connection of
|
||||
# old bootstrap procedure, the second for the start up data FD, and
|
||||
# the third for another mojo connection of new bootstrap procedure.
|
||||
# TODO(crbug.com/40735724): Clean up the code to drop the support of
|
||||
# oldest one after M91.
|
||||
# TODO(crbug.com/40170079): Clean up the mojo procedure support of the
|
||||
# the middle one after M92.
|
||||
cmsg_len_candidates = [(i + 1) * fds.itemsize
|
||||
for i in range(_NUM_FDS_MAX)]
|
||||
assert len(cmsg_data) in cmsg_len_candidates, (
|
||||
'CMSG_LEN is unexpected: %d' % (len(cmsg_data), ))
|
||||
fds.frombytes(cmsg_data[:])
|
||||
|
||||
if version == b'\x01':
|
||||
assert len(fds) == 2, 'Expecting exactly 2 FDs'
|
||||
startup_fd = os.fdopen(fds[0])
|
||||
mojo_fd = os.fdopen(fds[1])
|
||||
elif version:
|
||||
raise AssertionError('Unknown version: \\x%s' % version.hex())
|
||||
else:
|
||||
raise AssertionError('Failed to receive startup message from ash-chrome. '
|
||||
'Make sure you\'re logged in to Chrome OS.')
|
||||
return startup_fd, mojo_fd
|
||||
|
||||
|
||||
def _MaybeClosing(fileobj):
|
||||
"""Returns closing context manager, if given fileobj is not None.
|
||||
|
||||
If the given fileobj is none, return nullcontext.
|
||||
"""
|
||||
return (contextlib.closing if fileobj else NullContext)(fileobj)
|
||||
|
||||
|
||||
def _ApplyCgroups():
|
||||
"""Applies cgroups used in ChromeOS to lacros chrome as well."""
|
||||
# Cgroup directories taken from ChromeOS session_manager job configuration.
|
||||
UI_FREEZER_CGROUP_DIR = '/sys/fs/cgroup/freezer/ui'
|
||||
UI_CPU_CGROUP_DIR = '/sys/fs/cgroup/cpu/ui'
|
||||
pid = os.getpid()
|
||||
with open(os.path.join(UI_CPU_CGROUP_DIR, 'tasks'), 'a') as f:
|
||||
f.write(str(pid) + '\n')
|
||||
with open(os.path.join(UI_FREEZER_CGROUP_DIR, 'cgroup.procs'), 'a') as f:
|
||||
f.write(str(pid) + '\n')
|
||||
|
||||
|
||||
def _PreExec(uid, gid, groups):
|
||||
"""Set environment up for running the chrome binary."""
|
||||
# Nice and realtime priority values taken ChromeOSs session_manager job
|
||||
# configuration.
|
||||
resource.setrlimit(resource.RLIMIT_NICE, (40, 40))
|
||||
resource.setrlimit(resource.RLIMIT_RTPRIO, (10, 10))
|
||||
os.setgroups(groups)
|
||||
os.setgid(gid)
|
||||
os.setuid(uid)
|
||||
|
||||
|
||||
def Main():
|
||||
arg_parser = argparse.ArgumentParser()
|
||||
arg_parser.usage = __doc__
|
||||
arg_parser.add_argument(
|
||||
'-r',
|
||||
'--root-env-setup',
|
||||
action='store_true',
|
||||
help='Set typical cgroups and environment for chrome. '
|
||||
'If this is set, this script must be run as root.')
|
||||
arg_parser.add_argument(
|
||||
'-s',
|
||||
'--socket-path',
|
||||
type=pathlib.Path,
|
||||
required=True,
|
||||
help='Absolute path to the socket that were used to start ash-chrome, '
|
||||
'for example: "/tmp/lacros.socket"')
|
||||
flags, args = arg_parser.parse_known_args()
|
||||
|
||||
assert 'XDG_RUNTIME_DIR' in os.environ
|
||||
assert os.environ.get('EGL_PLATFORM') == 'surfaceless'
|
||||
|
||||
if flags.root_env_setup:
|
||||
# Check if we are actually root and error otherwise.
|
||||
assert getpass.getuser() == 'root', \
|
||||
'Root required environment flag specified, but user is not root.'
|
||||
# Apply necessary cgroups to our own process, so they will be inherited by
|
||||
# lacros chrome.
|
||||
_ApplyCgroups()
|
||||
else:
|
||||
print('WARNING: Running chrome without appropriate environment. '
|
||||
'This may affect performance test results. '
|
||||
'Set -r and run as root to avoid this.')
|
||||
|
||||
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock:
|
||||
sock.connect(flags.socket_path.as_posix())
|
||||
startup_connection, mojo_connection = (_ReceiveFDs(sock))
|
||||
|
||||
with _MaybeClosing(startup_connection), _MaybeClosing(mojo_connection):
|
||||
cmd = args[:]
|
||||
pass_fds = []
|
||||
if startup_connection:
|
||||
cmd.append('--cros-startup-data-fd=%d' % startup_connection.fileno())
|
||||
pass_fds.append(startup_connection.fileno())
|
||||
if mojo_connection:
|
||||
cmd.append('--crosapi-mojo-platform-channel-handle=%d' %
|
||||
mojo_connection.fileno())
|
||||
pass_fds.append(mojo_connection.fileno())
|
||||
|
||||
env = os.environ.copy()
|
||||
if flags.root_env_setup:
|
||||
username = 'chronos'
|
||||
p = pwd.getpwnam(username)
|
||||
uid = p.pw_uid
|
||||
gid = p.pw_gid
|
||||
groups = [g.gr_gid for g in grp.getgrall() if username in g.gr_mem]
|
||||
env['HOME'] = p.pw_dir
|
||||
env['LOGNAME'] = username
|
||||
env['USER'] = username
|
||||
|
||||
def fn():
|
||||
return _PreExec(uid, gid, groups)
|
||||
else:
|
||||
|
||||
def fn():
|
||||
return None
|
||||
|
||||
proc = subprocess.Popen(cmd, pass_fds=pass_fds, preexec_fn=fn)
|
||||
|
||||
return proc.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(Main())
|
@ -4,7 +4,6 @@
|
||||
|
||||
import("//build/config/chromeos/ui_mode.gni")
|
||||
import("//build/config/linux/gtk/gtk.gni")
|
||||
import("//build/lacros/lacros_resource_sizes.gni")
|
||||
import("//chromeos/tast_control.gni")
|
||||
|
||||
if (is_chromeos_device) {
|
||||
@ -78,10 +77,6 @@ source_set("test_support") {
|
||||
]
|
||||
}
|
||||
|
||||
lacros_resource_sizes_test("resource_sizes_lacros_chrome") {
|
||||
data_deps = [ "//chrome:chrome" ]
|
||||
}
|
||||
|
||||
source_set("unit_tests") {
|
||||
testonly = true
|
||||
deps = [
|
||||
|
@ -21,12 +21,6 @@ if (is_chromeos_device) {
|
||||
deploy_lacros = true
|
||||
skip_generating_board_args = true
|
||||
}
|
||||
if (!is_chromeos_ash) {
|
||||
data = [
|
||||
# A script needed to launch Lacros in testing environment.
|
||||
"//build/lacros/mojo_connection_lacros_launcher.py",
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user