[telemetry] Refactoring script.
- For refactor v0, add the ability to move modules but not rename them. - Merge count script into unified refactor script that handles all our ghetto grepping needs. Also: - Fix InDirectory and move it out of find_dependencies to telemetry.util.path. - Change find_dependencies to use telemetry.util.path. BUG=449308 TEST=tools/telemetry$ mkdir telemetry/internal; ./refactor mv telemetry/core/ telemetry/internal/; # To clean up, do "rm -r telemetry/internal/core && git reset --hard" Review URL: https://codereview.chromium.org/1033053002 Cr-Commit-Position: refs/heads/master@{#323064}
This commit is contained in:
tools/telemetry
@ -1,40 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import imp
|
||||
import inspect
|
||||
import os
|
||||
|
||||
from telemetry.util import path
|
||||
|
||||
|
||||
def IncludeDir(dir_name):
|
||||
return (dir_name[0] != '.' and dir_name[0] != '_' and
|
||||
not dir_name.startswith('internal') and not dir_name == 'third_party')
|
||||
|
||||
|
||||
def IncludeFile(file_name):
|
||||
root, ext = os.path.splitext(file_name)
|
||||
return (file_name[0] != '.' and
|
||||
not root.endswith('_unittest') and ext == '.py')
|
||||
|
||||
|
||||
def ListFiles(directory):
|
||||
matching_files = []
|
||||
for root, dirs, files in os.walk(directory):
|
||||
dirs[:] = [dir_name for dir_name in dirs if IncludeDir(dir_name)]
|
||||
matching_files += [
|
||||
os.path.relpath(os.path.join(root, file_name), directory)
|
||||
for file_name in files if IncludeFile(file_name)]
|
||||
return sorted(matching_files)
|
||||
|
||||
|
||||
def main():
|
||||
modules = ListFiles(path.GetTelemetryDir())
|
||||
print len(modules)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
289
tools/telemetry/refactor
Executable file
289
tools/telemetry/refactor
Executable file
@ -0,0 +1,289 @@
|
||||
#! /usr/bin/env python
|
||||
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import cStringIO
|
||||
import imp
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from telemetry.core import command_line
|
||||
from telemetry.util import path
|
||||
|
||||
|
||||
# All folders dependent on Telemetry, found using a code search.
|
||||
BASE_DIRS = (
|
||||
path.GetTelemetryDir(),
|
||||
os.path.join(path.GetChromiumSrcDir(), 'chrome', 'test', 'telemetry'),
|
||||
os.path.join(path.GetChromiumSrcDir(), 'content', 'test', 'gpu'),
|
||||
os.path.join(path.GetChromiumSrcDir(), 'tools', 'bisect-manual-test.py'),
|
||||
os.path.join(path.GetChromiumSrcDir(), 'tools', 'chrome_proxy'),
|
||||
os.path.join(path.GetChromiumSrcDir(), 'tools', 'perf'),
|
||||
os.path.join(path.GetChromiumSrcDir(),
|
||||
'tools', 'profile_chrome', 'perf_controller.py'),
|
||||
os.path.join(path.GetChromiumSrcDir(), 'tools', 'run-bisect-manual-test.py'),
|
||||
os.path.join(path.GetChromiumSrcDir(),
|
||||
'third_party', 'skia', 'tools', 'skp', 'page_sets'),
|
||||
os.path.join(path.GetChromiumSrcDir(), 'third_party', 'trace-viewer'),
|
||||
)
|
||||
|
||||
|
||||
def SortImportGroups(module_path):
|
||||
"""Sort each group of imports in the given Python module.
|
||||
|
||||
A group is a collection of adjacent import statements, with no non-import
|
||||
lines in between. Groups are sorted according to the Google Python Style
|
||||
Guide: "lexicographically, ignoring case, according to each module's full
|
||||
package path."
|
||||
"""
|
||||
_TransformImportGroups(module_path, _SortImportGroup)
|
||||
|
||||
|
||||
def _SortImportGroup(import_group):
|
||||
def _ImportComparator(import1, import2):
|
||||
_, root1, module1, _, _ = import1
|
||||
_, root2, module2, _, _ = import2
|
||||
full_module1 = (root1 + '.' + module1 if root1 else module1).lower()
|
||||
full_module2 = (root2 + '.' + module2 if root2 else module2).lower()
|
||||
return cmp(full_module1, full_module2)
|
||||
return sorted(import_group, cmp=_ImportComparator)
|
||||
|
||||
|
||||
def _TransformImportGroups(module_path, transformation):
|
||||
"""Apply a transformation to each group of imports in the given module.
|
||||
|
||||
An import is a tuple of (indent, root, module, alias, suffix),
|
||||
serialized as <indent>from <root> import <module> as <alias><suffix>.
|
||||
|
||||
Args:
|
||||
module_path: The module to apply transformations on.
|
||||
transformation: A function that takes in an import group and returns a
|
||||
modified import group. An import group is a list of import tuples.
|
||||
|
||||
Returns:
|
||||
True iff the module was modified, and False otherwise.
|
||||
"""
|
||||
def _WriteImports(output_stream, import_group):
|
||||
for indent, root, module, alias, suffix in transformation(import_group):
|
||||
output_stream.write(indent)
|
||||
if root:
|
||||
output_stream.write('from ')
|
||||
output_stream.write(root)
|
||||
output_stream.write(' ')
|
||||
output_stream.write('import ')
|
||||
output_stream.write(module)
|
||||
if alias:
|
||||
output_stream.write(' as ')
|
||||
output_stream.write(alias)
|
||||
output_stream.write(suffix)
|
||||
output_stream.write('\n')
|
||||
|
||||
# Read the file so we can diff it later to determine if we made any changes.
|
||||
with open(module_path, 'r') as module_file:
|
||||
original_file = module_file.read()
|
||||
|
||||
# Locate imports using regex, group them, and transform each one.
|
||||
# This regex produces a tuple of (indent, root, module, alias, suffix).
|
||||
regex = (r'(\s*)(?:from ((?:[a-z0-9_]+\.)*[a-z0-9_]+) )?'
|
||||
r'import ((?:[a-z0-9_]+\.)*[A-Za-z0-9_]+)(?: as ([A-Za-z0-9_]+))?(.*)')
|
||||
pattern = re.compile(regex)
|
||||
|
||||
updated_file = cStringIO.StringIO()
|
||||
with open(module_path, 'r') as module_file:
|
||||
import_group = []
|
||||
for line in module_file:
|
||||
import_match = pattern.match(line)
|
||||
if import_match:
|
||||
import_group.append(list(import_match.groups()))
|
||||
continue
|
||||
|
||||
if not import_group:
|
||||
updated_file.write(line)
|
||||
continue
|
||||
|
||||
_WriteImports(updated_file, import_group)
|
||||
import_group = []
|
||||
|
||||
Updated_file.write(line)
|
||||
|
||||
if import_group:
|
||||
_WriteImports(updated_file, import_group)
|
||||
import_group = []
|
||||
|
||||
if original_file == updated_file.getvalue():
|
||||
return False
|
||||
|
||||
with open(module_path, 'w') as module_file:
|
||||
module_file.write(updated_file.getvalue())
|
||||
return True
|
||||
|
||||
|
||||
def _ListFiles(base_directory, should_include_dir, should_include_file):
|
||||
matching_files = []
|
||||
for root, dirs, files in os.walk(base_directory):
|
||||
dirs[:] = [dir_name for dir_name in dirs if should_include_dir(dir_name)]
|
||||
matching_files += [os.path.join(root, file_name)
|
||||
for file_name in files if should_include_file(file_name)]
|
||||
return sorted(matching_files)
|
||||
|
||||
|
||||
class Count(command_line.Command):
|
||||
"""Print the number of public modules."""
|
||||
|
||||
def Run(self, args):
|
||||
modules = _ListFiles(path.GetTelemetryDir(),
|
||||
self._IsPublicApiDir, self._IsPublicApiFile)
|
||||
print len(modules)
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def _IsPublicApiDir(dir_name):
|
||||
return (dir_name[0] != '.' and dir_name[0] != '_' and
|
||||
not dir_name.startswith('internal') and not dir_name == 'third_party')
|
||||
|
||||
@staticmethod
|
||||
def _IsPublicApiFile(file_name):
|
||||
root, ext = os.path.splitext(file_name)
|
||||
return (file_name[0] != '.' and
|
||||
not root.endswith('_unittest') and ext == '.py')
|
||||
|
||||
|
||||
class Mv(command_line.Command):
|
||||
"""Move modules or packages."""
|
||||
|
||||
@classmethod
|
||||
def AddCommandLineArgs(cls, parser):
|
||||
parser.add_argument('source', nargs='+')
|
||||
parser.add_argument('destination')
|
||||
|
||||
@classmethod
|
||||
def ProcessCommandLineArgs(cls, parser, args):
|
||||
for source in args.source:
|
||||
# Ensure source path exists.
|
||||
if not os.path.exists(source):
|
||||
parser.error('"%s" not found.' % source)
|
||||
|
||||
# Ensure source path is in one of the BASE_DIRS.
|
||||
for base_dir in BASE_DIRS:
|
||||
if path.IsSubpath(source, base_dir):
|
||||
break
|
||||
else:
|
||||
parser.error('Source path "%s" is not in any of the base dirs.')
|
||||
|
||||
# Ensure destination path exists.
|
||||
if not os.path.exists(args.destination):
|
||||
parser.error('"%s" not found.' % args.destination)
|
||||
|
||||
# Ensure destination path is in one of the BASE_DIRS.
|
||||
for base_dir in BASE_DIRS:
|
||||
if path.IsSubpath(args.destination, base_dir):
|
||||
break
|
||||
else:
|
||||
parser.error('Destination path "%s" is not in any of the base dirs.')
|
||||
|
||||
# If there are multiple source paths, ensure destination is a directory.
|
||||
if len(args.source) > 1 and not os.path.isdir(args.destination):
|
||||
parser.error('Target "%s" is not a directory.' % args.destination)
|
||||
|
||||
# Ensure destination is not in any of the source paths.
|
||||
for source in args.source:
|
||||
if path.IsSubpath(args.destination, source):
|
||||
parser.error('Cannot move "%s" to a subdirectory of itself, "%s".' %
|
||||
(source, args.destination))
|
||||
|
||||
def Run(self, args):
|
||||
for dest_base_dir in BASE_DIRS:
|
||||
if path.IsSubpath(args.destination, dest_base_dir):
|
||||
break
|
||||
|
||||
# Get a list of old and new module names for renaming imports.
|
||||
moved_modules = {}
|
||||
for source in args.source:
|
||||
for source_base_dir in BASE_DIRS:
|
||||
if path.IsSubpath(source, source_base_dir):
|
||||
break
|
||||
|
||||
source_dir = os.path.dirname(os.path.normpath(source))
|
||||
|
||||
if os.path.isdir(source):
|
||||
source_files = _ListFiles(source,
|
||||
self._IsSourceDir, self._IsPythonModule)
|
||||
else:
|
||||
source_files = (source,)
|
||||
|
||||
for source_file_path in source_files:
|
||||
source_rel_path = os.path.relpath(source_file_path, source_base_dir)
|
||||
source_module_name = os.path.splitext(
|
||||
source_rel_path)[0].replace(os.sep, '.')
|
||||
|
||||
source_tree = os.path.relpath(source_file_path, source_dir)
|
||||
dest_path = os.path.join(args.destination, source_tree)
|
||||
dest_rel_path = os.path.relpath(dest_path, dest_base_dir)
|
||||
dest_module_name = os.path.splitext(
|
||||
dest_rel_path)[0].replace(os.sep, '.')
|
||||
|
||||
moved_modules[source_module_name] = dest_module_name
|
||||
|
||||
# Move things!
|
||||
if os.path.isdir(args.destination):
|
||||
for source in args.source:
|
||||
destination_path = os.path.join(
|
||||
args.destination, os.path.split(os.path.normpath(source))[1])
|
||||
os.rename(source, destination_path)
|
||||
else:
|
||||
assert len(args.source) == 1
|
||||
os.rename(args.source.pop(), args.destination)
|
||||
|
||||
# Update imports!
|
||||
def _UpdateImportGroup(import_group):
|
||||
modified = False
|
||||
for import_line in import_group:
|
||||
_, root, module, _, _ = import_line
|
||||
full_module = root + '.' + module if root else module
|
||||
|
||||
if full_module not in moved_modules:
|
||||
continue
|
||||
|
||||
modified = True
|
||||
|
||||
# Update import line.
|
||||
new_root, _, new_module = moved_modules[full_module].rpartition('.')
|
||||
import_line[1] = new_root
|
||||
import_line[2] = new_module
|
||||
|
||||
if modified:
|
||||
return _SortImportGroup(import_group)
|
||||
else:
|
||||
return import_group
|
||||
|
||||
for base_dir in BASE_DIRS:
|
||||
for module_path in _ListFiles(base_dir,
|
||||
self._IsSourceDir, self._IsPythonModule):
|
||||
if not _TransformImportGroups(module_path, _UpdateImportGroup):
|
||||
continue
|
||||
|
||||
# TODO(dtu): Update occurrences.
|
||||
|
||||
print moved_modules
|
||||
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def _IsSourceDir(dir_name):
|
||||
return dir_name[0] != '.' and dir_name != 'third_party'
|
||||
|
||||
@staticmethod
|
||||
def _IsPythonModule(file_name):
|
||||
_, ext = os.path.splitext(file_name)
|
||||
return ext == '.py'
|
||||
|
||||
|
||||
class RefactorCommand(command_line.SubcommandCommand):
|
||||
commands = (Count, Mv,)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(RefactorCommand.main())
|
@ -14,29 +14,22 @@ import zipfile
|
||||
from telemetry import benchmark
|
||||
from telemetry.core import command_line
|
||||
from telemetry.core import discover
|
||||
from telemetry.core import util
|
||||
from telemetry.util import bootstrap
|
||||
from telemetry.util import cloud_storage
|
||||
from telemetry.util import path
|
||||
from telemetry.util import path_set
|
||||
|
||||
DEPS_FILE = 'bootstrap_deps'
|
||||
|
||||
|
||||
def _InDirectory(subdirectory, directory):
|
||||
subdirectory = os.path.realpath(subdirectory)
|
||||
directory = os.path.realpath(directory)
|
||||
common_prefix = os.path.commonprefix([subdirectory, directory])
|
||||
return common_prefix == directory
|
||||
|
||||
|
||||
def FindBootstrapDependencies(base_dir):
|
||||
deps_file = os.path.join(base_dir, DEPS_FILE)
|
||||
if not os.path.exists(deps_file):
|
||||
return []
|
||||
deps_paths = bootstrap.ListAllDepsPaths(deps_file)
|
||||
return set(
|
||||
os.path.realpath(os.path.join(util.GetChromiumSrcDir(), os.pardir, path))
|
||||
for path in deps_paths)
|
||||
return set(os.path.realpath(os.path.join(
|
||||
path.GetChromiumSrcDir(), os.pardir, deps_path))
|
||||
for deps_path in deps_paths)
|
||||
|
||||
|
||||
def FindPythonDependencies(module_path):
|
||||
@ -58,7 +51,7 @@ def FindPythonDependencies(module_path):
|
||||
continue
|
||||
|
||||
module_path = os.path.realpath(module_path)
|
||||
if not _InDirectory(module_path, util.GetChromiumSrcDir()):
|
||||
if not path.IsSubpath(module_path, path.GetChromiumSrcDir()):
|
||||
continue
|
||||
|
||||
yield module_path
|
||||
@ -83,7 +76,7 @@ def FindPageSetDependencies(base_dir):
|
||||
options.ensure_value(k, v)
|
||||
|
||||
# Page set paths are relative to their runner script, not relative to us.
|
||||
util.GetBaseDir = lambda: base_dir
|
||||
path.GetBaseDir = lambda: base_dir
|
||||
# TODO: Loading the page set will automatically download its Cloud Storage
|
||||
# deps. This is really expensive, and we don't want to do this by default.
|
||||
page_set = test_obj.CreatePageSet(options)
|
||||
@ -94,26 +87,20 @@ def FindPageSetDependencies(base_dir):
|
||||
|
||||
|
||||
def FindExcludedFiles(files, options):
|
||||
def MatchesConditions(path, conditions):
|
||||
for condition in conditions:
|
||||
if condition(path):
|
||||
return True
|
||||
return False
|
||||
|
||||
# Define some filters for files.
|
||||
def IsHidden(path):
|
||||
for pathname_component in path.split(os.sep):
|
||||
def IsHidden(path_string):
|
||||
for pathname_component in path_string.split(os.sep):
|
||||
if pathname_component.startswith('.'):
|
||||
return True
|
||||
return False
|
||||
def IsPyc(path):
|
||||
return os.path.splitext(path)[1] == '.pyc'
|
||||
def IsInCloudStorage(path):
|
||||
return os.path.exists(path + '.sha1')
|
||||
def MatchesExcludeOptions(path):
|
||||
def IsPyc(path_string):
|
||||
return os.path.splitext(path_string)[1] == '.pyc'
|
||||
def IsInCloudStorage(path_string):
|
||||
return os.path.exists(path_string + '.sha1')
|
||||
def MatchesExcludeOptions(path_string):
|
||||
for pattern in options.exclude:
|
||||
if (fnmatch.fnmatch(path, pattern) or
|
||||
fnmatch.fnmatch(os.path.basename(path), pattern)):
|
||||
if (fnmatch.fnmatch(path_string, pattern) or
|
||||
fnmatch.fnmatch(os.path.basename(path_string), pattern)):
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -126,32 +113,32 @@ def FindExcludedFiles(files, options):
|
||||
]
|
||||
|
||||
# Check all the files against the filters.
|
||||
for path in files:
|
||||
if MatchesConditions(path, exclude_conditions):
|
||||
yield path
|
||||
for file_path in files:
|
||||
if any(condition(file_path) for condition in exclude_conditions):
|
||||
yield file_path
|
||||
|
||||
|
||||
def FindDependencies(paths, options):
|
||||
def FindDependencies(target_paths, options):
|
||||
# Verify arguments.
|
||||
for path in paths:
|
||||
if not os.path.exists(path):
|
||||
raise ValueError('Path does not exist: %s' % path)
|
||||
for target_path in target_paths:
|
||||
if not os.path.exists(target_path):
|
||||
raise ValueError('Path does not exist: %s' % target_path)
|
||||
|
||||
dependencies = path_set.PathSet()
|
||||
|
||||
# Including __init__.py will include Telemetry and its dependencies.
|
||||
# If the user doesn't pass any arguments, we just have Telemetry.
|
||||
dependencies |= FindPythonDependencies(os.path.realpath(
|
||||
os.path.join(util.GetTelemetryDir(), 'telemetry', '__init__.py')))
|
||||
dependencies |= FindBootstrapDependencies(util.GetTelemetryDir())
|
||||
os.path.join(path.GetTelemetryDir(), 'telemetry', '__init__.py')))
|
||||
dependencies |= FindBootstrapDependencies(path.GetTelemetryDir())
|
||||
|
||||
# Add dependencies.
|
||||
for path in paths:
|
||||
base_dir = os.path.dirname(os.path.realpath(path))
|
||||
for target_path in target_paths:
|
||||
base_dir = os.path.dirname(os.path.realpath(target_path))
|
||||
|
||||
dependencies.add(base_dir)
|
||||
dependencies |= FindBootstrapDependencies(base_dir)
|
||||
dependencies |= FindPythonDependencies(path)
|
||||
dependencies |= FindPythonDependencies(target_path)
|
||||
if options.include_page_set_data:
|
||||
dependencies |= FindPageSetDependencies(base_dir)
|
||||
|
||||
@ -161,25 +148,25 @@ def FindDependencies(paths, options):
|
||||
return dependencies
|
||||
|
||||
|
||||
def ZipDependencies(paths, dependencies, options):
|
||||
base_dir = os.path.dirname(os.path.realpath(util.GetChromiumSrcDir()))
|
||||
def ZipDependencies(target_paths, dependencies, options):
|
||||
base_dir = os.path.dirname(os.path.realpath(path.GetChromiumSrcDir()))
|
||||
|
||||
with zipfile.ZipFile(options.zip, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
||||
# Add dependencies to archive.
|
||||
for path in dependencies:
|
||||
for dependency_path in dependencies:
|
||||
path_in_archive = os.path.join(
|
||||
'telemetry', os.path.relpath(path, base_dir))
|
||||
zip_file.write(path, path_in_archive)
|
||||
'telemetry', os.path.relpath(dependency_path, base_dir))
|
||||
zip_file.write(dependency_path, path_in_archive)
|
||||
|
||||
# Add symlinks to executable paths, for ease of use.
|
||||
for path in paths:
|
||||
for target_path in target_paths:
|
||||
link_info = zipfile.ZipInfo(
|
||||
os.path.join('telemetry', os.path.basename(path)))
|
||||
os.path.join('telemetry', os.path.basename(target_path)))
|
||||
link_info.create_system = 3 # Unix attributes.
|
||||
# 010 is regular file, 0111 is the permission bits rwxrwxrwx.
|
||||
link_info.external_attr = 0100777 << 16 # Octal.
|
||||
|
||||
relative_path = os.path.relpath(path, base_dir)
|
||||
relative_path = os.path.relpath(target_path, base_dir)
|
||||
link_script = (
|
||||
'#!/usr/bin/env python\n\n'
|
||||
'import os\n'
|
||||
@ -213,11 +200,11 @@ def ZipDependencies(paths, dependencies, options):
|
||||
# also have gsutil, which is why this is inside the gsutil block.
|
||||
gsutil_dependencies.add(os.path.join(gsutil_base_dir, 'upload.py'))
|
||||
|
||||
for path in gsutil_dependencies:
|
||||
for dependency_path in gsutil_dependencies:
|
||||
path_in_archive = os.path.join(
|
||||
'telemetry', os.path.relpath(util.GetTelemetryDir(), base_dir),
|
||||
'third_party', os.path.relpath(path, gsutil_base_dir))
|
||||
zip_file.write(path, path_in_archive)
|
||||
'telemetry', os.path.relpath(path.GetTelemetryDir(), base_dir),
|
||||
'third_party', os.path.relpath(dependency_path, gsutil_base_dir))
|
||||
zip_file.write(dependency_path, path_in_archive)
|
||||
|
||||
|
||||
class FindDependenciesCommand(command_line.OptparseCommand):
|
||||
@ -251,10 +238,10 @@ class FindDependenciesCommand(command_line.OptparseCommand):
|
||||
logging.getLogger().setLevel(logging.WARNING)
|
||||
|
||||
def Run(self, args):
|
||||
paths = args.positional_args
|
||||
dependencies = FindDependencies(paths, args)
|
||||
target_paths = args.positional_args
|
||||
dependencies = FindDependencies(target_paths, args)
|
||||
if args.zip:
|
||||
ZipDependencies(paths, dependencies, args)
|
||||
ZipDependencies(target_paths, dependencies, args)
|
||||
print 'Zip archive written to %s.' % args.zip
|
||||
else:
|
||||
print '\n'.join(sorted(dependencies))
|
||||
|
@ -41,3 +41,15 @@ def FindInstalledWindowsApplication(application_path):
|
||||
return path
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def IsSubpath(subpath, superpath):
|
||||
"""Returns True iff subpath is or is in superpath."""
|
||||
subpath = os.path.realpath(subpath)
|
||||
superpath = os.path.realpath(superpath)
|
||||
|
||||
while len(subpath) >= len(superpath):
|
||||
if subpath == superpath:
|
||||
return True
|
||||
subpath = os.path.split(subpath)[0]
|
||||
return False
|
||||
|
Reference in New Issue
Block a user