0

Refactor serving_dirs with two goals in mind:

1) Move the serving_dir related logic to one place and add some comments to improve hackability and stability for cloud_storage in telemetry.

2) Make serving_dirs available for user stories as part of the refactor to remove page_sets.

This is the second part of https://codereview.chromium.org/794493004/, broken out for reviewer ease.

BUG=454531
BUG=435063

Committed: https://crrev.com/eed596dced7b9e3f7d21c84f695a39b946c62bf2
Cr-Commit-Position: refs/heads/master@{#320523}

Review URL: https://codereview.chromium.org/838253005

Cr-Commit-Position: refs/heads/master@{#320990}
This commit is contained in:
aiolos
2015-03-17 15:07:58 -07:00
committed by Commit bot
parent a13b511596
commit cd55fdb2cf
10 changed files with 288 additions and 118 deletions

@@ -202,6 +202,8 @@ class Page(user_story.UserStory):
@property @property
def serving_dir(self): def serving_dir(self):
if not self.is_file:
return None
file_path = os.path.realpath(self.file_path) file_path = os.path.realpath(self.file_path)
if os.path.isdir(file_path): if os.path.isdir(file_path):
return file_path return file_path

@@ -24,7 +24,9 @@ class TestPageSet(unittest.TestCase):
os.rmdir(directory_path) os.rmdir(directory_path)
real_directory_path = os.path.realpath(directory_path) real_directory_path = os.path.realpath(directory_path)
expected_serving_dirs = set([os.path.join(real_directory_path, 'a', 'b')]) expected_serving_dirs = set([os.path.join(real_directory_path, 'a', 'b'),
os.path.join(real_directory_path, 'c'),
os.path.join(real_directory_path, 'd')])
self.assertEquals(ps.serving_dirs, expected_serving_dirs) self.assertEquals(ps.serving_dirs, expected_serving_dirs)
self.assertEquals(ps[0].serving_dir, os.path.join(real_directory_path, 'c')) self.assertEquals(ps[0].serving_dir, os.path.join(real_directory_path, 'c'))
self.assertEquals(ps[2].serving_dir, os.path.join(real_directory_path, 'd')) self.assertEquals(ps[2].serving_dir, os.path.join(real_directory_path, 'd'))

@@ -291,6 +291,17 @@ class CloudStorageModuleStub(object):
self, self.INTERNAL_BUCKET, remote_path, local_path, True) self, self.INTERNAL_BUCKET, remote_path, local_path, True)
return result return result
def GetFilesInDirectoryIfChanged(self, directory, bucket):
if os.path.dirname(directory) == directory: # If in the root dir.
raise ValueError('Trying to serve root directory from HTTP server.')
for dirpath, _, filenames in os.walk(directory):
for filename in filenames:
path, extension = os.path.splitext(
os.path.join(dirpath, filename))
if extension != '.sha1':
continue
self.GetIfChanged(path, bucket)
def CalculateHash(self, file_path): def CalculateHash(self, file_path):
return self.local_file_hashes[file_path] return self.local_file_hashes[file_path]
@@ -355,6 +366,7 @@ class OsModuleStub(object):
def __init__(self, sys_module): def __init__(self, sys_module):
self.sys = sys_module self.sys = sys_module
self.files = [] self.files = []
self.dirs = []
def exists(self, path): def exists(self, path):
return path in self.files return path in self.files
@@ -362,6 +374,9 @@ class OsModuleStub(object):
def isfile(self, path): def isfile(self, path):
return path in self.files return path in self.files
def isdir(self, path):
return path in self.dirs
def join(self, *paths): def join(self, *paths):
def IsAbsolutePath(path): def IsAbsolutePath(path):
if self.sys.platform.startswith('win'): if self.sys.platform.startswith('win'):
@@ -383,6 +398,10 @@ class OsModuleStub(object):
tmp = os.path.join(*paths) tmp = os.path.join(*paths)
return tmp.replace('\\', '/') return tmp.replace('\\', '/')
@staticmethod
def abspath(path):
return os.path.abspath(path)
@staticmethod @staticmethod
def expanduser(path): def expanduser(path):
return os.path.expanduser(path) return os.path.expanduser(path)
@@ -395,8 +414,13 @@ class OsModuleStub(object):
def splitext(path): def splitext(path):
return os.path.splitext(path) return os.path.splitext(path)
@staticmethod
def splitdrive(path):
return os.path.splitdrive(path)
X_OK = os.X_OK X_OK = os.X_OK
sep = os.sep
pathsep = os.pathsep pathsep = os.pathsep
def __init__(self, sys_module=sys): def __init__(self, sys_module=sys):
@@ -408,6 +432,7 @@ class OsModuleStub(object):
self.program_files = None self.program_files = None
self.program_files_x86 = None self.program_files_x86 = None
self.devnull = os.devnull self.devnull = os.devnull
self._directory = {}
def access(self, path, _): def access(self, path, _):
return path in self.path.files return path in self.path.files
@@ -430,6 +455,10 @@ class OsModuleStub(object):
def chdir(self, path): def chdir(self, path):
pass pass
def walk(self, top):
for dir_name in self._directory:
yield top, dir_name, self._directory[dir_name]
class PerfControlModuleStub(object): class PerfControlModuleStub(object):
class PerfControlStub(object): class PerfControlStub(object):

@@ -32,28 +32,38 @@ class CloudStorageTest(unittest.TestCase):
{'preset_internal_file.wpr':CloudStorageTest.INTERNAL_FILE_HASH}} {'preset_internal_file.wpr':CloudStorageTest.INTERNAL_FILE_HASH}}
# Local data files and hashes. # Local data files and hashes.
self.data_files = ['/path/to/success.wpr', self.data_files = [
'/path/to/wrong_hash.wpr', os.path.join(os.path.sep, 'path', 'to', 'success.wpr'),
'/path/to/preset_public_file.wpr', os.path.join(os.path.sep, 'path', 'to', 'wrong_hash.wpr'),
'/path/to/preset_partner_file.wpr', os.path.join(os.path.sep, 'path', 'to', 'preset_public_file.wpr'),
'/path/to/preset_internal_file.wpr'] os.path.join(os.path.sep, 'path', 'to', 'preset_partner_file.wpr'),
os.path.join(os.path.sep, 'path', 'to', 'preset_internal_file.wpr')]
self.local_file_hashes = { self.local_file_hashes = {
'/path/to/success.wpr': CloudStorageTest.SUCCESS_FILE_HASH, os.path.join(os.path.sep, 'path', 'to', 'success.wpr'):
'/path/to/wrong_hash.wpr': CloudStorageTest.SUCCESS_FILE_HASH, CloudStorageTest.SUCCESS_FILE_HASH,
'/path/to/preset_public_file.wpr':CloudStorageTest.PUBLIC_FILE_HASH, os.path.join(os.path.sep, 'path', 'to', 'wrong_hash.wpr'):
'/path/to/preset_partner_file.wpr':CloudStorageTest.PARTNER_FILE_HASH, CloudStorageTest.SUCCESS_FILE_HASH,
'/path/to/preset_internal_file.wpr':CloudStorageTest.INTERNAL_FILE_HASH, os.path.join(os.path.sep, 'path', 'to', 'preset_public_file.wpr'):
CloudStorageTest.PUBLIC_FILE_HASH,
os.path.join(os.path.sep, 'path', 'to', 'preset_partner_file.wpr'):
CloudStorageTest.PARTNER_FILE_HASH,
os.path.join(os.path.sep, 'path', 'to', 'preset_internal_file.wpr'):
CloudStorageTest.INTERNAL_FILE_HASH,
} }
self.cloud_storage.SetCalculatedHashesForTesting(self.local_file_hashes) self.cloud_storage.SetCalculatedHashesForTesting(self.local_file_hashes)
# Local hash files and their contents. # Local hash files and their contents.
local_hash_files = { local_hash_files = {
'/path/to/success.wpr.sha1': CloudStorageTest.SUCCESS_FILE_HASH, os.path.join(os.path.sep, 'path', 'to', 'success.wpr.sha1'):
'/path/to/wrong_hash.wpr.sha1': 'wronghash'.zfill(40), CloudStorageTest.SUCCESS_FILE_HASH,
'/path/to/preset_public_file.wpr.sha1': CloudStorageTest.PUBLIC_FILE_HASH, os.path.join(os.path.sep, 'path', 'to', 'wrong_hash.wpr.sha1'):
'/path/to/preset_partner_file.wpr.sha1': 'wronghash'.zfill(40),
CloudStorageTest.PARTNER_FILE_HASH, os.path.join(os.path.sep, 'path', 'to', 'preset_public_file.wpr.sha1'):
'/path/to/preset_internal_file.wpr.sha1': CloudStorageTest.PUBLIC_FILE_HASH,
CloudStorageTest.INTERNAL_FILE_HASH, os.path.join(os.path.sep, 'path', 'to', 'preset_partner_file.wpr.sha1'):
CloudStorageTest.PARTNER_FILE_HASH,
os.path.join(os.path.sep, 'path', 'to',
'preset_internal_file.wpr.sha1'):
CloudStorageTest.INTERNAL_FILE_HASH,
} }
self.cloud_storage.SetHashFileContentsForTesting(local_hash_files) self.cloud_storage.SetHashFileContentsForTesting(local_hash_files)
@@ -73,25 +83,25 @@ class CloudStorageTest(unittest.TestCase):
self.assertFalse(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET, self.assertFalse(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET,
'preset_public_file.wpr')) 'preset_public_file.wpr'))
self.assertFalse(self.cloud_storage.Exists( self.assertFalse(self.cloud_storage.Exists(
self.cloud_storage.PARTNER_BUCKET, 'preset_partner_file.wpr')) self.cloud_storage.PARTNER_BUCKET, 'preset_partner_file.wpr'))
self.assertFalse(self.cloud_storage.Exists( self.assertFalse(self.cloud_storage.Exists(
self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr')) self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr'))
def testExistsNonEmptyCloudStorage(self): def testExistsNonEmptyCloudStorage(self):
# Test non-empty remote files dictionary. # Test non-empty remote files dictionary.
self.cloud_storage.SetRemotePathsForTesting(self.remote_paths) self.cloud_storage.SetRemotePathsForTesting(self.remote_paths)
self.assertTrue(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET,
'preset_public_file.wpr'))
self.assertTrue(self.cloud_storage.Exists(self.cloud_storage.PARTNER_BUCKET,
'preset_partner_file.wpr'))
self.assertTrue(self.cloud_storage.Exists( self.assertTrue(self.cloud_storage.Exists(
self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr')) self.cloud_storage.PUBLIC_BUCKET, 'preset_public_file.wpr'))
self.assertFalse(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET, self.assertTrue(self.cloud_storage.Exists(
'fake_file')) self.cloud_storage.PARTNER_BUCKET, 'preset_partner_file.wpr'))
self.assertTrue(self.cloud_storage.Exists(
self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr'))
self.assertFalse(self.cloud_storage.Exists( self.assertFalse(self.cloud_storage.Exists(
self.cloud_storage.PARTNER_BUCKET, 'fake_file')) self.cloud_storage.PUBLIC_BUCKET, 'fake_file'))
self.assertFalse(self.cloud_storage.Exists( self.assertFalse(self.cloud_storage.Exists(
self.cloud_storage.INTERNAL_BUCKET, 'fake_file')) self.cloud_storage.PARTNER_BUCKET, 'fake_file'))
self.assertFalse(self.cloud_storage.Exists(
self.cloud_storage.INTERNAL_BUCKET, 'fake_file'))
# Reset state. # Reset state.
self.cloud_storage.SetRemotePathsForTesting() self.cloud_storage.SetRemotePathsForTesting()
@@ -100,64 +110,72 @@ class CloudStorageTest(unittest.TestCase):
self.cloud_storage.SetRemotePathsForTesting(self.remote_paths) self.cloud_storage.SetRemotePathsForTesting(self.remote_paths)
self.assertFalse(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET, self.assertFalse(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET,
'success.wpr')) 'success.wpr'))
self.cloud_storage.Insert(self.cloud_storage.PUBLIC_BUCKET, 'success.wpr', self.cloud_storage.Insert(
'/path/to/success.wpr') self.cloud_storage.PUBLIC_BUCKET, 'success.wpr',
self.assertTrue(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET, os.path.join(os.path.sep, 'path', 'to', 'success.wpr'))
'success.wpr')) self.assertTrue(self.cloud_storage.Exists(
self.cloud_storage.PUBLIC_BUCKET, 'success.wpr'))
# Reset state. # Reset state.
self.cloud_storage.SetRemotePathsForTesting() self.cloud_storage.SetRemotePathsForTesting()
def testEmptyInsertAndExistsPublic(self): def testEmptyInsertAndExistsPublic(self):
# Test empty remote files dictionary. # Test empty remote files dictionary.
self.assertFalse(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET, self.assertFalse(self.cloud_storage.Exists(
'success.wpr')) self.cloud_storage.PUBLIC_BUCKET, 'success.wpr'))
self.cloud_storage.Insert(self.cloud_storage.PUBLIC_BUCKET, 'success.wpr', self.cloud_storage.Insert(
'/path/to/success.wpr') self.cloud_storage.PUBLIC_BUCKET, 'success.wpr',
self.assertTrue(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET, os.path.join(os.path.sep, 'path', 'to', 'success.wpr'))
'success.wpr')) self.assertTrue(self.cloud_storage.Exists(
self.cloud_storage.PUBLIC_BUCKET, 'success.wpr'))
def testEmptyInsertAndGet(self): def testEmptyInsertAndGet(self):
self.assertRaises(self.cloud_storage.NotFoundError, self.cloud_storage.Get, self.assertRaises(self.cloud_storage.NotFoundError, self.cloud_storage.Get,
self.cloud_storage.PUBLIC_BUCKET, 'success.wpr', self.cloud_storage.PUBLIC_BUCKET, 'success.wpr',
'/path/to/success.wpr') os.path.join(os.path.sep, 'path', 'to', 'success.wpr'))
self.cloud_storage.Insert(self.cloud_storage.PUBLIC_BUCKET, 'success.wpr', self.cloud_storage.Insert(self.cloud_storage.PUBLIC_BUCKET, 'success.wpr',
'/path/to/success.wpr') os.path.join(os.path.sep, 'path', 'to',
self.assertTrue(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET, 'success.wpr'))
'success.wpr')) self.assertTrue(self.cloud_storage.Exists(
self.assertEqual(CloudStorageTest.SUCCESS_FILE_HASH, self.cloud_storage.PUBLIC_BUCKET, 'success.wpr'))
self.cloud_storage.Get(self.cloud_storage.PUBLIC_BUCKET, 'success.wpr', self.assertEqual(CloudStorageTest.SUCCESS_FILE_HASH, self.cloud_storage.Get(
'/path/to/success.wpr')) self.cloud_storage.PUBLIC_BUCKET, 'success.wpr',
os.path.join(os.path.sep, 'path', 'to', 'success.wpr')))
def testNonEmptyInsertAndGet(self): def testNonEmptyInsertAndGet(self):
self.cloud_storage.SetRemotePathsForTesting(self.remote_paths) self.cloud_storage.SetRemotePathsForTesting(self.remote_paths)
self.assertRaises(self.cloud_storage.NotFoundError, self.cloud_storage.Get, self.assertRaises(self.cloud_storage.NotFoundError, self.cloud_storage.Get,
self.cloud_storage.PUBLIC_BUCKET, 'success.wpr', self.cloud_storage.PUBLIC_BUCKET, 'success.wpr',
'/path/to/success.wpr') os.path.join(os.path.sep, 'path', 'to', 'success.wpr'))
self.cloud_storage.Insert(self.cloud_storage.PUBLIC_BUCKET, 'success.wpr', self.cloud_storage.Insert(self.cloud_storage.PUBLIC_BUCKET, 'success.wpr',
'/path/to/success.wpr') os.path.join(os.path.sep, 'path', 'to',
'success.wpr'))
self.assertTrue(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET, self.assertTrue(self.cloud_storage.Exists(self.cloud_storage.PUBLIC_BUCKET,
'success.wpr')) 'success.wpr'))
self.assertEqual(CloudStorageTest.SUCCESS_FILE_HASH, self.assertEqual(
self.cloud_storage.Get(self.cloud_storage.PUBLIC_BUCKET, CloudStorageTest.SUCCESS_FILE_HASH, self.cloud_storage.Get(
'success.wpr', self.cloud_storage.PUBLIC_BUCKET, 'success.wpr',
'/path/to/success.wpr')) os.path.join(os.path.sep, 'path', 'to', 'success.wpr')))
# Reset state. # Reset state.
self.cloud_storage.SetRemotePathsForTesting() self.cloud_storage.SetRemotePathsForTesting()
def testGetIfChanged(self): def testGetIfChanged(self):
self.cloud_storage.SetRemotePathsForTesting(self.remote_paths) self.cloud_storage.SetRemotePathsForTesting(self.remote_paths)
self.assertRaises(self.cloud_storage.NotFoundError, self.cloud_storage.Get, self.assertRaises(
self.cloud_storage.PUBLIC_BUCKET, 'success.wpr', self.cloud_storage.NotFoundError, self.cloud_storage.Get,
'/path/to/success.wpr') self.cloud_storage.PUBLIC_BUCKET, 'success.wpr',
os.path.join(os.path.sep, 'path', 'to', 'success.wpr'))
self.assertFalse(self.cloud_storage.GetIfChanged( self.assertFalse(self.cloud_storage.GetIfChanged(
'/path/to/preset_public_file.wpr', self.cloud_storage.PUBLIC_BUCKET)) os.path.join(os.path.sep, 'path', 'to', 'preset_public_file.wpr'),
self.cloud_storage.PUBLIC_BUCKET))
self.cloud_storage.ChangeRemoteHashForTesting( self.cloud_storage.ChangeRemoteHashForTesting(
self.cloud_storage.PUBLIC_BUCKET, 'preset_public_file.wpr', self.cloud_storage.PUBLIC_BUCKET, 'preset_public_file.wpr',
CloudStorageTest.UPDATED_HASH) CloudStorageTest.UPDATED_HASH)
self.assertTrue(self.cloud_storage.GetIfChanged( self.assertTrue(self.cloud_storage.GetIfChanged(
'/path/to/preset_public_file.wpr', self.cloud_storage.PUBLIC_BUCKET)) os.path.join(os.path.sep, 'path', 'to', 'preset_public_file.wpr'),
self.cloud_storage.PUBLIC_BUCKET))
self.assertFalse(self.cloud_storage.GetIfChanged( self.assertFalse(self.cloud_storage.GetIfChanged(
'/path/to/preset_public_file.wpr', self.cloud_storage.PUBLIC_BUCKET)) os.path.join(os.path.sep, 'path', 'to', 'preset_public_file.wpr'),
self.cloud_storage.PUBLIC_BUCKET))
# Reset state. # Reset state.
self.cloud_storage.SetRemotePathsForTesting() self.cloud_storage.SetRemotePathsForTesting()
@@ -173,47 +191,50 @@ class CloudStorageTest(unittest.TestCase):
def testPermissionError(self): def testPermissionError(self):
self.cloud_storage.SetRemotePathsForTesting(self.remote_paths) self.cloud_storage.SetRemotePathsForTesting(self.remote_paths)
self.cloud_storage.SetPermissionLevelForTesting( self.cloud_storage.SetPermissionLevelForTesting(
self.cloud_storage.PUBLIC_PERMISSION) self.cloud_storage.PUBLIC_PERMISSION)
self.assertRaises( self.assertRaises(
self.cloud_storage.PermissionError, self.cloud_storage.Get, self.cloud_storage.PermissionError, self.cloud_storage.Get,
self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr', self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr',
'/path/to/preset_internal_file.wpr') os.path.join(os.path.sep, 'path', 'to', 'preset_internal_file.wpr'))
self.assertRaises( self.assertRaises(
self.cloud_storage.PermissionError, self.cloud_storage.GetIfChanged, self.cloud_storage.PermissionError, self.cloud_storage.GetIfChanged,
'/path/to/preset_internal_file.wpr', self.cloud_storage.INTERNAL_BUCKET) os.path.join(os.path.sep, 'path', 'to', 'preset_internal_file.wpr'),
self.cloud_storage.INTERNAL_BUCKET)
self.assertRaises( self.assertRaises(
self.cloud_storage.PermissionError, self.cloud_storage.List, self.cloud_storage.PermissionError, self.cloud_storage.List,
self.cloud_storage.INTERNAL_BUCKET) self.cloud_storage.INTERNAL_BUCKET)
self.assertRaises( self.assertRaises(
self.cloud_storage.PermissionError, self.cloud_storage.Exists, self.cloud_storage.PermissionError, self.cloud_storage.Exists,
self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr') self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr')
self.assertRaises( self.assertRaises(
self.cloud_storage.PermissionError, self.cloud_storage.Insert, self.cloud_storage.PermissionError, self.cloud_storage.Insert,
self.cloud_storage.INTERNAL_BUCKET, 'success.wpr', '/path/to/success.wpr') self.cloud_storage.INTERNAL_BUCKET, 'success.wpr',
os.path.join(os.path.sep, 'path', 'to', 'success.wpr'))
# Reset state. # Reset state.
self.cloud_storage.SetRemotePathsForTesting() self.cloud_storage.SetRemotePathsForTesting()
def testCredentialsError(self): def testCredentialsError(self):
self.cloud_storage.SetRemotePathsForTesting(self.remote_paths) self.cloud_storage.SetRemotePathsForTesting(self.remote_paths)
self.cloud_storage.SetPermissionLevelForTesting( self.cloud_storage.SetPermissionLevelForTesting(
self.cloud_storage.CREDENTIALS_ERROR_PERMISSION) self.cloud_storage.CREDENTIALS_ERROR_PERMISSION)
self.assertRaises( self.assertRaises(
self.cloud_storage.CredentialsError, self.cloud_storage.Get, self.cloud_storage.CredentialsError, self.cloud_storage.Get,
self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr', self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr',
'/path/to/preset_internal_file.wpr') os.path.join(os.path.sep, 'path', 'to', 'preset_internal_file.wpr'))
self.assertRaises( self.assertRaises(
self.cloud_storage.CredentialsError, self.cloud_storage.GetIfChanged, self.cloud_storage.CredentialsError, self.cloud_storage.GetIfChanged,
self.cloud_storage.INTERNAL_BUCKET, '/path/to/preset_internal_file.wpr') self.cloud_storage.INTERNAL_BUCKET,
os.path.join(os.path.sep, 'path', 'to', 'preset_internal_file.wpr'))
self.assertRaises( self.assertRaises(
self.cloud_storage.CredentialsError, self.cloud_storage.List, self.cloud_storage.CredentialsError, self.cloud_storage.List,
self.cloud_storage.INTERNAL_BUCKET) self.cloud_storage.INTERNAL_BUCKET)
self.assertRaises( self.assertRaises(
self.cloud_storage.CredentialsError, self.cloud_storage.Exists, self.cloud_storage.CredentialsError, self.cloud_storage.Exists,
self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr') self.cloud_storage.INTERNAL_BUCKET, 'preset_internal_file.wpr')
self.assertRaises( self.assertRaises(
self.cloud_storage.CredentialsError, self.cloud_storage.Insert, self.cloud_storage.CredentialsError, self.cloud_storage.Insert,
self.cloud_storage.INTERNAL_BUCKET, 'success.wpr', self.cloud_storage.INTERNAL_BUCKET, 'success.wpr',
'/path/to/success.wpr') os.path.join(os.path.sep, 'path', 'to', 'success.wpr'))
# Reset state. # Reset state.
self.cloud_storage.SetRemotePathsForTesting() self.cloud_storage.SetRemotePathsForTesting()

@@ -98,6 +98,14 @@ class UserStory(object):
"""Returns True iff this user story does not require network.""" """Returns True iff this user story does not require network."""
return self._is_local return self._is_local
@property
def serving_dir(self):
"""Returns the absolute path to a directory with hash files to data that
should be updated from cloud storage, or None if no files need to be
updated.
"""
return None
@property @property
def make_javascript_deterministic(self): def make_javascript_deterministic(self):
return self._make_javascript_deterministic return self._make_javascript_deterministic

@@ -9,7 +9,6 @@ import random
import sys import sys
import time import time
from telemetry import decorators
from telemetry import page as page_module from telemetry import page as page_module
from telemetry.core import exceptions from telemetry.core import exceptions
from telemetry.core import wpr_modes from telemetry.core import wpr_modes
@@ -116,28 +115,6 @@ def _RunUserStoryAndProcessErrorIfNeeded(expectations, user_story, results,
exception_formatter.PrintFormattedException( exception_formatter.PrintFormattedException(
msg='Exception from DidRunUserStory: ') msg='Exception from DidRunUserStory: ')
@decorators.Cache
def _UpdateUserStoryArchivesIfChanged(user_story_set):
# Scan every serving directory for .sha1 files
# and download them from Cloud Storage. Assume all data is public.
all_serving_dirs = user_story_set.serving_dirs.copy()
# Add individual page dirs to all serving dirs.
for user_story in user_story_set:
if isinstance(user_story, page_module.Page) and user_story.is_file:
all_serving_dirs.add(user_story.serving_dir)
# Scan all serving dirs.
for serving_dir in all_serving_dirs:
if os.path.splitdrive(serving_dir)[1] == '/':
raise ValueError('Trying to serve root directory from HTTP server.')
for dirpath, _, filenames in os.walk(serving_dir):
for filename in filenames:
path, extension = os.path.splitext(
os.path.join(dirpath, filename))
if extension != '.sha1':
continue
cloud_storage.GetIfChanged(path, user_story_set.bucket)
class UserStoryGroup(object): class UserStoryGroup(object):
def __init__(self, shared_user_story_state_class): def __init__(self, shared_user_story_state_class):
self._shared_user_story_state_class = shared_user_story_state_class self._shared_user_story_state_class = shared_user_story_state_class
@@ -207,7 +184,10 @@ def Run(test, user_story_set, expectations, finder_options, results,
if (not finder_options.use_live_sites and user_story_set.bucket and if (not finder_options.use_live_sites and user_story_set.bucket and
finder_options.browser_options.wpr_mode != wpr_modes.WPR_RECORD): finder_options.browser_options.wpr_mode != wpr_modes.WPR_RECORD):
_UpdateUserStoryArchivesIfChanged(user_story_set) serving_dirs = user_story_set.serving_dirs
for directory in serving_dirs:
cloud_storage.GetFilesInDirectoryIfChanged(directory,
user_story_set.bucket)
if not _UpdateAndCheckArchives( if not _UpdateAndCheckArchives(
user_story_set.archive_data_file, user_story_set.wpr_archive_info, user_story_set.archive_data_file, user_story_set.wpr_archive_info,
user_stories): user_stories):

@@ -32,6 +32,9 @@ class UserStorySet(object):
Web Page Replay's archive data. Valid values are: None, Web Page Replay's archive data. Valid values are: None,
PUBLIC_BUCKET, PARTNER_BUCKET, or INTERNAL_BUCKET (defined PUBLIC_BUCKET, PARTNER_BUCKET, or INTERNAL_BUCKET (defined
in telemetry.util.cloud_storage). in telemetry.util.cloud_storage).
serving_dirs: A set of paths, relative to self.base_dir, to directories
containing hash files for non-wpr archive data stored in cloud
storage.
""" """
self.user_stories = [] self.user_stories = []
self._archive_data_file = archive_data_file self._archive_data_file = archive_data_file
@@ -76,7 +79,11 @@ class UserStorySet(object):
@property @property
def serving_dirs(self): def serving_dirs(self):
return self._serving_dirs all_serving_dirs = self._serving_dirs.copy()
for user_story in self.user_stories:
if user_story.serving_dir:
all_serving_dirs.add(user_story.serving_dir)
return all_serving_dirs
@property @property
def archive_data_file(self): def archive_data_file(self):

@@ -15,6 +15,7 @@ import sys
import tarfile import tarfile
import urllib2 import urllib2
from telemetry import decorators
from telemetry.core import util from telemetry.core import util
from telemetry.util import path from telemetry.util import path
@@ -220,10 +221,8 @@ def Insert(bucket, remote_path, local_path, publicly_readable=False):
def GetIfChanged(file_path, bucket): def GetIfChanged(file_path, bucket):
"""Gets the file at file_path if it has a hash file that doesn't match. """Gets the file at file_path if it has a hash file that doesn't match or
if there is no local copy of file_path, but there is a hash file for it.
If the file is not in Cloud Storage, log a warning instead of raising an
exception. We assume that the user just hasn't uploaded the file yet.
Returns: Returns:
True if the binary was changed. True if the binary was changed.
@@ -244,6 +243,25 @@ def GetIfChanged(file_path, bucket):
Get(bucket, expected_hash, file_path) Get(bucket, expected_hash, file_path)
return True return True
# TODO(aiolos): remove @decorators.Cache for http://crbug.com/459787
@decorators.Cache
def GetFilesInDirectoryIfChanged(directory, bucket):
""" Scan the directory for .sha1 files, and download them from the given
bucket in cloud storage if the local and remote hash don't match or
there is no local copy.
"""
if not os.path.isdir(directory):
raise ValueError('Must provide a valid directory.')
# Don't allow the root directory to be a serving_dir.
if directory == os.path.abspath(os.sep):
raise ValueError('Trying to serve root directory from HTTP server.')
for dirpath, _, filenames in os.walk(directory):
for filename in filenames:
path_name, extension = os.path.splitext(
os.path.join(dirpath, filename))
if extension != '.sha1':
continue
GetIfChanged(path_name, bucket)
def CalculateHash(file_path): def CalculateHash(file_path):
"""Calculates and returns the hash of the file at file_path.""" """Calculates and returns the hash of the file at file_path."""

@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
import os
import unittest import unittest
from telemetry import decorators from telemetry import decorators
@@ -13,13 +14,61 @@ from telemetry.util import cloud_storage
def _FakeFindGsutil(): def _FakeFindGsutil():
return 'fake gsutil path' return 'fake gsutil path'
def _FakeReadHash(_):
return 'hashthis!'
def _FakeCalulateHashMatchesRead(_):
return 'hashthis!'
def _FakeCalulateHashNewHash(_):
return 'omgnewhash'
class CloudStorageUnitTest(unittest.TestCase): class CloudStorageUnitTest(unittest.TestCase):
def _FakeRunCommand(self, cmd): def _FakeRunCommand(self, cmd):
pass pass
def testValidCloudUrl(self): def _FakeGet(self, bucket, remote_path, local_path):
pass
def _assertRunCommandRaisesError(self, communicate_strs, error):
stubs = system_stub.Override(cloud_storage, ['open', 'subprocess'])
orig_find_gs_util = cloud_storage.FindGsutil
cloud_storage.FindGsutil = _FakeFindGsutil
stubs.open.files = {'fake gsutil path':''}
stubs.subprocess.Popen.returncode_result = 1
try:
for string in communicate_strs:
stubs.subprocess.Popen.communicate_result = ('', string)
self.assertRaises(error, cloud_storage._RunCommand, [])
finally:
stubs.Restore()
cloud_storage.FindGsutil = orig_find_gs_util
def testRunCommandCredentialsError(self):
strs = ['You are attempting to access protected data with no configured',
'Failure: No handler was ready to authenticate.']
self._assertRunCommandRaisesError(strs, cloud_storage.CredentialsError)
def testRunCommandPermissionError(self):
strs = ['status=403', 'status 403', '403 Forbidden']
self._assertRunCommandRaisesError(strs, cloud_storage.PermissionError)
def testRunCommandNotFoundError(self):
strs = ['InvalidUriError', 'No such object', 'No URLs matched',
'One or more URLs matched no', 'InvalidUriError']
self._assertRunCommandRaisesError(strs, cloud_storage.NotFoundError)
def testRunCommandServerError(self):
strs = ['500 Internal Server Error']
self._assertRunCommandRaisesError(strs, cloud_storage.ServerError)
def testRunCommandGenericError(self):
strs = ['Random string']
self._assertRunCommandRaisesError(strs, cloud_storage.CloudStorageError)
def testInsertCreatesValidCloudUrl(self):
orig_run_command = cloud_storage._RunCommand orig_run_command = cloud_storage._RunCommand
try: try:
cloud_storage._RunCommand = self._FakeRunCommand cloud_storage._RunCommand = self._FakeRunCommand
@@ -47,3 +96,60 @@ class CloudStorageUnitTest(unittest.TestCase):
finally: finally:
stubs.Restore() stubs.Restore()
cloud_storage.FindGsutil = orig_find_gs_util cloud_storage.FindGsutil = orig_find_gs_util
def testGetIfChanged(self):
stubs = system_stub.Override(cloud_storage, ['os', 'open'])
stubs.open.files[_FakeFindGsutil()] = ''
orig_get = cloud_storage.Get
orig_read_hash = cloud_storage.ReadHash
orig_calculate_hash = cloud_storage.CalculateHash
cloud_storage.ReadHash = _FakeReadHash
cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
file_path = 'test-file-path.wpr'
hash_path = file_path + '.sha1'
try:
cloud_storage.Get = self._FakeGet
# hash_path doesn't exist.
self.assertFalse(cloud_storage.GetIfChanged(file_path,
cloud_storage.PUBLIC_BUCKET))
# hash_path exists, but file_path doesn't.
stubs.os.path.files.append(hash_path)
self.assertTrue(cloud_storage.GetIfChanged(file_path,
cloud_storage.PUBLIC_BUCKET))
# hash_path and file_path exist, and have same hash.
stubs.os.path.files.append(file_path)
self.assertFalse(cloud_storage.GetIfChanged(file_path,
cloud_storage.PUBLIC_BUCKET))
# hash_path and file_path exist, and have different hashes.
cloud_storage.CalculateHash = _FakeCalulateHashNewHash
self.assertTrue(cloud_storage.GetIfChanged(file_path,
cloud_storage.PUBLIC_BUCKET))
finally:
stubs.Restore()
cloud_storage.Get = orig_get
cloud_storage.CalculateHash = orig_calculate_hash
cloud_storage.ReadHash = orig_read_hash
def testGetFilesInDirectoryIfChanged(self):
stubs = system_stub.Override(cloud_storage, ['os'])
stubs.os._directory = {'dir1':['1file1.sha1', '1file2.txt', '1file3.sha1'],
'dir2':['2file.txt'], 'dir3':['3file1.sha1']}
stubs.os.path.dirs = ['real_dir_path']
def IncrementFilesUpdated(*_):
IncrementFilesUpdated.files_updated +=1
IncrementFilesUpdated.files_updated = 0
orig_get_if_changed = cloud_storage.GetIfChanged
cloud_storage.GetIfChanged = IncrementFilesUpdated
try:
self.assertRaises(ValueError, cloud_storage.GetFilesInDirectoryIfChanged,
os.path.abspath(os.sep), cloud_storage.PUBLIC_BUCKET)
self.assertEqual(0, IncrementFilesUpdated.files_updated)
self.assertRaises(ValueError, cloud_storage.GetFilesInDirectoryIfChanged,
'fake_dir_path', cloud_storage.PUBLIC_BUCKET)
self.assertEqual(0, IncrementFilesUpdated.files_updated)
cloud_storage.GetFilesInDirectoryIfChanged('real_dir_path',
cloud_storage.PUBLIC_BUCKET)
self.assertEqual(3, IncrementFilesUpdated.files_updated)
finally:
cloud_storage.GetIfChanged = orig_get_if_changed
stubs.Restore()

@@ -91,9 +91,6 @@ def FindPageSetDependencies(base_dir):
# Add all of its serving_dirs as dependencies. # Add all of its serving_dirs as dependencies.
for serving_dir in page_set.serving_dirs: for serving_dir in page_set.serving_dirs:
yield serving_dir yield serving_dir
for page in page_set:
if page.is_file:
yield page.serving_dir
def FindExcludedFiles(files, options): def FindExcludedFiles(files, options):