0

Remove extension_key_ from ContentHash

ExtensionKey is a (relative) complex record, but for the life of
ContentHash it is not required: only extension ID and extension root
path are used, the rest is needed only for initializing. Having the
record instead of two fields doesn't decrease complexity, moreover,
without it accessing ID and root are simple, as shown by this commit.

This commit also allows to future move different flags/data (for
example, |fetch_params|) into ExtensionKey, making other code even
simpler. Also this saves a small amount of memory, as we don't store,
for example, verification public key with ContentHash.

Change-Id: I014b1a32dd2d1a6c0fb71f47c588d823dd2dd253
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/1738448
Commit-Queue: Oleg Davydov <burunduk@chromium.org>
Reviewed-by: Nikita Podguzov <nikitapodguzov@google.com>
Reviewed-by: Istiaque Ahmed <lazyboy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#686864}
This commit is contained in:
Oleg Davydov
2019-08-14 17:07:44 +00:00
committed by Commit Bot
parent fdd419a1f0
commit 11ab27ca30
5 changed files with 24 additions and 19 deletions

@@ -74,7 +74,7 @@ class ContentHashWaiter {
}
result_ = std::make_unique<ContentHashFetcherResult>();
result_->extension_id = content_hash->extension_key().extension_id;
result_->extension_id = content_hash->extension_id();
result_->success = content_hash->succeeded();
result_->was_cancelled = was_cancelled;
result_->mismatch_paths = content_hash->hash_mismatch_unix_paths();

@@ -28,8 +28,6 @@ std::unique_ptr<const ContentHashReader> ContentHashReader::Create(
const scoped_refptr<const ContentHash>& content_hash) {
base::ElapsedTimer timer;
const ContentHash::ExtensionKey& extension_key =
content_hash->extension_key();
auto hash_reader = base::WrapUnique(new ContentHashReader);
if (!content_hash->succeeded())
@@ -44,7 +42,7 @@ std::unique_ptr<const ContentHashReader> ContentHashReader::Create(
// resource.
if (!verified_contents.HasTreeHashRoot(relative_path)) {
base::FilePath full_path =
extension_key.extension_root.Append(relative_path);
content_hash->extension_root().Append(relative_path);
// Making a request to a non-existent file or to a directory should not
// result in content verification failure.
// TODO(proberge): This logic could be simplified if |content_verify_job|

@@ -586,7 +586,7 @@ void ContentVerifier::OnExtensionUnloadedOnIO(
void ContentVerifier::OnFetchComplete(
const scoped_refptr<const ContentHash>& content_hash) {
DCHECK_CURRENTLY_ON(content::BrowserThread::IO);
ExtensionId extension_id = content_hash->extension_key().extension_id;
ExtensionId extension_id = content_hash->extension_id();
if (g_content_verifier_test_observer) {
g_content_verifier_test_observer->OnFetchComplete(
extension_id, content_hash->has_verified_contents());
@@ -595,9 +595,9 @@ void ContentVerifier::OnFetchComplete(
VLOG(1) << "OnFetchComplete " << extension_id
<< " success:" << content_hash->succeeded();
const bool did_hash_mismatch = ShouldVerifyAnyPaths(
extension_id, content_hash->extension_key().extension_root,
content_hash->hash_mismatch_unix_paths());
const bool did_hash_mismatch =
ShouldVerifyAnyPaths(extension_id, content_hash->extension_root(),
content_hash->hash_mismatch_unix_paths());
if (!did_hash_mismatch)
return;

@@ -109,7 +109,8 @@ void ContentHash::Create(const ExtensionKey& key,
// Step 2/2: computed_hashes.json:
scoped_refptr<ContentHash> hash =
new ContentHash(key, std::move(verified_contents), nullptr);
new ContentHash(key.extension_id, key.extension_root,
std::move(verified_contents), nullptr);
const bool did_fetch_verified_contents = false;
hash->BuildComputedHashes(did_fetch_verified_contents,
false /* force_build */, is_cancelled);
@@ -135,10 +136,12 @@ const ComputedHashes::Reader& ContentHash::computed_hashes() const {
}
ContentHash::ContentHash(
const ExtensionKey& key,
const ExtensionId& id,
const base::FilePath& root,
std::unique_ptr<VerifiedContents> verified_contents,
std::unique_ptr<ComputedHashes::Reader> computed_hashes)
: key_(key),
: extension_id_(id),
extension_root_(root),
verified_contents_(std::move(verified_contents)),
computed_hashes_(std::move(computed_hashes)) {
if (!verified_contents_)
@@ -218,7 +221,8 @@ void ContentHash::DidFetchVerifiedContents(
RecordFetchResult(true);
scoped_refptr<ContentHash> hash =
new ContentHash(key, std::move(verified_contents), nullptr);
new ContentHash(key.extension_id, key.extension_root,
std::move(verified_contents), nullptr);
const bool did_fetch_verified_contents = true;
hash->BuildComputedHashes(did_fetch_verified_contents,
false /* force_build */, is_cancelled);
@@ -233,7 +237,7 @@ void ContentHash::DispatchFetchFailure(
RecordFetchResult(false);
// NOTE: bare new because ContentHash constructor is private.
scoped_refptr<ContentHash> content_hash =
new ContentHash(key, nullptr, nullptr);
new ContentHash(key.extension_id, key.extension_root, nullptr, nullptr);
std::move(created_callback)
.Run(content_hash, is_cancelled && is_cancelled.Run());
}
@@ -251,7 +255,7 @@ bool ContentHash::CreateHashes(const base::FilePath& hashes_file,
if (!base::CreateDirectoryAndGetError(hashes_file.DirName(), nullptr))
return false;
base::FileEnumerator enumerator(key_.extension_root, true, /* recursive */
base::FileEnumerator enumerator(extension_root_, true, /* recursive */
base::FileEnumerator::FILES);
// First discover all the file paths and put them in a sorted set.
SortedFilePathSet paths;
@@ -274,7 +278,7 @@ bool ContentHash::CreateHashes(const base::FilePath& hashes_file,
const base::FilePath& full_path = *i;
base::FilePath relative_unix_path;
key_.extension_root.AppendRelativePath(full_path, &relative_unix_path);
extension_root_.AppendRelativePath(full_path, &relative_unix_path);
relative_unix_path = relative_unix_path.NormalizePathSeparatorsTo('/');
if (!verified_contents_->HasTreeHashRoot(relative_unix_path))
@@ -314,7 +318,7 @@ void ContentHash::BuildComputedHashes(bool attempted_fetching_verified_contents,
bool force_build,
const IsCancelledCallback& is_cancelled) {
base::FilePath computed_hashes_path =
file_util::GetComputedHashesPath(key_.extension_root);
file_util::GetComputedHashesPath(extension_root_);
// Create computed_hashes.json file if any of the following is true:
// - We just fetched and wrote a verified_contents.json (i.e.

@@ -114,7 +114,8 @@ class ContentHash : public base::RefCountedThreadSafe<ContentHash> {
const std::set<base::FilePath>& hash_mismatch_unix_paths() const {
return hash_mismatch_unix_paths_;
}
const ExtensionKey extension_key() const { return key_; }
const ExtensionId& extension_id() const { return extension_id_; }
const base::FilePath& extension_root() const { return extension_root_; }
// Returns whether or not computed_hashes.json re-creation might be required
// for |this| to succeed.
@@ -138,7 +139,8 @@ class ContentHash : public base::RefCountedThreadSafe<ContentHash> {
kSucceeded,
};
ContentHash(const ExtensionKey& key,
ContentHash(const ExtensionId& id,
const base::FilePath& root,
std::unique_ptr<VerifiedContents> verified_contents,
std::unique_ptr<ComputedHashes::Reader> computed_hashes);
~ContentHash();
@@ -177,7 +179,8 @@ class ContentHash : public base::RefCountedThreadSafe<ContentHash> {
bool force_build,
const IsCancelledCallback& is_cancelled);
ExtensionKey key_;
const ExtensionId extension_id_;
const base::FilePath extension_root_;
Status status_ = Status::kInvalid;