0

Bump the macOS deployment target to 10.15

Bug: 1410851
Change-Id: I66d118d772c1bdebe0e8cc669b3dd7e6f91ffcea
Cq-Include-Trybots: luci.chrome.try:mac-chrome
Low-Coverage-Reason: Revving the deployment target requires a few isolated changes to code; it’s not my code and should not block revving the deployment target.
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/4628901
Reviewed-by: Mark Mentovai <mark@chromium.org>
Reviewed-by: Dale Curtis <dalecurtis@chromium.org>
Commit-Queue: Avi Drissman <avi@chromium.org>
Reviewed-by: David Benjamin <davidben@chromium.org>
Reviewed-by: Brian Osman <brianosman@google.com>
Cr-Commit-Position: refs/heads/main@{#1160724}
This commit is contained in:
Avi Drissman
2023-06-21 17:20:50 +00:00
committed by Chromium LUCI CQ
parent 7cc50e8b31
commit 94eb86e7ce
9 changed files with 120 additions and 280 deletions

@@ -18,7 +18,7 @@ declare_args() {
# `mac_min_system_version` when dropping support for older macOSes but where # `mac_min_system_version` when dropping support for older macOSes but where
# additional code changes are required to be compliant with the availability # additional code changes are required to be compliant with the availability
# rules. # rules.
mac_deployment_target = "10.13" mac_deployment_target = "10.15"
# The value of the `LSMinimumSystemVersion` in `Info.plist` files. This # The value of the `LSMinimumSystemVersion` in `Info.plist` files. This
# partially controls the minimum supported version of macOS for Chromium by # partially controls the minimum supported version of macOS for Chromium by

@@ -198,14 +198,6 @@ bool CanShare() {
service.subject = title; service.subject = title;
NSArray* itemsToShare = @[ url ]; NSArray* itemsToShare = @[ url ];
if (@available(macOS 10.14, *)) {
} else {
if ([[service name] isEqual:NSSharingServiceNamePostOnTwitter]) {
// The Twitter share service expects the title as an additional share
// item. This is the same approach system apps use.
itemsToShare = @[ url, title ];
}
}
if ([[service name] isEqual:kRemindersSharingServiceName]) { if ([[service name] isEqual:kRemindersSharingServiceName]) {
_activity = [[NSUserActivity alloc] _activity = [[NSUserActivity alloc]
initWithActivityType:NSUserActivityTypeBrowsingWeb]; initWithActivityType:NSUserActivityTypeBrowsingWeb];

@@ -4,7 +4,6 @@
#include "media/base/mac/video_capture_device_avfoundation_helpers.h" #include "media/base/mac/video_capture_device_avfoundation_helpers.h"
#include "base/notreached.h"
#include "build/build_config.h" #include "build/build_config.h"
#if !defined(__has_feature) || !__has_feature(objc_arc) #if !defined(__has_feature) || !__has_feature(objc_arc)
@@ -14,32 +13,21 @@
namespace media { namespace media {
NSArray<AVCaptureDevice*>* GetVideoCaptureDevices() { NSArray<AVCaptureDevice*>* GetVideoCaptureDevices() {
NSArray<AVCaptureDevice*>* devices = nil; // Query for all camera device types available on apple platform. The
if (@available(macOS 10.15, iOS 10.0, *)) { // others in the enum are only supported on iOS/iPadOS.
// Query for all camera device types available on apple platform. The NSArray* captureDeviceType = @[
// others in the enum are only supported on iOS/iPadOS. AVCaptureDeviceTypeBuiltInWideAngleCamera,
NSArray* captureDeviceType = @[
AVCaptureDeviceTypeBuiltInWideAngleCamera,
#if BUILDFLAG(IS_MAC) #if BUILDFLAG(IS_MAC)
AVCaptureDeviceTypeExternalUnknown AVCaptureDeviceTypeExternalUnknown
#endif #endif
]; ];
AVCaptureDeviceDiscoverySession* deviceDiscoverySession = AVCaptureDeviceDiscoverySession* deviceDiscoverySession =
[AVCaptureDeviceDiscoverySession [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:captureDeviceType discoverySessionWithDeviceTypes:captureDeviceType
mediaType:AVMediaTypeVideo mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified]; position:AVCaptureDevicePositionUnspecified];
devices = deviceDiscoverySession.devices; return deviceDiscoverySession.devices;
}
#if BUILDFLAG(IS_MAC) || (!defined(__IPHONE_10_0) || \
__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_10_0)
if (!devices) {
devices = [AVCaptureDevice devices];
}
#endif
return devices;
} }
} // namespace media } // namespace media

@@ -100,7 +100,6 @@ CAPTURE_EXPORT
- (void)setOnPhotoOutputStoppedForTesting: - (void)setOnPhotoOutputStoppedForTesting:
(base::RepeatingCallback<void()>)onPhotoOutputStopped; (base::RepeatingCallback<void()>)onPhotoOutputStopped;
- (void)setForceLegacyStillImageApiForTesting:(bool)forceLegacyApi;
// Use the below only for test. // Use the below only for test.
- (void)callLocked:(base::OnceClosure)lambda; - (void)callLocked:(base::OnceClosure)lambda;

@@ -212,10 +212,7 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
// When enabled, converts captured frames to NV12. // When enabled, converts captured frames to NV12.
std::unique_ptr<media::SampleBufferTransformer> _sampleBufferTransformer; std::unique_ptr<media::SampleBufferTransformer> _sampleBufferTransformer;
// On macOS 10.15 or later, this has type AVCapturePhotoOutput. AVCapturePhotoOutput* __strong _photoOutput;
// On earlier versions, this has type AVCaptureStillImageOutput.
// You say tomato, I say potato.
id __strong _photoOutput;
// Only accessed on the main thread. The takePhoto() operation is considered // Only accessed on the main thread. The takePhoto() operation is considered
// pending until we're ready to take another photo, which involves a PostTask // pending until we're ready to take another photo, which involves a PostTask
@@ -225,7 +222,6 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
// For testing. // For testing.
base::RepeatingCallback<void()> _onPhotoOutputStopped; base::RepeatingCallback<void()> _onPhotoOutputStopped;
bool _forceLegacyStillImageApi;
absl::optional<bool> _isPortraitEffectSupportedForTesting; absl::optional<bool> _isPortraitEffectSupportedForTesting;
absl::optional<bool> _isPortraitEffectActiveForTesting; absl::optional<bool> _isPortraitEffectActiveForTesting;
@@ -487,17 +483,6 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
[NSNotificationCenter.defaultCenter removeObserver:self]; [NSNotificationCenter.defaultCenter removeObserver:self];
} }
- (bool)useLegacyStillImageApi {
if (@available(macOS 10.15, *)) {
return _forceLegacyStillImageApi;
}
return true;
}
- (void)setForceLegacyStillImageApiForTesting:(bool)forceLegacyApi {
_forceLegacyStillImageApi = forceLegacyApi;
}
- (void)takePhoto { - (void)takePhoto {
DCHECK(_mainThreadTaskRunner->BelongsToCurrentThread()); DCHECK(_mainThreadTaskRunner->BelongsToCurrentThread());
DCHECK(_captureSession.running); DCHECK(_captureSession.running);
@@ -528,17 +513,7 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
{ {
// `_lock` is needed since `_photoOutput` may be read from non-main thread. // `_lock` is needed since `_photoOutput` may be read from non-main thread.
base::AutoLock lock(_lock); base::AutoLock lock(_lock);
#if (!defined(__IPHONE_10_0) || \ _photoOutput = [[AVCapturePhotoOutput alloc] init];
__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_10_0)
if ([self useLegacyStillImageApi]) {
_photoOutput = [[AVCaptureStillImageOutput alloc] init];
} else
#endif
if (@available(macOS 10.15, iOS 10.0, *)) {
_photoOutput = [[AVCapturePhotoOutput alloc] init];
} else {
NOTREACHED();
}
} }
if (![_captureSession canAddOutput:_photoOutput]) { if (![_captureSession canAddOutput:_photoOutput]) {
{ {
@@ -581,88 +556,24 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
// takePhotoInternal() can only happen when we have a `_photoOutput` because // takePhotoInternal() can only happen when we have a `_photoOutput` because
// stopPhotoOutput() cancels in-flight operations by invalidating weak ptrs. // stopPhotoOutput() cancels in-flight operations by invalidating weak ptrs.
DCHECK(_photoOutput); DCHECK(_photoOutput);
#if (!defined(__IPHONE_10_0) || \ @try {
__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_10_0) // Asynchronous success or failure is handled inside
if ([self useLegacyStillImageApi]) { // captureOutput:didFinishProcessingPhoto:error on an unknown thread.
// `_photoOutput` is of type AVCaptureStillImageOutput. Note that this block // Synchronous failures are handled in the catch clause below.
// retains `self` but that's fine because it's called one time and then [_photoOutput
// discarded, not kept around. capturePhotoWithSettings:[AVCapturePhotoSettings
const auto handler = ^(CMSampleBufferRef sampleBuffer, NSError* error) { photoSettingsWithFormat:@{
{ AVVideoCodecKey : AVVideoCodecTypeJPEG
base::AutoLock lock(self->_lock); }]
if (self->_frameReceiver) { delegate:self];
if (error != nil) { } @catch (id exception) {
self->_frameReceiver->OnPhotoError(); {
} else { base::AutoLock lock(_lock);
// Recommended compressed pixel format is JPEG, we don't expect if (_frameReceiver) {
// surprises. _frameReceiver->OnPhotoError();
// TODO(mcasas): Consider using [1] for merging EXIF output
// information:
// [1]
// +(NSData*)jpegStillImageNSDataRepresentation:jpegSampleBuffer;
DCHECK_EQ(kCMVideoCodecType_JPEG,
CMFormatDescriptionGetMediaSubType(
CMSampleBufferGetFormatDescription(sampleBuffer)));
char* baseAddress = nullptr;
size_t length = 0;
const bool sample_buffer_addressable =
media::ExtractBaseAddressAndLength(&baseAddress, &length,
sampleBuffer);
DCHECK(sample_buffer_addressable);
if (sample_buffer_addressable) {
self->_frameReceiver->OnPhotoTaken(
reinterpret_cast<uint8_t*>(baseAddress), length,
"image/jpeg");
}
}
}
} }
// Whether we succeeded or failed, we need to resolve the pending
// takePhoto() operation.
self->_mainThreadTaskRunner->PostTask(
FROM_HERE,
base::BindOnce(
[](base::WeakPtr<SelfHolder> weakSelf) {
if (!weakSelf.get()) {
return;
}
[weakSelf.get()->the_self takePhotoResolved];
},
self->_weakPtrHolderForTakePhoto.weak_ptr_factory.GetWeakPtr()));
};
AVCaptureStillImageOutput* image_output =
static_cast<AVCaptureStillImageOutput*>(_photoOutput);
DCHECK(image_output.connections.count == 1);
AVCaptureConnection* const connection =
image_output.connections.firstObject;
DCHECK(connection);
[image_output captureStillImageAsynchronouslyFromConnection:connection
completionHandler:handler];
} else
#endif
if (@available(macOS 10.15, iOS 10.0, *)) {
// `_photoOutput` is of type AVCapturePhotoOutput.
@try {
// Asynchronous success or failure is handled inside
// captureOutput:didFinishProcessingPhoto:error on an unknown thread.
// Synchronous failures are handled in the catch clause below.
[_photoOutput
capturePhotoWithSettings:[AVCapturePhotoSettings
photoSettingsWithFormat:@{
AVVideoCodecKey : AVVideoCodecTypeJPEG
}]
delegate:self];
} @catch (id exception) {
{
base::AutoLock lock(_lock);
if (_frameReceiver) {
_frameReceiver->OnPhotoError();
}
}
[self takePhotoResolved];
} }
} else { [self takePhotoResolved];
NOTREACHED();
} }
} }

@@ -75,23 +75,9 @@ TEST(VideoCaptureDeviceAVFoundationMacTest,
})); }));
} }
class VideoCaptureDeviceAVFoundationMacTakePhotoTest TEST(VideoCaptureDeviceAVFoundationMacTest, TakePhoto) {
: public testing::TestWithParam<bool> { RunTestCase(
public: base::BindOnce([] {
VideoCaptureDeviceAVFoundation* CreateCaptureDevice(
testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>*
frame_receiver) {
VideoCaptureDeviceAVFoundation* captureDevice =
[[VideoCaptureDeviceAVFoundation alloc]
initWithFrameReceiver:frame_receiver];
[captureDevice setForceLegacyStillImageApiForTesting:GetParam()];
return captureDevice;
}
};
TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest, TakePhoto) {
RunTestCase(base::BindOnce(
[](VideoCaptureDeviceAVFoundationMacTakePhotoTest* thiz) {
NSString* deviceId = GetFirstDeviceId(); NSString* deviceId = GetFirstDeviceId();
if (!deviceId) { if (!deviceId) {
DVLOG(1) << "No camera available. Exiting test."; DVLOG(1) << "No camera available. Exiting test.";
@@ -101,7 +87,8 @@ TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest, TakePhoto) {
testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver> testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
frame_receiver; frame_receiver;
VideoCaptureDeviceAVFoundation* captureDevice = VideoCaptureDeviceAVFoundation* captureDevice =
thiz->CreateCaptureDevice(&frame_receiver); [[VideoCaptureDeviceAVFoundation alloc]
initWithFrameReceiver:&frame_receiver];
NSString* errorMessage = nil; NSString* errorMessage = nil;
ASSERT_TRUE([captureDevice setCaptureDevice:deviceId ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
@@ -121,14 +108,12 @@ TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest, TakePhoto) {
}); });
[captureDevice takePhoto]; [captureDevice takePhoto];
run_loop.Run(); run_loop.Run();
}, }));
this));
} }
TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest, TEST(VideoCaptureDeviceAVFoundationMacTest, StopCaptureWhileTakingPhoto) {
StopCaptureWhileTakingPhoto) { RunTestCase(
RunTestCase(base::BindOnce( base::BindOnce([] {
[](VideoCaptureDeviceAVFoundationMacTakePhotoTest* thiz) {
NSString* deviceId = GetFirstDeviceId(); NSString* deviceId = GetFirstDeviceId();
if (!deviceId) { if (!deviceId) {
DVLOG(1) << "No camera available. Exiting test."; DVLOG(1) << "No camera available. Exiting test.";
@@ -138,7 +123,8 @@ TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest,
testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver> testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
frame_receiver; frame_receiver;
VideoCaptureDeviceAVFoundation* captureDevice = VideoCaptureDeviceAVFoundation* captureDevice =
thiz->CreateCaptureDevice(&frame_receiver); [[VideoCaptureDeviceAVFoundation alloc]
initWithFrameReceiver:&frame_receiver];
NSString* errorMessage = nil; NSString* errorMessage = nil;
ASSERT_TRUE([captureDevice setCaptureDevice:deviceId ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
@@ -150,18 +136,16 @@ TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest,
.WillOnce(base::test::RunClosure(run_loop.QuitClosure())); .WillOnce(base::test::RunClosure(run_loop.QuitClosure()));
[captureDevice takePhoto]; [captureDevice takePhoto];
// There is no risk that takePhoto() has successfully finishes before // There is no risk that takePhoto() has successfully finishes before
// stopCapture() because the takePhoto() call involes a // stopCapture() because the takePhoto() call involves a
// PostDelayedTask() that cannot run until RunLoop::Run() below. // PostDelayedTask() that cannot run until RunLoop::Run() below.
[captureDevice stopCapture]; [captureDevice stopCapture];
run_loop.Run(); run_loop.Run();
}, }));
this));
} }
TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest, TEST(VideoCaptureDeviceAVFoundationMacTest, MultiplePendingTakePhotos) {
MultiplePendingTakePhotos) { RunTestCase(
RunTestCase(base::BindOnce( base::BindOnce([] {
[](VideoCaptureDeviceAVFoundationMacTakePhotoTest* thiz) {
NSString* deviceId = GetFirstDeviceId(); NSString* deviceId = GetFirstDeviceId();
if (!deviceId) { if (!deviceId) {
DVLOG(1) << "No camera available. Exiting test."; DVLOG(1) << "No camera available. Exiting test.";
@@ -171,7 +155,8 @@ TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest,
testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver> testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
frame_receiver; frame_receiver;
VideoCaptureDeviceAVFoundation* captureDevice = VideoCaptureDeviceAVFoundation* captureDevice =
thiz->CreateCaptureDevice(&frame_receiver); [[VideoCaptureDeviceAVFoundation alloc]
initWithFrameReceiver:&frame_receiver];
NSString* errorMessage = nil; NSString* errorMessage = nil;
ASSERT_TRUE([captureDevice setCaptureDevice:deviceId ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
@@ -187,14 +172,13 @@ TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest,
[captureDevice takePhoto]; [captureDevice takePhoto];
[captureDevice takePhoto]; [captureDevice takePhoto];
run_loop.Run(); run_loop.Run();
}, }));
this));
} }
TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest, TEST(VideoCaptureDeviceAVFoundationMacTest,
StopCaptureWhileMultiplePendingTakePhotos) { StopCaptureWhileMultiplePendingTakePhotos) {
RunTestCase(base::BindOnce( RunTestCase(
[](VideoCaptureDeviceAVFoundationMacTakePhotoTest* thiz) { base::BindOnce([] {
NSString* deviceId = GetFirstDeviceId(); NSString* deviceId = GetFirstDeviceId();
if (!deviceId) { if (!deviceId) {
DVLOG(1) << "No camera available. Exiting test."; DVLOG(1) << "No camera available. Exiting test.";
@@ -204,7 +188,8 @@ TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest,
testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver> testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
frame_receiver; frame_receiver;
VideoCaptureDeviceAVFoundation* captureDevice = VideoCaptureDeviceAVFoundation* captureDevice =
thiz->CreateCaptureDevice(&frame_receiver); [[VideoCaptureDeviceAVFoundation alloc]
initWithFrameReceiver:&frame_receiver];
NSString* errorMessage = nil; NSString* errorMessage = nil;
ASSERT_TRUE([captureDevice setCaptureDevice:deviceId ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
@@ -224,79 +209,68 @@ TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest,
// PostDelayedTask() that cannot run until RunLoop::Run() below. // PostDelayedTask() that cannot run until RunLoop::Run() below.
[captureDevice stopCapture]; [captureDevice stopCapture];
run_loop.Run(); run_loop.Run();
}, }));
this));
} }
TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest, TEST(VideoCaptureDeviceAVFoundationMacTest,
StopPhotoOutputWhenNoLongerTakingPhotos) { StopPhotoOutputWhenNoLongerTakingPhotos) {
RunTestCase(base::BindOnce( RunTestCase(base::BindOnce([] {
[](VideoCaptureDeviceAVFoundationMacTakePhotoTest* thiz) { NSString* deviceId = GetFirstDeviceId();
NSString* deviceId = GetFirstDeviceId(); if (!deviceId) {
if (!deviceId) { DVLOG(1) << "No camera available. Exiting test.";
DVLOG(1) << "No camera available. Exiting test."; return;
return; }
}
testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver> testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
frame_receiver; frame_receiver;
VideoCaptureDeviceAVFoundation* captureDevice = VideoCaptureDeviceAVFoundation* captureDevice =
thiz->CreateCaptureDevice(&frame_receiver); [[VideoCaptureDeviceAVFoundation alloc]
initWithFrameReceiver:&frame_receiver];
NSString* errorMessage = nil; NSString* errorMessage = nil;
ASSERT_TRUE([captureDevice setCaptureDevice:deviceId ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
errorMessage:&errorMessage]); errorMessage:&errorMessage]);
ASSERT_TRUE([captureDevice startCapture]); ASSERT_TRUE([captureDevice startCapture]);
base::RunLoop run_loop(base::RunLoop::Type::kNestableTasksAllowed); base::RunLoop run_loop(base::RunLoop::Type::kNestableTasksAllowed);
[captureDevice [captureDevice setOnPhotoOutputStoppedForTesting:run_loop.QuitClosure()];
setOnPhotoOutputStoppedForTesting:run_loop.QuitClosure()]; base::TimeTicks start_time = base::TimeTicks::Now();
base::TimeTicks start_time = base::TimeTicks::Now(); [captureDevice takePhoto];
[captureDevice takePhoto]; // The RunLoop automatically advances mocked time when there are delayed
// The RunLoop automatically advances mocked time when there are delayed // tasks pending. This allows the test to run fast and still assert how
// tasks pending. This allows the test to run fast and still assert how // much mocked time has elapsed.
// much mocked time has elapsed. run_loop.Run();
run_loop.Run(); auto time_elapsed = base::TimeTicks::Now() - start_time;
auto time_elapsed = base::TimeTicks::Now() - start_time; // Still image output is not stopped until 60 seconds of inactivity, so
// Still image output is not stopped until 60 seconds of inactivity, so // the mocked time must have advanced at least this much.
// the mocked time must have advanced at least this much. EXPECT_GE(time_elapsed.InSeconds(), 60);
EXPECT_GE(time_elapsed.InSeconds(), 60); }));
},
this));
} }
TEST_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest, TEST(VideoCaptureDeviceAVFoundationMacTest,
TakePhotoAndShutDownWithoutWaiting) { TakePhotoAndShutDownWithoutWaiting) {
RunTestCase(base::BindOnce( RunTestCase(base::BindOnce([] {
[](VideoCaptureDeviceAVFoundationMacTakePhotoTest* thiz) { NSString* deviceId = GetFirstDeviceId();
NSString* deviceId = GetFirstDeviceId(); if (!deviceId) {
if (!deviceId) { DVLOG(1) << "No camera available. Exiting test.";
DVLOG(1) << "No camera available. Exiting test."; return;
return; }
}
testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver> testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
frame_receiver; frame_receiver;
VideoCaptureDeviceAVFoundation* captureDevice = VideoCaptureDeviceAVFoundation* captureDevice =
thiz->CreateCaptureDevice(&frame_receiver); [[VideoCaptureDeviceAVFoundation alloc]
initWithFrameReceiver:&frame_receiver];
NSString* errorMessage = nil; NSString* errorMessage = nil;
ASSERT_TRUE([captureDevice setCaptureDevice:deviceId ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
errorMessage:&errorMessage]); errorMessage:&errorMessage]);
ASSERT_TRUE([captureDevice startCapture]); ASSERT_TRUE([captureDevice startCapture]);
[captureDevice takePhoto]; [captureDevice takePhoto];
}, }));
this));
} }
// When not forcing legacy API, AVCapturePhotoOutput is used if available
// (macOS 10.15+). Otherwise AVCaptureStillImageOutput is used.
INSTANTIATE_TEST_SUITE_P(VideoCaptureDeviceAVFoundationMacTakePhotoTest,
VideoCaptureDeviceAVFoundationMacTakePhotoTest,
// Force legacy API?
testing::Values(false, true));
TEST(VideoCaptureDeviceAVFoundationMacTest, ForwardsOddPixelBufferResolution) { TEST(VideoCaptureDeviceAVFoundationMacTest, ForwardsOddPixelBufferResolution) {
// See crbug/1168112. // See crbug/1168112.
RunTestCase(base::BindOnce([] { RunTestCase(base::BindOnce([] {
@@ -323,7 +297,7 @@ TEST(VideoCaptureDeviceAVFoundationMacTest, ForwardsOddPixelBufferResolution) {
} }
TEST(VideoCaptureDeviceAVFoundationMacTest, FrameRateFloatInaccuracyIsHandled) { TEST(VideoCaptureDeviceAVFoundationMacTest, FrameRateFloatInaccuracyIsHandled) {
// See crbug/1299812. // See https://crbug.com/1299812.
RunTestCase(base::BindOnce([] { RunTestCase(base::BindOnce([] {
double max_frame_rate = 30.000030; double max_frame_rate = 30.000030;
AVCaptureDeviceFormat* format1 = AVCaptureDeviceFormat* format1 =

@@ -379,16 +379,7 @@ TEST_P(TrustStoreMacImplTest, SystemCerts) {
// Cert is only in the system domain. It should be untrusted. // Cert is only in the system domain. It should be untrusted.
EXPECT_FALSE(is_trusted); EXPECT_FALSE(is_trusted);
} else { } else {
bool trusted; bool trusted = SecTrustEvaluateWithError(trust, nullptr);
if (__builtin_available(macOS 10.14, *)) {
trusted = SecTrustEvaluateWithError(trust, nullptr);
} else {
SecTrustResultType trust_result;
ASSERT_EQ(noErr, SecTrustEvaluate(trust, &trust_result));
trusted = (trust_result == kSecTrustResultProceed) ||
(trust_result == kSecTrustResultUnspecified);
}
bool expected_trust_anchor = bool expected_trust_anchor =
trusted && (SecTrustGetCertificateCount(trust) == 1); trusted && (SecTrustGetCertificateCount(trust) == 1);
EXPECT_EQ(expected_trust_anchor, is_trusted); EXPECT_EQ(expected_trust_anchor, is_trusted);

@@ -76,17 +76,10 @@ OSStatus CopyCertChain(SecCertificateRef cert_handle,
// Evaluate trust, which creates the cert chain. // Evaluate trust, which creates the cert chain.
{ {
base::AutoLock lock(crypto::GetMacSecurityServicesLock()); base::AutoLock lock(crypto::GetMacSecurityServicesLock());
if (__builtin_available(macOS 10.14, *)) { // The return value is intentionally ignored since we only care about
// The return value is intentionally ignored since we only care about // building a cert chain, not whether it is trusted (the server is the
// building a cert chain, not whether it is trusted (the server is the // only one that can decide that.)
// only one that can decide that.) std::ignore = SecTrustEvaluateWithError(trust, nullptr);
std::ignore = SecTrustEvaluateWithError(trust, nullptr);
} else {
SecTrustResultType status;
result = SecTrustEvaluate(trust, &status);
if (result)
return result;
}
*out_cert_chain = x509_util::CertificateChainFromSecTrust(trust); *out_cert_chain = x509_util::CertificateChainFromSecTrust(trust);
} }
return result; return result;

@@ -30,22 +30,14 @@ void InitializeSkFontMgrForTest() {
[font_urls addObject:font_url.absoluteURL]; [font_urls addObject:font_url.absoluteURL];
} }
if (@available(macOS 10.15, *)) { CTFontManagerRegisterFontURLs(
CTFontManagerRegisterFontURLs( base::apple::NSToCFPtrCast(font_urls), kCTFontManagerScopeProcess,
base::apple::NSToCFPtrCast(font_urls), kCTFontManagerScopeProcess, /*enabled=*/true, ^bool(CFArrayRef errors, bool done) {
/*enabled=*/true, ^bool(CFArrayRef errors, bool done) { if (CFArrayGetCount(errors)) {
if (CFArrayGetCount(errors)) { DLOG(FATAL) << "Failed to activate fonts.";
DLOG(FATAL) << "Failed to activate fonts."; }
} return true;
return true; });
});
} else {
if (!CTFontManagerRegisterFontsForURLs(
base::apple::NSToCFPtrCast(font_urls), kCTFontManagerScopeProcess,
/*errors=*/nullptr)) {
DLOG(FATAL) << "Failed to activate fonts.";
}
}
} }
} // namespace skia } // namespace skia