From 975097d848acf8113d35df8b13548547bd315232 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Wed, 4 Dec 2024 15:20:58 +0100 Subject: [PATCH 01/16] Refactor permissions logic and remove OCMock from tests --- .../ios/RunnerTests/CameraPermissionTests.m | 163 +++++++++--------- .../camera_avfoundation/CameraPlugin.m | 15 +- ...onUtils.m => FLTCameraPermissionManager.m} | 38 ++-- .../FLTPermissionService.m | 17 ++ .../include/CameraPlugin.modulemap | 3 +- ...onUtils.h => FLTCameraPermissionManager.h} | 23 ++- .../FLTPermissionService.h | 19 ++ 7 files changed, 177 insertions(+), 101 deletions(-) rename packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/{CameraPermissionUtils.m => FLTCameraPermissionManager.m} (75%) create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTPermissionService.m rename packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/{CameraPermissionUtils.h => FLTCameraPermissionManager.h} (71%) create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTPermissionService.h diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m index 02a610affaa5..3a1e8bc88dda 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m @@ -8,31 +8,68 @@ #endif @import AVFoundation; @import XCTest; -#import #import "CameraTestUtils.h" +#import "FLTPermissionService.h" +#import "FLTCameraPermissionManager.h" -@interface CameraPermissionTests : XCTestCase +@interface MockPermissionService : NSObject +@property (nonatomic, assign) AVAuthorizationStatus cameraAuthorizationStatusStub; +@property (nonatomic, assign) AVAuthorizationStatus audioAuthorizationStatusStub; + +@property (nonatomic, assign) BOOL cameraGrantAccessStub; +@property (nonatomic, assign) BOOL audioGrantAccessStub; +@end +@implementation MockPermissionService +- (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType { + if (mediaType == AVMediaTypeVideo) { + return self.cameraAuthorizationStatusStub; + } else if (mediaType == AVMediaTypeAudio) { + return self.audioAuthorizationStatusStub; + } + @throw [NSException exceptionWithName:@"UnexpectedMediaType" + reason:@"Unexpected media type was used" + userInfo:nil]; +} + +- (void)requestAccessForMediaType:(AVMediaType)mediaType + completionHandler:(void (^)(BOOL granted))handler { + if (mediaType == AVMediaTypeVideo) { + handler(self.cameraGrantAccessStub); + } else if (mediaType == AVMediaTypeAudio) { + handler(self.audioGrantAccessStub); + } +} +@end + +@interface CameraPermissionTests : XCTestCase +@property (nonatomic, strong) FLTCameraPermissionManager *permissionManager; +@property (nonatomic, strong) MockPermissionService *mockService; @end @implementation CameraPermissionTests +- (void)setUp { + [super setUp]; + self.mockService = [[MockPermissionService alloc] init]; + self.permissionManager = [[FLTCameraPermissionManager alloc] + initWithPermissionService:self.mockService]; +} + #pragma mark - camera permissions -- (void)testRequestCameraPermission_completeWithoutErrorIfPrevoiuslyAuthorized { +- (void)testRequestCameraPermission_completeWithoutErrorIfPreviouslyAuthorized { XCTestExpectation *expectation = [self expectationWithDescription: @"Must copmlete without error if camera access was previously authorized."]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo]) - .andReturn(AVAuthorizationStatusAuthorized); + self.mockService.cameraAuthorizationStatusStub = AVAuthorizationStatusAuthorized; - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } - (void)testRequestCameraPermission_completeWithErrorIfPreviouslyDenied { @@ -45,14 +82,13 @@ - (void)testRequestCameraPermission_completeWithErrorIfPreviouslyDenied { @"Settings to enable camera access." details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo]) - .andReturn(AVAuthorizationStatusDenied); - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { + self.mockService.cameraAuthorizationStatusStub = AVAuthorizationStatusDenied; + + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -63,15 +99,13 @@ - (void)testRequestCameraPermission_completeWithErrorIfRestricted { message:@"Camera access is restricted. " details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo]) - .andReturn(AVAuthorizationStatusRestricted); + self.mockService.cameraAuthorizationStatusStub = AVAuthorizationStatusRestricted; - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -79,21 +113,16 @@ - (void)testRequestCameraPermission_completeWithoutErrorIfUserGrantAccess { XCTestExpectation *grantedExpectation = [self expectationWithDescription:@"Must complete without error if user choose to grant access"]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo]) - .andReturn(AVAuthorizationStatusNotDetermined); + self.mockService.cameraAuthorizationStatusStub = AVAuthorizationStatusNotDetermined; + // Mimic user choosing "allow" in permission dialog. - OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeVideo - completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) { - block(YES); - return YES; - }]]); + self.mockService.cameraGrantAccessStub = YES; - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { - if (error == nil) { - [grantedExpectation fulfill]; - } - }); + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { + if (error == nil) { + [grantedExpectation fulfill]; + } + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -105,21 +134,16 @@ - (void)testRequestCameraPermission_completeWithErrorIfUserDenyAccess { message:@"User denied the camera access request." details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeVideo]) - .andReturn(AVAuthorizationStatusNotDetermined); + self.mockService.cameraAuthorizationStatusStub = AVAuthorizationStatusNotDetermined; // Mimic user choosing "deny" in permission dialog. - OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeVideo - completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) { - block(NO); - return YES; - }]]); - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { + self.mockService.cameraGrantAccessStub = NO; + + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -131,17 +155,16 @@ - (void)testRequestAudioPermission_completeWithoutErrorIfPrevoiuslyAuthorized { [self expectationWithDescription: @"Must copmlete without error if audio access was previously authorized."]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio]) - .andReturn(AVAuthorizationStatusAuthorized); + self.mockService.audioAuthorizationStatusStub = AVAuthorizationStatusAuthorized; - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } + - (void)testRequestAudioPermission_completeWithErrorIfPreviouslyDenied { XCTestExpectation *expectation = [self expectationWithDescription: @@ -152,14 +175,13 @@ - (void)testRequestAudioPermission_completeWithErrorIfPreviouslyDenied { @"Settings to enable audio access." details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio]) - .andReturn(AVAuthorizationStatusDenied); - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { + self.mockService.audioAuthorizationStatusStub = AVAuthorizationStatusDenied; + + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -170,15 +192,13 @@ - (void)testRequestAudioPermission_completeWithErrorIfRestricted { message:@"Audio access is restricted. " details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio]) - .andReturn(AVAuthorizationStatusRestricted); + self.mockService.audioAuthorizationStatusStub = AVAuthorizationStatusRestricted; - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -186,21 +206,16 @@ - (void)testRequestAudioPermission_completeWithoutErrorIfUserGrantAccess { XCTestExpectation *grantedExpectation = [self expectationWithDescription:@"Must complete without error if user choose to grant access"]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio]) - .andReturn(AVAuthorizationStatusNotDetermined); + self.mockService.audioAuthorizationStatusStub = AVAuthorizationStatusNotDetermined; + // Mimic user choosing "allow" in permission dialog. - OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeAudio - completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) { - block(YES); - return YES; - }]]); + self.mockService.audioGrantAccessStub = YES; - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { [grantedExpectation fulfill]; } - }); + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -211,22 +226,16 @@ - (void)testRequestAudioPermission_completeWithErrorIfUserDenyAccess { message:@"User denied the audio access request." details:nil]; - id mockDevice = OCMClassMock([AVCaptureDevice class]); - OCMStub([mockDevice authorizationStatusForMediaType:AVMediaTypeAudio]) - .andReturn(AVAuthorizationStatusNotDetermined); + self.mockService.audioAuthorizationStatusStub = AVAuthorizationStatusNotDetermined; // Mimic user choosing "deny" in permission dialog. - OCMStub([mockDevice requestAccessForMediaType:AVMediaTypeAudio - completionHandler:[OCMArg checkWithBlock:^BOOL(void (^block)(BOOL)) { - block(NO); - return YES; - }]]); - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { + self.mockService.audioGrantAccessStub = NO; + + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }); - + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m index de208fd560ef..ca004b97d605 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m @@ -8,7 +8,7 @@ @import AVFoundation; @import Flutter; -#import "./include/camera_avfoundation/CameraPermissionUtils.h" +#import "./include/camera_avfoundation/FLTCameraPermissionManager.h" #import "./include/camera_avfoundation/CameraProperties.h" #import "./include/camera_avfoundation/FLTCam.h" #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" @@ -25,6 +25,7 @@ @interface CameraPlugin () @property(readonly, nonatomic) id registry; @property(readonly, nonatomic) NSObject *messenger; @property(nonatomic) FCPCameraGlobalEventApi *globalEventAPI; +@property(readonly, nonatomic) FLTCameraPermissionManager *permissionManager; @end @implementation CameraPlugin @@ -52,6 +53,10 @@ - (instancetype)initWithRegistry:(NSObject *)registry _messenger = messenger; _globalEventAPI = globalAPI; _captureSessionQueue = dispatch_queue_create("io.flutter.camera.captureSessionQueue", NULL); + + id permissionService = [[FLTDefaultPermissionService alloc] init]; + _permissionManager = [[FLTCameraPermissionManager alloc] initWithPermissionService:permissionService]; + dispatch_queue_set_specific(_captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); @@ -145,7 +150,7 @@ - (void)createCameraWithName:(nonnull NSString *)cameraName // Create FLTCam only if granted camera access (and audio access if audio is enabled) __weak typeof(self) weakSelf = self; dispatch_async(self.captureSessionQueue, ^{ - FLTRequestCameraPermissionWithCompletionHandler(^(FlutterError *error) { + [self->_permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { typeof(self) strongSelf = weakSelf; if (!strongSelf) return; @@ -157,7 +162,7 @@ - (void)createCameraWithName:(nonnull NSString *)cameraName // optional, and used as a workaround to fix a missing frame issue on iOS. if (settings.enableAudio) { // Setup audio capture session only if granted audio access. - FLTRequestAudioPermissionWithCompletionHandler(^(FlutterError *error) { + [self->_permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { // cannot use the outter `strongSelf` typeof(self) strongSelf = weakSelf; if (!strongSelf) return; @@ -168,14 +173,14 @@ - (void)createCameraWithName:(nonnull NSString *)cameraName settings:settings completion:completion]; } - }); + }]; } else { [strongSelf createCameraOnSessionQueueWithName:cameraName settings:settings completion:completion]; } } - }); + }]; }); } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPermissionUtils.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m similarity index 75% rename from packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPermissionUtils.m rename to packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m index b63a1d684e00..c3e326e5fcbd 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPermissionUtils.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m @@ -3,9 +3,29 @@ // found in the LICENSE file. @import AVFoundation; -#import "./include/camera_avfoundation/CameraPermissionUtils.h" +#import "./include/camera_avfoundation/FLTCameraPermissionManager.h" +#import "./include/camera_avfoundation/FLTPermissionService.h" -void FLTRequestPermission(BOOL forAudio, FLTCameraPermissionRequestCompletionHandler handler) { +@implementation FLTCameraPermissionManager + +- (instancetype)initWithPermissionService:(id)service { + self = [super init]; + if (self) { + _permissionService = service ?: [[FLTDefaultPermissionService alloc] init]; + } + return self; +} + +- (void)requestAudioPermissionWithCompletionHandler:(__strong FLTCameraPermissionRequestCompletionHandler)handler { + [self requestPermissionForAudio:YES handler:handler]; +} + +- (void)requestCameraPermissionWithCompletionHandler:(__strong FLTCameraPermissionRequestCompletionHandler)handler { + [self requestPermissionForAudio:NO handler:handler]; +} + +- (void)requestPermissionForAudio:(BOOL)forAudio + handler:(FLTCameraPermissionRequestCompletionHandler)handler { AVMediaType mediaType; if (forAudio) { mediaType = AVMediaTypeAudio; @@ -13,7 +33,7 @@ void FLTRequestPermission(BOOL forAudio, FLTCameraPermissionRequestCompletionHan mediaType = AVMediaTypeVideo; } - switch ([AVCaptureDevice authorizationStatusForMediaType:mediaType]) { + switch ([_permissionService authorizationStatusForMediaType:mediaType]) { case AVAuthorizationStatusAuthorized: handler(nil); break; @@ -50,7 +70,7 @@ void FLTRequestPermission(BOOL forAudio, FLTCameraPermissionRequestCompletionHan break; } case AVAuthorizationStatusNotDetermined: { - [AVCaptureDevice requestAccessForMediaType:mediaType + [_permissionService requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) { // handler can be invoked on an arbitrary dispatch queue. if (granted) { @@ -76,12 +96,4 @@ void FLTRequestPermission(BOOL forAudio, FLTCameraPermissionRequestCompletionHan } } -void FLTRequestCameraPermissionWithCompletionHandler( - FLTCameraPermissionRequestCompletionHandler handler) { - FLTRequestPermission(/*forAudio*/ NO, handler); -} - -void FLTRequestAudioPermissionWithCompletionHandler( - FLTCameraPermissionRequestCompletionHandler handler) { - FLTRequestPermission(/*forAudio*/ YES, handler); -} +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTPermissionService.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTPermissionService.m new file mode 100644 index 000000000000..151524e4ff11 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTPermissionService.m @@ -0,0 +1,17 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "./include/camera_avfoundation/FLTPermissionService.h" + +@implementation FLTDefaultPermissionService +- (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType { + return [AVCaptureDevice authorizationStatusForMediaType:mediaType]; +} + +- (void)requestAccessForMediaType:(AVMediaType)mediaType + completionHandler:(void (^)(BOOL granted))handler { + [AVCaptureDevice requestAccessForMediaType:mediaType + completionHandler:handler]; +} +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap index bc864d174927..c2487e24704f 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap @@ -6,12 +6,13 @@ framework module camera_avfoundation { explicit module Test { header "CameraPlugin_Test.h" - header "CameraPermissionUtils.h" header "CameraProperties.h" header "FLTCam.h" header "FLTCam_Test.h" header "FLTSavePhotoDelegate_Test.h" header "FLTThreadSafeEventChannel.h" + header "FLTPermissionService.h" + header "FLTCameraPermissionManager.h" header "QueueUtils.h" } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPermissionUtils.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCameraPermissionManager.h similarity index 71% rename from packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPermissionUtils.h rename to packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCameraPermissionManager.h index 5cbbab055f34..a7e859aebdde 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPermissionUtils.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCameraPermissionManager.h @@ -5,7 +5,16 @@ @import Foundation; #import -typedef void (^FLTCameraPermissionRequestCompletionHandler)(FlutterError *); +#import "FLTPermissionService.h" + +NS_ASSUME_NONNULL_BEGIN + +typedef void (^FLTCameraPermissionRequestCompletionHandler)(FlutterError *_Nullable); + +@interface FLTCameraPermissionManager : NSObject +@property (nonatomic, strong) id permissionService; + +- (instancetype)initWithPermissionService:(id)service; /// Requests camera access permission. /// @@ -16,8 +25,8 @@ typedef void (^FLTCameraPermissionRequestCompletionHandler)(FlutterError *); /// @param handler if access permission is (or was previously) granted, completion handler will be /// called without error; Otherwise completion handler will be called with error. Handler can be /// called on an arbitrary dispatch queue. -extern void FLTRequestCameraPermissionWithCompletionHandler( - FLTCameraPermissionRequestCompletionHandler handler); +- (void)requestCameraPermissionWithCompletionHandler:( + FLTCameraPermissionRequestCompletionHandler)handler; /// Requests audio access permission. /// @@ -28,5 +37,9 @@ extern void FLTRequestCameraPermissionWithCompletionHandler( /// @param handler if access permission is (or was previously) granted, completion handler will be /// called without error; Otherwise completion handler will be called with error. Handler can be /// called on an arbitrary dispatch queue. -extern void FLTRequestAudioPermissionWithCompletionHandler( - FLTCameraPermissionRequestCompletionHandler handler); +- (void)requestAudioPermissionWithCompletionHandler:( + FLTCameraPermissionRequestCompletionHandler)handler; + +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTPermissionService.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTPermissionService.h new file mode 100644 index 000000000000..af31e415e995 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTPermissionService.h @@ -0,0 +1,19 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; +@import Foundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTPermissionService +- (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType; +- (void)requestAccessForMediaType:(AVMediaType)mediaType + completionHandler:(void (^)(BOOL granted))handler; +@end + +@interface FLTDefaultPermissionService : NSObject +@end + +NS_ASSUME_NONNULL_END From d8232b1f0248d31b9ab01b947b043fca605605f0 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Fri, 6 Dec 2024 13:38:12 +0100 Subject: [PATCH 02/16] Introduce protocols and remove OCMock from CameraFocusTests --- .../ios/Runner.xcodeproj/project.pbxproj | 132 +++++++++----- .../ios/RunnerTests/CameraFocusTests.m | 135 ++++++++------ .../ios/RunnerTests/CameraPreviewPauseTests.m | 1 - .../example/ios/RunnerTests/CameraTestUtils.m | 9 +- .../ios/RunnerTests/FLTCamPhotoCaptureTests.m | 7 +- ...ts.m => FLTCameraPermissionManagerTests.m} | 125 ++++++++----- .../RunnerTests/MockCaptureDeviceController.h | 80 +++++++++ .../RunnerTests/MockCaptureDeviceController.m | 127 +++++++++++++ .../MockDeviceOrientationProvider.h | 17 ++ .../MockDeviceOrientationProvider.m | 14 ++ .../RunnerTests/Mocks/MockDeviceController.m | 6 + .../Sources/camera_avfoundation/FLTCam.m | 21 ++- .../FLTCamMediaSettingsAVWrapper.m | 9 +- .../FLTCameraPermissionManager.m | 2 +- .../Protocols/FLTCaptureDeviceControlling.m | 170 ++++++++++++++++++ .../Protocols/FLTDeviceOrientationProviding.m | 13 ++ .../{ => Protocols}/FLTPermissionService.m | 2 +- .../include/CameraPlugin.modulemap | 2 + .../include/camera_avfoundation/FLTCam.h | 5 +- .../FLTCamMediaSettingsAVWrapper.h | 10 +- .../include/camera_avfoundation/FLTCam_Test.h | 3 +- .../Protocols/FLTCaptureDeviceControlling.h | 72 ++++++++ .../Protocols/FLTDeviceOrientationProviding.h | 17 ++ .../{ => Protocols}/FLTPermissionService.h | 0 24 files changed, 801 insertions(+), 178 deletions(-) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{CameraPermissionTests.m => FLTCameraPermissionManagerTests.m} (68%) create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.h create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.m create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceController.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.m rename packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/{ => Protocols}/FLTPermissionService.m (89%) create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h rename packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/{ => Protocols}/FLTPermissionService.h (100%) diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index 041d918da5b9..6f24df26c737 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -10,8 +10,6 @@ 033B94BE269C40A200B4DF97 /* CameraMethodChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 033B94BD269C40A200B4DF97 /* CameraMethodChannelTests.m */; }; 03BB766B2665316900CE5A93 /* CameraFocusTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB766A2665316900CE5A93 /* CameraFocusTests.m */; }; 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; - 236906D1621AE863A5B2E770 /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 89D82918721FABF772705DB0 /* libPods-Runner.a */; }; - 25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */; }; 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; 408D7A792C3C9CD000B71F9A /* OCMock in Frameworks */ = {isa = PBXBuildFile; productRef = 408D7A782C3C9CD000B71F9A /* OCMock */; }; @@ -19,18 +17,22 @@ 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; }; 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */ = {isa = PBXBuildFile; productRef = 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */; }; 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */; }; + 7F87E8022D01FD6F00A3549C /* MockCaptureDeviceController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8012D01FD5600A3549C /* MockCaptureDeviceController.m */; }; + 7F87E80C2D0325D900A3549C /* MockDeviceOrientationProvider.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80B2D0325D700A3549C /* MockDeviceOrientationProvider.m */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; + A513EFD72CD1B8802B44FD82 /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = C5BBE9E0BEF3DC97699CB764 /* libPods-Runner.a */; }; + B283936252DB6663B9EC9A05 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 5E3D63425720360F63D4921B /* libPods-RunnerTests.a */; }; CEF6611A2B5E36A500D33FD4 /* CameraSessionPresetsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = CEF661192B5E36A500D33FD4 /* CameraSessionPresetsTests.m */; }; E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */; }; E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */; }; E04F108627A87CA600573D0C /* FLTSavePhotoDelegateTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */; }; E071CF7227B3061B006EF3BA /* FLTCamPhotoCaptureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */; }; E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */; }; - E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */; }; + E0B0D2BB27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0B0D2BA27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m */; }; E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */; }; E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */; }; E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */; }; @@ -68,17 +70,19 @@ 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraOrientationTests.m; sourceTree = ""; }; 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; }; 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; - 14AE82C910C2A12F2ECB2094 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; - 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; 43ED1536282570DE00EB00DE /* AvailableCamerasTest.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AvailableCamerasTest.m; sourceTree = ""; }; - 59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; + 5E3D63425720360F63D4921B /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + 60F3C27E53F2AF8B81A89EA9 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; 788A065927B0E02900533D74 /* StreamingTest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamingTest.m; sourceTree = ""; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraSettingsTests.m; sourceTree = ""; }; - 89D82918721FABF772705DB0 /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + 7F87E8012D01FD5600A3549C /* MockCaptureDeviceController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureDeviceController.m; sourceTree = ""; }; + 7F87E8032D02FF8C00A3549C /* MockCaptureDeviceController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureDeviceController.h; sourceTree = ""; }; + 7F87E80A2D0325B200A3549C /* MockDeviceOrientationProvider.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockDeviceOrientationProvider.h; sourceTree = ""; }; + 7F87E80B2D0325D700A3549C /* MockDeviceOrientationProvider.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockDeviceOrientationProvider.m; sourceTree = ""; }; 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -87,19 +91,21 @@ 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; - 9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = ""; }; - A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; + C5BBE9E0BEF3DC97699CB764 /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + CB65379B3085E03D11D2786A /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; CEF661192B5E36A500D33FD4 /* CameraSessionPresetsTests.m */ = {isa = PBXFileReference; indentWidth = 2; lastKnownFileType = sourcecode.c.objc; path = CameraSessionPresetsTests.m; sourceTree = ""; }; E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = QueueUtilsTests.m; sourceTree = ""; }; E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraCaptureSessionQueueRaceConditionTests.m; sourceTree = ""; }; E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTSavePhotoDelegateTests.m; sourceTree = ""; }; E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTCamPhotoCaptureTests.m; sourceTree = ""; }; E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FLTCamSampleBufferTests.m; sourceTree = ""; }; - E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPermissionTests.m; sourceTree = ""; }; + E0B0D2BA27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FLTCameraPermissionManagerTests.m; sourceTree = ""; }; E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeEventChannelTests.m; sourceTree = ""; }; E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CameraTestUtils.h; sourceTree = ""; }; E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraTestUtils.m; sourceTree = ""; }; E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPropertiesTests.m; sourceTree = ""; }; + E27055DF15226B1DFE032420 /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = ""; }; + E28469137832D102541045F6 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPreviewPauseTests.m; sourceTree = ""; }; /* End PBXFileReference section */ @@ -109,7 +115,7 @@ buildActionMask = 2147483647; files = ( 408D7A792C3C9CD000B71F9A /* OCMock in Frameworks */, - 25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */, + B283936252DB6663B9EC9A05 /* libPods-RunnerTests.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -128,6 +134,10 @@ 03BB76692665316900CE5A93 /* RunnerTests */ = { isa = PBXGroup; children = ( + 7F87E80B2D0325D700A3549C /* MockDeviceOrientationProvider.m */, + 7F87E80A2D0325B200A3549C /* MockDeviceOrientationProvider.h */, + 7F87E8032D02FF8C00A3549C /* MockCaptureDeviceController.h */, + 7F87E8012D01FD5600A3549C /* MockCaptureDeviceController.m */, 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */, 03BB766A2665316900CE5A93 /* CameraFocusTests.m */, 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */, @@ -137,7 +147,7 @@ E04F108527A87CA600573D0C /* FLTSavePhotoDelegateTests.m */, E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */, E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */, - E0B0D2BA27DFF2AF00E71E4B /* CameraPermissionTests.m */, + E0B0D2BA27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m */, E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */, E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */, E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */, @@ -151,15 +161,6 @@ path = RunnerTests; sourceTree = ""; }; - 3242FD2B467C15C62200632F /* Frameworks */ = { - isa = PBXGroup; - children = ( - 89D82918721FABF772705DB0 /* libPods-Runner.a */, - 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */, - ); - name = Frameworks; - sourceTree = ""; - }; 9740EEB11CF90186004384FC /* Flutter */ = { isa = PBXGroup; children = ( @@ -179,7 +180,7 @@ 03BB76692665316900CE5A93 /* RunnerTests */, 97C146EF1CF9000F007C117D /* Products */, FD386F00E98D73419C929072 /* Pods */, - 3242FD2B467C15C62200632F /* Frameworks */, + C004519075D29BB6F75ED9E6 /* Frameworks */, ); sourceTree = ""; }; @@ -216,13 +217,22 @@ name = "Supporting Files"; sourceTree = ""; }; + C004519075D29BB6F75ED9E6 /* Frameworks */ = { + isa = PBXGroup; + children = ( + C5BBE9E0BEF3DC97699CB764 /* libPods-Runner.a */, + 5E3D63425720360F63D4921B /* libPods-RunnerTests.a */, + ); + name = Frameworks; + sourceTree = ""; + }; FD386F00E98D73419C929072 /* Pods */ = { isa = PBXGroup; children = ( - 59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */, - 14AE82C910C2A12F2ECB2094 /* Pods-Runner.release.xcconfig */, - 9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */, - A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */, + 60F3C27E53F2AF8B81A89EA9 /* Pods-Runner.debug.xcconfig */, + E28469137832D102541045F6 /* Pods-Runner.release.xcconfig */, + E27055DF15226B1DFE032420 /* Pods-RunnerTests.debug.xcconfig */, + CB65379B3085E03D11D2786A /* Pods-RunnerTests.release.xcconfig */, ); path = Pods; sourceTree = ""; @@ -234,7 +244,7 @@ isa = PBXNativeTarget; buildConfigurationList = 03BB76712665316900CE5A93 /* Build configuration list for PBXNativeTarget "RunnerTests" */; buildPhases = ( - 422786A96136AA9087A2041B /* [CP] Check Pods Manifest.lock */, + D4954E02020248A842E1E7B3 /* [CP] Check Pods Manifest.lock */, 03BB76642665316900CE5A93 /* Sources */, 03BB76652665316900CE5A93 /* Frameworks */, 03BB76662665316900CE5A93 /* Resources */, @@ -256,13 +266,14 @@ isa = PBXNativeTarget; buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; buildPhases = ( - 9872F2A25E8A171A111468CD /* [CP] Check Pods Manifest.lock */, + 3770D57B1AB91BF2FCC5DE94 /* [CP] Check Pods Manifest.lock */, 9740EEB61CF901F6004384FC /* Run Script */, 97C146EA1CF9000F007C117D /* Sources */, 97C146EB1CF9000F007C117D /* Frameworks */, 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + BC4B4B1DFD7B8E3FA1EB91B9 /* [CP] Copy Pods Resources */, ); buildRules = ( ); @@ -287,6 +298,7 @@ TargetAttributes = { 03BB76672665316900CE5A93 = { CreatedOnToolsVersion = 12.5; + LastSwiftMigration = 1610; ProvisioningStyle = Automatic; TestTargetID = 97C146ED1CF9000F007C117D; }; @@ -340,6 +352,28 @@ /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ + 3770D57B1AB91BF2FCC5DE94 /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { isa = PBXShellScriptBuildPhase; alwaysOutOfDate = 1; @@ -356,44 +390,44 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; - 422786A96136AA9087A2041B /* [CP] Check Pods Manifest.lock */ = { + 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); - inputFileListPaths = ( - ); inputPaths = ( - "${PODS_PODFILE_DIR_PATH}/Podfile.lock", - "${PODS_ROOT}/Manifest.lock", - ); - name = "[CP] Check Pods Manifest.lock"; - outputFileListPaths = ( ); + name = "Run Script"; outputPaths = ( - "$(DERIVED_FILE_DIR)/Pods-RunnerTests-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; - showEnvVarsInLog = 0; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; }; - 9740EEB61CF901F6004384FC /* Run Script */ = { + BC4B4B1DFD7B8E3FA1EB91B9 /* [CP] Copy Pods Resources */ = { isa = PBXShellScriptBuildPhase; - alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); inputPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh", + "${PODS_CONFIGURATION_BUILD_DIR}/camera_avfoundation/camera_avfoundation_privacy.bundle", + "${PODS_CONFIGURATION_BUILD_DIR}/path_provider_foundation/path_provider_foundation_privacy.bundle", + "${PODS_CONFIGURATION_BUILD_DIR}/video_player_avfoundation/video_player_avfoundation_privacy.bundle", ); - name = "Run Script"; + name = "[CP] Copy Pods Resources"; outputPaths = ( + "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/camera_avfoundation_privacy.bundle", + "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/path_provider_foundation_privacy.bundle", + "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/video_player_avfoundation_privacy.bundle", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n"; + showEnvVarsInLog = 0; }; - 9872F2A25E8A171A111468CD /* [CP] Check Pods Manifest.lock */ = { + D4954E02020248A842E1E7B3 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -408,7 +442,7 @@ outputFileListPaths = ( ); outputPaths = ( - "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", + "$(DERIVED_FILE_DIR)/Pods-RunnerTests-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; @@ -426,8 +460,10 @@ E071CF7227B3061B006EF3BA /* FLTCamPhotoCaptureTests.m in Sources */, E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */, 03BB766B2665316900CE5A93 /* CameraFocusTests.m in Sources */, + 7F87E8022D01FD6F00A3549C /* MockCaptureDeviceController.m in Sources */, 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */, E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */, + 7F87E80C2D0325D900A3549C /* MockDeviceOrientationProvider.m in Sources */, E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */, E04F108627A87CA600573D0C /* FLTSavePhotoDelegateTests.m in Sources */, 43ED1537282570DE00EB00DE /* AvailableCamerasTest.m in Sources */, @@ -437,7 +473,7 @@ E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */, 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */, E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */, - E0B0D2BB27DFF2AF00E71E4B /* CameraPermissionTests.m in Sources */, + E0B0D2BB27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m in Sources */, E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -484,7 +520,7 @@ /* Begin XCBuildConfiguration section */ 03BB766F2665316900CE5A93 /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = 9C5CC6CAD53AD388B2694F3A /* Pods-RunnerTests.debug.xcconfig */; + baseConfigurationReference = E27055DF15226B1DFE032420 /* Pods-RunnerTests.debug.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; @@ -513,7 +549,7 @@ }; 03BB76702665316900CE5A93 /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = A24F9E418BA48BCC7409B117 /* Pods-RunnerTests.release.xcconfig */; + baseConfigurationReference = CB65379B3085E03D11D2786A /* Pods-RunnerTests.release.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m index 0cb8333345ab..edcd7c4795c7 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m @@ -8,120 +8,149 @@ #endif @import XCTest; @import AVFoundation; -#import + +#import "MockCaptureDeviceController.h" +#import "MockDeviceOrientationProvider.h" @interface CameraFocusTests : XCTestCase @property(readonly, nonatomic) FLTCam *camera; -@property(readonly, nonatomic) id mockDevice; -@property(readonly, nonatomic) id mockUIDevice; +@property(readonly, nonatomic) MockCaptureDeviceController *mockDevice; +@property(readonly, nonatomic) MockDeviceOrientationProvider *mockDeviceOrientationProvider; @end @implementation CameraFocusTests - (void)setUp { _camera = [[FLTCam alloc] init]; - _mockDevice = OCMClassMock([AVCaptureDevice class]); - _mockUIDevice = OCMPartialMock([UIDevice currentDevice]); -} - -- (void)tearDown { - [_mockDevice stopMocking]; - [_mockUIDevice stopMocking]; + _mockDevice = [[MockCaptureDeviceController alloc] init]; + _mockDeviceOrientationProvider = [[MockDeviceOrientationProvider alloc] init]; } - (void)testAutoFocusWithContinuousModeSupported_ShouldSetContinuousAutoFocus { - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]).andReturn(true); - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(true); - - // Don't expect setFocusMode:AVCaptureFocusModeAutoFocus - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus]; + // AVCaptureFocusModeContinuousAutoFocus and AVCaptureFocusModeContinuousAutoFocus are supported + _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { + return mode == AVCaptureFocusModeContinuousAutoFocus || mode == AVCaptureFocusModeAutoFocus; + }; + + __block BOOL setFocusModeContinuousAutoFocusCalled = NO; + + _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { + // Don't expect setFocusMode:AVCaptureFocusModeAutoFocus + if (mode == AVCaptureFocusModeAutoFocus) { + XCTFail(@"Unexpected call to setFocusMode"); + } else if (mode == AVCaptureFocusModeContinuousAutoFocus) { + setFocusModeContinuousAutoFocusCalled = YES; + } + }; // Run test [_camera applyFocusMode:FCPPlatformFocusModeAuto onDevice:_mockDevice]; // Expect setFocusMode:AVCaptureFocusModeContinuousAutoFocus - OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]); + XCTAssertTrue(setFocusModeContinuousAutoFocusCalled); } - (void)testAutoFocusWithContinuousModeNotSupported_ShouldSetAutoFocus { // AVCaptureFocusModeContinuousAutoFocus is not supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) - .andReturn(false); - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(true); - + // AVCaptureFocusModeAutoFocus is supported + _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { + return mode == AVCaptureFocusModeAutoFocus; + }; + + __block BOOL setFocusModeAutoFocusCalled = NO; + // Don't expect setFocusMode:AVCaptureFocusModeContinuousAutoFocus - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; - + _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { + if (mode == AVCaptureFocusModeContinuousAutoFocus) { + XCTFail(@"Unexpected call to setFocusMode"); + } else if (mode == AVCaptureFocusModeAutoFocus) { + setFocusModeAutoFocusCalled = YES; + } + }; + // Run test [_camera applyFocusMode:FCPPlatformFocusModeAuto onDevice:_mockDevice]; - + // Expect setFocusMode:AVCaptureFocusModeAutoFocus - OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeAutoFocus]); + XCTAssertTrue(setFocusModeAutoFocusCalled); } - (void)testAutoFocusWithNoModeSupported_ShouldSetNothing { - // AVCaptureFocusModeContinuousAutoFocus is not supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) - .andReturn(false); - // AVCaptureFocusModeContinuousAutoFocus is not supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(false); + // No modes are supported + _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { + return NO; + }; // Don't expect any setFocus - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus]; + _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { + XCTFail(@"Unexpected call to setFocusMode"); + }; // Run test [_camera applyFocusMode:FCPPlatformFocusModeAuto onDevice:_mockDevice]; } - (void)testLockedFocusWithModeSupported_ShouldSetModeAutoFocus { - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]).andReturn(true); - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(true); - - // Don't expect any setFocus - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; - + // AVCaptureFocusModeContinuousAutoFocus and AVCaptureFocusModeAutoFocus are supported + _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { + return mode == AVCaptureFocusModeContinuousAutoFocus || mode == AVCaptureFocusModeAutoFocus; + }; + + __block BOOL setFocusModeAutoFocusCalled = NO; + + // Expect only setFocusMode:AVCaptureFocusModeAutoFocus + _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { + if (mode == AVCaptureFocusModeContinuousAutoFocus) { + XCTFail(@"Unexpected call to setFocusMode"); + } else if (mode == AVCaptureFocusModeAutoFocus) { + setFocusModeAutoFocusCalled = YES; + } + }; + // Run test [_camera applyFocusMode:FCPPlatformFocusModeLocked onDevice:_mockDevice]; - // Expect setFocusMode:AVCaptureFocusModeAutoFocus - OCMVerify([_mockDevice setFocusMode:AVCaptureFocusModeAutoFocus]); + XCTAssertTrue(setFocusModeAutoFocusCalled); } - (void)testLockedFocusWithModeNotSupported_ShouldSetNothing { - // AVCaptureFocusModeContinuousAutoFocus is supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]).andReturn(true); - // AVCaptureFocusModeContinuousAutoFocus is not supported - OCMStub([_mockDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]).andReturn(false); + _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { + return mode == AVCaptureFocusModeContinuousAutoFocus; + }; // Don't expect any setFocus - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; - [[_mockDevice reject] setFocusMode:AVCaptureFocusModeAutoFocus]; + _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { + XCTFail(@"Unexpected call to setFocusMode"); + }; // Run test [_camera applyFocusMode:FCPPlatformFocusModeLocked onDevice:_mockDevice]; } +// TODO(mchudy): replace setValue with proper DI - (void)testSetFocusPointWithResult_SetsFocusPointOfInterest { // UI is currently in landscape left orientation - OCMStub([(UIDevice *)_mockUIDevice orientation]).andReturn(UIDeviceOrientationLandscapeLeft); + [_camera setValue:_mockDeviceOrientationProvider forKey:@"deviceOrientationProvider"]; + _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; // Focus point of interest is supported - OCMStub([_mockDevice isFocusPointOfInterestSupported]).andReturn(true); + _mockDevice.isFocusPointOfInterestSupported = YES; // Set mock device as the current capture device [_camera setValue:_mockDevice forKey:@"captureDevice"]; + __block BOOL setFocusPointOfInterestCalled = NO; + _mockDevice.setFocusPointOfInterestStub = ^(CGPoint point) { + if (point.x == 1 && point.y == 1) { + setFocusPointOfInterestCalled = YES; + } + }; + // Run test [_camera setFocusPoint:[FCPPlatformPoint makeWithX:1 y:1] withCompletion:^(FlutterError *_Nullable error){ }]; // Verify the focus point of interest has been set - OCMVerify([_mockDevice setFocusPointOfInterest:CGPointMake(1, 1)]); + XCTAssertTrue(setFocusPointOfInterestCalled); } @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m index 04bdd0795dab..60dfca541683 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPreviewPauseTests.m @@ -8,7 +8,6 @@ #endif @import XCTest; @import AVFoundation; -#import @interface CameraPreviewPauseTests : XCTestCase @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index 503a5c255c59..501498d6382b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -88,8 +88,8 @@ videoCaptureSession:videoSessionMock audioCaptureSession:audioSessionMock captureSessionQueue:captureSessionQueue - captureDeviceFactory:captureDeviceFactory ?: ^AVCaptureDevice *(void) { - return captureDeviceMock; + captureDeviceFactory:captureDeviceFactory ?: ^id(void) { + return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:captureDeviceMock]; } videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { return CMVideoFormatDescriptionGetDimensions(format.formatDescription); @@ -161,9 +161,8 @@ videoCaptureSession:captureSession audioCaptureSession:audioSessionMock captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) - captureDeviceFactory:^AVCaptureDevice *(void) { - return captureDevice; - } + captureDeviceFactory:^id(void) { + return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:captureDevice]; } videoDimensionsForFormat:videoDimensionsForFormat error:nil]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m index 7f17e39a6b0b..31711f065efd 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m @@ -171,7 +171,7 @@ - (void)testCaptureToFile_mustReportFileExtensionWithJpgWhenHEVCNotAvailableAndF }); [self waitForExpectationsWithTimeout:1 handler:nil]; } - +// - (void)testCaptureToFile_handlesTorchMode { XCTestExpectation *pathExpectation = [self expectationWithDescription: @@ -188,8 +188,9 @@ - (void)testCaptureToFile_handlesTorchMode { (void *)FLTCaptureSessionQueueSpecific, NULL); FLTCam *cam = FLTCreateCamWithCaptureSessionQueueAndMediaSettings(captureSessionQueue, nil, nil, - ^AVCaptureDevice *(void) { - return captureDeviceMock; + ^id(void) { + return + [[FLTDefaultCaptureDeviceController alloc] initWithDevice:captureDeviceMock]; }); AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m similarity index 68% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m index 3a1e8bc88dda..32c3fe0537bc 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPermissionTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m @@ -13,41 +13,29 @@ #import "FLTCameraPermissionManager.h" @interface MockPermissionService : NSObject -@property (nonatomic, assign) AVAuthorizationStatus cameraAuthorizationStatusStub; -@property (nonatomic, assign) AVAuthorizationStatus audioAuthorizationStatusStub; - -@property (nonatomic, assign) BOOL cameraGrantAccessStub; -@property (nonatomic, assign) BOOL audioGrantAccessStub; +@property(nonatomic, copy) AVAuthorizationStatus (^authorizationStatusStub)(AVMediaType mediaType); +@property(nonatomic, copy) void (^requestAccessStub)(AVMediaType mediaType, void (^handler)(BOOL)); @end @implementation MockPermissionService - (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType { - if (mediaType == AVMediaTypeVideo) { - return self.cameraAuthorizationStatusStub; - } else if (mediaType == AVMediaTypeAudio) { - return self.audioAuthorizationStatusStub; - } - @throw [NSException exceptionWithName:@"UnexpectedMediaType" - reason:@"Unexpected media type was used" - userInfo:nil]; + return self.authorizationStatusStub ? self.authorizationStatusStub(mediaType) : AVAuthorizationStatusNotDetermined; } - (void)requestAccessForMediaType:(AVMediaType)mediaType - completionHandler:(void (^)(BOOL granted))handler { - if (mediaType == AVMediaTypeVideo) { - handler(self.cameraGrantAccessStub); - } else if (mediaType == AVMediaTypeAudio) { - handler(self.audioGrantAccessStub); - } + completionHandler:(void (^)(BOOL))handler { + if (self.requestAccessStub) { + self.requestAccessStub(mediaType, handler); + } } @end -@interface CameraPermissionTests : XCTestCase +@interface FLTCameraPermissionManagerTests : XCTestCase @property (nonatomic, strong) FLTCameraPermissionManager *permissionManager; @property (nonatomic, strong) MockPermissionService *mockService; @end -@implementation CameraPermissionTests +@implementation FLTCameraPermissionManagerTests - (void)setUp { [super setUp]; @@ -63,8 +51,11 @@ - (void)testRequestCameraPermission_completeWithoutErrorIfPreviouslyAuthorized { [self expectationWithDescription: @"Must copmlete without error if camera access was previously authorized."]; - self.mockService.cameraAuthorizationStatusStub = AVAuthorizationStatusAuthorized; - + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusAuthorized; + }; + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { [expectation fulfill]; @@ -82,7 +73,10 @@ - (void)testRequestCameraPermission_completeWithErrorIfPreviouslyDenied { @"Settings to enable camera access." details:nil]; - self.mockService.cameraAuthorizationStatusStub = AVAuthorizationStatusDenied; + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusDenied; + }; [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { @@ -99,8 +93,11 @@ - (void)testRequestCameraPermission_completeWithErrorIfRestricted { message:@"Camera access is restricted. " details:nil]; - self.mockService.cameraAuthorizationStatusStub = AVAuthorizationStatusRestricted; - + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusRestricted; + }; + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; @@ -113,10 +110,17 @@ - (void)testRequestCameraPermission_completeWithoutErrorIfUserGrantAccess { XCTestExpectation *grantedExpectation = [self expectationWithDescription:@"Must complete without error if user choose to grant access"]; - self.mockService.cameraAuthorizationStatusStub = AVAuthorizationStatusNotDetermined; - + + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusNotDetermined; + }; + // Mimic user choosing "allow" in permission dialog. - self.mockService.cameraGrantAccessStub = YES; + self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + handler(YES); + }; [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { @@ -134,11 +138,17 @@ - (void)testRequestCameraPermission_completeWithErrorIfUserDenyAccess { message:@"User denied the camera access request." details:nil]; - self.mockService.cameraAuthorizationStatusStub = AVAuthorizationStatusNotDetermined; - - // Mimic user choosing "deny" in permission dialog. - self.mockService.cameraGrantAccessStub = NO; - + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusNotDetermined; + }; + + // Mimic user choosing "allow" in permission dialog. + self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + handler(NO); + }; + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; @@ -155,8 +165,11 @@ - (void)testRequestAudioPermission_completeWithoutErrorIfPrevoiuslyAuthorized { [self expectationWithDescription: @"Must copmlete without error if audio access was previously authorized."]; - self.mockService.audioAuthorizationStatusStub = AVAuthorizationStatusAuthorized; - + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + return AVAuthorizationStatusAuthorized; + }; + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { [expectation fulfill]; @@ -175,8 +188,11 @@ - (void)testRequestAudioPermission_completeWithErrorIfPreviouslyDenied { @"Settings to enable audio access." details:nil]; - self.mockService.audioAuthorizationStatusStub = AVAuthorizationStatusDenied; - + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + return AVAuthorizationStatusDenied; + }; + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; @@ -192,8 +208,11 @@ - (void)testRequestAudioPermission_completeWithErrorIfRestricted { message:@"Audio access is restricted. " details:nil]; - self.mockService.audioAuthorizationStatusStub = AVAuthorizationStatusRestricted; - + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + return AVAuthorizationStatusRestricted; + }; + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; @@ -206,10 +225,16 @@ - (void)testRequestAudioPermission_completeWithoutErrorIfUserGrantAccess { XCTestExpectation *grantedExpectation = [self expectationWithDescription:@"Must complete without error if user choose to grant access"]; - self.mockService.audioAuthorizationStatusStub = AVAuthorizationStatusNotDetermined; - + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + return AVAuthorizationStatusNotDetermined; + }; + // Mimic user choosing "allow" in permission dialog. - self.mockService.audioGrantAccessStub = YES; + self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + handler(YES); + }; [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { @@ -226,11 +251,17 @@ - (void)testRequestAudioPermission_completeWithErrorIfUserDenyAccess { message:@"User denied the audio access request." details:nil]; - self.mockService.audioAuthorizationStatusStub = AVAuthorizationStatusNotDetermined; - + self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + return AVAuthorizationStatusNotDetermined; + }; + // Mimic user choosing "deny" in permission dialog. - self.mockService.audioGrantAccessStub = NO; - + self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { + XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); + handler(NO); + }; + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h new file mode 100644 index 000000000000..8723b1910244 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h @@ -0,0 +1,80 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +#if __has_include() +@import camera_avfoundation.Test; +#endif +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@interface MockCaptureDeviceController : NSObject +// Position/Orientation +@property(nonatomic, assign) AVCaptureDevicePosition position; + +// Format/Configuration +@property(nonatomic, strong) AVCaptureDeviceFormat *activeFormat; +@property(nonatomic, strong) NSArray *formats; +@property(nonatomic, copy) void (^setActiveFormatStub)(AVCaptureDeviceFormat *format); + +// Flash/Torch +@property(nonatomic, assign) BOOL hasFlash; +@property(nonatomic, assign) BOOL hasTorch; +@property(nonatomic, assign) BOOL isTorchAvailable; +@property(nonatomic, assign) AVCaptureTorchMode torchMode; +@property(nonatomic, copy) void (^setTorchModeStub)(AVCaptureTorchMode mode); +@property(nonatomic, assign) BOOL flashModeSupported; + +// Focus +@property(nonatomic, assign) BOOL focusPointOfInterestSupported; +@property(nonatomic, copy) BOOL (^isFocusModeSupportedStub)(AVCaptureFocusMode mode); +@property(nonatomic, assign) AVCaptureFocusMode focusMode; +@property(nonatomic, copy) void (^setFocusModeStub)(AVCaptureFocusMode mode); +@property(nonatomic, assign) CGPoint focusPointOfInterest; +@property(nonatomic, copy) void (^setFocusPointOfInterestStub)(CGPoint point); + +// Exposure +@property(nonatomic, assign) BOOL exposurePointOfInterestSupported; +@property(nonatomic, assign) AVCaptureExposureMode exposureMode; +@property(nonatomic, assign) BOOL exposureModeSupported; +@property(nonatomic, copy) void (^setExposureModeStub)(AVCaptureExposureMode mode); +@property(nonatomic, assign) CGPoint exposurePointOfInterest; +@property(nonatomic, copy) void (^setExposurePointOfInterestStub)(CGPoint point); +@property(nonatomic, assign) float minExposureTargetBias; +@property(nonatomic, assign) float maxExposureTargetBias; +@property(nonatomic, copy) void (^setExposureTargetBiasStub)(float bias, void (^_Nullable handler)(CMTime)); + +// Zoom +@property(nonatomic, assign) float maxAvailableVideoZoomFactor; +@property(nonatomic, assign) float minAvailableVideoZoomFactor; +@property(nonatomic, assign) float videoZoomFactor; +@property(nonatomic, copy) void (^setVideoZoomFactorStub)(float factor); + +// Camera Properties +@property(nonatomic, assign) float lensAperture; +@property(nonatomic, assign) CMTime exposureDuration; +@property(nonatomic, assign) float ISO; + +// Configuration Lock +@property(nonatomic, assign) BOOL shouldFailConfiguration; +@property(nonatomic, copy) void (^lockForConfigurationStub)(NSError **error); +@property(nonatomic, copy) void (^unlockForConfigurationStub)(void); + +// Frame Duration +@property(nonatomic, assign) CMTime activeVideoMinFrameDuration; +@property(nonatomic, assign) CMTime activeVideoMaxFrameDuration; +@property(nonatomic, copy) void (^setActiveVideoMinFrameDurationStub)(CMTime duration); +@property(nonatomic, copy) void (^setActiveVideoMaxFrameDurationStub)(CMTime duration); + +// Input Creation +@property(nonatomic, strong) AVCaptureInput *inputToReturn; +@property(nonatomic, copy) void (^createInputStub)(NSError **error); + +@property(nonatomic, assign) BOOL isExposurePointOfInterestSupported; +@property(nonatomic, assign) BOOL isFocusPointOfInterestSupported; + +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m new file mode 100644 index 000000000000..c1659d36f07d --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m @@ -0,0 +1,127 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +#if __has_include() +@import camera_avfoundation.Test; +#endif +@import AVFoundation; + +#import "MockCaptureDeviceController.h" + +@implementation MockCaptureDeviceController + +- (void)setActiveFormat:(AVCaptureDeviceFormat *)format { + _activeFormat = format; + if (self.setActiveFormatStub) { + self.setActiveFormatStub(format); + } +} + +- (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode { + return self.flashModeSupported; +} + +- (void)setTorchMode:(AVCaptureTorchMode)mode { + _torchMode = mode; + if (self.setTorchModeStub) { + self.setTorchModeStub(mode); + } +} + +- (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode { + if (self.isFocusModeSupportedStub) { + return self.isFocusModeSupportedStub(mode); + } + return NO; +} + +- (void)setFocusMode:(AVCaptureFocusMode)mode { + _focusMode = mode; + if (self.setFocusModeStub) { + self.setFocusModeStub(mode); + } +} + +- (void)setFocusPointOfInterest:(CGPoint)point { + _focusPointOfInterest = point; + if (self.setFocusPointOfInterestStub) { + self.setFocusPointOfInterestStub(point); + } +} + +- (void)setExposureMode:(AVCaptureExposureMode)mode { + _exposureMode = mode; + if (self.setExposureModeStub) { + self.setExposureModeStub(mode); + } +} + +- (void)setExposurePointOfInterest:(CGPoint)point { + _exposurePointOfInterest = point; + if (self.setExposurePointOfInterestStub) { + self.setExposurePointOfInterestStub(point); + } +} + +- (void)setExposureTargetBias:(float)bias completionHandler:(void (^)(CMTime))handler { + if (self.setExposureTargetBiasStub) { + self.setExposureTargetBiasStub(bias, handler); + } +} + +- (void)setVideoZoomFactor:(float)factor { + _videoZoomFactor = factor; + if (self.setVideoZoomFactorStub) { + self.setVideoZoomFactorStub(factor); + } +} + +- (BOOL)lockForConfiguration:(NSError **)error { + if (self.lockForConfigurationStub) { + self.lockForConfigurationStub(error); + return !self.shouldFailConfiguration; + } + if (self.shouldFailConfiguration) { + if (error) { + *error = [NSError errorWithDomain:@"test" code:0 userInfo:nil]; + } + return NO; + } + return YES; +} + +- (void)unlockForConfiguration { + if (self.unlockForConfigurationStub) { + self.unlockForConfigurationStub(); + } +} + +- (void)setActiveVideoMinFrameDuration:(CMTime)duration { + _activeVideoMinFrameDuration = duration; + if (self.setActiveVideoMinFrameDurationStub) { + self.setActiveVideoMinFrameDurationStub(duration); + } +} + +- (void)setActiveVideoMaxFrameDuration:(CMTime)duration { + _activeVideoMaxFrameDuration = duration; + if (self.setActiveVideoMaxFrameDurationStub) { + self.setActiveVideoMaxFrameDurationStub(duration); + } +} + +- (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode { + return self.exposureModeSupported; +} + +- (AVCaptureInput *)createInput:(NSError *_Nullable *_Nullable)error { + if (self.createInputStub) { + self.createInputStub(error); + } + return self.inputToReturn; +} + + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.h new file mode 100644 index 000000000000..0421ec665293 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.h @@ -0,0 +1,17 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +#if __has_include() +@import camera_avfoundation.Test; +#endif +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@interface MockDeviceOrientationProvider : NSObject +@property(nonatomic, assign) UIDeviceOrientation orientation; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.m new file mode 100644 index 000000000000..963cb7e96ff9 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.m @@ -0,0 +1,14 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +#if __has_include() +@import camera_avfoundation.Test; +#endif +@import AVFoundation; + +#import "MockDeviceOrientationProvider.h" + +@implementation MockDeviceOrientationProvider +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceController.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceController.m new file mode 100644 index 000000000000..27cc985d7d2b --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceController.m @@ -0,0 +1,6 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + + +@interface FLTMockCaptureDeviceController : NSObject diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 0b065026f10e..2174f6998691 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -13,6 +13,8 @@ #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" #import "./include/camera_avfoundation/QueueUtils.h" #import "./include/camera_avfoundation/messages.g.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" +#import "./include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h" static FlutterError *FlutterErrorFromNSError(NSError *error) { return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code] @@ -103,7 +105,7 @@ @interface FLTCam () deviceOrientationProvider; /// Reports the given error message to the Dart side of the plugin. /// /// Can be called from any thread. @@ -144,8 +146,9 @@ - (instancetype)initWithCameraName:(NSString *)cameraName videoCaptureSession:videoCaptureSession audioCaptureSession:videoCaptureSession captureSessionQueue:captureSessionQueue - captureDeviceFactory:^AVCaptureDevice *(void) { - return [AVCaptureDevice deviceWithUniqueID:cameraName]; + captureDeviceFactory:^id (void) { + AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:cameraName]; + return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:device]; } videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { return CMVideoFormatDescriptionGetDimensions(format.formatDescription); @@ -262,6 +265,8 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings _motionManager = [[CMMotionManager alloc] init]; [_motionManager startAccelerometerUpdates]; + + _deviceOrientationProvider = [[FLTDefaultDeviceOrientationProvider alloc] init]; if (_mediaSettings.framesPerSecond) { // The frame rate can be changed only on a locked for configuration device. @@ -309,7 +314,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings - (AVCaptureConnection *)createConnection:(NSError **)error { // Setup video capture input. - _captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:error]; + _captureVideoInput = [_captureDevice createInput:error]; // Test the return value of the `deviceInputWithDevice` method to see whether an error occurred. // Don’t just test to see whether the error pointer was set to point to an error. @@ -344,8 +349,8 @@ - (void)reportInitializationState { height:self.previewSize.height] exposureMode:self.exposureMode focusMode:self.focusMode - exposurePointSupported:self.captureDevice.exposurePointOfInterestSupported - focusPointSupported:self.captureDevice.focusPointOfInterestSupported]; + exposurePointSupported:self.captureDevice.isExposurePointOfInterestSupported + focusPointSupported:self.captureDevice.isFocusPointOfInterestSupported]; __weak typeof(self) weakSelf = self; FLTEnsureToRunOnMainQueue(^{ @@ -1038,7 +1043,7 @@ - (void)applyFocusMode { [self applyFocusMode:_focusMode onDevice:_captureDevice]; } -- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice { +- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(id)captureDevice { [captureDevice lockForConfiguration:nil]; switch (focusMode) { case FCPPlatformFocusModeLocked: @@ -1175,7 +1180,7 @@ - (void)setFocusPoint:(FCPPlatformPoint *)point details:nil]); return; } - UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + UIDeviceOrientation orientation = [_deviceOrientationProvider orientation]; [_captureDevice lockForConfiguration:nil]; // A nil point resets to the center. [_captureDevice diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m index b975daa4b5c2..c712245a6ced 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m @@ -3,14 +3,15 @@ // found in the LICENSE file. #import "./include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" @implementation FLTCamMediaSettingsAVWrapper -- (BOOL)lockDevice:(AVCaptureDevice *)captureDevice error:(NSError *_Nullable *_Nullable)outError { +- (BOOL)lockDevice:(id)captureDevice error:(NSError *_Nullable *_Nullable)outError { return [captureDevice lockForConfiguration:outError]; } -- (void)unlockDevice:(AVCaptureDevice *)captureDevice { +- (void)unlockDevice:(id)captureDevice { return [captureDevice unlockForConfiguration]; } @@ -22,11 +23,11 @@ - (void)commitConfigurationForSession:(AVCaptureSession *)videoCaptureSession { [videoCaptureSession commitConfiguration]; } -- (void)setMinFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { +- (void)setMinFrameDuration:(CMTime)duration onDevice:(id)captureDevice { captureDevice.activeVideoMinFrameDuration = duration; } -- (void)setMaxFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { +- (void)setMaxFrameDuration:(CMTime)duration onDevice:(id)captureDevice { captureDevice.activeVideoMaxFrameDuration = duration; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m index c3e326e5fcbd..27a0550e82c5 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m @@ -4,7 +4,7 @@ @import AVFoundation; #import "./include/camera_avfoundation/FLTCameraPermissionManager.h" -#import "./include/camera_avfoundation/FLTPermissionService.h" +#import "./include/camera_avfoundation/Protocols/FLTPermissionService.h" @implementation FLTCameraPermissionManager diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m new file mode 100644 index 000000000000..f89e44920f48 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m @@ -0,0 +1,170 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" + +@interface FLTDefaultCaptureDeviceController () +@property(nonatomic, strong) AVCaptureDevice *device; +@end + +@implementation FLTDefaultCaptureDeviceController + +- (instancetype)initWithDevice:(AVCaptureDevice *)device { + self = [super init]; + if (self) { + _device = device; + } + return self; +} + +// Position/Orientation +- (AVCaptureDevicePosition)position { + return self.device.position; +} + +// Format/Configuration +- (AVCaptureDeviceFormat *)activeFormat { + return self.device.activeFormat; +} + +- (NSArray *)formats { + return self.device.formats; +} + +- (void)setActiveFormat:(AVCaptureDeviceFormat *)format { + self.device.activeFormat = format; +} + +// Flash/Torch +- (BOOL)hasFlash { + return self.device.hasFlash; +} + +- (BOOL)hasTorch { + return self.device.hasTorch; +} + +- (BOOL)isTorchAvailable { + return self.device.isTorchAvailable; +} + +- (AVCaptureTorchMode)torchMode { + return self.device.torchMode; +} + +- (void)setTorchMode:(AVCaptureTorchMode)torchMode { + self.device.torchMode = torchMode; +} + +- (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode { + return [self.device isFlashModeSupported:mode]; +} + +// Focus +- (BOOL)isFocusPointOfInterestSupported { + return self.device.isFocusPointOfInterestSupported; +} + +- (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode { + return [self.device isFocusModeSupported:mode]; +} + +- (void)setFocusMode:(AVCaptureFocusMode)focusMode { + self.device.focusMode = focusMode; +} + +- (void)setFocusPointOfInterest:(CGPoint)point { + self.device.focusPointOfInterest = point; +} + +// Exposure +- (BOOL)isExposurePointOfInterestSupported { + return self.device.isExposurePointOfInterestSupported; +} + +- (void)setExposureMode:(AVCaptureExposureMode)exposureMode { + self.device.exposureMode = exposureMode; +} + +- (void)setExposurePointOfInterest:(CGPoint)point { + self.device.exposurePointOfInterest = point; +} + +- (float)minExposureTargetBias { + return self.device.minExposureTargetBias; +} + +- (float)maxExposureTargetBias { + return self.device.maxExposureTargetBias; +} + +- (void)setExposureTargetBias:(float)bias completionHandler:(void (^)(CMTime))handler { + [self.device setExposureTargetBias:bias completionHandler:handler]; +} + +- (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode { + return [self.device isExposureModeSupported:mode]; +} + +// Zoom +- (float)maxAvailableVideoZoomFactor { + return self.device.maxAvailableVideoZoomFactor; +} + +- (float)minAvailableVideoZoomFactor { + return self.device.minAvailableVideoZoomFactor; +} + +- (float)videoZoomFactor { + return self.device.videoZoomFactor; +} + +- (void)setVideoZoomFactor:(float)factor { + self.device.videoZoomFactor = factor; +} + +// Camera Properties +- (float)lensAperture { + return self.device.lensAperture; +} + +- (CMTime)exposureDuration { + return self.device.exposureDuration; +} + +- (float)ISO { + return self.device.ISO; +} + +// Configuration Lock +- (BOOL)lockForConfiguration:(NSError **)error { + return [self.device lockForConfiguration:error]; +} + +- (void)unlockForConfiguration { + [self.device unlockForConfiguration]; +} + +- (CMTime)activeVideoMinFrameDuration { + return self.device.activeVideoMinFrameDuration; +} + +- (void)setActiveVideoMinFrameDuration:(CMTime)duration { + self.device.activeVideoMinFrameDuration = duration; +} + +- (CMTime)activeVideoMaxFrameDuration { + return self.device.activeVideoMaxFrameDuration; +} + +- (void)setActiveVideoMaxFrameDuration:(CMTime)duration { + self.device.activeVideoMaxFrameDuration = duration; +} + +- (AVCaptureInput *)createInput:(NSError * _Nullable * _Nullable)error { + return [AVCaptureDeviceInput deviceInputWithDevice:_device error:error]; +} + + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.m new file mode 100644 index 000000000000..7b10f09109d4 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.m @@ -0,0 +1,13 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h" + +@implementation FLTDefaultDeviceOrientationProvider + +- (UIDeviceOrientation)orientation { + return [[UIDevice currentDevice] orientation]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTPermissionService.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPermissionService.m similarity index 89% rename from packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTPermissionService.m rename to packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPermissionService.m index 151524e4ff11..afdac5d3da59 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTPermissionService.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPermissionService.m @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#import "./include/camera_avfoundation/FLTPermissionService.h" +#import "../include/camera_avfoundation/Protocols/FLTPermissionService.h" @implementation FLTDefaultPermissionService - (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap index c2487e24704f..12c1c17a141d 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap @@ -12,7 +12,9 @@ framework module camera_avfoundation { header "FLTSavePhotoDelegate_Test.h" header "FLTThreadSafeEventChannel.h" header "FLTPermissionService.h" + header "FLTCaptureDeviceControlling.h" header "FLTCameraPermissionManager.h" + header "FLTDeviceOrientationProviding.h" header "QueueUtils.h" } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h index d8f97926b770..9fee90390a2e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h @@ -8,6 +8,7 @@ #import "CameraProperties.h" #import "FLTCamMediaSettingsAVWrapper.h" +#import "FLTCaptureDeviceControlling.h" #import "messages.g.h" NS_ASSUME_NONNULL_BEGIN @@ -15,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN /// A class that manages camera's state and performs camera operations. @interface FLTCam : NSObject -@property(readonly, nonatomic) AVCaptureDevice *captureDevice; +@property(readonly, nonatomic) id captureDevice; @property(readonly, nonatomic) CGSize previewSize; @property(assign, nonatomic) BOOL isPreviewPaused; @property(nonatomic, copy) void (^onFrameAvailable)(void); @@ -92,7 +93,7 @@ NS_ASSUME_NONNULL_BEGIN /// /// @param focusMode The focus mode that should be applied to the @captureDevice instance. /// @param captureDevice The AVCaptureDevice to which the @focusMode will be applied. -- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice; +- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(id)captureDevice; - (void)pausePreview; - (void)resumePreview; - (void)setDescriptionWhileRecording:(NSString *)cameraName diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h index 144a84eac13f..4e8d9d15cd8f 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h @@ -5,6 +5,8 @@ @import AVFoundation; @import Foundation; +#import "FLTCaptureDeviceControlling.h" + NS_ASSUME_NONNULL_BEGIN /** @@ -25,14 +27,14 @@ NS_ASSUME_NONNULL_BEGIN * @param outError The optional error. * @result A BOOL indicating whether the device was successfully locked for configuration. */ -- (BOOL)lockDevice:(AVCaptureDevice *)captureDevice error:(NSError *_Nullable *_Nullable)outError; +- (BOOL)lockDevice:(id)captureDevice error:(NSError *_Nullable *_Nullable)outError; /** * @method unlockDevice: * @abstract Release exclusive control over device hardware properties. * @param captureDevice The capture device. */ -- (void)unlockDevice:(AVCaptureDevice *)captureDevice; +- (void)unlockDevice:(id)captureDevice; /** * @method beginConfigurationForSession: @@ -57,7 +59,7 @@ NS_ASSUME_NONNULL_BEGIN * @param duration The frame duration. * @param captureDevice The capture device */ -- (void)setMinFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice; +- (void)setMinFrameDuration:(CMTime)duration onDevice:(id)captureDevice; /** * @method setMaxFrameDuration:onDevice: @@ -66,7 +68,7 @@ NS_ASSUME_NONNULL_BEGIN * @param duration The frame duration. * @param captureDevice The capture device */ -- (void)setMaxFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice; +- (void)setMaxFrameDuration:(CMTime)duration onDevice:(id)captureDevice; /** * @method assetWriterAudioInputWithOutputSettings: diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h index d05838f49a70..65bf90a70b69 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h @@ -4,6 +4,7 @@ #import "FLTCam.h" #import "FLTSavePhotoDelegate.h" +#import "FLTCaptureDeviceControlling.h" /// Determines the video dimensions (width and height) for a given capture device format. /// Used in tests to mock CMVideoFormatDescriptionGetDimensions. @@ -11,7 +12,7 @@ typedef CMVideoDimensions (^VideoDimensionsForFormat)(AVCaptureDeviceFormat *); /// Factory block returning an AVCaptureDevice. /// Used in tests to inject a device into FLTCam. -typedef AVCaptureDevice * (^CaptureDeviceFactory)(void); +typedef id (^CaptureDeviceFactory)(void); @interface FLTImageStreamHandler : NSObject diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h new file mode 100644 index 000000000000..5603055865aa --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h @@ -0,0 +1,72 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; +@import Foundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTCaptureDeviceControlling + +// Position/Orientation +- (AVCaptureDevicePosition)position; + +// Format/Configuration +- (AVCaptureDeviceFormat *)activeFormat; +- (NSArray *)formats; +- (void)setActiveFormat:(AVCaptureDeviceFormat *)format; + +// Flash/Torch +- (BOOL)hasFlash; +- (BOOL)hasTorch; +- (BOOL)isTorchAvailable; +- (AVCaptureTorchMode)torchMode; +- (void)setTorchMode:(AVCaptureTorchMode)torchMode; +- (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode; + +// Focus +- (BOOL)isFocusPointOfInterestSupported; +- (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode; +- (void)setFocusMode:(AVCaptureFocusMode)focusMode; +- (void)setFocusPointOfInterest:(CGPoint)point; + +// Exposure +- (BOOL)isExposurePointOfInterestSupported; +- (void)setExposureMode:(AVCaptureExposureMode)exposureMode; +- (void)setExposurePointOfInterest:(CGPoint)point; +- (float)minExposureTargetBias; +- (float)maxExposureTargetBias; +- (void)setExposureTargetBias:(float)bias completionHandler:(void (^ _Nullable)(CMTime))handler; +- (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode; + +// Zoom +- (float)maxAvailableVideoZoomFactor; +- (float)minAvailableVideoZoomFactor; +- (float)videoZoomFactor; +- (void)setVideoZoomFactor:(float)factor; + +// Camera Properties +- (float)lensAperture; +- (CMTime)exposureDuration; +- (float)ISO; + +// Configuration Lock +- (BOOL)lockForConfiguration:(NSError **)error; +- (void)unlockForConfiguration; + +// Frame Duration +- (CMTime)activeVideoMinFrameDuration; +- (void)setActiveVideoMinFrameDuration:(CMTime)duration; +- (CMTime)activeVideoMaxFrameDuration; +- (void)setActiveVideoMaxFrameDuration:(CMTime)duration; + +- (AVCaptureInput *)createInput:(NSError * _Nullable * _Nullable)error; + +@end + +@interface FLTDefaultCaptureDeviceController : NSObject +- (instancetype)initWithDevice:(AVCaptureDevice *)device; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h new file mode 100644 index 000000000000..e0b39f9b9e21 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h @@ -0,0 +1,17 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; +@import Foundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTDeviceOrientationProviding +- (UIDeviceOrientation)orientation; +@end + +@interface FLTDefaultDeviceOrientationProvider : NSObject +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTPermissionService.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPermissionService.h similarity index 100% rename from packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTPermissionService.h rename to packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPermissionService.h From a78714636dbf1123d2b1fd7341ab9480ebfcb944 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Fri, 6 Dec 2024 17:14:26 +0100 Subject: [PATCH 03/16] Migrate CameraExposureTests --- .../ios/RunnerTests/CameraExposureTests.m | 78 ++++++++++++------- .../ios/RunnerTests/CameraFocusTests.m | 28 ++++++- .../ios/RunnerTests/CameraOrientationTests.m | 25 ++++-- .../RunnerTests/MockCaptureDeviceController.h | 7 +- .../RunnerTests/MockCaptureDeviceController.m | 3 +- .../Protocols/FLTCaptureDeviceControlling.m | 2 +- 6 files changed, 99 insertions(+), 44 deletions(-) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m index 7b641a5746c0..bbe3cf055a9a 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m @@ -5,51 +5,73 @@ @import camera_avfoundation; @import XCTest; @import AVFoundation; -#import -@interface FLTCam : NSObject - -- (void)setExposurePointWithResult:(FlutterResult)result x:(double)x y:(double)y; -@end +#import "MockCaptureDeviceController.h" +#import "MockDeviceOrientationProvider.h" @interface CameraExposureTests : XCTestCase @property(readonly, nonatomic) FLTCam *camera; -@property(readonly, nonatomic) id mockDevice; -@property(readonly, nonatomic) id mockUIDevice; +@property(readonly, nonatomic) MockCaptureDeviceController *mockDevice; +@property(readonly, nonatomic) MockDeviceOrientationProvider *mockDeviceOrientationProvider; @end @implementation CameraExposureTests - (void)setUp { _camera = [[FLTCam alloc] init]; - _mockDevice = OCMClassMock([AVCaptureDevice class]); - _mockUIDevice = OCMPartialMock([UIDevice currentDevice]); -} - -- (void)tearDown { - [_mockDevice stopMocking]; - [_mockUIDevice stopMocking]; + _mockDevice = [[MockCaptureDeviceController alloc] init]; + _mockDeviceOrientationProvider = [[MockDeviceOrientationProvider alloc] init]; + + [_camera setValue:_mockDevice forKey:@"captureDevice"]; + [_camera setValue:_mockDeviceOrientationProvider forKey:@"deviceOrientationProvider"]; } -- (void)testSetExpsourePointWithResult_SetsExposurePointOfInterest { +- (void)testSetExposurePointWithResult_SetsExposurePointOfInterest { // UI is currently in landscape left orientation - OCMStub([(UIDevice *)_mockUIDevice orientation]).andReturn(UIDeviceOrientationLandscapeLeft); + _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; // Exposure point of interest is supported - OCMStub([_mockDevice isExposurePointOfInterestSupported]).andReturn(true); - // Set mock device as the current capture device - [_camera setValue:_mockDevice forKey:@"captureDevice"]; + _mockDevice.isExposurePointOfInterestSupported = YES; + + // Verify the focus point of interest has been set + __block CGPoint setPoint = CGPointZero; + _mockDevice.setExposurePointOfInterestStub = ^(CGPoint point) { + if (CGPointEqualToPoint(CGPointMake(1, 1), point)) { + setPoint = point; + } + }; // Run test - [_camera - setExposurePointWithResult:^void(id _Nullable result) { - } - x:1 - y:1]; + XCTestExpectation *completionExpectation = [self expectationWithDescription:@"Completion called"]; + [_camera setExposurePoint:[FCPPlatformPoint makeWithX:1 y:1] + withCompletion:^(FlutterError * _Nullable error) { + XCTAssertNil(error); + [completionExpectation fulfill]; + }]; - // Verify the focus point of interest has been set - OCMVerify([_mockDevice setExposurePointOfInterest:CGPointMake(1, 1)]); + [self waitForExpectationsWithTimeout:1 handler:nil]; + XCTAssertEqual(setPoint.x, 1.0); + XCTAssertEqual(setPoint.y, 1.0); +} + +- (void)testSetExposurePoint_WhenNotSupported_ReturnsError { + // UI is currently in landscape left orientation + _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; + // Exposure point of interest is not supported + _mockDevice.isExposurePointOfInterestSupported = NO; + + XCTestExpectation *expectation = [self expectationWithDescription:@"Completion with error"]; + + // Run + [_camera setExposurePoint:[FCPPlatformPoint makeWithX:1 y:1] + withCompletion:^(FlutterError *_Nullable error) { + XCTAssertNotNil(error); + XCTAssertEqualObjects(error.code, @"setExposurePointFailed"); + XCTAssertEqualObjects(error.message, @"Device does not have exposure point capabilities"); + [expectation fulfill]; + }]; + + // Verify + [self waitForExpectationsWithTimeout:1 handler:nil]; } @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m index edcd7c4795c7..27f5e21bdfab 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m @@ -24,6 +24,9 @@ - (void)setUp { _camera = [[FLTCam alloc] init]; _mockDevice = [[MockCaptureDeviceController alloc] init]; _mockDeviceOrientationProvider = [[MockDeviceOrientationProvider alloc] init]; + + [_camera setValue:_mockDevice forKey:@"captureDevice"]; + [_camera setValue:_mockDeviceOrientationProvider forKey:@"deviceOrientationProvider"]; } - (void)testAutoFocusWithContinuousModeSupported_ShouldSetContinuousAutoFocus { @@ -127,15 +130,11 @@ - (void)testLockedFocusWithModeNotSupported_ShouldSetNothing { [_camera applyFocusMode:FCPPlatformFocusModeLocked onDevice:_mockDevice]; } -// TODO(mchudy): replace setValue with proper DI - (void)testSetFocusPointWithResult_SetsFocusPointOfInterest { // UI is currently in landscape left orientation - [_camera setValue:_mockDeviceOrientationProvider forKey:@"deviceOrientationProvider"]; _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; // Focus point of interest is supported _mockDevice.isFocusPointOfInterestSupported = YES; - // Set mock device as the current capture device - [_camera setValue:_mockDevice forKey:@"captureDevice"]; __block BOOL setFocusPointOfInterestCalled = NO; _mockDevice.setFocusPointOfInterestStub = ^(CGPoint point) { @@ -153,4 +152,25 @@ - (void)testSetFocusPointWithResult_SetsFocusPointOfInterest { XCTAssertTrue(setFocusPointOfInterestCalled); } +- (void)testSetFocusPoint_WhenNotSupported_ReturnsError { + // UI is currently in landscape left orientation + _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; + // Exposure point of interest is not supported + _mockDevice.isFocusPointOfInterestSupported = NO; + + XCTestExpectation *expectation = [self expectationWithDescription:@"Completion with error"]; + + // Run + [_camera setFocusPoint:[FCPPlatformPoint makeWithX:1 y:1] + withCompletion:^(FlutterError *_Nullable error) { + XCTAssertNotNil(error); + XCTAssertEqualObjects(error.code, @"setFocusPointFailed"); + XCTAssertEqualObjects(error.message, @"Device does not have focus point capabilities"); + [expectation fulfill]; + }]; + + // Verify + [self waitForExpectationsWithTimeout:1 handler:nil]; +} + @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m index e6ce8d48bc5b..7d205274aa3d 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m @@ -11,6 +11,8 @@ #import +#import "MockCaptureDeviceController.h" + @interface StubGlobalEventApi : FCPCameraGlobalEventApi @property(nonatomic) BOOL called; @property(nonatomic) FCPPlatformDeviceOrientation lastOrientation; @@ -35,10 +37,21 @@ - (FlutterBinaryMessengerConnection)setMessageHandlerOnChannel:(nonnull NSString #pragma mark - @interface CameraOrientationTests : XCTestCase +@property(readonly, nonatomic) FLTCam *camera; +@property(readonly, nonatomic) MockCaptureDeviceController *mockDevice; +@property(readonly, nonatomic) StubGlobalEventApi *eventAPI; @end @implementation CameraOrientationTests +- (void)setUp { + [super setUp]; + _mockDevice = [[MockCaptureDeviceController alloc] init]; + _camera = [[FLTCam alloc] init]; + + [_camera setValue:_mockDevice forKey:@"captureDevice"]; +} + // Ensure that the given queue and then the main queue have both cycled, to wait for any pending // async events that may have been bounced between them. - (void)waitForRoundTripWithQueue:(dispatch_queue_t)queue { @@ -98,20 +111,22 @@ - (void)testOrientationUpdateMustBeOnCaptureSessionQueue { XCTestExpectation *queueExpectation = [self expectationWithDescription:@"Orientation update must happen on the capture session queue"]; - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; + CameraPlugin *plugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; const char *captureSessionQueueSpecific = "capture_session_queue"; dispatch_queue_set_specific(camera.captureSessionQueue, captureSessionQueueSpecific, (void *)captureSessionQueueSpecific, NULL); - FLTCam *mockCam = OCMClassMock([FLTCam class]); - camera.camera = mockCam; - OCMStub([mockCam setDeviceOrientation:UIDeviceOrientationLandscapeLeft]) + plugin.camera = _camera; + + //_camera setDeviceOrientation:<#(UIDeviceOrientation)#> + + OCMStub([_camera setDeviceOrientation:UIDeviceOrientationLandscapeLeft]) .andDo(^(NSInvocation *invocation) { if (dispatch_get_specific(captureSessionQueueSpecific)) { [queueExpectation fulfill]; } }); - [camera orientationChanged: + [plugin orientationChanged: [self createMockNotificationForOrientation:UIDeviceOrientationLandscapeLeft]]; [self waitForExpectationsWithTimeout:1 handler:nil]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h index 8723b1910244..901bcc60c7b1 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h @@ -28,7 +28,7 @@ NS_ASSUME_NONNULL_BEGIN @property(nonatomic, assign) BOOL flashModeSupported; // Focus -@property(nonatomic, assign) BOOL focusPointOfInterestSupported; +@property(nonatomic, assign) BOOL isFocusPointOfInterestSupported; @property(nonatomic, copy) BOOL (^isFocusModeSupportedStub)(AVCaptureFocusMode mode); @property(nonatomic, assign) AVCaptureFocusMode focusMode; @property(nonatomic, copy) void (^setFocusModeStub)(AVCaptureFocusMode mode); @@ -36,7 +36,7 @@ NS_ASSUME_NONNULL_BEGIN @property(nonatomic, copy) void (^setFocusPointOfInterestStub)(CGPoint point); // Exposure -@property(nonatomic, assign) BOOL exposurePointOfInterestSupported; +@property(nonatomic, assign) BOOL isExposurePointOfInterestSupported; @property(nonatomic, assign) AVCaptureExposureMode exposureMode; @property(nonatomic, assign) BOOL exposureModeSupported; @property(nonatomic, copy) void (^setExposureModeStub)(AVCaptureExposureMode mode); @@ -72,9 +72,6 @@ NS_ASSUME_NONNULL_BEGIN @property(nonatomic, strong) AVCaptureInput *inputToReturn; @property(nonatomic, copy) void (^createInputStub)(NSError **error); -@property(nonatomic, assign) BOOL isExposurePointOfInterestSupported; -@property(nonatomic, assign) BOOL isFocusPointOfInterestSupported; - @end NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m index c1659d36f07d..548cea98bb9c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m @@ -68,6 +68,8 @@ - (void)setExposurePointOfInterest:(CGPoint)point { - (void)setExposureTargetBias:(float)bias completionHandler:(void (^)(CMTime))handler { if (self.setExposureTargetBiasStub) { self.setExposureTargetBiasStub(bias, handler); + } else if (handler) { + handler(kCMTimeZero); } } @@ -123,5 +125,4 @@ - (AVCaptureInput *)createInput:(NSError *_Nullable *_Nullable)error { return self.inputToReturn; } - @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m index f89e44920f48..1483620039b4 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m @@ -63,7 +63,7 @@ - (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode { // Focus - (BOOL)isFocusPointOfInterestSupported { - return self.device.isFocusPointOfInterestSupported; + return self.device.isFocusPointOfInterestSupported; } - (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode { From 881805a6f95cdc53c42818ff19747d4a0b738205 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Fri, 6 Dec 2024 17:39:36 +0100 Subject: [PATCH 04/16] Initial migration of CameraOrientationTests --- .../ios/RunnerTests/CameraOrientationTests.m | 116 +++++++++++------- .../camera_avfoundation/CameraPlugin.m | 2 + 2 files changed, 73 insertions(+), 45 deletions(-) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m index 7d205274aa3d..f52c30328fd4 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m @@ -12,6 +12,7 @@ #import #import "MockCaptureDeviceController.h" +#import "MockDeviceOrientationProvider.h" @interface StubGlobalEventApi : FCPCameraGlobalEventApi @property(nonatomic) BOOL called; @@ -34,12 +35,37 @@ - (FlutterBinaryMessengerConnection)setMessageHandlerOnChannel:(nonnull NSString @end +@interface MockCamera : FLTCam +@property(nonatomic, copy) void (^setDeviceOrientationStub)(UIDeviceOrientation orientation); +@end + +@implementation MockCamera +- (void)setDeviceOrientation:(UIDeviceOrientation)orientation { + if (self.setDeviceOrientationStub) { + self.setDeviceOrientationStub(orientation); + } +} + +@end + +@interface MockUIDevice : UIDevice +@property (nonatomic, assign) UIDeviceOrientation mockOrientation; +@end + +@implementation MockUIDevice +- (UIDeviceOrientation)orientation { + return self.mockOrientation; +} + +@end + #pragma mark - @interface CameraOrientationTests : XCTestCase -@property(readonly, nonatomic) FLTCam *camera; +@property(readonly, nonatomic) MockCamera *camera; @property(readonly, nonatomic) MockCaptureDeviceController *mockDevice; @property(readonly, nonatomic) StubGlobalEventApi *eventAPI; +@property(readonly, nonatomic) CameraPlugin *cameraPlugin; @end @implementation CameraOrientationTests @@ -47,9 +73,15 @@ @implementation CameraOrientationTests - (void)setUp { [super setUp]; _mockDevice = [[MockCaptureDeviceController alloc] init]; - _camera = [[FLTCam alloc] init]; + _camera = [[MockCamera alloc] init]; + _eventAPI = [[StubGlobalEventApi alloc] init]; [_camera setValue:_mockDevice forKey:@"captureDevice"]; + + _cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:_eventAPI]; + _cameraPlugin.camera = _camera; } // Ensure that the given queue and then the main queue have both cycled, to wait for any pending @@ -70,30 +102,20 @@ - (void)sendOrientation:(UIDeviceOrientation)orientation toCamera:(CameraPlugin } - (void)testOrientationNotifications { - StubGlobalEventApi *eventAPI = [[StubGlobalEventApi alloc] init]; - CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil - messenger:nil - globalAPI:eventAPI]; - - [self sendOrientation:UIDeviceOrientationPortraitUpsideDown toCamera:cameraPlugin]; - XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationPortraitDown); - [self sendOrientation:UIDeviceOrientationPortrait toCamera:cameraPlugin]; - XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationPortraitUp); - [self sendOrientation:UIDeviceOrientationLandscapeLeft toCamera:cameraPlugin]; - XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationLandscapeLeft); - [self sendOrientation:UIDeviceOrientationLandscapeRight toCamera:cameraPlugin]; - XCTAssertEqual(eventAPI.lastOrientation, FCPPlatformDeviceOrientationLandscapeRight); + [self sendOrientation:UIDeviceOrientationPortraitUpsideDown toCamera:_cameraPlugin]; + XCTAssertEqual(_eventAPI.lastOrientation, FCPPlatformDeviceOrientationPortraitDown); + [self sendOrientation:UIDeviceOrientationPortrait toCamera:_cameraPlugin]; + XCTAssertEqual(_eventAPI.lastOrientation, FCPPlatformDeviceOrientationPortraitUp); + [self sendOrientation:UIDeviceOrientationLandscapeLeft toCamera:_cameraPlugin]; + XCTAssertEqual(_eventAPI.lastOrientation, FCPPlatformDeviceOrientationLandscapeLeft); + [self sendOrientation:UIDeviceOrientationLandscapeRight toCamera:_cameraPlugin]; + XCTAssertEqual(_eventAPI.lastOrientation, FCPPlatformDeviceOrientationLandscapeRight); } - (void)testOrientationNotificationsNotCalledForFaceUp { - StubGlobalEventApi *eventAPI = [[StubGlobalEventApi alloc] init]; - CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil - messenger:nil - globalAPI:eventAPI]; - - [self sendOrientation:UIDeviceOrientationFaceUp toCamera:cameraPlugin]; + [self sendOrientation:UIDeviceOrientationFaceUp toCamera:_cameraPlugin]; - XCTAssertFalse(eventAPI.called); + XCTAssertFalse(_eventAPI.called); } - (void)testOrientationNotificationsNotCalledForFaceDown { @@ -113,18 +135,15 @@ - (void)testOrientationUpdateMustBeOnCaptureSessionQueue { CameraPlugin *plugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; const char *captureSessionQueueSpecific = "capture_session_queue"; - dispatch_queue_set_specific(camera.captureSessionQueue, captureSessionQueueSpecific, + dispatch_queue_set_specific(plugin.captureSessionQueue, captureSessionQueueSpecific, (void *)captureSessionQueueSpecific, NULL); plugin.camera = _camera; - //_camera setDeviceOrientation:<#(UIDeviceOrientation)#> - - OCMStub([_camera setDeviceOrientation:UIDeviceOrientationLandscapeLeft]) - .andDo(^(NSInvocation *invocation) { - if (dispatch_get_specific(captureSessionQueueSpecific)) { - [queueExpectation fulfill]; - } - }); + _camera.setDeviceOrientationStub = ^(UIDeviceOrientation orientation) { + if (dispatch_get_specific(captureSessionQueueSpecific)) { + [queueExpectation fulfill]; + } + }; [plugin orientationChanged: [self createMockNotificationForOrientation:UIDeviceOrientationLandscapeLeft]]; @@ -133,32 +152,39 @@ - (void)testOrientationUpdateMustBeOnCaptureSessionQueue { - (void)testOrientationChanged_noRetainCycle { dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); - FLTCam *mockCam = OCMClassMock([FLTCam class]); - StubGlobalEventApi *stubAPI = [[StubGlobalEventApi alloc] init]; - __weak CameraPlugin *weakCamera; + __weak CameraPlugin *weakPlugin; @autoreleasepool { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil + CameraPlugin *plugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil - globalAPI:stubAPI]; - weakCamera = camera; - camera.captureSessionQueue = captureSessionQueue; - camera.camera = mockCam; + globalAPI:_eventAPI]; + weakPlugin = plugin; + plugin.captureSessionQueue = captureSessionQueue; + plugin.camera = _camera; - [camera orientationChanged: + [plugin orientationChanged: [self createMockNotificationForOrientation:UIDeviceOrientationLandscapeLeft]]; } // Sanity check - XCTAssertNil(weakCamera, @"Camera must have been deallocated."); + XCTAssertNil(weakPlugin, @"Camera must have been deallocated."); + + __block BOOL setDeviceOrientationCalled = NO; + _camera.setDeviceOrientationStub = ^(UIDeviceOrientation orientation) { + if (orientation == UIDeviceOrientationLandscapeLeft) { + setDeviceOrientationCalled = YES; + } + }; + + __weak StubGlobalEventApi *weakEventAPI = _eventAPI; // Must check in captureSessionQueue since orientationChanged dispatches to this queue. XCTestExpectation *expectation = [self expectationWithDescription:@"Dispatched to capture session queue"]; dispatch_async(captureSessionQueue, ^{ - OCMVerify(never(), [mockCam setDeviceOrientation:UIDeviceOrientationLandscapeLeft]); - XCTAssertFalse(stubAPI.called); + XCTAssertFalse(setDeviceOrientationCalled); + XCTAssertFalse(weakEventAPI.called); [expectation fulfill]; }); @@ -166,8 +192,8 @@ - (void)testOrientationChanged_noRetainCycle { } - (NSNotification *)createMockNotificationForOrientation:(UIDeviceOrientation)deviceOrientation { - UIDevice *mockDevice = OCMClassMock([UIDevice class]); - OCMStub([mockDevice orientation]).andReturn(deviceOrientation); + MockUIDevice *mockDevice = [[MockUIDevice alloc] init]; + mockDevice.mockOrientation = deviceOrientation; return [NSNotification notificationWithName:@"orientation_test" object:mockDevice]; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m index ca004b97d605..f5f1cf94f2b5 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m @@ -60,6 +60,7 @@ - (instancetype)initWithRegistry:(NSObject *)registry dispatch_queue_set_specific(_captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); + // TODO: use device orientation protocol [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(orientationChanged:) @@ -73,6 +74,7 @@ - (void)detachFromEngineForRegistrar:(NSObject *)registr } - (void)orientationChanged:(NSNotification *)note { + // TODO: change to protocol UIDevice *device = note.object; UIDeviceOrientation orientation = device.orientation; From 8b0a57f150dab674714bc5ca5535f8f761051548 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Fri, 6 Dec 2024 18:06:47 +0100 Subject: [PATCH 05/16] Add method channel abstraction --- .../ios/RunnerTests/CameraOrientationTests.m | 2 -- .../RunnerTests/Mocks/MockDeviceController.m | 6 ---- .../Protocols/FLTMethodChannelProtocol.m | 34 +++++++++++++++++++ .../include/CameraPlugin.modulemap | 1 + .../Protocols/FLTMethodChannelProtocol.h | 27 +++++++++++++++ 5 files changed, 62 insertions(+), 8 deletions(-) delete mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceController.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTMethodChannelProtocol.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTMethodChannelProtocol.h diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m index f52c30328fd4..b17a3bf64560 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m @@ -9,8 +9,6 @@ @import XCTest; @import Flutter; -#import - #import "MockCaptureDeviceController.h" #import "MockDeviceOrientationProvider.h" diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceController.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceController.m deleted file mode 100644 index 27cc985d7d2b..000000000000 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceController.m +++ /dev/null @@ -1,6 +0,0 @@ -// Copyright 2013 The Flutter Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - - -@interface FLTMockCaptureDeviceController : NSObject diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTMethodChannelProtocol.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTMethodChannelProtocol.m new file mode 100644 index 000000000000..416617b59df0 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTMethodChannelProtocol.m @@ -0,0 +1,34 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Flutter; + +#import "FLTMethodChannelProtocol.h" + +@interface DefaultMethodChannel () +/// The wrapped FlutterMethodChannel +@property(nonatomic, strong) FlutterMethodChannel *channel; +@end + +@implementation DefaultMethodChannel + +- (instancetype)initWithChannel:(nonnull FlutterMethodChannel *)channel { + self = [super init]; + if (self) { + _channel = channel; + } + return self; +} + +- (void)invokeMethod:(nonnull NSString *)method arguments:(id _Nullable)arguments { + [self.channel invokeMethod:method arguments:arguments]; +} + +- (void)invokeMethod:(nonnull NSString *)method + arguments:(id _Nullable)arguments + result:(FlutterResult _Nullable)callback { + [self.channel invokeMethod:method arguments:arguments result:callback]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap index 12c1c17a141d..dd5366316aee 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap @@ -15,6 +15,7 @@ framework module camera_avfoundation { header "FLTCaptureDeviceControlling.h" header "FLTCameraPermissionManager.h" header "FLTDeviceOrientationProviding.h" + header "FLTMethodChannelProtocol.h" header "QueueUtils.h" } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTMethodChannelProtocol.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTMethodChannelProtocol.h new file mode 100644 index 000000000000..9795f0e4cd8b --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTMethodChannelProtocol.h @@ -0,0 +1,27 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +NS_ASSUME_NONNULL_BEGIN +/// A protocol that wraps FlutterMethodChannel. +@protocol FLTMethodChannelProtocol + +/// Invokes the specified Flutter method with the specified arguments, expecting +/// an asynchronous result. +- (void)invokeMethod:(NSString *)method arguments:(id _Nullable)arguments; + +/// Invokes the specified Flutter method with the specified arguments and specified callback +- (void)invokeMethod:(NSString *)method + arguments:(id _Nullable)arguments + result:(FlutterResult _Nullable)callback; + +@end + +/// The default method channel that wraps FlutterMethodChannel +@interface DefaultMethodChannel : NSObject + +/// Initialize this wrapper with a FlutterMethodChannel +- (instancetype)initWithChannel:(FlutterMethodChannel *)channel; +@end + +NS_ASSUME_NONNULL_END From 6110a3b009981243d88bbeda289e7ce497f7a00a Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Mon, 9 Dec 2024 09:33:24 +0100 Subject: [PATCH 06/16] Migrate ThreadSafeEventChannelTests --- .../ios/Runner.xcodeproj/project.pbxproj | 10 +++++ .../FLTCameraPermissionManagerTests.m | 1 + .../RunnerTests/MockCaptureDeviceController.h | 3 -- .../RunnerTests/MockCaptureDeviceController.m | 3 -- .../MockDeviceOrientationProvider.h | 3 -- .../MockDeviceOrientationProvider.m | 3 -- .../ios/RunnerTests/MockEventChannel.h | 14 ++++++ .../ios/RunnerTests/MockEventChannel.m | 15 +++++++ .../RunnerTests/ThreadSafeEventChannelTests.m | 44 +++++++++++-------- .../Sources/camera_avfoundation/FLTCam.m | 4 +- .../FLTThreadSafeEventChannel.m | 5 ++- .../Protocols/FLTEventChannelProtocol.m | 27 ++++++++++++ .../Protocols/FLTMethodChannelProtocol.m | 34 -------------- .../include/CameraPlugin.modulemap | 2 +- .../FLTThreadSafeEventChannel.h | 4 +- .../Protocols/FLTEventChannelProtocol.h | 18 ++++++++ .../Protocols/FLTMethodChannelProtocol.h | 27 ------------ 17 files changed, 121 insertions(+), 96 deletions(-) create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.h create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannelProtocol.m delete mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTMethodChannelProtocol.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h delete mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTMethodChannelProtocol.h diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index 6f24df26c737..39f19eb9ce8d 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -19,6 +19,8 @@ 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */; }; 7F87E8022D01FD6F00A3549C /* MockCaptureDeviceController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8012D01FD5600A3549C /* MockCaptureDeviceController.m */; }; 7F87E80C2D0325D900A3549C /* MockDeviceOrientationProvider.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80B2D0325D700A3549C /* MockDeviceOrientationProvider.m */; }; + 7F87E80E2D03283E00A3549C /* CameraExposureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */; }; + 7F87E81C2D06DE2400A3549C /* MockEventChannel.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E81B2D06DE2400A3549C /* MockEventChannel.m */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; @@ -83,6 +85,9 @@ 7F87E8032D02FF8C00A3549C /* MockCaptureDeviceController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureDeviceController.h; sourceTree = ""; }; 7F87E80A2D0325B200A3549C /* MockDeviceOrientationProvider.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockDeviceOrientationProvider.h; sourceTree = ""; }; 7F87E80B2D0325D700A3549C /* MockDeviceOrientationProvider.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockDeviceOrientationProvider.m; sourceTree = ""; }; + 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraExposureTests.m; sourceTree = ""; }; + 7F87E81A2D06DDD700A3549C /* MockEventChannel.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockEventChannel.h; sourceTree = ""; }; + 7F87E81B2D06DE2400A3549C /* MockEventChannel.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockEventChannel.m; sourceTree = ""; }; 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -134,12 +139,15 @@ 03BB76692665316900CE5A93 /* RunnerTests */ = { isa = PBXGroup; children = ( + 7F87E81A2D06DDD700A3549C /* MockEventChannel.h */, + 7F87E81B2D06DE2400A3549C /* MockEventChannel.m */, 7F87E80B2D0325D700A3549C /* MockDeviceOrientationProvider.m */, 7F87E80A2D0325B200A3549C /* MockDeviceOrientationProvider.h */, 7F87E8032D02FF8C00A3549C /* MockCaptureDeviceController.h */, 7F87E8012D01FD5600A3549C /* MockCaptureDeviceController.m */, 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */, 03BB766A2665316900CE5A93 /* CameraFocusTests.m */, + 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */, 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */, 03BB766C2665316900CE5A93 /* Info.plist */, 033B94BD269C40A200B4DF97 /* CameraMethodChannelTests.m */, @@ -463,9 +471,11 @@ 7F87E8022D01FD6F00A3549C /* MockCaptureDeviceController.m in Sources */, 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */, E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */, + 7F87E80E2D03283E00A3549C /* CameraExposureTests.m in Sources */, 7F87E80C2D0325D900A3549C /* MockDeviceOrientationProvider.m in Sources */, E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */, E04F108627A87CA600573D0C /* FLTSavePhotoDelegateTests.m in Sources */, + 7F87E81C2D06DE2400A3549C /* MockEventChannel.m in Sources */, 43ED1537282570DE00EB00DE /* AvailableCamerasTest.m in Sources */, E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */, 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */, diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m index 32c3fe0537bc..2b81df5eb03b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m @@ -8,6 +8,7 @@ #endif @import AVFoundation; @import XCTest; + #import "CameraTestUtils.h" #import "FLTPermissionService.h" #import "FLTCameraPermissionManager.h" diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h index 901bcc60c7b1..69441e0e353b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h @@ -3,9 +3,6 @@ // found in the LICENSE file. @import camera_avfoundation; -#if __has_include() -@import camera_avfoundation.Test; -#endif @import AVFoundation; NS_ASSUME_NONNULL_BEGIN diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m index 548cea98bb9c..d2e71a4afe32 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m @@ -3,9 +3,6 @@ // found in the LICENSE file. @import camera_avfoundation; -#if __has_include() -@import camera_avfoundation.Test; -#endif @import AVFoundation; #import "MockCaptureDeviceController.h" diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.h index 0421ec665293..855bebc998c7 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.h @@ -3,9 +3,6 @@ // found in the LICENSE file. @import camera_avfoundation; -#if __has_include() -@import camera_avfoundation.Test; -#endif @import AVFoundation; NS_ASSUME_NONNULL_BEGIN diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.m index 963cb7e96ff9..364b5b1fd60b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.m @@ -3,9 +3,6 @@ // found in the LICENSE file. @import camera_avfoundation; -#if __has_include() -@import camera_avfoundation.Test; -#endif @import AVFoundation; #import "MockDeviceOrientationProvider.h" diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.h new file mode 100644 index 000000000000..098b75b6982e --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.h @@ -0,0 +1,14 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import Flutter; + +NS_ASSUME_NONNULL_BEGIN + +@interface MockEventChannel : NSObject +@property(nonatomic, copy) void (^setStreamHandlerStub)(NSObject *); +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.m new file mode 100644 index 000000000000..7190cfb17714 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.m @@ -0,0 +1,15 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockEventChannel.h" + +@implementation MockEventChannel + +- (void)setStreamHandler:(NSObject *)handler { + if (self.setStreamHandlerStub) { + self.setStreamHandlerStub(handler); + } +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m index 169b75ddfbb1..1f6562e2303e 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m @@ -7,30 +7,37 @@ @import camera_avfoundation.Test; #endif @import XCTest; -#import + +#import "MockEventChannel.h" @interface ThreadSafeEventChannelTests : XCTestCase +@property(readonly, nonatomic) MockEventChannel *mockEventChannel; +@property(readonly, nonatomic) FLTThreadSafeEventChannel *threadSafeEventChannel; @end @implementation ThreadSafeEventChannelTests -- (void)testSetStreamHandler_shouldStayOnMainThreadIfCalledFromMainThread { - FlutterEventChannel *mockEventChannel = OCMClassMock([FlutterEventChannel class]); - FLTThreadSafeEventChannel *threadSafeEventChannel = - [[FLTThreadSafeEventChannel alloc] initWithEventChannel:mockEventChannel]; +- (void)setUp { + [super setUp]; + _mockEventChannel = [[MockEventChannel alloc] init]; + _threadSafeEventChannel = + [[FLTThreadSafeEventChannel alloc] initWithEventChannel:_mockEventChannel]; +} +- (void)testSetStreamHandler_shouldStayOnMainThreadIfCalledFromMainThread { XCTestExpectation *mainThreadExpectation = [self expectationWithDescription:@"setStreamHandler must be called on the main thread"]; XCTestExpectation *mainThreadCompletionExpectation = [self expectationWithDescription: @"setStreamHandler's completion block must be called on the main thread"]; - OCMStub([mockEventChannel setStreamHandler:[OCMArg any]]).andDo(^(NSInvocation *invocation) { + + [_mockEventChannel setSetStreamHandlerStub:^(NSObject *handler) { if (NSThread.isMainThread) { [mainThreadExpectation fulfill]; } - }); - - [threadSafeEventChannel setStreamHandler:nil + }]; + + [_threadSafeEventChannel setStreamHandler:nil completion:^{ if (NSThread.isMainThread) { [mainThreadCompletionExpectation fulfill]; @@ -40,23 +47,22 @@ - (void)testSetStreamHandler_shouldStayOnMainThreadIfCalledFromMainThread { } - (void)testSetStreamHandler_shouldDispatchToMainThreadIfCalledFromBackgroundThread { - FlutterEventChannel *mockEventChannel = OCMClassMock([FlutterEventChannel class]); - FLTThreadSafeEventChannel *threadSafeEventChannel = - [[FLTThreadSafeEventChannel alloc] initWithEventChannel:mockEventChannel]; - XCTestExpectation *mainThreadExpectation = [self expectationWithDescription:@"setStreamHandler must be called on the main thread"]; XCTestExpectation *mainThreadCompletionExpectation = [self expectationWithDescription: @"setStreamHandler's completion block must be called on the main thread"]; - OCMStub([mockEventChannel setStreamHandler:[OCMArg any]]).andDo(^(NSInvocation *invocation) { + + + [_mockEventChannel setSetStreamHandlerStub:^(NSObject *handler) { if (NSThread.isMainThread) { [mainThreadExpectation fulfill]; } - }); + }]; + __weak typeof(self) weakSelf = self; dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ - [threadSafeEventChannel setStreamHandler:nil + [weakSelf.threadSafeEventChannel setStreamHandler:nil completion:^{ if (NSThread.isMainThread) { [mainThreadCompletionExpectation fulfill]; @@ -69,11 +75,13 @@ - (void)testSetStreamHandler_shouldDispatchToMainThreadIfCalledFromBackgroundThr - (void)testEventChannel_shouldBeKeptAliveWhenDispatchingBackToMainThread { XCTestExpectation *expectation = [self expectationWithDescription:@"Completion should be called."]; + + __weak typeof(self) weakSelf = self; dispatch_async(dispatch_queue_create("test", NULL), ^{ FLTThreadSafeEventChannel *channel = [[FLTThreadSafeEventChannel alloc] - initWithEventChannel:OCMClassMock([FlutterEventChannel class])]; + initWithEventChannel:weakSelf.mockEventChannel]; - [channel setStreamHandler:OCMOCK_ANY + [channel setStreamHandler:nil completion:^{ [expectation fulfill]; }]; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 2174f6998691..92937d9b77e4 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -15,6 +15,7 @@ #import "./include/camera_avfoundation/messages.g.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" #import "./include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h" +#import "./include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h" static FlutterError *FlutterErrorFromNSError(NSError *error) { return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code] @@ -1211,8 +1212,9 @@ - (void)startImageStreamWithMessenger:(NSObject *)messen FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera_avfoundation/imageStream" binaryMessenger:messenger]; + id eventChannelProtocol = [[FLTDefaultEventChannel alloc] initWithEventChannel:eventChannel]; FLTThreadSafeEventChannel *threadSafeEventChannel = - [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel]; + [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannelProtocol]; _imageStreamHandler = imageStreamHandler; __weak typeof(self) weakSelf = self; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m index 53c7273a5901..3addfac69d18 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m @@ -4,14 +4,15 @@ #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" #import "./include/camera_avfoundation/QueueUtils.h" +#import "./include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h" @interface FLTThreadSafeEventChannel () -@property(nonatomic, strong) FlutterEventChannel *channel; +@property(nonatomic, strong) id channel; @end @implementation FLTThreadSafeEventChannel -- (instancetype)initWithEventChannel:(FlutterEventChannel *)channel { +- (instancetype)initWithEventChannel:(id)channel { self = [super init]; if (self) { _channel = channel; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannelProtocol.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannelProtocol.m new file mode 100644 index 000000000000..7ddb68e07b05 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannelProtocol.m @@ -0,0 +1,27 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Flutter; + +#import "FLTEventChannelProtocol.h" + +@interface FLTDefaultEventChannel () +@property(nonatomic, strong) FlutterEventChannel *channel; +@end + +@implementation FLTDefaultEventChannel + +- (instancetype)initWithEventChannel:(FlutterEventChannel *)channel { + self = [super init]; + if (self) { + _channel = channel; + } + return self; +} + +- (void)setStreamHandler:(NSObject *)handler { + [self.channel setStreamHandler:handler]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTMethodChannelProtocol.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTMethodChannelProtocol.m deleted file mode 100644 index 416617b59df0..000000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTMethodChannelProtocol.m +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2013 The Flutter Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -@import Flutter; - -#import "FLTMethodChannelProtocol.h" - -@interface DefaultMethodChannel () -/// The wrapped FlutterMethodChannel -@property(nonatomic, strong) FlutterMethodChannel *channel; -@end - -@implementation DefaultMethodChannel - -- (instancetype)initWithChannel:(nonnull FlutterMethodChannel *)channel { - self = [super init]; - if (self) { - _channel = channel; - } - return self; -} - -- (void)invokeMethod:(nonnull NSString *)method arguments:(id _Nullable)arguments { - [self.channel invokeMethod:method arguments:arguments]; -} - -- (void)invokeMethod:(nonnull NSString *)method - arguments:(id _Nullable)arguments - result:(FlutterResult _Nullable)callback { - [self.channel invokeMethod:method arguments:arguments result:callback]; -} - -@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap index dd5366316aee..3475bd05410c 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap @@ -15,7 +15,7 @@ framework module camera_avfoundation { header "FLTCaptureDeviceControlling.h" header "FLTCameraPermissionManager.h" header "FLTDeviceOrientationProviding.h" - header "FLTMethodChannelProtocol.h" + header "FLTEventChannelProtocol.h" header "QueueUtils.h" } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h index 20a1d4023a31..e7cf7f90c74b 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h @@ -4,6 +4,8 @@ #import +#import "FLTEventChannelProtocol.h" + NS_ASSUME_NONNULL_BEGIN /// A thread safe wrapper for FlutterEventChannel that can be called from any thread, by dispatching @@ -12,7 +14,7 @@ NS_ASSUME_NONNULL_BEGIN /// Creates a FLTThreadSafeEventChannel by wrapping a FlutterEventChannel object. /// @param channel The FlutterEventChannel object to be wrapped. -- (instancetype)initWithEventChannel:(FlutterEventChannel *)channel; +- (instancetype)initWithEventChannel:(id)channel; /// Registers a handler on the main thread for stream setup requests from the Flutter side. /// The completion block runs on the main thread. diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h new file mode 100644 index 000000000000..eefdd2a0a6fa --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h @@ -0,0 +1,18 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Flutter; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTEventChannelProtocol +- (void)setStreamHandler:(nullable NSObject *)handler; +@end + +/// The default method channel that wraps FlutterMethodChannel +@interface FLTDefaultEventChannel : NSObject +- (instancetype)initWithEventChannel:(FlutterEventChannel *)channel; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTMethodChannelProtocol.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTMethodChannelProtocol.h deleted file mode 100644 index 9795f0e4cd8b..000000000000 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTMethodChannelProtocol.h +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2013 The Flutter Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -NS_ASSUME_NONNULL_BEGIN -/// A protocol that wraps FlutterMethodChannel. -@protocol FLTMethodChannelProtocol - -/// Invokes the specified Flutter method with the specified arguments, expecting -/// an asynchronous result. -- (void)invokeMethod:(NSString *)method arguments:(id _Nullable)arguments; - -/// Invokes the specified Flutter method with the specified arguments and specified callback -- (void)invokeMethod:(NSString *)method - arguments:(id _Nullable)arguments - result:(FlutterResult _Nullable)callback; - -@end - -/// The default method channel that wraps FlutterMethodChannel -@interface DefaultMethodChannel : NSObject - -/// Initialize this wrapper with a FlutterMethodChannel -- (instancetype)initWithChannel:(FlutterMethodChannel *)channel; -@end - -NS_ASSUME_NONNULL_END From 688f048621fe34227ff4b35199a64cdda88095de Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Mon, 9 Dec 2024 10:55:14 +0100 Subject: [PATCH 07/16] Migrate AvailableCamerasTests --- .../ios/RunnerTests/AvailableCamerasTest.m | 96 ++++++++++--------- .../ios/RunnerTests/CameraOrientationTests.m | 12 ++- .../example/ios/RunnerTests/CameraUtilTests.m | 1 - .../RunnerTests/MockCameraDeviceDiscovery.h | 15 +++ .../RunnerTests/MockCameraDeviceDiscovery.m | 18 ++++ .../RunnerTests/MockCaptureDeviceController.h | 2 + .../RunnerTests/MockCaptureDeviceController.m | 1 - .../camera_avfoundation/CameraPlugin.m | 19 ++-- .../Protocols/FLTCameraDeviceDiscovery.m | 30 ++++++ .../Protocols/FLTCaptureDeviceControlling.m | 3 + .../include/CameraPlugin.modulemap | 1 + .../camera_avfoundation/CameraPlugin_Test.h | 4 +- .../Protocols/FLTCameraDeviceDiscovery.h | 21 ++++ .../Protocols/FLTCaptureDeviceControlling.h | 2 + 14 files changed, 169 insertions(+), 56 deletions(-) create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.h create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m index f26a8dc48f16..22a5d84d2ef5 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m @@ -8,33 +8,44 @@ #endif @import XCTest; @import AVFoundation; -#import + +#import "MockCameraDeviceDiscovery.h" +#import "MockCaptureDeviceController.h" @interface AvailableCamerasTest : XCTestCase +@property(nonatomic, strong) MockCameraDeviceDiscovery *mockDeviceDiscovery; +@property(nonatomic, strong) CameraPlugin *cameraPlugin; @end @implementation AvailableCamerasTest +- (void)setUp { + [super setUp]; + + self.mockDeviceDiscovery = [[MockCameraDeviceDiscovery alloc] init]; + self.cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil globalAPI:nil deviceDiscovery:_mockDeviceDiscovery]; +} + + - (void)testAvailableCamerasShouldReturnAllCamerasOnMultiCameraIPhone { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; // iPhone 13 Cameras: - AVCaptureDevice *wideAngleCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([wideAngleCamera uniqueID]).andReturn(@"0"); - OCMStub([wideAngleCamera position]).andReturn(AVCaptureDevicePositionBack); - - AVCaptureDevice *frontFacingCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([frontFacingCamera uniqueID]).andReturn(@"1"); - OCMStub([frontFacingCamera position]).andReturn(AVCaptureDevicePositionFront); - - AVCaptureDevice *ultraWideCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([ultraWideCamera uniqueID]).andReturn(@"2"); - OCMStub([ultraWideCamera position]).andReturn(AVCaptureDevicePositionBack); - - AVCaptureDevice *telephotoCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([telephotoCamera uniqueID]).andReturn(@"3"); - OCMStub([telephotoCamera position]).andReturn(AVCaptureDevicePositionBack); + MockCaptureDeviceController *wideAngleCamera = [[MockCaptureDeviceController alloc] init]; + wideAngleCamera.uniqueID = @"0"; + wideAngleCamera.position = AVCaptureDevicePositionBack; + + MockCaptureDeviceController *frontFacingCamera = [[MockCaptureDeviceController alloc] init]; + frontFacingCamera.uniqueID = @"1"; + frontFacingCamera.position = AVCaptureDevicePositionFront; + + MockCaptureDeviceController *ultraWideCamera = [[MockCaptureDeviceController alloc] init]; + ultraWideCamera.uniqueID = @"2"; + ultraWideCamera.position = AVCaptureDevicePositionBack; + + MockCaptureDeviceController *telephotoCamera = [[MockCaptureDeviceController alloc] init]; + telephotoCamera.uniqueID = @"3"; + telephotoCamera.position = AVCaptureDevicePositionBack; NSMutableArray *requiredTypes = [@[ AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera ] @@ -43,21 +54,21 @@ - (void)testAvailableCamerasShouldReturnAllCamerasOnMultiCameraIPhone { [requiredTypes addObject:AVCaptureDeviceTypeBuiltInUltraWideCamera]; } - id discoverySessionMock = OCMClassMock([AVCaptureDeviceDiscoverySession class]); - OCMStub([discoverySessionMock discoverySessionWithDeviceTypes:requiredTypes - mediaType:AVMediaTypeVideo - position:AVCaptureDevicePositionUnspecified]) - .andReturn(discoverySessionMock); - NSMutableArray *cameras = [NSMutableArray array]; [cameras addObjectsFromArray:@[ wideAngleCamera, frontFacingCamera, telephotoCamera ]]; if (@available(iOS 13.0, *)) { [cameras addObject:ultraWideCamera]; } - OCMStub([discoverySessionMock devices]).andReturn([NSArray arrayWithArray:cameras]); - + + _mockDeviceDiscovery.discoverySessionStub = ^NSArray> * _Nullable(NSArray * _Nonnull deviceTypes, AVMediaType _Nonnull mediaType, AVCaptureDevicePosition position) { + XCTAssertEqualObjects(deviceTypes, requiredTypes); + XCTAssertEqual(mediaType, AVMediaTypeVideo); + XCTAssertEqual(position, AVCaptureDevicePositionUnspecified); + return cameras; + }; + __block NSArray *resultValue; - [camera + [_cameraPlugin availableCamerasWithCompletion:^(NSArray *_Nullable result, FlutterError *_Nullable error) { XCTAssertNil(error); @@ -74,17 +85,16 @@ - (void)testAvailableCamerasShouldReturnAllCamerasOnMultiCameraIPhone { } } - (void)testAvailableCamerasShouldReturnOneCameraOnSingleCameraIPhone { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; // iPhone 8 Cameras: - AVCaptureDevice *wideAngleCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([wideAngleCamera uniqueID]).andReturn(@"0"); - OCMStub([wideAngleCamera position]).andReturn(AVCaptureDevicePositionBack); - - AVCaptureDevice *frontFacingCamera = OCMClassMock([AVCaptureDevice class]); - OCMStub([frontFacingCamera uniqueID]).andReturn(@"1"); - OCMStub([frontFacingCamera position]).andReturn(AVCaptureDevicePositionFront); + MockCaptureDeviceController *wideAngleCamera = [[MockCaptureDeviceController alloc] init]; + wideAngleCamera.uniqueID = @"0"; + wideAngleCamera.position = AVCaptureDevicePositionBack; + + MockCaptureDeviceController *frontFacingCamera = [[MockCaptureDeviceController alloc] init]; + frontFacingCamera.uniqueID = @"1"; + frontFacingCamera.position = AVCaptureDevicePositionFront; NSMutableArray *requiredTypes = [@[ AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera ] @@ -92,19 +102,19 @@ - (void)testAvailableCamerasShouldReturnOneCameraOnSingleCameraIPhone { if (@available(iOS 13.0, *)) { [requiredTypes addObject:AVCaptureDeviceTypeBuiltInUltraWideCamera]; } - - id discoverySessionMock = OCMClassMock([AVCaptureDeviceDiscoverySession class]); - OCMStub([discoverySessionMock discoverySessionWithDeviceTypes:requiredTypes - mediaType:AVMediaTypeVideo - position:AVCaptureDevicePositionUnspecified]) - .andReturn(discoverySessionMock); - + NSMutableArray *cameras = [NSMutableArray array]; [cameras addObjectsFromArray:@[ wideAngleCamera, frontFacingCamera ]]; - OCMStub([discoverySessionMock devices]).andReturn([NSArray arrayWithArray:cameras]); + + _mockDeviceDiscovery.discoverySessionStub = ^NSArray> * _Nullable(NSArray * _Nonnull deviceTypes, AVMediaType _Nonnull mediaType, AVCaptureDevicePosition position) { + XCTAssertEqualObjects(deviceTypes, requiredTypes); + XCTAssertEqual(mediaType, AVMediaTypeVideo); + XCTAssertEqual(position, AVCaptureDevicePositionUnspecified); + return cameras; + }; __block NSArray *resultValue; - [camera + [_cameraPlugin availableCamerasWithCompletion:^(NSArray *_Nullable result, FlutterError *_Nullable error) { XCTAssertNil(error); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m index b17a3bf64560..cbeab1a3008c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m @@ -11,6 +11,7 @@ #import "MockCaptureDeviceController.h" #import "MockDeviceOrientationProvider.h" +#import "MockCameraDeviceDiscovery.h" @interface StubGlobalEventApi : FCPCameraGlobalEventApi @property(nonatomic) BOOL called; @@ -64,6 +65,7 @@ @interface CameraOrientationTests : XCTestCase @property(readonly, nonatomic) MockCaptureDeviceController *mockDevice; @property(readonly, nonatomic) StubGlobalEventApi *eventAPI; @property(readonly, nonatomic) CameraPlugin *cameraPlugin; +@property(readonly, nonatomic) MockCameraDeviceDiscovery *deviceDiscovery; @end @implementation CameraOrientationTests @@ -73,12 +75,14 @@ - (void)setUp { _mockDevice = [[MockCaptureDeviceController alloc] init]; _camera = [[MockCamera alloc] init]; _eventAPI = [[StubGlobalEventApi alloc] init]; + _deviceDiscovery = [[MockCameraDeviceDiscovery alloc] init]; [_camera setValue:_mockDevice forKey:@"captureDevice"]; _cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil - globalAPI:_eventAPI]; + globalAPI:_eventAPI + deviceDiscovery:_deviceDiscovery]; _cameraPlugin.camera = _camera; } @@ -120,7 +124,8 @@ - (void)testOrientationNotificationsNotCalledForFaceDown { StubGlobalEventApi *eventAPI = [[StubGlobalEventApi alloc] init]; CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil - globalAPI:eventAPI]; + globalAPI:eventAPI + deviceDiscovery:_deviceDiscovery]; [self sendOrientation:UIDeviceOrientationFaceDown toCamera:cameraPlugin]; @@ -156,7 +161,8 @@ - (void)testOrientationChanged_noRetainCycle { @autoreleasepool { CameraPlugin *plugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil - globalAPI:_eventAPI]; + globalAPI:_eventAPI + deviceDiscovery:_deviceDiscovery]; weakPlugin = plugin; plugin.captureSessionQueue = captureSessionQueue; plugin.camera = _camera; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraUtilTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraUtilTests.m index d1a835c36efe..d33d660812ed 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraUtilTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraUtilTests.m @@ -5,7 +5,6 @@ @import camera_avfoundation; @import XCTest; @import AVFoundation; -#import @interface FLTCam : NSObject +@property(nonatomic, copy) NSArray> *_Nullable (^discoverySessionStub) + (NSArray *deviceTypes, AVMediaType mediaType, AVCaptureDevicePosition position); +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.m new file mode 100644 index 000000000000..71e3ed0f06db --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.m @@ -0,0 +1,18 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockCameraDeviceDiscovery.h" + +@implementation MockCameraDeviceDiscovery + +- (NSArray> *)discoverySessionWithDeviceTypes:(NSArray *)deviceTypes + mediaType:(AVMediaType)mediaType + position:(AVCaptureDevicePosition)position { + if (self.discoverySessionStub) { + return self.discoverySessionStub(deviceTypes, mediaType, position); + } + return @[]; +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h index 69441e0e353b..954323638b81 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h @@ -8,6 +8,8 @@ NS_ASSUME_NONNULL_BEGIN @interface MockCaptureDeviceController : NSObject +@property(nonatomic, assign) NSString* uniqueID; + // Position/Orientation @property(nonatomic, assign) AVCaptureDevicePosition position; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m index d2e71a4afe32..7823ba87140e 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m @@ -8,7 +8,6 @@ #import "MockCaptureDeviceController.h" @implementation MockCaptureDeviceController - - (void)setActiveFormat:(AVCaptureDeviceFormat *)format { _activeFormat = format; if (self.setActiveFormatStub) { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m index f5f1cf94f2b5..9e0883f9b554 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m @@ -14,6 +14,7 @@ #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" #import "./include/camera_avfoundation/QueueUtils.h" #import "./include/camera_avfoundation/messages.g.h" +#import "./include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h" static FlutterError *FlutterErrorFromNSError(NSError *error) { return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code] @@ -26,6 +27,7 @@ @interface CameraPlugin () @property(readonly, nonatomic) NSObject *messenger; @property(nonatomic) FCPCameraGlobalEventApi *globalEventAPI; @property(readonly, nonatomic) FLTCameraPermissionManager *permissionManager; +@property(readonly, nonatomic) id deviceDiscovery; @end @implementation CameraPlugin @@ -41,18 +43,21 @@ - (instancetype)initWithRegistry:(NSObject *)registry return [self initWithRegistry:registry messenger:messenger - globalAPI:[[FCPCameraGlobalEventApi alloc] initWithBinaryMessenger:messenger]]; + globalAPI:[[FCPCameraGlobalEventApi alloc] initWithBinaryMessenger:messenger] + deviceDiscovery:[[FLTDefaultCameraDeviceDiscovery alloc] init]]; } - (instancetype)initWithRegistry:(NSObject *)registry messenger:(NSObject *)messenger - globalAPI:(FCPCameraGlobalEventApi *)globalAPI { + globalAPI:(FCPCameraGlobalEventApi *)globalAPI + deviceDiscovery:(id)deviceDiscovery { self = [super init]; NSAssert(self, @"super init cannot be nil"); _registry = registry; _messenger = messenger; _globalEventAPI = globalAPI; _captureSessionQueue = dispatch_queue_create("io.flutter.camera.captureSessionQueue", NULL); + _deviceDiscovery = deviceDiscovery; id permissionService = [[FLTDefaultPermissionService alloc] init]; _permissionManager = [[FLTCameraPermissionManager alloc] initWithPermissionService:permissionService]; @@ -118,11 +123,11 @@ - (void)availableCamerasWithCompletion: if (@available(iOS 13.0, *)) { [discoveryDevices addObject:AVCaptureDeviceTypeBuiltInUltraWideCamera]; } - AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession - discoverySessionWithDeviceTypes:discoveryDevices - mediaType:AVMediaTypeVideo - position:AVCaptureDevicePositionUnspecified]; - NSArray *devices = discoverySession.devices; + + NSArray> *devices = [self.deviceDiscovery discoverySessionWithDeviceTypes:discoveryDevices + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionUnspecified]; + NSMutableArray *reply = [[NSMutableArray alloc] initWithCapacity:devices.count]; for (AVCaptureDevice *device in devices) { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.m new file mode 100644 index 000000000000..3987ffaea585 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.m @@ -0,0 +1,30 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Flutter; + +#import "FLTCameraDeviceDiscovery.h" + +@implementation FLTDefaultCameraDeviceDiscovery + +- (NSArray> *)discoverySessionWithDeviceTypes:(NSArray *)deviceTypes + mediaType:(AVMediaType)mediaType + position:(AVCaptureDevicePosition)position { + AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession + discoverySessionWithDeviceTypes:deviceTypes + mediaType:mediaType + position:position]; + + NSArray *devices = discoverySession.devices; + NSMutableArray> *deviceControllers = [NSMutableArray array]; + + for (AVCaptureDevice *device in devices) { + FLTDefaultCaptureDeviceController *controller = [[FLTDefaultCaptureDeviceController alloc] initWithDevice:device]; + [deviceControllers addObject:controller]; + } + + return deviceControllers; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m index 1483620039b4..e17ca4a4067c 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m @@ -166,5 +166,8 @@ - (AVCaptureInput *)createInput:(NSError * _Nullable * _Nullable)error { return [AVCaptureDeviceInput deviceInputWithDevice:_device error:error]; } +- (nonnull NSString *)uniqueID { + return self.device.uniqueID; +} @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap index 3475bd05410c..170400bb80ff 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap @@ -16,6 +16,7 @@ framework module camera_avfoundation { header "FLTCameraPermissionManager.h" header "FLTDeviceOrientationProviding.h" header "FLTEventChannelProtocol.h" + header "FLTCameraDeviceDiscovery.h" header "QueueUtils.h" } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h index c29c2f306db8..28c7e75b99d2 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h @@ -7,6 +7,7 @@ #import "CameraPlugin.h" #import "FLTCam.h" #import "messages.g.h" +#import "FLTCameraDeviceDiscovery.h" /// APIs exposed for unit testing. @interface CameraPlugin () @@ -25,7 +26,8 @@ /// unit testing. - (instancetype)initWithRegistry:(NSObject *)registry messenger:(NSObject *)messenger - globalAPI:(FCPCameraGlobalEventApi *)globalAPI NS_DESIGNATED_INITIALIZER; + globalAPI:(FCPCameraGlobalEventApi *)globalAPI + deviceDiscovery:(id)deviceDiscovery NS_DESIGNATED_INITIALIZER; /// Hide the default public constructor. - (instancetype)init NS_UNAVAILABLE; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h new file mode 100644 index 000000000000..4f28bbe02315 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h @@ -0,0 +1,21 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; + +#import "FLTCaptureDeviceControlling.h" +#import "messages.g.h" + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTCameraDeviceDiscovery +- (NSArray> *)discoverySessionWithDeviceTypes:(NSArray *)deviceTypes + mediaType:(AVMediaType)mediaType + position:(AVCaptureDevicePosition)position; +@end + +@interface FLTDefaultCameraDeviceDiscovery : NSObject +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h index 5603055865aa..434e639f3d07 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h @@ -9,6 +9,8 @@ NS_ASSUME_NONNULL_BEGIN @protocol FLTCaptureDeviceControlling +- (NSString *)uniqueID; + // Position/Orientation - (AVCaptureDevicePosition)position; From 22aff03df821d2b359d2fcb23558324767208ed5 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Wed, 11 Dec 2024 13:05:23 +0100 Subject: [PATCH 08/16] Migrate more tests and introduce new protocols --- .../ios/Runner.xcodeproj/project.pbxproj | 18 ++++ .../ios/RunnerTests/AvailableCamerasTest.m | 7 +- .../RunnerTests/CameraMethodChannelTests.m | 47 +++++---- .../ios/RunnerTests/CameraOrientationTests.m | 29 +++++- .../RunnerTests/CameraSessionPresetsTests.m | 67 ++++++++----- .../ios/RunnerTests/CameraSettingsTests.m | 33 ++++--- .../example/ios/RunnerTests/CameraTestUtils.h | 6 +- .../example/ios/RunnerTests/CameraTestUtils.m | 67 +++++-------- .../ios/RunnerTests/FLTCamPhotoCaptureTests.m | 2 +- .../RunnerTests/FLTSavePhotoDelegateTests.m | 58 ++++++----- .../ios/RunnerTests/MockCaptureSession.h | 24 +++++ .../ios/RunnerTests/MockCaptureSession.m | 98 +++++++++++++++++++ .../example/ios/RunnerTests/MockPhotoData.h | 13 +++ .../example/ios/RunnerTests/MockPhotoData.m | 18 ++++ .../camera_avfoundation/CameraPlugin.m | 42 ++++++-- .../Sources/camera_avfoundation/FLTCam.m | 20 ++-- .../FLTCamMediaSettingsAVWrapper.m | 5 +- .../FLTSavePhotoDelegate.m | 11 ++- .../Protocols/FLTCaptureDeviceControlling.m | 18 ++-- .../Protocols/FLTCaptureSessionProtocol.m | 97 ++++++++++++++++++ .../Protocols/FLTPhotoData.m | 22 +++++ .../include/CameraPlugin.modulemap | 2 + .../camera_avfoundation/CameraPlugin.h | 5 + .../camera_avfoundation/CameraPlugin_Test.h | 5 +- .../include/camera_avfoundation/FLTCam.h | 32 ++++++ .../FLTCamMediaSettingsAVWrapper.h | 5 +- .../include/camera_avfoundation/FLTCam_Test.h | 33 +------ .../FLTSavePhotoDelegate_Test.h | 3 +- .../Protocols/FLTCaptureSessionProtocol.h | 37 +++++++ .../Protocols/FLTPhotoData.h | 21 ++++ 30 files changed, 642 insertions(+), 203 deletions(-) create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.h create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.h create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPhotoData.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPhotoData.h diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index 39f19eb9ce8d..98e6cb7657bf 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -21,6 +21,9 @@ 7F87E80C2D0325D900A3549C /* MockDeviceOrientationProvider.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80B2D0325D700A3549C /* MockDeviceOrientationProvider.m */; }; 7F87E80E2D03283E00A3549C /* CameraExposureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */; }; 7F87E81C2D06DE2400A3549C /* MockEventChannel.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E81B2D06DE2400A3549C /* MockEventChannel.m */; }; + 7F87E8262D06EBCB00A3549C /* MockCameraDeviceDiscovery.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8252D06EBCB00A3549C /* MockCameraDeviceDiscovery.m */; }; + 7F87E8342D072F9A00A3549C /* MockCaptureSession.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8332D072F9A00A3549C /* MockCaptureSession.m */; }; + 7F87E83B2D09B4A300A3549C /* MockPhotoData.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E83A2D09B4A300A3549C /* MockPhotoData.m */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; @@ -88,6 +91,12 @@ 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraExposureTests.m; sourceTree = ""; }; 7F87E81A2D06DDD700A3549C /* MockEventChannel.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockEventChannel.h; sourceTree = ""; }; 7F87E81B2D06DE2400A3549C /* MockEventChannel.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockEventChannel.m; sourceTree = ""; }; + 7F87E8242D06EBB800A3549C /* MockCameraDeviceDiscovery.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCameraDeviceDiscovery.h; sourceTree = ""; }; + 7F87E8252D06EBCB00A3549C /* MockCameraDeviceDiscovery.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCameraDeviceDiscovery.m; sourceTree = ""; }; + 7F87E8322D072F8B00A3549C /* MockCaptureSession.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureSession.h; sourceTree = ""; }; + 7F87E8332D072F9A00A3549C /* MockCaptureSession.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureSession.m; sourceTree = ""; }; + 7F87E8392D09B45300A3549C /* MockPhotoData.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockPhotoData.h; sourceTree = ""; }; + 7F87E83A2D09B4A300A3549C /* MockPhotoData.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockPhotoData.m; sourceTree = ""; }; 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -145,6 +154,12 @@ 7F87E80A2D0325B200A3549C /* MockDeviceOrientationProvider.h */, 7F87E8032D02FF8C00A3549C /* MockCaptureDeviceController.h */, 7F87E8012D01FD5600A3549C /* MockCaptureDeviceController.m */, + 7F87E8242D06EBB800A3549C /* MockCameraDeviceDiscovery.h */, + 7F87E8252D06EBCB00A3549C /* MockCameraDeviceDiscovery.m */, + 7F87E8322D072F8B00A3549C /* MockCaptureSession.h */, + 7F87E8332D072F9A00A3549C /* MockCaptureSession.m */, + 7F87E8392D09B45300A3549C /* MockPhotoData.h */, + 7F87E83A2D09B4A300A3549C /* MockPhotoData.m */, 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */, 03BB766A2665316900CE5A93 /* CameraFocusTests.m */, 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */, @@ -466,6 +481,7 @@ files = ( 033B94BE269C40A200B4DF97 /* CameraMethodChannelTests.m in Sources */, E071CF7227B3061B006EF3BA /* FLTCamPhotoCaptureTests.m in Sources */, + 7F87E83B2D09B4A300A3549C /* MockPhotoData.m in Sources */, E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */, 03BB766B2665316900CE5A93 /* CameraFocusTests.m in Sources */, 7F87E8022D01FD6F00A3549C /* MockCaptureDeviceController.m in Sources */, @@ -473,6 +489,7 @@ E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */, 7F87E80E2D03283E00A3549C /* CameraExposureTests.m in Sources */, 7F87E80C2D0325D900A3549C /* MockDeviceOrientationProvider.m in Sources */, + 7F87E8262D06EBCB00A3549C /* MockCameraDeviceDiscovery.m in Sources */, E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */, E04F108627A87CA600573D0C /* FLTSavePhotoDelegateTests.m in Sources */, 7F87E81C2D06DE2400A3549C /* MockEventChannel.m in Sources */, @@ -484,6 +501,7 @@ 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */, E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */, E0B0D2BB27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m in Sources */, + 7F87E8342D072F9A00A3549C /* MockCaptureSession.m in Sources */, E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m index 22a5d84d2ef5..c59c9fdd1391 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m @@ -23,7 +23,12 @@ - (void)setUp { [super setUp]; self.mockDeviceDiscovery = [[MockCameraDeviceDiscovery alloc] init]; - self.cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil globalAPI:nil deviceDiscovery:_mockDeviceDiscovery]; + self.cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil globalAPI:nil deviceDiscovery:_mockDeviceDiscovery + sessionFactory:^id{ + return nil; + } deviceFactory:^id(NSString *name) { + return nil; + }]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m index 4df1994699df..6bfc204d019a 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m @@ -8,7 +8,9 @@ #endif @import XCTest; @import AVFoundation; -#import + +#import "MockCaptureSession.h" +#import "MockCaptureDeviceController.h" @interface CameraMethodChannelTests : XCTestCase @end @@ -16,19 +18,19 @@ @interface CameraMethodChannelTests : XCTestCase @implementation CameraMethodChannelTests - (void)testCreate_ShouldCallResultOnMainThread { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; + MockCaptureSession *avCaptureSessionMock = [[MockCaptureSession alloc] init]; + avCaptureSessionMock.mockCanSetSessionPreset = YES; + + MockCaptureDeviceController *mockDeviceController = [[MockCaptureDeviceController alloc] init]; + + CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil globalAPI:nil deviceDiscovery:nil sessionFactory:^id{ + return avCaptureSessionMock; + } deviceFactory:^id(NSString *name) { + return mockDeviceController; + }]; XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; - // Set up mocks for initWithCameraName method - id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]]) - .andReturn([AVCaptureInput alloc]); - - id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock); - OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - // Set up method call __block NSNumber *resultValue; [camera createCameraOnSessionQueueWithName:@"acamera" @@ -51,15 +53,20 @@ - (void)testCreate_ShouldCallResultOnMainThread { } - (void)testDisposeShouldDeallocCamera { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; - - id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]]) - .andReturn([AVCaptureInput alloc]); - - id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock); - OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); + MockCaptureSession *avCaptureSessionMock = [[MockCaptureSession alloc] init]; + avCaptureSessionMock.mockCanSetSessionPreset = YES; + + MockCaptureDeviceController *mockDeviceController = [[MockCaptureDeviceController alloc] init]; + + CameraPlugin *camera = [[CameraPlugin alloc] + initWithRegistry:nil + messenger:nil + globalAPI:nil + deviceDiscovery:nil sessionFactory:^id{ + return avCaptureSessionMock; + } deviceFactory:^id(NSString *name) { + return mockDeviceController; + }]; XCTestExpectation *createExpectation = [self expectationWithDescription:@"create's result block must be called"]; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m index cbeab1a3008c..55012b0c4bcd 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m @@ -12,6 +12,7 @@ #import "MockCaptureDeviceController.h" #import "MockDeviceOrientationProvider.h" #import "MockCameraDeviceDiscovery.h" +#import "MockCaptureSession.h" @interface StubGlobalEventApi : FCPCameraGlobalEventApi @property(nonatomic) BOOL called; @@ -66,6 +67,7 @@ @interface CameraOrientationTests : XCTestCase @property(readonly, nonatomic) StubGlobalEventApi *eventAPI; @property(readonly, nonatomic) CameraPlugin *cameraPlugin; @property(readonly, nonatomic) MockCameraDeviceDiscovery *deviceDiscovery; +@property(readonly, nonatomic) MockCaptureSession *captureSession; @end @implementation CameraOrientationTests @@ -76,13 +78,21 @@ - (void)setUp { _camera = [[MockCamera alloc] init]; _eventAPI = [[StubGlobalEventApi alloc] init]; _deviceDiscovery = [[MockCameraDeviceDiscovery alloc] init]; + _captureSession = [[MockCaptureSession alloc] init]; [_camera setValue:_mockDevice forKey:@"captureDevice"]; + __weak typeof(self) weakSelf = self; + _cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil globalAPI:_eventAPI - deviceDiscovery:_deviceDiscovery]; + deviceDiscovery:_deviceDiscovery + sessionFactory:^id{ + return weakSelf.captureSession; + } deviceFactory:^id(NSString *name) { + return nil; + }]; _cameraPlugin.camera = _camera; } @@ -122,10 +132,17 @@ - (void)testOrientationNotificationsNotCalledForFaceUp { - (void)testOrientationNotificationsNotCalledForFaceDown { StubGlobalEventApi *eventAPI = [[StubGlobalEventApi alloc] init]; + + __weak typeof(self) weakSelf = self; CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil globalAPI:eventAPI - deviceDiscovery:_deviceDiscovery]; + deviceDiscovery:_deviceDiscovery + sessionFactory:^id{ + return weakSelf.captureSession; + } deviceFactory:^id(NSString *name) { + return nil; + }]; [self sendOrientation:UIDeviceOrientationFaceDown toCamera:cameraPlugin]; @@ -157,12 +174,18 @@ - (void)testOrientationChanged_noRetainCycle { dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); __weak CameraPlugin *weakPlugin; + __weak typeof(self) weakSelf = self; @autoreleasepool { CameraPlugin *plugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil globalAPI:_eventAPI - deviceDiscovery:_deviceDiscovery]; + deviceDiscovery:_deviceDiscovery + sessionFactory:^id{ + return weakSelf.captureSession; + } deviceFactory:^id(NSString *name) { + return nil; + }]; weakPlugin = plugin; plugin.captureSessionQueue = captureSessionQueue; plugin.camera = _camera; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m index 08cba70bf3a2..81e492d6e927 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m @@ -11,6 +11,8 @@ @import XCTest; #import #import "CameraTestUtils.h" +#import "MockCaptureSession.h" +#import "MockCaptureDeviceController.h" /// Includes test cases related to resolution presets setting operations for FLTCam class. @interface FLTCamSessionPresetsTest : XCTestCase @@ -20,17 +22,26 @@ @implementation FLTCamSessionPresetsTest - (void)testResolutionPresetWithBestFormat_mustUpdateCaptureSessionPreset { NSString *expectedPreset = AVCaptureSessionPresetInputPriority; + XCTestExpectation *presetExpectation = [self expectationWithDescription:@"Expected preset set"]; + XCTestExpectation *formatExpectation = [self expectationWithDescription:@"Expected format set"]; - id videoSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; id captureFormatMock = OCMClassMock([AVCaptureDeviceFormat class]); - id captureDeviceMock = OCMClassMock([AVCaptureDevice class]); - OCMStub([captureDeviceMock formats]).andReturn(@[ captureFormatMock ]); - - OCMExpect([captureDeviceMock activeFormat]).andReturn(captureFormatMock); - OCMExpect([captureDeviceMock lockForConfiguration:NULL]).andReturn(YES); - OCMExpect([videoSessionMock setSessionPreset:expectedPreset]); + + MockCaptureDeviceController *captureDeviceMock = [[MockCaptureDeviceController alloc] init]; + captureDeviceMock.formats = @[captureFormatMock]; + captureDeviceMock.setActiveFormatStub = ^(AVCaptureDeviceFormat * _Nonnull format) { + if (format == captureFormatMock) { + [formatExpectation fulfill]; + } + }; + + videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { + if (preset == expectedPreset) { + [presetExpectation fulfill]; + } + }; FLTCreateCamWithVideoDimensionsForFormat(videoSessionMock, FCPPlatformResolutionPresetMax, captureDeviceMock, @@ -41,41 +52,47 @@ - (void)testResolutionPresetWithBestFormat_mustUpdateCaptureSessionPreset { return videoDimensions; }); - OCMVerifyAll(captureDeviceMock); - OCMVerifyAll(videoSessionMock); + [self waitForExpectationsWithTimeout:1 handler:nil]; } - (void)testResolutionPresetWithCanSetSessionPresetMax_mustUpdateCaptureSessionPreset { NSString *expectedPreset = AVCaptureSessionPreset3840x2160; + XCTestExpectation *expectation = [self expectationWithDescription:@"Expected preset set"]; - id videoSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); - + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; // Make sure that setting resolution preset for session always succeeds. - OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - - OCMExpect([videoSessionMock setSessionPreset:expectedPreset]); - + videoSessionMock.mockCanSetSessionPreset = YES; + + videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { + if (preset == expectedPreset) { + [expectation fulfill]; + } + }; + FLTCreateCamWithVideoCaptureSession(videoSessionMock, FCPPlatformResolutionPresetMax); - OCMVerifyAll(videoSessionMock); + [self waitForExpectationsWithTimeout:1 handler:nil]; } - (void)testResolutionPresetWithCanSetSessionPresetUltraHigh_mustUpdateCaptureSessionPreset { NSString *expectedPreset = AVCaptureSessionPreset3840x2160; + XCTestExpectation *expectation = [self expectationWithDescription:@"Expected preset set"]; - id videoSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); - + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; + // Make sure that setting resolution preset for session always succeeds. - OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - + videoSessionMock.mockCanSetSessionPreset = YES; + // Expect that setting "ultraHigh" resolutionPreset correctly updates videoCaptureSession. - OCMExpect([videoSessionMock setSessionPreset:expectedPreset]); + videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { + if (preset == expectedPreset) { + [expectation fulfill]; + } + }; FLTCreateCamWithVideoCaptureSession(videoSessionMock, FCPPlatformResolutionPresetUltraHigh); - OCMVerifyAll(videoSessionMock); + [self waitForExpectationsWithTimeout:1 handler:nil]; } @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m index 9bed6bea4883..44748b53166c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m @@ -8,8 +8,9 @@ #endif @import XCTest; @import AVFoundation; -#import #import "CameraTestUtils.h" +#import "MockCaptureDeviceController.h" +#import "MockCaptureSession.h" static const FCPPlatformResolutionPreset gTestResolutionPreset = FCPPlatformResolutionPresetMedium; static const int gTestFramesPerSecond = 15; @@ -65,11 +66,11 @@ - (void)unlockDevice:(AVCaptureDevice *)captureDevice { [_unlockExpectation fulfill]; } -- (void)beginConfigurationForSession:(AVCaptureSession *)videoCaptureSession { +- (void)beginConfigurationForSession:(id)videoCaptureSession { [_beginConfigurationExpectation fulfill]; } -- (void)commitConfigurationForSession:(AVCaptureSession *)videoCaptureSession { +- (void)commitConfigurationForSession:(id)videoCaptureSession { [_commitConfigurationExpectation fulfill]; } @@ -142,10 +143,10 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter { enableAudio:gTestEnableAudio]; TestMediaSettingsAVWrapper *injectedWrapper = [[TestMediaSettingsAVWrapper alloc] initWithTestCase:self]; - + FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( dispatch_queue_create("test", NULL), settings, injectedWrapper, nil); - + // Expect FPS configuration is passed to camera device. [self waitForExpectations:@[ injectedWrapper.lockExpectation, injectedWrapper.beginConfigurationExpectation, @@ -167,18 +168,20 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter { } - (void)testSettings_ShouldBeSupportedByMethodCall { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; - - XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; + MockCaptureDeviceController *mockDeviceController = [[MockCaptureDeviceController alloc] init]; + MockCaptureSession *mockSession = [[MockCaptureSession alloc] init]; + mockSession.mockCanSetSessionPreset = YES; - // Set up mocks for initWithCameraName method - id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]]) - .andReturn([AVCaptureInput alloc]); + CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil + globalAPI:nil deviceDiscovery:nil sessionFactory:^id{ + return mockSession; + } + deviceFactory:^id(NSString *name) { + return mockDeviceController; + } + ]; - id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock); - OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); + XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; // Set up method call FCPPlatformMediaSettings *mediaSettings = diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h index 2bbb56c51a79..7273bda8a62f 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h @@ -28,7 +28,7 @@ extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessi /// @param captureSession AVCaptureSession for video /// @param resolutionPreset preset for camera's captureSession resolution /// @return an FLTCam object. -extern FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSession, +extern FLTCam *FLTCreateCamWithVideoCaptureSession(id captureSession, FCPPlatformResolutionPreset resolutionPreset); /// Creates an `FLTCam` with a given captureSession and resolutionPreset. @@ -39,8 +39,8 @@ extern FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSess /// @param videoDimensionsForFormat custom code to determine video dimensions /// @return an FLTCam object. extern FLTCam *FLTCreateCamWithVideoDimensionsForFormat( - AVCaptureSession *captureSession, FCPPlatformResolutionPreset resolutionPreset, - AVCaptureDevice *captureDevice, VideoDimensionsForFormat videoDimensionsForFormat); + id captureSession, FCPPlatformResolutionPreset resolutionPreset, + id captureDevice, VideoDimensionsForFormat videoDimensionsForFormat); /// Creates a test sample buffer. /// @return a test sample buffer. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index 501498d6382b..074a9d82917b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -8,6 +8,9 @@ @import AVFoundation; @import camera_avfoundation; +#import "MockCaptureDeviceController.h" +#import "MockCaptureSession.h" + static FCPPlatformMediaSettings *FCPGetDefaultMediaSettings( FCPPlatformResolutionPreset resolutionPreset) { return [FCPPlatformMediaSettings makeWithResolutionPreset:resolutionPreset @@ -32,26 +35,14 @@ if (!mediaSettingsAVWrapper) { mediaSettingsAVWrapper = [[FLTCamMediaSettingsAVWrapper alloc] init]; } - - id inputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) - .andReturn(inputMock); - - id videoSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([videoSessionMock beginConfiguration]) - .andDo(^(NSInvocation *invocation){ - }); - OCMStub([videoSessionMock commitConfiguration]) - .andDo(^(NSInvocation *invocation){ - }); - - OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); - OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - - id audioSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]); - OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - + + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; + videoSessionMock.mockCanSetSessionPreset = YES; + + MockCaptureSession *audioSessionMock = [[MockCaptureSession alloc] init]; + audioSessionMock.mockCanSetSessionPreset = YES; + + __block MockCaptureDeviceController *mockDevice = [[MockCaptureDeviceController alloc] init]; id frameRateRangeMock1 = OCMClassMock([AVFrameRateRange class]); OCMStub([frameRateRangeMock1 minFrameRate]).andReturn(3); OCMStub([frameRateRangeMock1 maxFrameRate]).andReturn(30); @@ -67,21 +58,17 @@ OCMStub([captureDeviceFormatMock2 videoSupportedFrameRateRanges]).andReturn(@[ frameRateRangeMock2 ]); - - id captureDeviceMock = OCMClassMock([AVCaptureDevice class]); - OCMStub([captureDeviceMock lockForConfiguration:[OCMArg setTo:nil]]).andReturn(YES); - OCMStub([captureDeviceMock formats]).andReturn((@[ - captureDeviceFormatMock1, captureDeviceFormatMock2 - ])); - __block AVCaptureDeviceFormat *format = captureDeviceFormatMock1; - OCMStub([captureDeviceMock setActiveFormat:[OCMArg any]]).andDo(^(NSInvocation *invocation) { - [invocation retainArguments]; - [invocation getArgument:&format atIndex:2]; - }); - OCMStub([captureDeviceMock activeFormat]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&format]; - }); - + +// id inputMock = OCMClassMock([AVCaptureDeviceInput class]); +// OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) +// .andReturn(inputMock); + + mockDevice.formats = @[captureDeviceFormatMock1, captureDeviceFormatMock2]; + mockDevice.activeFormat = captureDeviceFormatMock1; + // mockDevice.inputToReturn = inputMock; + + // + id fltCam = [[FLTCam alloc] initWithMediaSettings:mediaSettings mediaSettingsAVWrapper:mediaSettingsAVWrapper orientation:UIDeviceOrientationPortrait @@ -89,7 +76,7 @@ audioCaptureSession:audioSessionMock captureSessionQueue:captureSessionQueue captureDeviceFactory:captureDeviceFactory ?: ^id(void) { - return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:captureDeviceMock]; + return mockDevice; } videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { return CMVideoFormatDescriptionGetDimensions(format.formatDescription); @@ -124,7 +111,7 @@ return fltCam; } -FLTCam *FLTCreateCamWithVideoCaptureSession(AVCaptureSession *captureSession, +FLTCam *FLTCreateCamWithVideoCaptureSession(id captureSession, FCPPlatformResolutionPreset resolutionPreset) { id inputMock = OCMClassMock([AVCaptureDeviceInput class]); OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) @@ -145,8 +132,8 @@ } FLTCam *FLTCreateCamWithVideoDimensionsForFormat( - AVCaptureSession *captureSession, FCPPlatformResolutionPreset resolutionPreset, - AVCaptureDevice *captureDevice, VideoDimensionsForFormat videoDimensionsForFormat) { + id captureSession, FCPPlatformResolutionPreset resolutionPreset, + id captureDevice, VideoDimensionsForFormat videoDimensionsForFormat) { id inputMock = OCMClassMock([AVCaptureDeviceInput class]); OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) .andReturn(inputMock); @@ -162,7 +149,7 @@ audioCaptureSession:audioSessionMock captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) captureDeviceFactory:^id(void) { - return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:captureDevice]; } + return captureDevice; } videoDimensionsForFormat:videoDimensionsForFormat error:nil]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m index 31711f065efd..49e05b0ec266 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m @@ -144,7 +144,7 @@ - (void)testCaptureToFile_mustReportFileExtensionWithJpgWhenHEVCNotAvailableAndF dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - [cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; + [cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m index c92d824e4696..ccd0cd6c7195 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m @@ -8,7 +8,8 @@ #endif @import AVFoundation; @import XCTest; -#import + +#import "MockPhotoData.h" @interface FLTSavePhotoDelegateTests : XCTestCase @@ -32,7 +33,7 @@ - (void)testHandlePhotoCaptureResult_mustCompleteWithErrorIfFailedToCapture { }]; [delegate handlePhotoCaptureResultWithError:captureError - photoDataProvider:^NSData * { + photoDataProvider:^id { return nil; }]; [self waitForExpectationsWithTimeout:1 handler:nil]; @@ -55,15 +56,16 @@ - (void)testHandlePhotoCaptureResult_mustCompleteWithErrorIfFailedToWrite { [completionExpectation fulfill]; }]; - // Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g. - // `XCTRunnerIDESession::logDebugMessage:`) on a private queue. - id mockData = OCMPartialMock([NSData data]); - OCMStub([mockData writeToFile:OCMOCK_ANY - options:NSDataWritingAtomic - error:[OCMArg setTo:ioError]]) - .andReturn(NO); + MockPhotoData *mockData = [[MockPhotoData alloc] init]; + mockData.writeToFileStub = ^BOOL(NSString *path, + NSDataWritingOptions options, + NSError **error) { + *error = ioError; + return NO; + }; + [delegate handlePhotoCaptureResultWithError:nil - photoDataProvider:^NSData * { + photoDataProvider:^id { return mockData; }]; [self waitForExpectationsWithTimeout:1 handler:nil]; @@ -84,14 +86,16 @@ - (void)testHandlePhotoCaptureResult_mustCompleteWithFilePathIfSuccessToWrite { [completionExpectation fulfill]; }]; - // Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g. - // `XCTRunnerIDESession::logDebugMessage:`) on a private queue. - id mockData = OCMPartialMock([NSData data]); - OCMStub([mockData writeToFile:filePath options:NSDataWritingAtomic error:[OCMArg setTo:nil]]) - .andReturn(YES); + + MockPhotoData *mockData = [[MockPhotoData alloc] init]; + mockData.writeToFileStub = ^BOOL(NSString *path, + NSDataWritingOptions options, + NSError **error) { + return YES; + }; [delegate handlePhotoCaptureResultWithError:nil - photoDataProvider:^NSData * { + photoDataProvider:^id { return mockData; }]; [self waitForExpectationsWithTimeout:1 handler:nil]; @@ -108,17 +112,17 @@ - (void)testHandlePhotoCaptureResult_bothProvideDataAndSaveFileMustRunOnIOQueue dispatch_queue_t ioQueue = dispatch_queue_create("test", NULL); const char *ioQueueSpecific = "io_queue_specific"; dispatch_queue_set_specific(ioQueue, ioQueueSpecific, (void *)ioQueueSpecific, NULL); + + MockPhotoData *mockData = [[MockPhotoData alloc] init]; + mockData.writeToFileStub = ^BOOL(NSString *path, + NSDataWritingOptions options, + NSError **error) { + if (dispatch_get_specific(ioQueueSpecific)) { + [writeFileQueueExpectation fulfill]; + } + return YES; + }; - // Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g. - // `XCTRunnerIDESession::logDebugMessage:`) on a private queue. - id mockData = OCMPartialMock([NSData data]); - OCMStub([mockData writeToFile:OCMOCK_ANY options:NSDataWritingAtomic error:[OCMArg setTo:nil]]) - .andDo(^(NSInvocation *invocation) { - if (dispatch_get_specific(ioQueueSpecific)) { - [writeFileQueueExpectation fulfill]; - } - }) - .andReturn(YES); NSString *filePath = @"test"; FLTSavePhotoDelegate *delegate = [[FLTSavePhotoDelegate alloc] @@ -129,7 +133,7 @@ - (void)testHandlePhotoCaptureResult_bothProvideDataAndSaveFileMustRunOnIOQueue }]; [delegate handlePhotoCaptureResultWithError:nil - photoDataProvider:^NSData * { + photoDataProvider:^id { if (dispatch_get_specific(ioQueueSpecific)) { [dataProviderQueueExpectation fulfill]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.h new file mode 100644 index 000000000000..fce650515b6c --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.h @@ -0,0 +1,24 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@interface MockCaptureSession : NSObject + @property(nonatomic, copy) void (^beginConfigurationStub)(void); + @property(nonatomic, copy) void (^commitConfigurationStub)(void); + @property(nonatomic, copy) void (^startRunningStub)(void); + @property(nonatomic, copy) void (^stopRunningStub)(void); + @property(nonatomic, copy) void (^setSessionPresetStub)(AVCaptureSessionPreset preset); + + @property(nonatomic, strong) NSMutableArray *inputs; + @property(nonatomic, strong) NSMutableArray *outputs; + @property(nonatomic, assign) BOOL mockCanSetSessionPreset; + @property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; + +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m new file mode 100644 index 000000000000..3c09b1d07c50 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m @@ -0,0 +1,98 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockCaptureSession.h" + +@implementation MockCaptureSession + +- (instancetype)init { + self = [super init]; + if (self) { + _inputs = [NSMutableArray array]; + _outputs = [NSMutableArray array]; + } + return self; +} + +- (void)beginConfiguration { + if (self.beginConfigurationStub) { + self.beginConfigurationStub(); + } +} + +- (void)commitConfiguration { + if (self.commitConfigurationStub) { + self.commitConfigurationStub(); + } +} + +- (void)startRunning { + if (self.startRunningStub) { + self.startRunningStub(); + } +} + +- (void)stopRunning { + if (self.stopRunningStub) { + self.stopRunningStub(); + } +} + +- (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset { + return self.mockCanSetSessionPreset; +} + +- (void)addConnection:(nonnull AVCaptureConnection *)connection { + +} + + +- (void)addInput:(nonnull AVCaptureInput *)input { + +} + + +- (void)addInputWithNoConnections:(nonnull AVCaptureInput *)input { +} + + +- (void)addOutput:(nonnull AVCaptureOutput *)output { +} + + +- (void)addOutputWithNoConnections:(nonnull AVCaptureOutput *)output { +} + + +- (BOOL)canAddConnection:(nonnull AVCaptureConnection *)connection { + return YES; +} + + +- (BOOL)canAddInput:(nonnull AVCaptureInput *)input { + return YES; +} + + +- (BOOL)canAddOutput:(nonnull AVCaptureOutput *)output { + return YES; +} + + +- (void)removeInput:(nonnull AVCaptureInput *)input { + +} + + +- (void)removeOutput:(nonnull AVCaptureOutput *)output { + +} + +- (void)setSessionPreset:(AVCaptureSessionPreset)sessionPreset { + if (_setSessionPresetStub) { + _setSessionPresetStub(sessionPreset); + } +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.h new file mode 100644 index 000000000000..f810d218d5c9 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.h @@ -0,0 +1,13 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +@interface MockPhotoData : NSObject +@property(nonatomic, copy) BOOL (^writeToFileStub)(NSString *path, + NSDataWritingOptions options, + NSError **error); +@end + diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.m new file mode 100644 index 000000000000..b51909d76755 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.m @@ -0,0 +1,18 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockPhotoData.h" + +@implementation MockPhotoData + +- (BOOL)writeToFile:(NSString *)path + options:(NSDataWritingOptions)writeOptionsMask + error:(NSError **)errorPtr { + if (self.writeToFileStub) { + return _writeToFileStub(path, writeOptionsMask, errorPtr); + } + return YES; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m index 9e0883f9b554..0891bf9948b3 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m @@ -28,6 +28,8 @@ @interface CameraPlugin () @property(nonatomic) FCPCameraGlobalEventApi *globalEventAPI; @property(readonly, nonatomic) FLTCameraPermissionManager *permissionManager; @property(readonly, nonatomic) id deviceDiscovery; +@property(nonatomic, copy) CaptureSessionFactory captureSessionFactory; +@property(nonatomic, copy) CaptureNamedDeviceFactory captureDeviceFactory; @end @implementation CameraPlugin @@ -44,13 +46,23 @@ - (instancetype)initWithRegistry:(NSObject *)registry [self initWithRegistry:registry messenger:messenger globalAPI:[[FCPCameraGlobalEventApi alloc] initWithBinaryMessenger:messenger] - deviceDiscovery:[[FLTDefaultCameraDeviceDiscovery alloc] init]]; + deviceDiscovery:[[FLTDefaultCameraDeviceDiscovery alloc] init] + sessionFactory:^id(void) { + return [[FLTDefaultCaptureSession alloc] init]; + } + deviceFactory:^id(NSString *name) { + AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:name]; + return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:device]; + } + ]; } - (instancetype)initWithRegistry:(NSObject *)registry messenger:(NSObject *)messenger globalAPI:(FCPCameraGlobalEventApi *)globalAPI - deviceDiscovery:(id)deviceDiscovery { + deviceDiscovery:(id)deviceDiscovery + sessionFactory:(CaptureSessionFactory)captureSessionFactory + deviceFactory:(CaptureNamedDeviceFactory)deviceFactory{ self = [super init]; NSAssert(self, @"super init cannot be nil"); _registry = registry; @@ -58,6 +70,8 @@ - (instancetype)initWithRegistry:(NSObject *)registry _globalEventAPI = globalAPI; _captureSessionQueue = dispatch_queue_create("io.flutter.camera.captureSessionQueue", NULL); _deviceDiscovery = deviceDiscovery; + _captureSessionFactory = captureSessionFactory; + _captureDeviceFactory = deviceFactory; id permissionService = [[FLTDefaultPermissionService alloc] init]; _permissionManager = [[FLTCameraPermissionManager alloc] initWithPermissionService:permissionService]; @@ -483,13 +497,23 @@ - (void)sessionQueueCreateCameraWithName:(NSString *)name [[FLTCamMediaSettingsAVWrapper alloc] init]; NSError *error; - FLTCam *cam = [[FLTCam alloc] initWithCameraName:name - mediaSettings:settings - mediaSettingsAVWrapper:mediaSettingsAVWrapper - orientation:[[UIDevice currentDevice] orientation] - captureSessionQueue:self.captureSessionQueue - error:&error]; - + + __weak typeof(self) weakSelf = self; + FLTCam *cam = [[FLTCam alloc] + initWithMediaSettings:settings + mediaSettingsAVWrapper:mediaSettingsAVWrapper + orientation:[[UIDevice currentDevice] orientation] + videoCaptureSession:_captureSessionFactory() + audioCaptureSession:_captureSessionFactory() + captureSessionQueue:self.captureSessionQueue + captureDeviceFactory:^id _Nonnull{ + return weakSelf.captureDeviceFactory(name); + } + videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat * _Nonnull format) { + return CMVideoFormatDescriptionGetDimensions(format.formatDescription); + } + error:&error]; + if (error) { completion(nil, FlutterErrorFromNSError(error)); } else { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 92937d9b77e4..5f60db563215 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -16,6 +16,7 @@ #import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" #import "./include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h" #import "./include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" static FlutterError *FlutterErrorFromNSError(NSError *error) { return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code] @@ -57,8 +58,8 @@ @interface FLTCam () videoCaptureSession; +@property(readonly, nonatomic) id audioCaptureSession; @property(readonly, nonatomic) AVCaptureInput *captureVideoInput; /// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. @@ -123,12 +124,15 @@ - (instancetype)initWithCameraName:(NSString *)cameraName orientation:(UIDeviceOrientation)orientation captureSessionQueue:(dispatch_queue_t)captureSessionQueue error:(NSError **)error { + AVCaptureSession *videoSession = [[AVCaptureSession alloc] init]; + AVCaptureSession *audioSession = [[AVCaptureSession alloc] init]; + return [self initWithCameraName:cameraName mediaSettings:mediaSettings mediaSettingsAVWrapper:mediaSettingsAVWrapper orientation:orientation - videoCaptureSession:[[AVCaptureSession alloc] init] - audioCaptureSession:[[AVCaptureSession alloc] init] + videoCaptureSession:[[FLTDefaultCaptureSession alloc] initWithCaptureSession:videoSession] + audioCaptureSession:[[FLTDefaultCaptureSession alloc] initWithCaptureSession:audioSession] captureSessionQueue:captureSessionQueue error:error]; } @@ -137,8 +141,8 @@ - (instancetype)initWithCameraName:(NSString *)cameraName mediaSettings:(FCPPlatformMediaSettings *)mediaSettings mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(AVCaptureSession *)videoCaptureSession - audioCaptureSession:(AVCaptureSession *)audioCaptureSession + videoCaptureSession:(id)videoCaptureSession + audioCaptureSession:(id)audioCaptureSession captureSessionQueue:(dispatch_queue_t)captureSessionQueue error:(NSError **)error { return [self initWithMediaSettings:mediaSettings @@ -213,8 +217,8 @@ static void selectBestFormatForRequestedFrameRate( - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(AVCaptureSession *)videoCaptureSession - audioCaptureSession:(AVCaptureSession *)audioCaptureSession + videoCaptureSession:(id)videoCaptureSession + audioCaptureSession:(id)audioCaptureSession captureSessionQueue:(dispatch_queue_t)captureSessionQueue captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m index c712245a6ced..6dae176ebb5a 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m @@ -4,6 +4,7 @@ #import "./include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" @implementation FLTCamMediaSettingsAVWrapper @@ -15,11 +16,11 @@ - (void)unlockDevice:(id)captureDevice { return [captureDevice unlockForConfiguration]; } -- (void)beginConfigurationForSession:(AVCaptureSession *)videoCaptureSession { +- (void)beginConfigurationForSession:(id)videoCaptureSession { [videoCaptureSession beginConfiguration]; } -- (void)commitConfigurationForSession:(AVCaptureSession *)videoCaptureSession { +- (void)commitConfigurationForSession:(id)videoCaptureSession { [videoCaptureSession commitConfiguration]; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m index 5b45f93c221c..2efe90464954 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m @@ -4,6 +4,8 @@ #import "./include/camera_avfoundation/FLTSavePhotoDelegate.h" #import "./include/camera_avfoundation/FLTSavePhotoDelegate_Test.h" +#import "./include/camera_avfoundation/Protocols/FLTPhotoData.h" + @interface FLTSavePhotoDelegate () /// The file path for the captured photo. @@ -26,7 +28,7 @@ - (instancetype)initWithPath:(NSString *)path } - (void)handlePhotoCaptureResultWithError:(NSError *)error - photoDataProvider:(NSData * (^)(void))photoDataProvider { + photoDataProvider:(id (^)(void))photoDataProvider { if (error) { self.completionHandler(nil, error); return; @@ -36,7 +38,7 @@ - (void)handlePhotoCaptureResultWithError:(NSError *)error typeof(self) strongSelf = weakSelf; if (!strongSelf) return; - NSData *data = photoDataProvider(); + id data = photoDataProvider(); NSError *ioError; if ([data writeToFile:strongSelf.path options:NSDataWritingAtomic error:&ioError]) { strongSelf.completionHandler(self.path, nil); @@ -50,8 +52,9 @@ - (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error { [self handlePhotoCaptureResultWithError:error - photoDataProvider:^NSData * { - return [photo fileDataRepresentation]; + photoDataProvider:^id { + NSData *data = [photo fileDataRepresentation]; + return [[FLTDefaultPhotoData alloc] init]; }]; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m index e17ca4a4067c..2044b87c7689 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m @@ -11,11 +11,15 @@ @interface FLTDefaultCaptureDeviceController () @implementation FLTDefaultCaptureDeviceController - (instancetype)initWithDevice:(AVCaptureDevice *)device { - self = [super init]; - if (self) { - _device = device; - } - return self; + self = [super init]; + if (self) { + _device = device; + } + return self; +} + +- (nonnull NSString *)uniqueID { + return self.device.uniqueID; } // Position/Orientation @@ -166,8 +170,4 @@ - (AVCaptureInput *)createInput:(NSError * _Nullable * _Nullable)error { return [AVCaptureDeviceInput deviceInputWithDevice:_device error:error]; } -- (nonnull NSString *)uniqueID { - return self.device.uniqueID; -} - @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m new file mode 100644 index 000000000000..d9be4a1a794e --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m @@ -0,0 +1,97 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" + +@interface FLTDefaultCaptureSession () +@property(nonatomic, strong) AVCaptureSession *captureSession; +@end + +@implementation FLTDefaultCaptureSession + +- (instancetype)initWithCaptureSession:(AVCaptureSession *)session { + self = [super init]; + if (self) { + _captureSession = session; + } + return self; +} + +- (void)beginConfiguration { + [_captureSession beginConfiguration]; +} + +- (void)commitConfiguration { + [_captureSession commitConfiguration]; +} + +- (void)startRunning { + [_captureSession startRunning]; +} + +- (void)stopRunning { + [_captureSession stopRunning]; +} + +- (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset { + return [_captureSession canSetSessionPreset:preset]; +} + +- (void)addInputWithNoConnections:(AVCaptureInput *)input { + [_captureSession addInputWithNoConnections:input]; +} + +- (void)addOutputWithNoConnections:(AVCaptureOutput *)output { + [_captureSession addOutputWithNoConnections:output]; +} + +- (void)addConnection:(AVCaptureConnection *)connection { + [_captureSession addConnection:connection]; +} + +- (void)addOutput:(AVCaptureOutput *)output { + [_captureSession addOutput:output]; +} + +- (void)removeInput:(AVCaptureInput *)input { + [_captureSession removeInput:input]; +} + +- (void)removeOutput:(AVCaptureOutput *)output { + [_captureSession removeOutput:output]; +} + +- (void)setSessionPreset:(AVCaptureSessionPreset)sessionPreset { + _captureSession.sessionPreset = sessionPreset; +} + +- (AVCaptureSessionPreset)sessionPreset { + return _captureSession.sessionPreset; +} + +- (NSArray *)inputs { + return _captureSession.inputs; +} + +- (NSArray *)outputs { + return _captureSession.outputs; +} + +- (BOOL)canAddInput:(AVCaptureInput *)input { + return [_captureSession canAddInput:input]; +} + +- (BOOL)canAddOutput:(AVCaptureOutput *)output { + return [_captureSession canAddOutput:output]; +} + +- (BOOL)canAddConnection:(AVCaptureConnection *)connection { + return [_captureSession canAddConnection:connection]; +} + +- (void)addInput:(AVCaptureInput *)input { + [_captureSession addInput:input]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPhotoData.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPhotoData.m new file mode 100644 index 000000000000..583c9f27e430 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPhotoData.m @@ -0,0 +1,22 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTPhotoData.h" + +@implementation FLTDefaultPhotoData + +- (instancetype)initWithData:(NSData *)data { + self = [super init]; + if (self) { + _data = data; + } + return self; +} + +- (BOOL)writeToFile:(NSString *)path + options:(NSDataWritingOptions)writeOptionsMask + error:(NSError **)errorPtr { + return [self.data writeToFile:path options:writeOptionsMask error:errorPtr]; +} +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap index 170400bb80ff..1ba442f7b107 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap @@ -17,6 +17,8 @@ framework module camera_avfoundation { header "FLTDeviceOrientationProviding.h" header "FLTEventChannelProtocol.h" header "FLTCameraDeviceDiscovery.h" + header "FLTCaptureSessionProtocol.h" + header "FLTPhotoData.h" header "QueueUtils.h" } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h index 586b2fc87085..c25e375342d6 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h @@ -5,6 +5,11 @@ #import #import "messages.g.h" +#import "FLTCaptureSessionProtocol.h" +#import "FLTCaptureDeviceControlling.h" + +typedef id (^CaptureSessionFactory)(void); +typedef id (^CaptureNamedDeviceFactory)(NSString* name); @interface CameraPlugin : NSObject @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h index 28c7e75b99d2..6b8170eb2dee 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h @@ -27,7 +27,10 @@ - (instancetype)initWithRegistry:(NSObject *)registry messenger:(NSObject *)messenger globalAPI:(FCPCameraGlobalEventApi *)globalAPI - deviceDiscovery:(id)deviceDiscovery NS_DESIGNATED_INITIALIZER; + deviceDiscovery:(id)deviceDiscovery + sessionFactory:(CaptureSessionFactory)sessionFactory + deviceFactory:(CaptureNamedDeviceFactory)deviceFactory +NS_DESIGNATED_INITIALIZER; /// Hide the default public constructor. - (instancetype)init NS_UNAVAILABLE; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h index 9fee90390a2e..fe9315ced9cd 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h @@ -13,6 +13,14 @@ NS_ASSUME_NONNULL_BEGIN +/// Factory block returning an AVCaptureDevice. +/// Used in tests to inject a device into FLTCam. +typedef id _Nonnull (^CaptureDeviceFactory)(void); + +/// Determines the video dimensions (width and height) for a given capture device format. +/// Used in tests to mock CMVideoFormatDescriptionGetDimensions. +typedef CMVideoDimensions (^VideoDimensionsForFormat)(AVCaptureDeviceFormat *); + /// A class that manages camera's state and performs camera operations. @interface FLTCam : NSObject @@ -47,6 +55,30 @@ NS_ASSUME_NONNULL_BEGIN captureSessionQueue:(dispatch_queue_t)captureSessionQueue error:(NSError **)error; +/// Initializes a camera instance. +/// Allows for injecting dependencies that are usually internal. +- (instancetype)initWithCameraName:(NSString *)cameraName + mediaSettings:(FCPPlatformMediaSettings *)mediaSettings + mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper + orientation:(UIDeviceOrientation)orientation + videoCaptureSession:(id)videoCaptureSession + audioCaptureSession:(id)audioCaptureSession + captureSessionQueue:(dispatch_queue_t)captureSessionQueue + error:(NSError **)error; + +/// Initializes a camera instance. +/// Allows for testing with specified resolution, audio preference, orientation, +/// and direct access to capture sessions and blocks. +- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings + mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper + orientation:(UIDeviceOrientation)orientation + videoCaptureSession:(id)videoCaptureSession + audioCaptureSession:(id)audioCaptureSession + captureSessionQueue:(dispatch_queue_t)captureSessionQueue + captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory + videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat + error:(NSError **)error; + /// Informs the Dart side of the plugin of the current camera state and capabilities. - (void)reportInitializationState; - (void)start; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h index 4e8d9d15cd8f..da75d7e4e75b 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h @@ -6,6 +6,7 @@ @import Foundation; #import "FLTCaptureDeviceControlling.h" +#import "FLTCaptureSessionProtocol.h" NS_ASSUME_NONNULL_BEGIN @@ -42,7 +43,7 @@ NS_ASSUME_NONNULL_BEGIN * operations on a running session into atomic updates. * @param videoCaptureSession The video capture session. */ -- (void)beginConfigurationForSession:(AVCaptureSession *)videoCaptureSession; +- (void)beginConfigurationForSession:(id)videoCaptureSession; /** * @method commitConfigurationForSession: @@ -50,7 +51,7 @@ NS_ASSUME_NONNULL_BEGIN * operations on a running session into atomic updates. * @param videoCaptureSession The video capture session. */ -- (void)commitConfigurationForSession:(AVCaptureSession *)videoCaptureSession; +- (void)commitConfigurationForSession:(id)videoCaptureSession; /** * @method setMinFrameDuration:onDevice: diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h index 65bf90a70b69..cfe1b0772787 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h @@ -5,14 +5,7 @@ #import "FLTCam.h" #import "FLTSavePhotoDelegate.h" #import "FLTCaptureDeviceControlling.h" - -/// Determines the video dimensions (width and height) for a given capture device format. -/// Used in tests to mock CMVideoFormatDescriptionGetDimensions. -typedef CMVideoDimensions (^VideoDimensionsForFormat)(AVCaptureDeviceFormat *); - -/// Factory block returning an AVCaptureDevice. -/// Used in tests to inject a device into FLTCam. -typedef id (^CaptureDeviceFactory)(void); +#import "FLTCaptureSessionProtocol.h" @interface FLTImageStreamHandler : NSObject @@ -53,30 +46,6 @@ typedef id (^CaptureDeviceFactory)(void); didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection; -/// Initializes a camera instance. -/// Allows for injecting dependencies that are usually internal. -- (instancetype)initWithCameraName:(NSString *)cameraName - mediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(AVCaptureSession *)videoCaptureSession - audioCaptureSession:(AVCaptureSession *)audioCaptureSession - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - error:(NSError **)error; - -/// Initializes a camera instance. -/// Allows for testing with specified resolution, audio preference, orientation, -/// and direct access to capture sessions and blocks. -- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(AVCaptureSession *)videoCaptureSession - audioCaptureSession:(AVCaptureSession *)audioCaptureSession - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory - videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat - error:(NSError **)error; - /// Start streaming images. - (void)startImageStreamWithMessenger:(NSObject *)messenger imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h index 79539e4bd40e..4e18ba71c465 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h @@ -3,6 +3,7 @@ // found in the LICENSE file. #import "FLTSavePhotoDelegate.h" +#import "FLTPhotoData.h" /// API exposed for unit tests. @interface FLTSavePhotoDelegate () @@ -20,5 +21,5 @@ /// @param error the capture error. /// @param photoDataProvider a closure that provides photo data. - (void)handlePhotoCaptureResultWithError:(NSError *)error - photoDataProvider:(NSData * (^)(void))photoDataProvider; + photoDataProvider:(id (^)(void))photoDataProvider; @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h new file mode 100644 index 000000000000..b6c2501f1703 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h @@ -0,0 +1,37 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTCaptureSessionProtocol + +- (void)beginConfiguration; +- (void)commitConfiguration; +- (void)startRunning; +- (void)stopRunning; +- (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset; +- (void)addInputWithNoConnections:(AVCaptureInput *)input; +- (void)addOutputWithNoConnections:(AVCaptureOutput *)output; +- (void)addConnection:(AVCaptureConnection *)connection; +- (void)addOutput:(AVCaptureOutput *)output; +- (void)removeInput:(AVCaptureInput *)input; +- (void)removeOutput:(AVCaptureOutput *)output; +- (BOOL)canAddInput:(AVCaptureInput *)input; +- (BOOL)canAddOutput:(AVCaptureOutput *)output; +- (BOOL)canAddConnection:(AVCaptureConnection *)connection; +- (void)addInput:(AVCaptureInput *)input; +@property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; +@property(nonatomic, readonly) NSArray *inputs; +@property(nonatomic, readonly) NSArray *outputs; + +@end + + +@interface FLTDefaultCaptureSession : NSObject +- (instancetype)initWithCaptureSession:(AVCaptureSession *)session; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPhotoData.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPhotoData.h new file mode 100644 index 000000000000..7afd7b8bc88a --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPhotoData.h @@ -0,0 +1,21 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Foundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTPhotoData +- (BOOL)writeToFile:(NSString *)path + options:(NSDataWritingOptions)writeOptionsMask + error:(NSError **)errorPtr; +@end + + +@interface FLTDefaultPhotoData : NSObject +@property(nonatomic, strong, readonly) NSData *data; +- (instancetype)initWithData:(NSData *)data; +@end + +NS_ASSUME_NONNULL_END From 4bdb8a942cabb33993a9b12c5054a4b851d91fa4 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Tue, 17 Dec 2024 12:03:49 +0100 Subject: [PATCH 09/16] Add new mocks and migrate more tests --- .../ios/Runner.xcodeproj/project.pbxproj | 140 ++++---- .../ios/RunnerTests/AvailableCamerasTest.m | 41 ++- .../ios/RunnerTests/CameraExposureTests.m | 31 +- .../ios/RunnerTests/CameraFocusTests.m | 38 +-- .../RunnerTests/CameraMethodChannelTests.m | 44 +-- .../ios/RunnerTests/CameraOrientationTests.m | 77 ++--- .../RunnerTests/CameraSessionPresetsTests.m | 31 +- .../ios/RunnerTests/CameraSettingsTests.m | 27 +- .../example/ios/RunnerTests/CameraTestUtils.h | 9 +- .../example/ios/RunnerTests/CameraTestUtils.m | 131 ++++---- .../ios/RunnerTests/FLTCamPhotoCaptureTests.m | 116 +++---- .../ios/RunnerTests/FLTCamSampleBufferTests.m | 305 +++++++----------- .../FLTCameraPermissionManagerTests.m | 85 +++-- .../RunnerTests/FLTSavePhotoDelegateTests.m | 16 +- .../example/ios/RunnerTests/MockAssetWriter.h | 24 ++ .../example/ios/RunnerTests/MockAssetWriter.m | 50 +++ .../RunnerTests/MockCameraDeviceDiscovery.h | 6 +- .../RunnerTests/MockCameraDeviceDiscovery.m | 15 +- .../ios/RunnerTests/MockCaptureConnection.h | 18 ++ .../ios/RunnerTests/MockCaptureConnection.m | 19 ++ .../RunnerTests/MockCaptureDeviceController.h | 18 +- .../RunnerTests/MockCaptureDeviceController.m | 142 ++++---- .../ios/RunnerTests/MockCapturePhotoOutput.h | 12 + .../ios/RunnerTests/MockCapturePhotoOutput.m | 14 + .../RunnerTests/MockCapturePhotoSettings.h | 13 + .../RunnerTests/MockCapturePhotoSettings.m | 9 + .../ios/RunnerTests/MockCaptureSession.h | 18 +- .../ios/RunnerTests/MockCaptureSession.m | 47 +-- .../example/ios/RunnerTests/MockPhotoData.h | 6 +- .../example/ios/RunnerTests/MockPhotoData.m | 4 +- .../example/ios/RunnerTests/StreamingTest.m | 65 +++- .../RunnerTests/ThreadSafeEventChannelTests.m | 35 +- .../camera_avfoundation/CameraPlugin.m | 110 ++++--- .../Sources/camera_avfoundation/FLTCam.m | 160 ++++----- .../FLTCamMediaSettingsAVWrapper.m | 12 +- .../FLTCameraPermissionManager.m | 46 +-- .../FLTSavePhotoDelegate.m | 6 +- .../FLTThreadSafeEventChannel.m | 2 +- .../Protocols/FLTAssetWriter.m | 93 ++++++ .../Protocols/FLTCameraDeviceDiscovery.m | 26 +- .../Protocols/FLTCaptureConnection.m | 49 +++ .../Protocols/FLTCaptureDeviceControlling.m | 102 +++--- .../Protocols/FLTCapturePhotoOutput.m | 50 +++ .../Protocols/FLTCapturePhotoSettings.m | 34 ++ .../Protocols/FLTCaptureSessionProtocol.m | 40 +-- .../Protocols/FLTDeviceOrientationProviding.m | 2 +- .../Protocols/FLTPermissionService.m | 7 +- .../Protocols/FLTPhotoData.m | 4 +- .../include/CameraPlugin.modulemap | 4 + .../camera_avfoundation/CameraPlugin.h | 5 +- .../camera_avfoundation/CameraPlugin_Test.h | 5 +- .../include/camera_avfoundation/FLTCam.h | 39 ++- .../FLTCamMediaSettingsAVWrapper.h | 9 +- .../include/camera_avfoundation/FLTCam_Test.h | 11 +- .../FLTCameraPermissionManager.h | 10 +- .../FLTSavePhotoDelegate_Test.h | 2 +- .../Protocols/FLTAssetWriter.h | 42 +++ .../Protocols/FLTCameraDeviceDiscovery.h | 7 +- .../Protocols/FLTCaptureConnection.h | 21 ++ .../Protocols/FLTCaptureDeviceControlling.h | 20 +- .../Protocols/FLTCapturePhotoOutput.h | 30 ++ .../Protocols/FLTCapturePhotoSettings.h | 24 ++ .../Protocols/FLTCaptureSessionProtocol.h | 1 - .../Protocols/FLTPermissionService.h | 2 +- .../Protocols/FLTPhotoData.h | 5 +- .../include/camera_avfoundation/QueueUtils.h | 2 +- 66 files changed, 1599 insertions(+), 989 deletions(-) create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.h create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.m create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.h create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.m create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.h create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.m create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.h create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTAssetWriter.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureConnection.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoOutput.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTAssetWriter.h create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureConnection.h create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index 98e6cb7657bf..c59c7e652ae4 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -17,6 +17,7 @@ 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; }; 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */ = {isa = PBXBuildFile; productRef = 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */; }; 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */; }; + 7F29385D2D10A728009D2F67 /* MockCaptureConnection.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F29385C2D10A728009D2F67 /* MockCaptureConnection.m */; }; 7F87E8022D01FD6F00A3549C /* MockCaptureDeviceController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8012D01FD5600A3549C /* MockCaptureDeviceController.m */; }; 7F87E80C2D0325D900A3549C /* MockDeviceOrientationProvider.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80B2D0325D700A3549C /* MockDeviceOrientationProvider.m */; }; 7F87E80E2D03283E00A3549C /* CameraExposureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */; }; @@ -24,13 +25,15 @@ 7F87E8262D06EBCB00A3549C /* MockCameraDeviceDiscovery.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8252D06EBCB00A3549C /* MockCameraDeviceDiscovery.m */; }; 7F87E8342D072F9A00A3549C /* MockCaptureSession.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8332D072F9A00A3549C /* MockCaptureSession.m */; }; 7F87E83B2D09B4A300A3549C /* MockPhotoData.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E83A2D09B4A300A3549C /* MockPhotoData.m */; }; + 7F87E8422D0AF98D00A3549C /* MockAssetWriter.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8412D0AF98D00A3549C /* MockAssetWriter.m */; }; + 7F87E84D2D0B248A00A3549C /* MockCapturePhotoSettings.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E84C2D0B248A00A3549C /* MockCapturePhotoSettings.m */; }; + 7F87E8502D0B30DD00A3549C /* MockCapturePhotoOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E84F2D0B30DD00A3549C /* MockCapturePhotoOutput.m */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; - A513EFD72CD1B8802B44FD82 /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = C5BBE9E0BEF3DC97699CB764 /* libPods-Runner.a */; }; - B283936252DB6663B9EC9A05 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 5E3D63425720360F63D4921B /* libPods-RunnerTests.a */; }; + ABA022F748E0C5AECBCD8F5F /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 8FE1D7C55B3760E3991BE41E /* libPods-Runner.a */; }; CEF6611A2B5E36A500D33FD4 /* CameraSessionPresetsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = CEF661192B5E36A500D33FD4 /* CameraSessionPresetsTests.m */; }; E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */; }; E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */; }; @@ -42,6 +45,7 @@ E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */; }; E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */; }; E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */; }; + F392940CDE88632C06D6CB59 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 6982009932DF1932663D04D5 /* libPods-RunnerTests.a */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -73,17 +77,21 @@ 03BB766A2665316900CE5A93 /* CameraFocusTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraFocusTests.m; sourceTree = ""; }; 03BB766C2665316900CE5A93 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraOrientationTests.m; sourceTree = ""; }; + 10C4CE57A7EA31FA7C113654 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; }; 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; 43ED1536282570DE00EB00DE /* AvailableCamerasTest.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AvailableCamerasTest.m; sourceTree = ""; }; - 5E3D63425720360F63D4921B /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; - 60F3C27E53F2AF8B81A89EA9 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; + 5A32C345E4881D9C7CE9479C /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; + 6982009932DF1932663D04D5 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + 73BD4FD74789D3EB46FB5774 /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = ""; }; 788A065927B0E02900533D74 /* StreamingTest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamingTest.m; sourceTree = ""; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraSettingsTests.m; sourceTree = ""; }; + 7F29385A2D10A653009D2F67 /* MockCaptureConnection.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureConnection.h; sourceTree = ""; }; + 7F29385C2D10A728009D2F67 /* MockCaptureConnection.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureConnection.m; sourceTree = ""; }; 7F87E8012D01FD5600A3549C /* MockCaptureDeviceController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureDeviceController.m; sourceTree = ""; }; 7F87E8032D02FF8C00A3549C /* MockCaptureDeviceController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureDeviceController.h; sourceTree = ""; }; 7F87E80A2D0325B200A3549C /* MockDeviceOrientationProvider.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockDeviceOrientationProvider.h; sourceTree = ""; }; @@ -97,6 +105,14 @@ 7F87E8332D072F9A00A3549C /* MockCaptureSession.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureSession.m; sourceTree = ""; }; 7F87E8392D09B45300A3549C /* MockPhotoData.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockPhotoData.h; sourceTree = ""; }; 7F87E83A2D09B4A300A3549C /* MockPhotoData.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockPhotoData.m; sourceTree = ""; }; + 7F87E8402D0AF96400A3549C /* MockAssetWriter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockAssetWriter.h; sourceTree = ""; }; + 7F87E8412D0AF98D00A3549C /* MockAssetWriter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockAssetWriter.m; sourceTree = ""; }; + 7F87E84B2D0B245E00A3549C /* MockCapturePhotoSettings.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCapturePhotoSettings.h; sourceTree = ""; }; + 7F87E84C2D0B248A00A3549C /* MockCapturePhotoSettings.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCapturePhotoSettings.m; sourceTree = ""; }; + 7F87E84E2D0B30CD00A3549C /* MockCapturePhotoOutput.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCapturePhotoOutput.h; sourceTree = ""; }; + 7F87E84F2D0B30DD00A3549C /* MockCapturePhotoOutput.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCapturePhotoOutput.m; sourceTree = ""; }; + 8FE1D7C55B3760E3991BE41E /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + 93DE3DA611CB15AE1AF7956C /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -105,8 +121,6 @@ 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; - C5BBE9E0BEF3DC97699CB764 /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; }; - CB65379B3085E03D11D2786A /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; CEF661192B5E36A500D33FD4 /* CameraSessionPresetsTests.m */ = {isa = PBXFileReference; indentWidth = 2; lastKnownFileType = sourcecode.c.objc; path = CameraSessionPresetsTests.m; sourceTree = ""; }; E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = QueueUtilsTests.m; sourceTree = ""; }; E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraCaptureSessionQueueRaceConditionTests.m; sourceTree = ""; }; @@ -118,8 +132,6 @@ E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CameraTestUtils.h; sourceTree = ""; }; E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraTestUtils.m; sourceTree = ""; }; E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPropertiesTests.m; sourceTree = ""; }; - E27055DF15226B1DFE032420 /* Pods-RunnerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.debug.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.debug.xcconfig"; sourceTree = ""; }; - E28469137832D102541045F6 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPreviewPauseTests.m; sourceTree = ""; }; /* End PBXFileReference section */ @@ -129,7 +141,7 @@ buildActionMask = 2147483647; files = ( 408D7A792C3C9CD000B71F9A /* OCMock in Frameworks */, - B283936252DB6663B9EC9A05 /* libPods-RunnerTests.a in Frameworks */, + F392940CDE88632C06D6CB59 /* libPods-RunnerTests.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -160,6 +172,14 @@ 7F87E8332D072F9A00A3549C /* MockCaptureSession.m */, 7F87E8392D09B45300A3549C /* MockPhotoData.h */, 7F87E83A2D09B4A300A3549C /* MockPhotoData.m */, + 7F87E8402D0AF96400A3549C /* MockAssetWriter.h */, + 7F87E8412D0AF98D00A3549C /* MockAssetWriter.m */, + 7F87E84B2D0B245E00A3549C /* MockCapturePhotoSettings.h */, + 7F87E84C2D0B248A00A3549C /* MockCapturePhotoSettings.m */, + 7F87E84E2D0B30CD00A3549C /* MockCapturePhotoOutput.h */, + 7F87E84F2D0B30DD00A3549C /* MockCapturePhotoOutput.m */, + 7F29385A2D10A653009D2F67 /* MockCaptureConnection.h */, + 7F29385C2D10A728009D2F67 /* MockCaptureConnection.m */, 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */, 03BB766A2665316900CE5A93 /* CameraFocusTests.m */, 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */, @@ -184,6 +204,15 @@ path = RunnerTests; sourceTree = ""; }; + 483773B8AC5ACAFFC7939408 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 8FE1D7C55B3760E3991BE41E /* libPods-Runner.a */, + 6982009932DF1932663D04D5 /* libPods-RunnerTests.a */, + ); + name = Frameworks; + sourceTree = ""; + }; 9740EEB11CF90186004384FC /* Flutter */ = { isa = PBXGroup; children = ( @@ -203,7 +232,7 @@ 03BB76692665316900CE5A93 /* RunnerTests */, 97C146EF1CF9000F007C117D /* Products */, FD386F00E98D73419C929072 /* Pods */, - C004519075D29BB6F75ED9E6 /* Frameworks */, + 483773B8AC5ACAFFC7939408 /* Frameworks */, ); sourceTree = ""; }; @@ -240,22 +269,13 @@ name = "Supporting Files"; sourceTree = ""; }; - C004519075D29BB6F75ED9E6 /* Frameworks */ = { - isa = PBXGroup; - children = ( - C5BBE9E0BEF3DC97699CB764 /* libPods-Runner.a */, - 5E3D63425720360F63D4921B /* libPods-RunnerTests.a */, - ); - name = Frameworks; - sourceTree = ""; - }; FD386F00E98D73419C929072 /* Pods */ = { isa = PBXGroup; children = ( - 60F3C27E53F2AF8B81A89EA9 /* Pods-Runner.debug.xcconfig */, - E28469137832D102541045F6 /* Pods-Runner.release.xcconfig */, - E27055DF15226B1DFE032420 /* Pods-RunnerTests.debug.xcconfig */, - CB65379B3085E03D11D2786A /* Pods-RunnerTests.release.xcconfig */, + 10C4CE57A7EA31FA7C113654 /* Pods-Runner.debug.xcconfig */, + 93DE3DA611CB15AE1AF7956C /* Pods-Runner.release.xcconfig */, + 73BD4FD74789D3EB46FB5774 /* Pods-RunnerTests.debug.xcconfig */, + 5A32C345E4881D9C7CE9479C /* Pods-RunnerTests.release.xcconfig */, ); path = Pods; sourceTree = ""; @@ -267,7 +287,7 @@ isa = PBXNativeTarget; buildConfigurationList = 03BB76712665316900CE5A93 /* Build configuration list for PBXNativeTarget "RunnerTests" */; buildPhases = ( - D4954E02020248A842E1E7B3 /* [CP] Check Pods Manifest.lock */, + E15D6BA2F8A105D236FE8B62 /* [CP] Check Pods Manifest.lock */, 03BB76642665316900CE5A93 /* Sources */, 03BB76652665316900CE5A93 /* Frameworks */, 03BB76662665316900CE5A93 /* Resources */, @@ -289,14 +309,14 @@ isa = PBXNativeTarget; buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; buildPhases = ( - 3770D57B1AB91BF2FCC5DE94 /* [CP] Check Pods Manifest.lock */, + 8DA8F843F355F2F3627CE806 /* [CP] Check Pods Manifest.lock */, 9740EEB61CF901F6004384FC /* Run Script */, 97C146EA1CF9000F007C117D /* Sources */, 97C146EB1CF9000F007C117D /* Frameworks */, 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, - BC4B4B1DFD7B8E3FA1EB91B9 /* [CP] Copy Pods Resources */, + 4A0695CE70E797FA66DE2DC0 /* [CP] Copy Pods Resources */, ); buildRules = ( ); @@ -375,82 +395,82 @@ /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ - 3770D57B1AB91BF2FCC5DE94 /* [CP] Check Pods Manifest.lock */ = { + 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); - inputFileListPaths = ( - ); inputPaths = ( - "${PODS_PODFILE_DIR_PATH}/Podfile.lock", - "${PODS_ROOT}/Manifest.lock", - ); - name = "[CP] Check Pods Manifest.lock"; - outputFileListPaths = ( + "${TARGET_BUILD_DIR}/${INFOPLIST_PATH}", ); + name = "Thin Binary"; outputPaths = ( - "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; - showEnvVarsInLog = 0; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; }; - 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { + 4A0695CE70E797FA66DE2DC0 /* [CP] Copy Pods Resources */ = { isa = PBXShellScriptBuildPhase; - alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); inputPaths = ( - "${TARGET_BUILD_DIR}/${INFOPLIST_PATH}", + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh", + "${PODS_CONFIGURATION_BUILD_DIR}/camera_avfoundation/camera_avfoundation_privacy.bundle", + "${PODS_CONFIGURATION_BUILD_DIR}/path_provider_foundation/path_provider_foundation_privacy.bundle", + "${PODS_CONFIGURATION_BUILD_DIR}/video_player_avfoundation/video_player_avfoundation_privacy.bundle", ); - name = "Thin Binary"; + name = "[CP] Copy Pods Resources"; outputPaths = ( + "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/camera_avfoundation_privacy.bundle", + "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/path_provider_foundation_privacy.bundle", + "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/video_player_avfoundation_privacy.bundle", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n"; + showEnvVarsInLog = 0; }; - 9740EEB61CF901F6004384FC /* Run Script */ = { + 8DA8F843F355F2F3627CE806 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; - alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); + inputFileListPaths = ( + ); inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( ); - name = "Run Script"; outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; }; - BC4B4B1DFD7B8E3FA1EB91B9 /* [CP] Copy Pods Resources */ = { + 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); inputPaths = ( - "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh", - "${PODS_CONFIGURATION_BUILD_DIR}/camera_avfoundation/camera_avfoundation_privacy.bundle", - "${PODS_CONFIGURATION_BUILD_DIR}/path_provider_foundation/path_provider_foundation_privacy.bundle", - "${PODS_CONFIGURATION_BUILD_DIR}/video_player_avfoundation/video_player_avfoundation_privacy.bundle", ); - name = "[CP] Copy Pods Resources"; + name = "Run Script"; outputPaths = ( - "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/camera_avfoundation_privacy.bundle", - "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/path_provider_foundation_privacy.bundle", - "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/video_player_avfoundation_privacy.bundle", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n"; - showEnvVarsInLog = 0; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; }; - D4954E02020248A842E1E7B3 /* [CP] Check Pods Manifest.lock */ = { + E15D6BA2F8A105D236FE8B62 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( @@ -480,10 +500,14 @@ buildActionMask = 2147483647; files = ( 033B94BE269C40A200B4DF97 /* CameraMethodChannelTests.m in Sources */, + 7F87E8502D0B30DD00A3549C /* MockCapturePhotoOutput.m in Sources */, E071CF7227B3061B006EF3BA /* FLTCamPhotoCaptureTests.m in Sources */, 7F87E83B2D09B4A300A3549C /* MockPhotoData.m in Sources */, + 7F29385D2D10A728009D2F67 /* MockCaptureConnection.m in Sources */, E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */, + 7F87E8422D0AF98D00A3549C /* MockAssetWriter.m in Sources */, 03BB766B2665316900CE5A93 /* CameraFocusTests.m in Sources */, + 7F87E84D2D0B248A00A3549C /* MockCapturePhotoSettings.m in Sources */, 7F87E8022D01FD6F00A3549C /* MockCaptureDeviceController.m in Sources */, 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */, E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */, diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m index c59c9fdd1391..82a51ba3a346 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m @@ -21,17 +21,20 @@ @implementation AvailableCamerasTest - (void)setUp { [super setUp]; - + self.mockDeviceDiscovery = [[MockCameraDeviceDiscovery alloc] init]; - self.cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil globalAPI:nil deviceDiscovery:_mockDeviceDiscovery - sessionFactory:^id{ - return nil; - } deviceFactory:^id(NSString *name) { - return nil; - }]; + self.cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:nil + deviceDiscovery:_mockDeviceDiscovery + sessionFactory:^id { + return nil; + } + deviceFactory:^id(NSString *name) { + return nil; + }]; } - - (void)testAvailableCamerasShouldReturnAllCamerasOnMultiCameraIPhone { XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; @@ -39,7 +42,7 @@ - (void)testAvailableCamerasShouldReturnAllCamerasOnMultiCameraIPhone { MockCaptureDeviceController *wideAngleCamera = [[MockCaptureDeviceController alloc] init]; wideAngleCamera.uniqueID = @"0"; wideAngleCamera.position = AVCaptureDevicePositionBack; - + MockCaptureDeviceController *frontFacingCamera = [[MockCaptureDeviceController alloc] init]; frontFacingCamera.uniqueID = @"1"; frontFacingCamera.position = AVCaptureDevicePositionFront; @@ -47,7 +50,7 @@ - (void)testAvailableCamerasShouldReturnAllCamerasOnMultiCameraIPhone { MockCaptureDeviceController *ultraWideCamera = [[MockCaptureDeviceController alloc] init]; ultraWideCamera.uniqueID = @"2"; ultraWideCamera.position = AVCaptureDevicePositionBack; - + MockCaptureDeviceController *telephotoCamera = [[MockCaptureDeviceController alloc] init]; telephotoCamera.uniqueID = @"3"; telephotoCamera.position = AVCaptureDevicePositionBack; @@ -64,14 +67,16 @@ - (void)testAvailableCamerasShouldReturnAllCamerasOnMultiCameraIPhone { if (@available(iOS 13.0, *)) { [cameras addObject:ultraWideCamera]; } - - _mockDeviceDiscovery.discoverySessionStub = ^NSArray> * _Nullable(NSArray * _Nonnull deviceTypes, AVMediaType _Nonnull mediaType, AVCaptureDevicePosition position) { + + _mockDeviceDiscovery.discoverySessionStub = ^NSArray> *_Nullable( + NSArray *_Nonnull deviceTypes, AVMediaType _Nonnull mediaType, + AVCaptureDevicePosition position) { XCTAssertEqualObjects(deviceTypes, requiredTypes); XCTAssertEqual(mediaType, AVMediaTypeVideo); XCTAssertEqual(position, AVCaptureDevicePositionUnspecified); return cameras; }; - + __block NSArray *resultValue; [_cameraPlugin availableCamerasWithCompletion:^(NSArray *_Nullable result, @@ -96,7 +101,7 @@ - (void)testAvailableCamerasShouldReturnOneCameraOnSingleCameraIPhone { MockCaptureDeviceController *wideAngleCamera = [[MockCaptureDeviceController alloc] init]; wideAngleCamera.uniqueID = @"0"; wideAngleCamera.position = AVCaptureDevicePositionBack; - + MockCaptureDeviceController *frontFacingCamera = [[MockCaptureDeviceController alloc] init]; frontFacingCamera.uniqueID = @"1"; frontFacingCamera.position = AVCaptureDevicePositionFront; @@ -107,11 +112,13 @@ - (void)testAvailableCamerasShouldReturnOneCameraOnSingleCameraIPhone { if (@available(iOS 13.0, *)) { [requiredTypes addObject:AVCaptureDeviceTypeBuiltInUltraWideCamera]; } - + NSMutableArray *cameras = [NSMutableArray array]; [cameras addObjectsFromArray:@[ wideAngleCamera, frontFacingCamera ]]; - - _mockDeviceDiscovery.discoverySessionStub = ^NSArray> * _Nullable(NSArray * _Nonnull deviceTypes, AVMediaType _Nonnull mediaType, AVCaptureDevicePosition position) { + + _mockDeviceDiscovery.discoverySessionStub = ^NSArray> *_Nullable( + NSArray *_Nonnull deviceTypes, AVMediaType _Nonnull mediaType, + AVCaptureDevicePosition position) { XCTAssertEqualObjects(deviceTypes, requiredTypes); XCTAssertEqual(mediaType, AVMediaTypeVideo); XCTAssertEqual(position, AVCaptureDevicePositionUnspecified); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m index bbe3cf055a9a..28a5f7e1fcce 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraExposureTests.m @@ -21,7 +21,7 @@ - (void)setUp { _camera = [[FLTCam alloc] init]; _mockDevice = [[MockCaptureDeviceController alloc] init]; _mockDeviceOrientationProvider = [[MockDeviceOrientationProvider alloc] init]; - + [_camera setValue:_mockDevice forKey:@"captureDevice"]; [_camera setValue:_mockDeviceOrientationProvider forKey:@"deviceOrientationProvider"]; } @@ -31,7 +31,7 @@ - (void)testSetExposurePointWithResult_SetsExposurePointOfInterest { _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; // Exposure point of interest is supported _mockDevice.isExposurePointOfInterestSupported = YES; - + // Verify the focus point of interest has been set __block CGPoint setPoint = CGPointZero; _mockDevice.setExposurePointOfInterestStub = ^(CGPoint point) { @@ -43,9 +43,9 @@ - (void)testSetExposurePointWithResult_SetsExposurePointOfInterest { // Run test XCTestExpectation *completionExpectation = [self expectationWithDescription:@"Completion called"]; [_camera setExposurePoint:[FCPPlatformPoint makeWithX:1 y:1] - withCompletion:^(FlutterError * _Nullable error) { - XCTAssertNil(error); - [completionExpectation fulfill]; + withCompletion:^(FlutterError *_Nullable error) { + XCTAssertNil(error); + [completionExpectation fulfill]; }]; [self waitForExpectationsWithTimeout:1 handler:nil]; @@ -58,18 +58,19 @@ - (void)testSetExposurePoint_WhenNotSupported_ReturnsError { _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; // Exposure point of interest is not supported _mockDevice.isExposurePointOfInterestSupported = NO; - + XCTestExpectation *expectation = [self expectationWithDescription:@"Completion with error"]; - + // Run - [_camera setExposurePoint:[FCPPlatformPoint makeWithX:1 y:1] - withCompletion:^(FlutterError *_Nullable error) { - XCTAssertNotNil(error); - XCTAssertEqualObjects(error.code, @"setExposurePointFailed"); - XCTAssertEqualObjects(error.message, @"Device does not have exposure point capabilities"); - [expectation fulfill]; - }]; - + [_camera + setExposurePoint:[FCPPlatformPoint makeWithX:1 y:1] + withCompletion:^(FlutterError *_Nullable error) { + XCTAssertNotNil(error); + XCTAssertEqualObjects(error.code, @"setExposurePointFailed"); + XCTAssertEqualObjects(error.message, @"Device does not have exposure point capabilities"); + [expectation fulfill]; + }]; + // Verify [self waitForExpectationsWithTimeout:1 handler:nil]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m index 27f5e21bdfab..f7d96c3aa75c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraFocusTests.m @@ -24,7 +24,7 @@ - (void)setUp { _camera = [[FLTCam alloc] init]; _mockDevice = [[MockCaptureDeviceController alloc] init]; _mockDeviceOrientationProvider = [[MockDeviceOrientationProvider alloc] init]; - + [_camera setValue:_mockDevice forKey:@"captureDevice"]; [_camera setValue:_mockDeviceOrientationProvider forKey:@"deviceOrientationProvider"]; } @@ -34,7 +34,7 @@ - (void)testAutoFocusWithContinuousModeSupported_ShouldSetContinuousAutoFocus { _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { return mode == AVCaptureFocusModeContinuousAutoFocus || mode == AVCaptureFocusModeAutoFocus; }; - + __block BOOL setFocusModeContinuousAutoFocusCalled = NO; _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { @@ -59,9 +59,9 @@ - (void)testAutoFocusWithContinuousModeNotSupported_ShouldSetAutoFocus { _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { return mode == AVCaptureFocusModeAutoFocus; }; - + __block BOOL setFocusModeAutoFocusCalled = NO; - + // Don't expect setFocusMode:AVCaptureFocusModeContinuousAutoFocus _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { if (mode == AVCaptureFocusModeContinuousAutoFocus) { @@ -70,10 +70,10 @@ - (void)testAutoFocusWithContinuousModeNotSupported_ShouldSetAutoFocus { setFocusModeAutoFocusCalled = YES; } }; - + // Run test [_camera applyFocusMode:FCPPlatformFocusModeAuto onDevice:_mockDevice]; - + // Expect setFocusMode:AVCaptureFocusModeAutoFocus XCTAssertTrue(setFocusModeAutoFocusCalled); } @@ -86,7 +86,7 @@ - (void)testAutoFocusWithNoModeSupported_ShouldSetNothing { // Don't expect any setFocus _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { - XCTFail(@"Unexpected call to setFocusMode"); + XCTFail(@"Unexpected call to setFocusMode"); }; // Run test @@ -98,7 +98,7 @@ - (void)testLockedFocusWithModeSupported_ShouldSetModeAutoFocus { _mockDevice.isFocusModeSupportedStub = ^BOOL(AVCaptureFocusMode mode) { return mode == AVCaptureFocusModeContinuousAutoFocus || mode == AVCaptureFocusModeAutoFocus; }; - + __block BOOL setFocusModeAutoFocusCalled = NO; // Expect only setFocusMode:AVCaptureFocusModeAutoFocus @@ -109,7 +109,7 @@ - (void)testLockedFocusWithModeSupported_ShouldSetModeAutoFocus { setFocusModeAutoFocusCalled = YES; } }; - + // Run test [_camera applyFocusMode:FCPPlatformFocusModeLocked onDevice:_mockDevice]; @@ -123,7 +123,7 @@ - (void)testLockedFocusWithModeNotSupported_ShouldSetNothing { // Don't expect any setFocus _mockDevice.setFocusModeStub = ^(AVCaptureFocusMode mode) { - XCTFail(@"Unexpected call to setFocusMode"); + XCTFail(@"Unexpected call to setFocusMode"); }; // Run test @@ -157,18 +157,18 @@ - (void)testSetFocusPoint_WhenNotSupported_ReturnsError { _mockDeviceOrientationProvider.orientation = UIDeviceOrientationLandscapeLeft; // Exposure point of interest is not supported _mockDevice.isFocusPointOfInterestSupported = NO; - + XCTestExpectation *expectation = [self expectationWithDescription:@"Completion with error"]; - + // Run [_camera setFocusPoint:[FCPPlatformPoint makeWithX:1 y:1] - withCompletion:^(FlutterError *_Nullable error) { - XCTAssertNotNil(error); - XCTAssertEqualObjects(error.code, @"setFocusPointFailed"); - XCTAssertEqualObjects(error.message, @"Device does not have focus point capabilities"); - [expectation fulfill]; - }]; - + withCompletion:^(FlutterError *_Nullable error) { + XCTAssertNotNil(error); + XCTAssertEqualObjects(error.code, @"setFocusPointFailed"); + XCTAssertEqualObjects(error.message, @"Device does not have focus point capabilities"); + [expectation fulfill]; + }]; + // Verify [self waitForExpectationsWithTimeout:1 handler:nil]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m index 6bfc204d019a..c86346a2794c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m @@ -9,8 +9,8 @@ @import XCTest; @import AVFoundation; -#import "MockCaptureSession.h" #import "MockCaptureDeviceController.h" +#import "MockCaptureSession.h" @interface CameraMethodChannelTests : XCTestCase @end @@ -20,14 +20,19 @@ @implementation CameraMethodChannelTests - (void)testCreate_ShouldCallResultOnMainThread { MockCaptureSession *avCaptureSessionMock = [[MockCaptureSession alloc] init]; avCaptureSessionMock.mockCanSetSessionPreset = YES; - + MockCaptureDeviceController *mockDeviceController = [[MockCaptureDeviceController alloc] init]; - - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil globalAPI:nil deviceDiscovery:nil sessionFactory:^id{ - return avCaptureSessionMock; - } deviceFactory:^id(NSString *name) { - return mockDeviceController; - }]; + + CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:nil + deviceDiscovery:nil + sessionFactory:^id { + return avCaptureSessionMock; + } + deviceFactory:^id(NSString *name) { + return mockDeviceController; + }]; XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; @@ -55,18 +60,19 @@ - (void)testCreate_ShouldCallResultOnMainThread { - (void)testDisposeShouldDeallocCamera { MockCaptureSession *avCaptureSessionMock = [[MockCaptureSession alloc] init]; avCaptureSessionMock.mockCanSetSessionPreset = YES; - + MockCaptureDeviceController *mockDeviceController = [[MockCaptureDeviceController alloc] init]; - - CameraPlugin *camera = [[CameraPlugin alloc] - initWithRegistry:nil - messenger:nil - globalAPI:nil - deviceDiscovery:nil sessionFactory:^id{ - return avCaptureSessionMock; - } deviceFactory:^id(NSString *name) { - return mockDeviceController; - }]; + + CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:nil + deviceDiscovery:nil + sessionFactory:^id { + return avCaptureSessionMock; + } + deviceFactory:^id(NSString *name) { + return mockDeviceController; + }]; XCTestExpectation *createExpectation = [self expectationWithDescription:@"create's result block must be called"]; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m index 55012b0c4bcd..720648a83890 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m @@ -9,10 +9,10 @@ @import XCTest; @import Flutter; -#import "MockCaptureDeviceController.h" -#import "MockDeviceOrientationProvider.h" #import "MockCameraDeviceDiscovery.h" +#import "MockCaptureDeviceController.h" #import "MockCaptureSession.h" +#import "MockDeviceOrientationProvider.h" @interface StubGlobalEventApi : FCPCameraGlobalEventApi @property(nonatomic) BOOL called; @@ -41,20 +41,20 @@ @interface MockCamera : FLTCam @implementation MockCamera - (void)setDeviceOrientation:(UIDeviceOrientation)orientation { - if (self.setDeviceOrientationStub) { - self.setDeviceOrientationStub(orientation); - } + if (self.setDeviceOrientationStub) { + self.setDeviceOrientationStub(orientation); + } } @end @interface MockUIDevice : UIDevice -@property (nonatomic, assign) UIDeviceOrientation mockOrientation; +@property(nonatomic, assign) UIDeviceOrientation mockOrientation; @end @implementation MockUIDevice - (UIDeviceOrientation)orientation { - return self.mockOrientation; + return self.mockOrientation; } @end @@ -81,18 +81,19 @@ - (void)setUp { _captureSession = [[MockCaptureSession alloc] init]; [_camera setValue:_mockDevice forKey:@"captureDevice"]; - + __weak typeof(self) weakSelf = self; _cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil - messenger:nil - globalAPI:_eventAPI - deviceDiscovery:_deviceDiscovery - sessionFactory:^id{ - return weakSelf.captureSession; - } deviceFactory:^id(NSString *name) { - return nil; - }]; + messenger:nil + globalAPI:_eventAPI + deviceDiscovery:_deviceDiscovery + sessionFactory:^id { + return weakSelf.captureSession; + } + deviceFactory:^id(NSString *name) { + return nil; + }]; _cameraPlugin.camera = _camera; } @@ -132,17 +133,18 @@ - (void)testOrientationNotificationsNotCalledForFaceUp { - (void)testOrientationNotificationsNotCalledForFaceDown { StubGlobalEventApi *eventAPI = [[StubGlobalEventApi alloc] init]; - + __weak typeof(self) weakSelf = self; CameraPlugin *cameraPlugin = [[CameraPlugin alloc] initWithRegistry:nil - messenger:nil - globalAPI:eventAPI - deviceDiscovery:_deviceDiscovery - sessionFactory:^id{ - return weakSelf.captureSession; - } deviceFactory:^id(NSString *name) { - return nil; - }]; + messenger:nil + globalAPI:eventAPI + deviceDiscovery:_deviceDiscovery + sessionFactory:^id { + return weakSelf.captureSession; + } + deviceFactory:^id(NSString *name) { + return nil; + }]; [self sendOrientation:UIDeviceOrientationFaceDown toCamera:cameraPlugin]; @@ -158,8 +160,8 @@ - (void)testOrientationUpdateMustBeOnCaptureSessionQueue { dispatch_queue_set_specific(plugin.captureSessionQueue, captureSessionQueueSpecific, (void *)captureSessionQueueSpecific, NULL); plugin.camera = _camera; - - _camera.setDeviceOrientationStub = ^(UIDeviceOrientation orientation) { + + _camera.setDeviceOrientationStub = ^(UIDeviceOrientation orientation) { if (dispatch_get_specific(captureSessionQueueSpecific)) { [queueExpectation fulfill]; } @@ -178,14 +180,15 @@ - (void)testOrientationChanged_noRetainCycle { @autoreleasepool { CameraPlugin *plugin = [[CameraPlugin alloc] initWithRegistry:nil - messenger:nil - globalAPI:_eventAPI - deviceDiscovery:_deviceDiscovery - sessionFactory:^id{ - return weakSelf.captureSession; - } deviceFactory:^id(NSString *name) { - return nil; - }]; + messenger:nil + globalAPI:_eventAPI + deviceDiscovery:_deviceDiscovery + sessionFactory:^id { + return weakSelf.captureSession; + } + deviceFactory:^id(NSString *name) { + return nil; + }]; weakPlugin = plugin; plugin.captureSessionQueue = captureSessionQueue; plugin.camera = _camera; @@ -196,14 +199,14 @@ - (void)testOrientationChanged_noRetainCycle { // Sanity check XCTAssertNil(weakPlugin, @"Camera must have been deallocated."); - + __block BOOL setDeviceOrientationCalled = NO; _camera.setDeviceOrientationStub = ^(UIDeviceOrientation orientation) { if (orientation == UIDeviceOrientationLandscapeLeft) { setDeviceOrientationCalled = YES; } }; - + __weak StubGlobalEventApi *weakEventAPI = _eventAPI; // Must check in captureSessionQueue since orientationChanged dispatches to this queue. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m index 81e492d6e927..936395c7c5f9 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m @@ -9,10 +9,9 @@ @import AVFoundation; @import XCTest; -#import #import "CameraTestUtils.h" -#import "MockCaptureSession.h" #import "MockCaptureDeviceController.h" +#import "MockCaptureSession.h" /// Includes test cases related to resolution presets setting operations for FLTCam class. @interface FLTCamSessionPresetsTest : XCTestCase @@ -26,18 +25,18 @@ - (void)testResolutionPresetWithBestFormat_mustUpdateCaptureSessionPreset { XCTestExpectation *formatExpectation = [self expectationWithDescription:@"Expected format set"]; MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; - - id captureFormatMock = OCMClassMock([AVCaptureDeviceFormat class]); - MockCaptureDeviceController *captureDeviceMock = [[MockCaptureDeviceController alloc] init]; - captureDeviceMock.formats = @[captureFormatMock]; - captureDeviceMock.setActiveFormatStub = ^(AVCaptureDeviceFormat * _Nonnull format) { - if (format == captureFormatMock) { + MockCaptureDeviceFormat *fakeFormat = [[MockCaptureDeviceFormat alloc] init]; + captureDeviceMock.formats = @[ fakeFormat ]; + captureDeviceMock.activeFormat = fakeFormat; + + captureDeviceMock.setActiveFormatStub = ^(id format) { + if (format == fakeFormat) { [formatExpectation fulfill]; } }; - - videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { + + videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { if (preset == expectedPreset) { [presetExpectation fulfill]; } @@ -62,13 +61,13 @@ - (void)testResolutionPresetWithCanSetSessionPresetMax_mustUpdateCaptureSessionP MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; // Make sure that setting resolution preset for session always succeeds. videoSessionMock.mockCanSetSessionPreset = YES; - - videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { + + videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { if (preset == expectedPreset) { [expectation fulfill]; } }; - + FLTCreateCamWithVideoCaptureSession(videoSessionMock, FCPPlatformResolutionPresetMax); [self waitForExpectationsWithTimeout:1 handler:nil]; @@ -79,12 +78,12 @@ - (void)testResolutionPresetWithCanSetSessionPresetUltraHigh_mustUpdateCaptureSe XCTestExpectation *expectation = [self expectationWithDescription:@"Expected preset set"]; MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; - + // Make sure that setting resolution preset for session always succeeds. videoSessionMock.mockCanSetSessionPreset = YES; - + // Expect that setting "ultraHigh" resolutionPreset correctly updates videoCaptureSession. - videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { + videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { if (preset == expectedPreset) { [expectation fulfill]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m index 44748b53166c..9d72a24d9f0f 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m @@ -11,6 +11,7 @@ #import "CameraTestUtils.h" #import "MockCaptureDeviceController.h" #import "MockCaptureSession.h" +#import "MockAssetWriter.h" static const FCPPlatformResolutionPreset gTestResolutionPreset = FCPPlatformResolutionPresetMedium; static const int gTestFramesPerSecond = 15; @@ -143,10 +144,10 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter { enableAudio:gTestEnableAudio]; TestMediaSettingsAVWrapper *injectedWrapper = [[TestMediaSettingsAVWrapper alloc] initWithTestCase:self]; - + FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_create("test", NULL), settings, injectedWrapper, nil); - + dispatch_queue_create("test", NULL), settings, injectedWrapper, nil, nil, nil); + // Expect FPS configuration is passed to camera device. [self waitForExpectations:@[ injectedWrapper.lockExpectation, injectedWrapper.beginConfigurationExpectation, @@ -172,14 +173,16 @@ - (void)testSettings_ShouldBeSupportedByMethodCall { MockCaptureSession *mockSession = [[MockCaptureSession alloc] init]; mockSession.mockCanSetSessionPreset = YES; - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil - globalAPI:nil deviceDiscovery:nil sessionFactory:^id{ - return mockSession; - } - deviceFactory:^id(NSString *name) { - return mockDeviceController; - } - ]; + CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil + messenger:nil + globalAPI:nil + deviceDiscovery:nil + sessionFactory:^id { + return mockSession; + } + deviceFactory:^id(NSString *name) { + return mockDeviceController; + }]; XCTestExpectation *expectation = [self expectationWithDescription:@"Result finished"]; @@ -214,7 +217,7 @@ - (void)testSettings_ShouldSelectFormatWhichSupports60FPS { enableAudio:gTestEnableAudio]; FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_create("test", NULL), settings, nil, nil); + dispatch_queue_create("test", NULL), settings, nil, nil, nil, nil); AVFrameRateRange *range = camera.captureDevice.activeFormat.videoSupportedFrameRateRanges[0]; XCTAssertLessThanOrEqual(range.minFrameRate, 60); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h index 7273bda8a62f..b62205fc2cb1 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h @@ -20,7 +20,9 @@ extern FLTCam *_Nullable FLTCreateCamWithCaptureSessionQueueAndMediaSettings( dispatch_queue_t _Nullable captureSessionQueue, FCPPlatformMediaSettings *_Nullable mediaSettings, FLTCamMediaSettingsAVWrapper *_Nullable mediaSettingsAVWrapper, - CaptureDeviceFactory _Nullable captureDeviceFactory); + CaptureDeviceFactory _Nullable captureDeviceFactory, + id _Nullable capturePhotoOutput, + id _Nullable assetWriter); extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue); @@ -39,8 +41,9 @@ extern FLTCam *FLTCreateCamWithVideoCaptureSession(id /// @param videoDimensionsForFormat custom code to determine video dimensions /// @return an FLTCam object. extern FLTCam *FLTCreateCamWithVideoDimensionsForFormat( - id captureSession, FCPPlatformResolutionPreset resolutionPreset, - id captureDevice, VideoDimensionsForFormat videoDimensionsForFormat); + id captureSession, FCPPlatformResolutionPreset resolutionPreset, + id captureDevice, + VideoDimensionsForFormat videoDimensionsForFormat); /// Creates a test sample buffer. /// @return a test sample buffer. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index 074a9d82917b..80e9be78fa9f 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -10,6 +10,7 @@ #import "MockCaptureDeviceController.h" #import "MockCaptureSession.h" +#import "MockAssetWriter.h" static FCPPlatformMediaSettings *FCPGetDefaultMediaSettings( FCPPlatformResolutionPreset resolutionPreset) { @@ -21,13 +22,15 @@ } FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue) { - return FLTCreateCamWithCaptureSessionQueueAndMediaSettings(captureSessionQueue, nil, nil, nil); + return FLTCreateCamWithCaptureSessionQueueAndMediaSettings(captureSessionQueue, nil, nil, nil, + nil, nil); } FLTCam *FLTCreateCamWithCaptureSessionQueueAndMediaSettings( dispatch_queue_t captureSessionQueue, FCPPlatformMediaSettings *mediaSettings, - FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper, - CaptureDeviceFactory captureDeviceFactory) { + FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper, CaptureDeviceFactory captureDeviceFactory, + id capturePhotoOutput, + id assetWriter) { if (!mediaSettings) { mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMedium); } @@ -36,39 +39,38 @@ mediaSettingsAVWrapper = [[FLTCamMediaSettingsAVWrapper alloc] init]; } + if (!assetWriter) { + assetWriter = [[MockAssetWriter alloc] init]; + } + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; videoSessionMock.mockCanSetSessionPreset = YES; - + MockCaptureSession *audioSessionMock = [[MockCaptureSession alloc] init]; audioSessionMock.mockCanSetSessionPreset = YES; __block MockCaptureDeviceController *mockDevice = [[MockCaptureDeviceController alloc] init]; + id frameRateRangeMock1 = OCMClassMock([AVFrameRateRange class]); OCMStub([frameRateRangeMock1 minFrameRate]).andReturn(3); OCMStub([frameRateRangeMock1 maxFrameRate]).andReturn(30); - id captureDeviceFormatMock1 = OCMClassMock([AVCaptureDeviceFormat class]); - OCMStub([captureDeviceFormatMock1 videoSupportedFrameRateRanges]).andReturn(@[ - frameRateRangeMock1 - ]); + MockCaptureDeviceFormat *captureDeviceFormatMock1 = [[MockCaptureDeviceFormat alloc] init]; + captureDeviceFormatMock1.videoSupportedFrameRateRanges = @[ frameRateRangeMock1 ]; id frameRateRangeMock2 = OCMClassMock([AVFrameRateRange class]); OCMStub([frameRateRangeMock2 minFrameRate]).andReturn(3); OCMStub([frameRateRangeMock2 maxFrameRate]).andReturn(60); - id captureDeviceFormatMock2 = OCMClassMock([AVCaptureDeviceFormat class]); - OCMStub([captureDeviceFormatMock2 videoSupportedFrameRateRanges]).andReturn(@[ - frameRateRangeMock2 - ]); - -// id inputMock = OCMClassMock([AVCaptureDeviceInput class]); -// OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) -// .andReturn(inputMock); - - mockDevice.formats = @[captureDeviceFormatMock1, captureDeviceFormatMock2]; + MockCaptureDeviceFormat *captureDeviceFormatMock2 = [[MockCaptureDeviceFormat alloc] init]; + captureDeviceFormatMock1.videoSupportedFrameRateRanges = @[ frameRateRangeMock2 ]; + + id inputMock = OCMClassMock([AVCaptureDeviceInput class]); + OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) + .andReturn(inputMock); + + mockDevice.formats = @[ captureDeviceFormatMock1, captureDeviceFormatMock2 ]; mockDevice.activeFormat = captureDeviceFormatMock1; - // mockDevice.inputToReturn = inputMock; + mockDevice.inputToReturn = inputMock; - // - id fltCam = [[FLTCam alloc] initWithMediaSettings:mediaSettings mediaSettingsAVWrapper:mediaSettingsAVWrapper orientation:UIDeviceOrientationPortrait @@ -81,7 +83,11 @@ videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { return CMVideoFormatDescriptionGetDimensions(format.formatDescription); } - error:nil]; + capturePhotoOutput:capturePhotoOutput + assetWriterFactory:^id _Nonnull(NSURL *url, AVFileType fileType, NSError * _Nullable __autoreleasing * _Nullable error) { + return assetWriter; + } + error:nil]; id captureVideoDataOutputMock = [OCMockObject niceMockForClass:[AVCaptureVideoDataOutput class]]; @@ -93,54 +99,49 @@ OCMStub([captureVideoDataOutputMock sampleBufferCallbackQueue]).andReturn(captureSessionQueue); - id videoMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]); - OCMStub([videoMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY - sourcePixelBufferAttributes:OCMOCK_ANY]) - .andReturn(videoMock); - - id writerInputMock = [OCMockObject niceMockForClass:[AVAssetWriterInput class]]; - - OCMStub([writerInputMock assetWriterInputWithMediaType:AVMediaTypeAudio - outputSettings:[OCMArg any]]) - .andReturn(writerInputMock); - - OCMStub([writerInputMock assetWriterInputWithMediaType:AVMediaTypeVideo - outputSettings:[OCMArg any]]) - .andReturn(writerInputMock); + + MockPixelBufferAdaptor *videoMock = [[MockPixelBufferAdaptor alloc] init]; + MockAssetWriterInput *writerInputMock = [[MockAssetWriterInput alloc] init]; return fltCam; } FLTCam *FLTCreateCamWithVideoCaptureSession(id captureSession, FCPPlatformResolutionPreset resolutionPreset) { - id inputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) - .andReturn(inputMock); - - id audioSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]); - OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - - return [[FLTCam alloc] initWithCameraName:@"camera" - mediaSettings:FCPGetDefaultMediaSettings(resolutionPreset) - mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init] - orientation:UIDeviceOrientationPortrait - videoCaptureSession:captureSession - audioCaptureSession:audioSessionMock - captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) - error:nil]; +// id inputMock = OCMClassMock([AVCaptureDeviceInput class]); +// OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) +// .andReturn(inputMock); + + MockCaptureSession *audioSessionMock = [[MockCaptureSession alloc] init]; + audioSessionMock.mockCanSetSessionPreset = YES; + + return [[FLTCam alloc] initWithMediaSettings:FCPGetDefaultMediaSettings(resolutionPreset) + mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init] + orientation:UIDeviceOrientationPortrait + videoCaptureSession:captureSession + audioCaptureSession:audioSessionMock + captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) + captureDeviceFactory: ^id(void) { + return [[MockCaptureDeviceController alloc] init]; + } + videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { + return CMVideoFormatDescriptionGetDimensions(format.formatDescription); + } + capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] initWithPhotoOutput:[AVCapturePhotoOutput new]] assetWriterFactory:^id _Nonnull(NSURL *url, AVFileType fileType, NSError * _Nullable __autoreleasing * _Nullable error) { + return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; + } error:nil]; } FLTCam *FLTCreateCamWithVideoDimensionsForFormat( - id captureSession, FCPPlatformResolutionPreset resolutionPreset, - id captureDevice, VideoDimensionsForFormat videoDimensionsForFormat) { - id inputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) - .andReturn(inputMock); - - id audioSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]); - OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); + id captureSession, FCPPlatformResolutionPreset resolutionPreset, + id captureDevice, + VideoDimensionsForFormat videoDimensionsForFormat) { + // id inputMock = OCMClassMock([AVCaptureDeviceInput class]); + // OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) + // .andReturn(inputMock); + // + MockCaptureSession *audioSessionMock = [[MockCaptureSession alloc] init]; + audioSessionMock.mockCanSetSessionPreset = YES; return [[FLTCam alloc] initWithMediaSettings:FCPGetDefaultMediaSettings(resolutionPreset) mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init] @@ -149,8 +150,14 @@ audioCaptureSession:audioSessionMock captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) captureDeviceFactory:^id(void) { - return captureDevice; } + return captureDevice; + } videoDimensionsForFormat:videoDimensionsForFormat + capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] + initWithPhotoOutput:[AVCapturePhotoOutput new]] + assetWriterFactory:^id _Nonnull(NSURL *url, AVFileType fileType, NSError * _Nullable __autoreleasing * _Nullable error) { + return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; + } error:nil]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m index 49e05b0ec266..d2c96092c914 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m @@ -8,8 +8,9 @@ #endif @import AVFoundation; @import XCTest; -#import #import "CameraTestUtils.h" +#import "MockCaptureDeviceController.h" +#import "MockCapturePhotoOutput.h" /// Includes test cases related to photo capture operations for FLTCam class. @interface FLTCamPhotoCaptureTests : XCTestCase @@ -27,22 +28,24 @@ - (void)testCaptureToFile_mustReportErrorToResultIfSavePhotoDelegateCompletionsW dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - OCMStub([mockSettings photoSettings]).andReturn(settings); + + // AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; + // id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); + // OCMStub([mockSettings photoSettings]).andReturn(settings); NSError *error = [NSError errorWithDomain:@"test" code:0 userInfo:nil]; - id mockOutput = OCMClassMock([AVCapturePhotoOutput class]); - OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY]) - .andDo(^(NSInvocation *invocation) { + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; + mockOutput.capturePhotoWithSettingsStub = + ^(id settings, id captureDelegate) { FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(nil, error); }); - }); + }; + cam.capturePhotoOutput = mockOutput; // `FLTCam::captureToFile` runs on capture session queue. @@ -67,22 +70,22 @@ - (void)testCaptureToFile_mustReportPathToResultIfSavePhotoDelegateCompletionsWi (void *)FLTCaptureSessionQueueSpecific, NULL); FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - OCMStub([mockSettings photoSettings]).andReturn(settings); + // AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; + // id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); + // OCMStub([mockSettings photoSettings]).andReturn(settings); NSString *filePath = @"test"; - id mockOutput = OCMClassMock([AVCapturePhotoOutput class]); - OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY]) - .andDo(^(NSInvocation *invocation) { + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; + mockOutput.capturePhotoWithSettingsStub = + ^(id settings, id captureDelegate) { FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(filePath, nil); }); - }); + }; cam.capturePhotoOutput = mockOutput; // `FLTCam::captureToFile` runs on capture session queue. @@ -105,27 +108,27 @@ - (void)testCaptureToFile_mustReportFileExtensionWithHeifWhenHEVCIsAvailableAndF FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); [cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; - AVCapturePhotoSettings *settings = - [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}]; - - id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - OCMStub([mockSettings photoSettingsWithFormat:OCMOCK_ANY]).andReturn(settings); + // AVCapturePhotoSettings *settings = + // [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : + // AVVideoCodecTypeHEVC}]; + // + // id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); + // OCMStub([mockSettings photoSettingsWithFormat:OCMOCK_ANY]).andReturn(settings); - id mockOutput = OCMClassMock([AVCapturePhotoOutput class]); + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; // Set availablePhotoCodecTypes to HEVC - NSArray *codecTypes = @[ AVVideoCodecTypeHEVC ]; - OCMStub([mockOutput availablePhotoCodecTypes]).andReturn(codecTypes); - - OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY]) - .andDo(^(NSInvocation *invocation) { + mockOutput.availablePhotoCodecTypes = @[ AVVideoCodecTypeHEVC ]; + mockOutput.capturePhotoWithSettingsStub = + ^(id settings, id photoDelegate) { FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(delegate.filePath, nil); }); - }); + }; cam.capturePhotoOutput = mockOutput; + // `FLTCam::captureToFile` runs on capture session queue. dispatch_async(captureSessionQueue, ^{ [cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) { @@ -144,24 +147,25 @@ - (void)testCaptureToFile_mustReportFileExtensionWithJpgWhenHEVCNotAvailableAndF dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - [cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; - - AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - OCMStub([mockSettings photoSettings]).andReturn(settings); + [cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; - id mockOutput = OCMClassMock([AVCapturePhotoOutput class]); + // AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; + // id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); + // OCMStub([mockSettings photoSettings]).andReturn(settings); - OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY]) - .andDo(^(NSInvocation *invocation) { + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; + mockOutput.capturePhotoWithSettingsStub = + ^(id settings, id photoDelegate) { FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(delegate.filePath, nil); }); - }); + }; + cam.capturePhotoOutput = mockOutput; + // `FLTCam::captureToFile` runs on capture session queue. dispatch_async(captureSessionQueue, ^{ [cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) { @@ -176,39 +180,44 @@ - (void)testCaptureToFile_handlesTorchMode { XCTestExpectation *pathExpectation = [self expectationWithDescription: @"Must send file path to result if save photo delegate completes with file path."]; + XCTestExpectation *setTorchExpectation = + [self expectationWithDescription:@"Should set torch mode to AVCaptureTorchModeOn."]; - id captureDeviceMock = OCMClassMock([AVCaptureDevice class]); - OCMStub([captureDeviceMock hasTorch]).andReturn(YES); - OCMStub([captureDeviceMock isTorchAvailable]).andReturn(YES); - OCMStub([captureDeviceMock torchMode]).andReturn(AVCaptureTorchModeAuto); - OCMExpect([captureDeviceMock setTorchMode:AVCaptureTorchModeOn]); + MockCaptureDeviceController *captureDeviceMock = [[MockCaptureDeviceController alloc] init]; + captureDeviceMock.hasTorch = YES; + captureDeviceMock.isTorchAvailable = YES; + captureDeviceMock.torchMode = AVCaptureTorchModeAuto; + captureDeviceMock.setTorchModeStub = ^(AVCaptureTorchMode mode) { + [setTorchExpectation fulfill]; + }; dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueueAndMediaSettings(captureSessionQueue, nil, nil, - ^id(void) { - return - [[FLTDefaultCaptureDeviceController alloc] initWithDevice:captureDeviceMock]; - }); + FLTCam *cam = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( + captureSessionQueue, nil, nil, + ^id(void) { + return captureDeviceMock; + }, + nil, nil); - AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - OCMStub([mockSettings photoSettings]).andReturn(settings); + // AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; + // id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); + // OCMStub([mockSettings photoSettings]).andReturn(settings); NSString *filePath = @"test"; - id mockOutput = OCMClassMock([AVCapturePhotoOutput class]); - OCMStub([mockOutput capturePhotoWithSettings:OCMOCK_ANY delegate:OCMOCK_ANY]) - .andDo(^(NSInvocation *invocation) { + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; + mockOutput.capturePhotoWithSettingsStub = + ^(id settings, id photoDelegate) { FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(filePath, nil); }); - }); + }; cam.capturePhotoOutput = mockOutput; // `FLTCam::captureToFile` runs on capture session queue. @@ -222,6 +231,5 @@ - (void)testCaptureToFile_handlesTorchMode { }]; }); [self waitForExpectationsWithTimeout:1 handler:nil]; - OCMVerifyAll(captureDeviceMock); } @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m index b6b78f2dab28..95a7347bce62 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m @@ -8,32 +8,48 @@ #endif @import AVFoundation; @import XCTest; -#import #import "CameraTestUtils.h" +#import "MockAssetWriter.h" +#import "MockCaptureConnection.h" /// Includes test cases related to sample buffer handling for FLTCam class. @interface FLTCamSampleBufferTests : XCTestCase - +@property(readonly, nonatomic) dispatch_queue_t captureSessionQueue; +@property(readonly, nonatomic) FLTCam *camera; +@property(readonly, nonatomic) MockAssetWriter *writerMock; +@property(readonly, nonatomic) MockCaptureConnection *connectionMock; @end @implementation FLTCamSampleBufferTests +- (void)setUp { + _captureSessionQueue = dispatch_queue_create("testing", NULL); + _writerMock = [[MockAssetWriter alloc] init]; + _connectionMock = [[MockCaptureConnection alloc] init]; + + _camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( + _captureSessionQueue, + [FCPPlatformMediaSettings makeWithResolutionPreset:FCPPlatformResolutionPresetMedium + framesPerSecond:nil + videoBitrate:nil + audioBitrate:nil + enableAudio:YES], + nil, nil,nil, _writerMock); +} + - (void)testSampleBufferCallbackQueueMustBeCaptureSessionQueue { - dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - XCTAssertEqual(captureSessionQueue, cam.captureVideoOutput.sampleBufferCallbackQueue, + XCTAssertEqual(_captureSessionQueue, _camera.captureVideoOutput.sampleBufferCallbackQueue, @"Sample buffer callback queue must be the capture session queue."); } - (void)testCopyPixelBuffer { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("test", NULL)); CMSampleBufferRef capturedSampleBuffer = FLTCreateTestSampleBuffer(); CVPixelBufferRef capturedPixelBuffer = CMSampleBufferGetImageBuffer(capturedSampleBuffer); // Mimic sample buffer callback when captured a new video sample - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:capturedSampleBuffer - fromConnection:OCMClassMock([AVCaptureConnection class])]; - CVPixelBufferRef deliveriedPixelBuffer = [cam copyPixelBuffer]; + fromConnection:_connectionMock]; + CVPixelBufferRef deliveriedPixelBuffer = [_camera copyPixelBuffer]; XCTAssertEqual(deliveriedPixelBuffer, capturedPixelBuffer, @"FLTCam must deliver the latest captured pixel buffer to copyPixelBuffer API."); CFRelease(capturedSampleBuffer); @@ -41,32 +57,19 @@ - (void)testCopyPixelBuffer { } - (void)testDidOutputSampleBuffer_mustNotChangeSampleBufferRetainCountAfterPauseResumeRecording { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("test", NULL)); CMSampleBufferRef sampleBuffer = FLTCreateTestSampleBuffer(); - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); - __block AVAssetWriterStatus status = AVAssetWriterStatusUnknown; - OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) { - status = AVAssetWriterStatusWriting; - }); - OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&status]; - }); - // Pause then resume the recording. - [cam + [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; - [cam pauseVideoRecording]; - [cam resumeVideoRecording]; + [_camera pauseVideoRecording]; + [_camera resumeVideoRecording]; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:sampleBuffer - fromConnection:OCMClassMock([AVCaptureConnection class])]; + fromConnection:_connectionMock]; XCTAssertEqual(CFGetRetainCount(sampleBuffer), 1, @"didOutputSampleBuffer must not change the sample buffer retain count after " @"pause resume recording."); @@ -74,55 +77,35 @@ - (void)testDidOutputSampleBuffer_mustNotChangeSampleBufferRetainCountAfterPause } - (void)testDidOutputSampleBufferIgnoreAudioSamplesBeforeVideoSamples { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL)); CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer(); CMSampleBufferRef audioSample = FLTCreateTestAudioSampleBuffer(); - id connectionMock = OCMClassMock([AVCaptureConnection class]); - - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); - __block AVAssetWriterStatus status = AVAssetWriterStatusUnknown; - OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) { - status = AVAssetWriterStatusWriting; - }); - OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&status]; - }); - __block NSArray *writtenSamples = @[]; - - id adaptorMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]); - OCMStub([adaptorMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY - sourcePixelBufferAttributes:OCMOCK_ANY]) - .andReturn(adaptorMock); - OCMStub([adaptorMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero]) - .ignoringNonObjectArgs() - .andDo(^(NSInvocation *invocation) { - writtenSamples = [writtenSamples arrayByAddingObject:@"video"]; - }); - - id inputMock = OCMClassMock([AVAssetWriterInput class]); - OCMStub([inputMock assetWriterInputWithMediaType:OCMOCK_ANY outputSettings:OCMOCK_ANY]) - .andReturn(inputMock); - OCMStub([inputMock isReadyForMoreMediaData]).andReturn(YES); - OCMStub([inputMock appendSampleBuffer:[OCMArg anyPointer]]).andDo(^(NSInvocation *invocation) { + + MockPixelBufferAdaptor *adaptorMock = [[MockPixelBufferAdaptor alloc] init]; + adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + writtenSamples = [writtenSamples arrayByAddingObject:@"video"]; + return YES; + }; + + MockAssetWriterInput *inputMock = [[MockAssetWriterInput alloc] init]; + inputMock.isReadyForMoreMediaData = YES; + inputMock.appendSampleBufferStub = ^BOOL(CMSampleBufferRef buffer) { writtenSamples = [writtenSamples arrayByAddingObject:@"audio"]; - }); + return YES; + }; - [cam + [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; - [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; - [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:_connectionMock]; + [_camera captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:_connectionMock]; + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; - [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; + fromConnection:_connectionMock]; + [_camera captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:_connectionMock]; NSArray *expectedSamples = @[ @"video", @"audio" ]; XCTAssertEqualObjects(writtenSamples, expectedSamples, @"First appended sample must be video."); @@ -132,67 +115,42 @@ - (void)testDidOutputSampleBufferIgnoreAudioSamplesBeforeVideoSamples { } - (void)testDidOutputSampleBufferSampleTimesMustBeNumericAfterPauseResume { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL)); CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer(); CMSampleBufferRef audioSample = FLTCreateTestAudioSampleBuffer(); - id connectionMock = OCMClassMock([AVCaptureConnection class]); - - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); - __block AVAssetWriterStatus status = AVAssetWriterStatusUnknown; - OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) { - status = AVAssetWriterStatusWriting; - }); - OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&status]; - }); - __block BOOL videoAppended = NO; - id adaptorMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]); - OCMStub([adaptorMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY - sourcePixelBufferAttributes:OCMOCK_ANY]) - .andReturn(adaptorMock); - OCMStub([adaptorMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero]) - .ignoringNonObjectArgs() - .andDo(^(NSInvocation *invocation) { - CMTime presentationTime; - [invocation getArgument:&presentationTime atIndex:3]; - XCTAssert(CMTIME_IS_NUMERIC(presentationTime)); - videoAppended = YES; - }); + MockPixelBufferAdaptor *adaptorMock = [[MockPixelBufferAdaptor alloc] init]; + adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + XCTAssert(CMTIME_IS_NUMERIC(time)); + videoAppended = YES; + return YES; + }; __block BOOL audioAppended = NO; - id inputMock = OCMClassMock([AVAssetWriterInput class]); - OCMStub([inputMock assetWriterInputWithMediaType:OCMOCK_ANY outputSettings:OCMOCK_ANY]) - .andReturn(inputMock); - OCMStub([inputMock isReadyForMoreMediaData]).andReturn(YES); - OCMStub([inputMock appendSampleBuffer:[OCMArg anyPointer]]).andDo(^(NSInvocation *invocation) { - CMSampleBufferRef sampleBuffer; - [invocation getArgument:&sampleBuffer atIndex:2]; - CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + MockAssetWriterInput *inputMock = [[MockAssetWriterInput alloc] init]; + inputMock.isReadyForMoreMediaData = YES; + inputMock.appendSampleBufferStub = ^BOOL(CMSampleBufferRef buffer) { + CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(buffer); XCTAssert(CMTIME_IS_NUMERIC(sampleTime)); audioAppended = YES; - }); + return YES; + }; - [cam - startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { - } + [_camera + startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {} messengerForStreaming:nil]; - [cam pauseVideoRecording]; - [cam resumeVideoRecording]; + [_camera pauseVideoRecording]; + [_camera resumeVideoRecording]; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; - [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; - [cam captureOutput:cam.captureVideoOutput + fromConnection:_connectionMock]; + [_camera captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:_connectionMock]; + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; - [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; + fromConnection:_connectionMock]; + [_camera captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:_connectionMock]; XCTAssert(videoAppended && audioAppended, @"Video or audio was not appended."); CFRelease(videoSample); @@ -200,139 +158,96 @@ - (void)testDidOutputSampleBufferSampleTimesMustBeNumericAfterPauseResume { } - (void)testDidOutputSampleBufferMustNotAppendSampleWhenReadyForMoreMediaDataIsNo { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL)); CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer(); - id connectionMock = OCMClassMock([AVCaptureConnection class]); - - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); - __block BOOL sampleAppended = NO; - id adaptorMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]); - OCMStub([adaptorMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY - sourcePixelBufferAttributes:OCMOCK_ANY]) - .andReturn(adaptorMock); - OCMStub([adaptorMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero]) - .ignoringNonObjectArgs() - .andDo(^(NSInvocation *invocation) { - sampleAppended = YES; - }); + MockPixelBufferAdaptor *adaptorMock = [[MockPixelBufferAdaptor alloc] init]; + adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + sampleAppended = YES; + return YES; + }; __block BOOL readyForMoreMediaData = NO; - id inputMock = OCMClassMock([AVAssetWriterInput class]); - OCMStub([inputMock assetWriterInputWithMediaType:OCMOCK_ANY outputSettings:OCMOCK_ANY]) - .andReturn(inputMock); - OCMStub([inputMock isReadyForMoreMediaData]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&readyForMoreMediaData]; - }); - - [cam + MockAssetWriterInput *inputMock = [[MockAssetWriterInput alloc] init]; + inputMock.isReadyForMoreMediaData = readyForMoreMediaData; + + [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; readyForMoreMediaData = YES; sampleAppended = NO; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; + fromConnection:_connectionMock]; XCTAssertTrue(sampleAppended, @"Sample was not appended."); readyForMoreMediaData = NO; sampleAppended = NO; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; + fromConnection:_connectionMock]; XCTAssertFalse(sampleAppended, @"Sample cannot be appended when readyForMoreMediaData is NO."); CFRelease(videoSample); } - (void)testStopVideoRecordingWithCompletionMustCallCompletion { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL)); - - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); - __block AVAssetWriterStatus status = AVAssetWriterStatusUnknown; - OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) { - status = AVAssetWriterStatusWriting; - }); - OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) { - [invocation setReturnValue:&status]; - }); - - OCMStub([writerMock finishWritingWithCompletionHandler:[OCMArg checkWithBlock:^(id param) { - XCTAssert(status == AVAssetWriterStatusWriting, - @"Cannot call finishWritingWithCompletionHandler when status is " - @"not AVAssetWriterStatusWriting."); - void (^handler)(void) = param; - handler(); - return YES; - }]]); - - [cam + __weak MockAssetWriter *weakWriter = _writerMock; + _writerMock.finishWritingStub = ^(void (^param)(void)) { + XCTAssert(weakWriter.status == AVAssetWriterStatusWriting, + @"Cannot call finishWritingWithCompletionHandler when status is " + @"not AVAssetWriterStatusWriting."); + void (^handler)(void) = param; + handler(); + }; + + [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; __block BOOL completionCalled = NO; - [cam stopVideoRecordingWithCompletion:^(NSString *_Nullable path, FlutterError *_Nullable error) { + [_camera stopVideoRecordingWithCompletion:^(NSString *_Nullable path, FlutterError *_Nullable error) { completionCalled = YES; }]; XCTAssert(completionCalled, @"Completion was not called."); } - (void)testStartWritingShouldNotBeCalledBetweenSampleCreationAndAppending { - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL)); CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer(); - id connectionMock = OCMClassMock([AVCaptureConnection class]); - - id writerMock = OCMClassMock([AVAssetWriter class]); - OCMStub([writerMock alloc]).andReturn(writerMock); - OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) - .andReturn(writerMock); __block BOOL startWritingCalled = NO; - OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) { + _writerMock.startWritingStub = ^{ startWritingCalled = YES; - }); + }; __block BOOL videoAppended = NO; - id adaptorMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]); - OCMStub([adaptorMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY - sourcePixelBufferAttributes:OCMOCK_ANY]) - .andReturn(adaptorMock); - OCMStub([adaptorMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero]) - .ignoringNonObjectArgs() - .andDo(^(NSInvocation *invocation) { - videoAppended = YES; - }); - - id inputMock = OCMClassMock([AVAssetWriterInput class]); - OCMStub([inputMock assetWriterInputWithMediaType:OCMOCK_ANY outputSettings:OCMOCK_ANY]) - .andReturn(inputMock); - OCMStub([inputMock isReadyForMoreMediaData]).andReturn(YES); - - [cam + MockPixelBufferAdaptor *adaptorMock = [[MockPixelBufferAdaptor alloc] init]; + adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + videoAppended = YES; + return YES; + }; + + MockAssetWriterInput *inputMock = [[MockAssetWriterInput alloc] init]; + inputMock.isReadyForMoreMediaData = YES; + + [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; BOOL startWritingCalledBefore = startWritingCalled; - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; + fromConnection:_connectionMock]; XCTAssert((startWritingCalledBefore && videoAppended) || (startWritingCalled && !videoAppended), @"The startWriting was called between sample creation and appending."); - [cam captureOutput:cam.captureVideoOutput + [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample - fromConnection:connectionMock]; + fromConnection:_connectionMock]; XCTAssert(videoAppended, @"Video was not appended."); CFRelease(videoSample); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m index 2b81df5eb03b..a2490ba0d774 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCameraPermissionManagerTests.m @@ -10,8 +10,8 @@ @import XCTest; #import "CameraTestUtils.h" -#import "FLTPermissionService.h" #import "FLTCameraPermissionManager.h" +#import "FLTPermissionService.h" @interface MockPermissionService : NSObject @property(nonatomic, copy) AVAuthorizationStatus (^authorizationStatusStub)(AVMediaType mediaType); @@ -20,29 +20,29 @@ @interface MockPermissionService : NSObject @implementation MockPermissionService - (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType { - return self.authorizationStatusStub ? self.authorizationStatusStub(mediaType) : AVAuthorizationStatusNotDetermined; + return self.authorizationStatusStub ? self.authorizationStatusStub(mediaType) + : AVAuthorizationStatusNotDetermined; } -- (void)requestAccessForMediaType:(AVMediaType)mediaType - completionHandler:(void (^)(BOOL))handler { - if (self.requestAccessStub) { - self.requestAccessStub(mediaType, handler); - } +- (void)requestAccessForMediaType:(AVMediaType)mediaType completionHandler:(void (^)(BOOL))handler { + if (self.requestAccessStub) { + self.requestAccessStub(mediaType, handler); + } } @end @interface FLTCameraPermissionManagerTests : XCTestCase -@property (nonatomic, strong) FLTCameraPermissionManager *permissionManager; -@property (nonatomic, strong) MockPermissionService *mockService; +@property(nonatomic, strong) FLTCameraPermissionManager *permissionManager; +@property(nonatomic, strong) MockPermissionService *mockService; @end @implementation FLTCameraPermissionManagerTests - (void)setUp { - [super setUp]; - self.mockService = [[MockPermissionService alloc] init]; - self.permissionManager = [[FLTCameraPermissionManager alloc] - initWithPermissionService:self.mockService]; + [super setUp]; + self.mockService = [[MockPermissionService alloc] init]; + self.permissionManager = + [[FLTCameraPermissionManager alloc] initWithPermissionService:self.mockService]; } #pragma mark - camera permissions @@ -53,10 +53,10 @@ - (void)testRequestCameraPermission_completeWithoutErrorIfPreviouslyAuthorized { @"Must copmlete without error if camera access was previously authorized."]; self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { - XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); - return AVAuthorizationStatusAuthorized; + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusAuthorized; }; - + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { [expectation fulfill]; @@ -75,10 +75,10 @@ - (void)testRequestCameraPermission_completeWithErrorIfPreviouslyDenied { details:nil]; self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { - XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); - return AVAuthorizationStatusDenied; + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusDenied; }; - + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; @@ -98,7 +98,7 @@ - (void)testRequestCameraPermission_completeWithErrorIfRestricted { XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); return AVAuthorizationStatusRestricted; }; - + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; @@ -111,23 +111,22 @@ - (void)testRequestCameraPermission_completeWithoutErrorIfUserGrantAccess { XCTestExpectation *grantedExpectation = [self expectationWithDescription:@"Must complete without error if user choose to grant access"]; - self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { - XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); - return AVAuthorizationStatusNotDetermined; + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusNotDetermined; }; - + // Mimic user choosing "allow" in permission dialog. self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { - XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); - handler(YES); + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + handler(YES); }; [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { - if (error == nil) { - [grantedExpectation fulfill]; - } - }]; + if (error == nil) { + [grantedExpectation fulfill]; + } + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -140,21 +139,21 @@ - (void)testRequestCameraPermission_completeWithErrorIfUserDenyAccess { details:nil]; self.mockService.authorizationStatusStub = ^AVAuthorizationStatus(AVMediaType mediaType) { - XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); - return AVAuthorizationStatusNotDetermined; + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + return AVAuthorizationStatusNotDetermined; }; - + // Mimic user choosing "allow" in permission dialog. self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { - XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); - handler(NO); + XCTAssertEqualObjects(mediaType, AVMediaTypeVideo); + handler(NO); }; - + [self.permissionManager requestCameraPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; } - }]; + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -170,7 +169,7 @@ - (void)testRequestAudioPermission_completeWithoutErrorIfPrevoiuslyAuthorized { XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); return AVAuthorizationStatusAuthorized; }; - + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if (error == nil) { [expectation fulfill]; @@ -193,7 +192,7 @@ - (void)testRequestAudioPermission_completeWithErrorIfPreviouslyDenied { XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); return AVAuthorizationStatusDenied; }; - + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; @@ -213,7 +212,7 @@ - (void)testRequestAudioPermission_completeWithErrorIfRestricted { XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); return AVAuthorizationStatusRestricted; }; - + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; @@ -230,7 +229,7 @@ - (void)testRequestAudioPermission_completeWithoutErrorIfUserGrantAccess { XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); return AVAuthorizationStatusNotDetermined; }; - + // Mimic user choosing "allow" in permission dialog. self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); @@ -256,13 +255,13 @@ - (void)testRequestAudioPermission_completeWithErrorIfUserDenyAccess { XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); return AVAuthorizationStatusNotDetermined; }; - + // Mimic user choosing "deny" in permission dialog. self.mockService.requestAccessStub = ^(AVMediaType mediaType, void (^handler)(BOOL)) { XCTAssertEqualObjects(mediaType, AVMediaTypeAudio); handler(NO); }; - + [self.permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { if ([error isEqual:expectedError]) { [expectation fulfill]; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m index ccd0cd6c7195..413f31b69bfe 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTSavePhotoDelegateTests.m @@ -57,9 +57,7 @@ - (void)testHandlePhotoCaptureResult_mustCompleteWithErrorIfFailedToWrite { }]; MockPhotoData *mockData = [[MockPhotoData alloc] init]; - mockData.writeToFileStub = ^BOOL(NSString *path, - NSDataWritingOptions options, - NSError **error) { + mockData.writeToFileStub = ^BOOL(NSString *path, NSDataWritingOptions options, NSError **error) { *error = ioError; return NO; }; @@ -86,11 +84,8 @@ - (void)testHandlePhotoCaptureResult_mustCompleteWithFilePathIfSuccessToWrite { [completionExpectation fulfill]; }]; - MockPhotoData *mockData = [[MockPhotoData alloc] init]; - mockData.writeToFileStub = ^BOOL(NSString *path, - NSDataWritingOptions options, - NSError **error) { + mockData.writeToFileStub = ^BOOL(NSString *path, NSDataWritingOptions options, NSError **error) { return YES; }; @@ -112,18 +107,15 @@ - (void)testHandlePhotoCaptureResult_bothProvideDataAndSaveFileMustRunOnIOQueue dispatch_queue_t ioQueue = dispatch_queue_create("test", NULL); const char *ioQueueSpecific = "io_queue_specific"; dispatch_queue_set_specific(ioQueue, ioQueueSpecific, (void *)ioQueueSpecific, NULL); - + MockPhotoData *mockData = [[MockPhotoData alloc] init]; - mockData.writeToFileStub = ^BOOL(NSString *path, - NSDataWritingOptions options, - NSError **error) { + mockData.writeToFileStub = ^BOOL(NSString *path, NSDataWritingOptions options, NSError **error) { if (dispatch_get_specific(ioQueueSpecific)) { [writeFileQueueExpectation fulfill]; } return YES; }; - NSString *filePath = @"test"; FLTSavePhotoDelegate *delegate = [[FLTSavePhotoDelegate alloc] initWithPath:filePath diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.h new file mode 100644 index 000000000000..1184ad805583 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.h @@ -0,0 +1,24 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +@interface MockAssetWriter : NSObject +@property(nonatomic, assign) AVAssetWriterStatus status; +@property(nonatomic, copy) void (^getStatusStub)(void); +@property(nonatomic, copy) void (^startWritingStub)(void); +@property(nonatomic, copy) void (^finishWritingStub)(void (^)(void)); +@property(nonatomic, strong) NSError *error; +@end + +@interface MockAssetWriterInput : NSObject +@property(nonatomic, assign) BOOL isReadyForMoreMediaData; +@property(nonatomic, assign) BOOL expectsMediaDataInRealTime; +@property(nonatomic, copy) BOOL (^appendSampleBufferStub)(CMSampleBufferRef); +@end + +@interface MockPixelBufferAdaptor : NSObject +@property(nonatomic, copy) BOOL (^appendPixelBufferStub)(CVPixelBufferRef, CMTime); +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.m new file mode 100644 index 000000000000..c12b8631b920 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.m @@ -0,0 +1,50 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockAssetWriter.h" + +@implementation MockAssetWriter +- (BOOL)startWriting { + if (self.startWritingStub) { + self.startWritingStub(); + } + self.status = AVAssetWriterStatusWriting; + return YES; +} + +- (void)finishWritingWithCompletionHandler:(void (^)(void))handler { + if (self.finishWritingStub) { + self.finishWritingStub(handler); + } else if (handler) { + handler(); + } +} + +- (void)startSessionAtSourceTime:(CMTime)startTime { +} + +- (void)addInput:(nonnull AVAssetWriterInput *)input { + +} + + +@end + +@implementation MockAssetWriterInput +- (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer { + if (self.appendSampleBufferStub) { + return self.appendSampleBufferStub(sampleBuffer); + } + return YES; +} +@end + +@implementation MockPixelBufferAdaptor +- (BOOL)appendPixelBuffer:(nonnull CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime { + if (self.appendPixelBufferStub) { + return self.appendPixelBufferStub(pixelBuffer, presentationTime); + } + return YES; +} +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.h index 928018c35d18..88dfaf914b93 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.h @@ -8,8 +8,10 @@ NS_ASSUME_NONNULL_BEGIN @interface MockCameraDeviceDiscovery : NSObject -@property(nonatomic, copy) NSArray> *_Nullable (^discoverySessionStub) - (NSArray *deviceTypes, AVMediaType mediaType, AVCaptureDevicePosition position); +@property(nonatomic, copy) + NSArray> *_Nullable (^discoverySessionStub) + (NSArray *deviceTypes, AVMediaType mediaType, + AVCaptureDevicePosition position); @end NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.m index 71e3ed0f06db..19322e3e0f1a 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.m @@ -6,13 +6,14 @@ @implementation MockCameraDeviceDiscovery -- (NSArray> *)discoverySessionWithDeviceTypes:(NSArray *)deviceTypes - mediaType:(AVMediaType)mediaType - position:(AVCaptureDevicePosition)position { - if (self.discoverySessionStub) { - return self.discoverySessionStub(deviceTypes, mediaType, position); - } - return @[]; +- (NSArray> *) + discoverySessionWithDeviceTypes:(NSArray *)deviceTypes + mediaType:(AVMediaType)mediaType + position:(AVCaptureDevicePosition)position { + if (self.discoverySessionStub) { + return self.discoverySessionStub(deviceTypes, mediaType, position); + } + return @[]; } @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.h new file mode 100644 index 000000000000..7349093533da --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.h @@ -0,0 +1,18 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@interface MockCaptureConnection : NSObject +@property(nonatomic, assign) BOOL videoMirrored; +@property(nonatomic, assign) AVCaptureVideoOrientation videoOrientation; +@property(nonatomic, strong) NSArray *inputPorts; +@property(nonatomic, assign) BOOL isVideoMirroringSupported; +@property(nonatomic, assign) BOOL isVideoOrientationSupported; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.m new file mode 100644 index 000000000000..dc8d423c8e21 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.m @@ -0,0 +1,19 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockCaptureConnection.h" + +@implementation MockCaptureConnection { + NSArray *_inputPorts; +} + +- (NSArray *)inputPorts { + return _inputPorts; +} + +- (void)setInputPorts:(NSArray *)inputPorts { + _inputPorts = [inputPorts copy]; +} + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h index 954323638b81..9d6ada2acecb 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h @@ -8,15 +8,15 @@ NS_ASSUME_NONNULL_BEGIN @interface MockCaptureDeviceController : NSObject -@property(nonatomic, assign) NSString* uniqueID; +@property(nonatomic, assign) NSString *uniqueID; // Position/Orientation @property(nonatomic, assign) AVCaptureDevicePosition position; // Format/Configuration -@property(nonatomic, strong) AVCaptureDeviceFormat *activeFormat; -@property(nonatomic, strong) NSArray *formats; -@property(nonatomic, copy) void (^setActiveFormatStub)(AVCaptureDeviceFormat *format); +@property(nonatomic, strong) id activeFormat; +@property(nonatomic, strong) NSArray> *formats; +@property(nonatomic, copy) void (^setActiveFormatStub)(id format); // Flash/Torch @property(nonatomic, assign) BOOL hasFlash; @@ -43,7 +43,8 @@ NS_ASSUME_NONNULL_BEGIN @property(nonatomic, copy) void (^setExposurePointOfInterestStub)(CGPoint point); @property(nonatomic, assign) float minExposureTargetBias; @property(nonatomic, assign) float maxExposureTargetBias; -@property(nonatomic, copy) void (^setExposureTargetBiasStub)(float bias, void (^_Nullable handler)(CMTime)); +@property(nonatomic, copy) void (^setExposureTargetBiasStub) + (float bias, void (^_Nullable handler)(CMTime)); // Zoom @property(nonatomic, assign) float maxAvailableVideoZoomFactor; @@ -73,4 +74,11 @@ NS_ASSUME_NONNULL_BEGIN @end +@interface MockCaptureDeviceFormat : NSObject +@property(nonatomic, strong) NSArray *videoSupportedFrameRateRanges; +@property(nonatomic, assign) CMFormatDescriptionRef formatDescription; + +- (instancetype)initWithDimensions:(CMVideoDimensions)dimensions; +@end + NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m index 7823ba87140e..ca855a2c9e68 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m @@ -8,22 +8,22 @@ #import "MockCaptureDeviceController.h" @implementation MockCaptureDeviceController -- (void)setActiveFormat:(AVCaptureDeviceFormat *)format { - _activeFormat = format; - if (self.setActiveFormatStub) { - self.setActiveFormatStub(format); - } +- (void)setActiveFormat:(id)format { + _activeFormat = format; + if (self.setActiveFormatStub) { + self.setActiveFormatStub(format); + } } - (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode { - return self.flashModeSupported; + return self.flashModeSupported; } - (void)setTorchMode:(AVCaptureTorchMode)mode { - _torchMode = mode; - if (self.setTorchModeStub) { - self.setTorchModeStub(mode); - } + _torchMode = mode; + if (self.setTorchModeStub) { + self.setTorchModeStub(mode); + } } - (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode { @@ -34,91 +34,111 @@ - (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode { } - (void)setFocusMode:(AVCaptureFocusMode)mode { - _focusMode = mode; - if (self.setFocusModeStub) { - self.setFocusModeStub(mode); - } + _focusMode = mode; + if (self.setFocusModeStub) { + self.setFocusModeStub(mode); + } } - (void)setFocusPointOfInterest:(CGPoint)point { - _focusPointOfInterest = point; - if (self.setFocusPointOfInterestStub) { - self.setFocusPointOfInterestStub(point); - } + _focusPointOfInterest = point; + if (self.setFocusPointOfInterestStub) { + self.setFocusPointOfInterestStub(point); + } } - (void)setExposureMode:(AVCaptureExposureMode)mode { - _exposureMode = mode; - if (self.setExposureModeStub) { - self.setExposureModeStub(mode); - } + _exposureMode = mode; + if (self.setExposureModeStub) { + self.setExposureModeStub(mode); + } } - (void)setExposurePointOfInterest:(CGPoint)point { - _exposurePointOfInterest = point; - if (self.setExposurePointOfInterestStub) { - self.setExposurePointOfInterestStub(point); - } + _exposurePointOfInterest = point; + if (self.setExposurePointOfInterestStub) { + self.setExposurePointOfInterestStub(point); + } } - (void)setExposureTargetBias:(float)bias completionHandler:(void (^)(CMTime))handler { - if (self.setExposureTargetBiasStub) { - self.setExposureTargetBiasStub(bias, handler); - } else if (handler) { - handler(kCMTimeZero); - } + if (self.setExposureTargetBiasStub) { + self.setExposureTargetBiasStub(bias, handler); + } else if (handler) { + handler(kCMTimeZero); + } } - (void)setVideoZoomFactor:(float)factor { - _videoZoomFactor = factor; - if (self.setVideoZoomFactorStub) { - self.setVideoZoomFactorStub(factor); - } + _videoZoomFactor = factor; + if (self.setVideoZoomFactorStub) { + self.setVideoZoomFactorStub(factor); + } } - (BOOL)lockForConfiguration:(NSError **)error { - if (self.lockForConfigurationStub) { - self.lockForConfigurationStub(error); - return !self.shouldFailConfiguration; - } - if (self.shouldFailConfiguration) { - if (error) { - *error = [NSError errorWithDomain:@"test" code:0 userInfo:nil]; - } - return NO; + if (self.lockForConfigurationStub) { + self.lockForConfigurationStub(error); + return !self.shouldFailConfiguration; + } + if (self.shouldFailConfiguration) { + if (error) { + *error = [NSError errorWithDomain:@"test" code:0 userInfo:nil]; } - return YES; + return NO; + } + return YES; } - (void)unlockForConfiguration { - if (self.unlockForConfigurationStub) { - self.unlockForConfigurationStub(); - } + if (self.unlockForConfigurationStub) { + self.unlockForConfigurationStub(); + } } - (void)setActiveVideoMinFrameDuration:(CMTime)duration { - _activeVideoMinFrameDuration = duration; - if (self.setActiveVideoMinFrameDurationStub) { - self.setActiveVideoMinFrameDurationStub(duration); - } + _activeVideoMinFrameDuration = duration; + if (self.setActiveVideoMinFrameDurationStub) { + self.setActiveVideoMinFrameDurationStub(duration); + } } - (void)setActiveVideoMaxFrameDuration:(CMTime)duration { - _activeVideoMaxFrameDuration = duration; - if (self.setActiveVideoMaxFrameDurationStub) { - self.setActiveVideoMaxFrameDurationStub(duration); - } + _activeVideoMaxFrameDuration = duration; + if (self.setActiveVideoMaxFrameDurationStub) { + self.setActiveVideoMaxFrameDurationStub(duration); + } } - (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode { - return self.exposureModeSupported; + return self.exposureModeSupported; } - (AVCaptureInput *)createInput:(NSError *_Nullable *_Nullable)error { - if (self.createInputStub) { - self.createInputStub(error); - } - return self.inputToReturn; + if (self.createInputStub) { + self.createInputStub(error); + } + return self.inputToReturn; } @end + +@implementation MockCaptureDeviceFormat +- (void)dealloc { + if (_formatDescription) { + CFRelease(_formatDescription); + } +} + +- (instancetype)initWithDimensions:(CMVideoDimensions)dimensions { + self = [super init]; + if (self) { + CMVideoFormatDescriptionCreate(kCFAllocatorDefault, kCVPixelFormatType_32BGRA, dimensions.width, + dimensions.height, NULL, &_formatDescription); + } + return self; +} + +@synthesize format; + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.h new file mode 100644 index 000000000000..fda1904097d4 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.h @@ -0,0 +1,12 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +@interface MockCapturePhotoOutput : NSObject +@property(nonatomic, copy) void (^capturePhotoWithSettingsStub) + (id, id); +@property(nonatomic, strong) NSArray *availablePhotoCodecTypes; +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.m new file mode 100644 index 000000000000..617e262a1688 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.m @@ -0,0 +1,14 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockCapturePhotoOutput.h" + +@implementation MockCapturePhotoOutput +- (void)capturePhotoWithSettings:(id)settings + delegate:(id)delegate { + if (self.capturePhotoWithSettingsStub) { + self.capturePhotoWithSettingsStub(settings, delegate); + } +} +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.h new file mode 100644 index 000000000000..5a13da24edb2 --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.h @@ -0,0 +1,13 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +@interface MockCapturePhotoSettings : NSObject +@property(nonatomic, assign) int64_t uniqueID; +@property(nonatomic, copy) NSDictionary *format; +@property(nonatomic, assign) AVCaptureFlashMode flashMode; +@property(nonatomic, assign) BOOL highResolutionPhotoEnabled; +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.m new file mode 100644 index 000000000000..10d2daf45a7b --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.m @@ -0,0 +1,9 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "MockCapturePhotoSettings.h" + +@implementation MockCapturePhotoSettings + +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.h index fce650515b6c..fc2af24098cd 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.h @@ -8,16 +8,16 @@ NS_ASSUME_NONNULL_BEGIN @interface MockCaptureSession : NSObject - @property(nonatomic, copy) void (^beginConfigurationStub)(void); - @property(nonatomic, copy) void (^commitConfigurationStub)(void); - @property(nonatomic, copy) void (^startRunningStub)(void); - @property(nonatomic, copy) void (^stopRunningStub)(void); - @property(nonatomic, copy) void (^setSessionPresetStub)(AVCaptureSessionPreset preset); +@property(nonatomic, copy) void (^beginConfigurationStub)(void); +@property(nonatomic, copy) void (^commitConfigurationStub)(void); +@property(nonatomic, copy) void (^startRunningStub)(void); +@property(nonatomic, copy) void (^stopRunningStub)(void); +@property(nonatomic, copy) void (^setSessionPresetStub)(AVCaptureSessionPreset preset); - @property(nonatomic, strong) NSMutableArray *inputs; - @property(nonatomic, strong) NSMutableArray *outputs; - @property(nonatomic, assign) BOOL mockCanSetSessionPreset; - @property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; +@property(nonatomic, strong) NSMutableArray *inputs; +@property(nonatomic, strong) NSMutableArray *outputs; +@property(nonatomic, assign) BOOL mockCanSetSessionPreset; +@property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m index 3c09b1d07c50..9adb54e90d6c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m @@ -7,18 +7,18 @@ @implementation MockCaptureSession - (instancetype)init { - self = [super init]; - if (self) { - _inputs = [NSMutableArray array]; - _outputs = [NSMutableArray array]; - } - return self; + self = [super init]; + if (self) { + _inputs = [NSMutableArray array]; + _outputs = [NSMutableArray array]; + } + return self; } - (void)beginConfiguration { if (self.beginConfigurationStub) { self.beginConfigurationStub(); - } + } } - (void)commitConfiguration { @@ -30,7 +30,7 @@ - (void)commitConfiguration { - (void)startRunning { if (self.startRunningStub) { self.startRunningStub(); - } + } } - (void)stopRunning { @@ -43,56 +43,43 @@ - (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset { return self.mockCanSetSessionPreset; } -- (void)addConnection:(nonnull AVCaptureConnection *)connection { - +- (void)addConnection:(nonnull AVCaptureConnection *)connection { } - -- (void)addInput:(nonnull AVCaptureInput *)input { - +- (void)addInput:(nonnull AVCaptureInput *)input { } - - (void)addInputWithNoConnections:(nonnull AVCaptureInput *)input { } - - (void)addOutput:(nonnull AVCaptureOutput *)output { } - -- (void)addOutputWithNoConnections:(nonnull AVCaptureOutput *)output { +- (void)addOutputWithNoConnections:(nonnull AVCaptureOutput *)output { } - -- (BOOL)canAddConnection:(nonnull AVCaptureConnection *)connection { +- (BOOL)canAddConnection:(nonnull AVCaptureConnection *)connection { return YES; } - -- (BOOL)canAddInput:(nonnull AVCaptureInput *)input { +- (BOOL)canAddInput:(nonnull AVCaptureInput *)input { return YES; } - -- (BOOL)canAddOutput:(nonnull AVCaptureOutput *)output { +- (BOOL)canAddOutput:(nonnull AVCaptureOutput *)output { return YES; } - -- (void)removeInput:(nonnull AVCaptureInput *)input { - +- (void)removeInput:(nonnull AVCaptureInput *)input { } - -- (void)removeOutput:(nonnull AVCaptureOutput *)output { - +- (void)removeOutput:(nonnull AVCaptureOutput *)output { } - (void)setSessionPreset:(AVCaptureSessionPreset)sessionPreset { if (_setSessionPresetStub) { _setSessionPresetStub(sessionPreset); - } + } } @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.h index f810d218d5c9..f698b81bfca1 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.h @@ -6,8 +6,6 @@ @import AVFoundation; @interface MockPhotoData : NSObject -@property(nonatomic, copy) BOOL (^writeToFileStub)(NSString *path, - NSDataWritingOptions options, - NSError **error); +@property(nonatomic, copy) BOOL (^writeToFileStub) + (NSString *path, NSDataWritingOptions options, NSError **error); @end - diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.m index b51909d76755..f15512c6f2ea 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.m @@ -7,8 +7,8 @@ @implementation MockPhotoData - (BOOL)writeToFile:(NSString *)path - options:(NSDataWritingOptions)writeOptionsMask - error:(NSError **)errorPtr { + options:(NSDataWritingOptions)writeOptionsMask + error:(NSError **)errorPtr { if (self.writeToFileStub) { return _writeToFileStub(path, writeOptionsMask, errorPtr); } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m index 53c7c1da2c70..b0c504181d69 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m @@ -8,20 +8,67 @@ #endif @import XCTest; @import AVFoundation; -#import #import "CameraTestUtils.h" +@interface MockImageStreamHandler : FLTImageStreamHandler +@property(nonatomic, copy) void (^onEventSinkCalled)(id event); +@end + +@implementation MockImageStreamHandler + +- (FlutterEventSink)eventSink { + if (self.onEventSinkCalled) { + return ^(id event) { + self.onEventSinkCalled(event); + }; + } + return nil; +} + +@end + +@interface MockFlutterBinaryMessenger : NSObject +@end + +@implementation MockFlutterBinaryMessenger +- (void)sendOnChannel:(NSString *)channel message:(NSData *)message { +} + +- (void)sendOnChannel:(NSString *)channel + message:(NSData *)message + binaryReply:(FlutterBinaryReply)callback { +} + +- (FlutterBinaryMessengerConnection)setMessageHandlerOnChannel:(NSString *)channel + binaryMessageHandler: + (FlutterBinaryMessageHandler)handler { + return 0; +} + +- (void)cleanUpConnection:(FlutterBinaryMessengerConnection)connection { +} + +- (void)cleanupConnection:(FlutterBinaryMessengerConnection)connection { +} +@end + @interface StreamingTests : XCTestCase @property(readonly, nonatomic) FLTCam *camera; @property(readonly, nonatomic) CMSampleBufferRef sampleBuffer; +@property(readonly, nonatomic) MockImageStreamHandler *mockStreamHandler; +@property(readonly, nonatomic) MockFlutterBinaryMessenger *messengerMock; + @end @implementation StreamingTests - (void)setUp { dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL); + _mockStreamHandler = + [[MockImageStreamHandler alloc] initWithCaptureSessionQueue:captureSessionQueue]; _camera = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); _sampleBuffer = FLTCreateTestSampleBuffer(); + _messengerMock = [[MockFlutterBinaryMessenger alloc] init]; } - (void)tearDown { @@ -32,13 +79,11 @@ - (void)testExceedMaxStreamingPendingFramesCount { XCTestExpectation *streamingExpectation = [self expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"]; - id handlerMock = OCMClassMock([FLTImageStreamHandler class]); - OCMStub([handlerMock eventSink]).andReturn(^(id event) { + _mockStreamHandler.onEventSinkCalled = ^(id eventSink) { [streamingExpectation fulfill]; - }); + }; - id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); - [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; + [_camera startImageStreamWithMessenger:_messengerMock imageStreamHandler:_mockStreamHandler]; XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages" object:_camera @@ -59,13 +104,11 @@ - (void)testReceivedImageStreamData { [self expectationWithDescription: @"Must be able to call the handler again when receivedImageStreamData is called"]; - id handlerMock = OCMClassMock([FLTImageStreamHandler class]); - OCMStub([handlerMock eventSink]).andReturn(^(id event) { + _mockStreamHandler.onEventSinkCalled = ^(id eventSink) { [streamingExpectation fulfill]; - }); + }; - id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); - [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; + [_camera startImageStreamWithMessenger:_messengerMock imageStreamHandler:_mockStreamHandler]; XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages" object:_camera diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m index 1f6562e2303e..dfd539ecfd3d 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/ThreadSafeEventChannelTests.m @@ -21,7 +21,7 @@ - (void)setUp { [super setUp]; _mockEventChannel = [[MockEventChannel alloc] init]; _threadSafeEventChannel = - [[FLTThreadSafeEventChannel alloc] initWithEventChannel:_mockEventChannel]; + [[FLTThreadSafeEventChannel alloc] initWithEventChannel:_mockEventChannel]; } - (void)testSetStreamHandler_shouldStayOnMainThreadIfCalledFromMainThread { @@ -30,19 +30,19 @@ - (void)testSetStreamHandler_shouldStayOnMainThreadIfCalledFromMainThread { XCTestExpectation *mainThreadCompletionExpectation = [self expectationWithDescription: @"setStreamHandler's completion block must be called on the main thread"]; - + [_mockEventChannel setSetStreamHandlerStub:^(NSObject *handler) { if (NSThread.isMainThread) { [mainThreadExpectation fulfill]; } }]; - + [_threadSafeEventChannel setStreamHandler:nil - completion:^{ - if (NSThread.isMainThread) { - [mainThreadCompletionExpectation fulfill]; - } - }]; + completion:^{ + if (NSThread.isMainThread) { + [mainThreadCompletionExpectation fulfill]; + } + }]; [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -52,8 +52,7 @@ - (void)testSetStreamHandler_shouldDispatchToMainThreadIfCalledFromBackgroundThr XCTestExpectation *mainThreadCompletionExpectation = [self expectationWithDescription: @"setStreamHandler's completion block must be called on the main thread"]; - - + [_mockEventChannel setSetStreamHandlerStub:^(NSObject *handler) { if (NSThread.isMainThread) { [mainThreadExpectation fulfill]; @@ -63,11 +62,11 @@ - (void)testSetStreamHandler_shouldDispatchToMainThreadIfCalledFromBackgroundThr __weak typeof(self) weakSelf = self; dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ [weakSelf.threadSafeEventChannel setStreamHandler:nil - completion:^{ - if (NSThread.isMainThread) { - [mainThreadCompletionExpectation fulfill]; - } - }]; + completion:^{ + if (NSThread.isMainThread) { + [mainThreadCompletionExpectation fulfill]; + } + }]; }); [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -75,11 +74,11 @@ - (void)testSetStreamHandler_shouldDispatchToMainThreadIfCalledFromBackgroundThr - (void)testEventChannel_shouldBeKeptAliveWhenDispatchingBackToMainThread { XCTestExpectation *expectation = [self expectationWithDescription:@"Completion should be called."]; - + __weak typeof(self) weakSelf = self; dispatch_async(dispatch_queue_create("test", NULL), ^{ - FLTThreadSafeEventChannel *channel = [[FLTThreadSafeEventChannel alloc] - initWithEventChannel:weakSelf.mockEventChannel]; + FLTThreadSafeEventChannel *channel = + [[FLTThreadSafeEventChannel alloc] initWithEventChannel:weakSelf.mockEventChannel]; [channel setStreamHandler:nil completion:^{ diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m index 0891bf9948b3..0d94c92f3814 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m @@ -8,13 +8,14 @@ @import AVFoundation; @import Flutter; -#import "./include/camera_avfoundation/FLTCameraPermissionManager.h" #import "./include/camera_avfoundation/CameraProperties.h" #import "./include/camera_avfoundation/FLTCam.h" +#import "./include/camera_avfoundation/FLTCameraPermissionManager.h" #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" +#import "./include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h" +#import "./include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h" #import "./include/camera_avfoundation/QueueUtils.h" #import "./include/camera_avfoundation/messages.g.h" -#import "./include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h" static FlutterError *FlutterErrorFromNSError(NSError *error) { return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code] @@ -42,27 +43,25 @@ + (void)registerWithRegistrar:(NSObject *)registrar { - (instancetype)initWithRegistry:(NSObject *)registry messenger:(NSObject *)messenger { - return - [self initWithRegistry:registry - messenger:messenger - globalAPI:[[FCPCameraGlobalEventApi alloc] initWithBinaryMessenger:messenger] - deviceDiscovery:[[FLTDefaultCameraDeviceDiscovery alloc] init] - sessionFactory:^id(void) { - return [[FLTDefaultCaptureSession alloc] init]; - } - deviceFactory:^id(NSString *name) { + return [self initWithRegistry:registry + messenger:messenger + globalAPI:[[FCPCameraGlobalEventApi alloc] initWithBinaryMessenger:messenger] + deviceDiscovery:[[FLTDefaultCameraDeviceDiscovery alloc] init] + sessionFactory:^id(void) { + return [[FLTDefaultCaptureSession alloc] init]; + } + deviceFactory:^id(NSString *name) { AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:name]; return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:device]; - } - ]; + }]; } - (instancetype)initWithRegistry:(NSObject *)registry messenger:(NSObject *)messenger globalAPI:(FCPCameraGlobalEventApi *)globalAPI deviceDiscovery:(id)deviceDiscovery - sessionFactory:(CaptureSessionFactory)captureSessionFactory - deviceFactory:(CaptureNamedDeviceFactory)deviceFactory{ + sessionFactory:(CaptureSessionFactory)captureSessionFactory + deviceFactory:(CaptureNamedDeviceFactory)deviceFactory { self = [super init]; NSAssert(self, @"super init cannot be nil"); _registry = registry; @@ -72,10 +71,11 @@ - (instancetype)initWithRegistry:(NSObject *)registry _deviceDiscovery = deviceDiscovery; _captureSessionFactory = captureSessionFactory; _captureDeviceFactory = deviceFactory; - + id permissionService = [[FLTDefaultPermissionService alloc] init]; - _permissionManager = [[FLTCameraPermissionManager alloc] initWithPermissionService:permissionService]; - + _permissionManager = + [[FLTCameraPermissionManager alloc] initWithPermissionService:permissionService]; + dispatch_queue_set_specific(_captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); @@ -137,11 +137,12 @@ - (void)availableCamerasWithCompletion: if (@available(iOS 13.0, *)) { [discoveryDevices addObject:AVCaptureDeviceTypeBuiltInUltraWideCamera]; } - - NSArray> *devices = [self.deviceDiscovery discoverySessionWithDeviceTypes:discoveryDevices - mediaType:AVMediaTypeVideo - position:AVCaptureDevicePositionUnspecified]; - + + NSArray> *devices = + [self.deviceDiscovery discoverySessionWithDeviceTypes:discoveryDevices + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionUnspecified]; + NSMutableArray *reply = [[NSMutableArray alloc] initWithCapacity:devices.count]; for (AVCaptureDevice *device in devices) { @@ -183,18 +184,19 @@ - (void)createCameraWithName:(nonnull NSString *)cameraName // optional, and used as a workaround to fix a missing frame issue on iOS. if (settings.enableAudio) { // Setup audio capture session only if granted audio access. - [self->_permissionManager requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { - // cannot use the outter `strongSelf` - typeof(self) strongSelf = weakSelf; - if (!strongSelf) return; - if (error) { - completion(nil, error); - } else { - [strongSelf createCameraOnSessionQueueWithName:cameraName - settings:settings - completion:completion]; - } - }]; + [self->_permissionManager + requestAudioPermissionWithCompletionHandler:^(FlutterError *error) { + // cannot use the outter `strongSelf` + typeof(self) strongSelf = weakSelf; + if (!strongSelf) return; + if (error) { + completion(nil, error); + } else { + [strongSelf createCameraOnSessionQueueWithName:cameraName + settings:settings + completion:completion]; + } + }]; } else { [strongSelf createCameraOnSessionQueueWithName:cameraName settings:settings @@ -497,23 +499,27 @@ - (void)sessionQueueCreateCameraWithName:(NSString *)name [[FLTCamMediaSettingsAVWrapper alloc] init]; NSError *error; - - __weak typeof(self) weakSelf = self; - FLTCam *cam = [[FLTCam alloc] - initWithMediaSettings:settings - mediaSettingsAVWrapper:mediaSettingsAVWrapper - orientation:[[UIDevice currentDevice] orientation] - videoCaptureSession:_captureSessionFactory() - audioCaptureSession:_captureSessionFactory() - captureSessionQueue:self.captureSessionQueue - captureDeviceFactory:^id _Nonnull{ - return weakSelf.captureDeviceFactory(name); - } - videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat * _Nonnull format) { - return CMVideoFormatDescriptionGetDimensions(format.formatDescription); - } - error:&error]; - + + __weak typeof(self) weakSelf = self; + FLTCam *cam = [[FLTCam alloc] initWithMediaSettings:settings + mediaSettingsAVWrapper:mediaSettingsAVWrapper + orientation:[[UIDevice currentDevice] orientation] + videoCaptureSession:_captureSessionFactory() + audioCaptureSession:_captureSessionFactory() + captureSessionQueue:self.captureSessionQueue + captureDeviceFactory:^id _Nonnull { + return weakSelf.captureDeviceFactory(name); + } + videoDimensionsForFormat:^CMVideoDimensions(id _Nonnull format) { + return CMVideoFormatDescriptionGetDimensions(format.format.formatDescription); + } + capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] + initWithPhotoOutput:[AVCapturePhotoOutput new]] + assetWriterFactory:^id _Nonnull(NSURL * _Nonnull url, AVFileType _Nonnull fileType, NSError * _Nullable __autoreleasing * _Nullable error) { + return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; + } + error:&error]; + if (error) { completion(nil, FlutterErrorFromNSError(error)); } else { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 5f60db563215..0ac9389b9e4e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -11,12 +11,14 @@ #import "./include/camera_avfoundation/FLTSavePhotoDelegate.h" #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" -#import "./include/camera_avfoundation/QueueUtils.h" -#import "./include/camera_avfoundation/messages.g.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" +#import "./include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" #import "./include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h" #import "./include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h" -#import "./include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureConnection.h" +#import "./include/camera_avfoundation/QueueUtils.h" +#import "./include/camera_avfoundation/messages.g.h" static FlutterError *FlutterErrorFromNSError(NSError *error) { return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)error.code] @@ -66,7 +68,7 @@ @interface FLTCam () videoWriter; @property(strong, nonatomic) AVAssetWriterInput *videoWriterInput; @property(strong, nonatomic) AVAssetWriterInput *audioWriterInput; @property(strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferAdaptor; @@ -107,6 +109,7 @@ @interface FLTCam () deviceOrientationProvider; /// Reports the given error message to the Dart side of the plugin. /// @@ -118,51 +121,58 @@ @implementation FLTCam NSString *const errorMethod = @"error"; -- (instancetype)initWithCameraName:(NSString *)cameraName - mediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - error:(NSError **)error { - AVCaptureSession *videoSession = [[AVCaptureSession alloc] init]; - AVCaptureSession *audioSession = [[AVCaptureSession alloc] init]; - - return [self initWithCameraName:cameraName - mediaSettings:mediaSettings - mediaSettingsAVWrapper:mediaSettingsAVWrapper - orientation:orientation - videoCaptureSession:[[FLTDefaultCaptureSession alloc] initWithCaptureSession:videoSession] - audioCaptureSession:[[FLTDefaultCaptureSession alloc] initWithCaptureSession:audioSession] - captureSessionQueue:captureSessionQueue - error:error]; -} - -- (instancetype)initWithCameraName:(NSString *)cameraName - mediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(id)videoCaptureSession - audioCaptureSession:(id)audioCaptureSession - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - error:(NSError **)error { - return [self initWithMediaSettings:mediaSettings - mediaSettingsAVWrapper:mediaSettingsAVWrapper - orientation:orientation - videoCaptureSession:videoCaptureSession - audioCaptureSession:videoCaptureSession - captureSessionQueue:captureSessionQueue - captureDeviceFactory:^id (void) { - AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:cameraName]; - return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:device]; - } - videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { - return CMVideoFormatDescriptionGetDimensions(format.formatDescription); - } - error:error]; -} +//- (instancetype)initWithCameraName:(NSString *)cameraName +// mediaSettings:(FCPPlatformMediaSettings *)mediaSettings +// mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper +// orientation:(UIDeviceOrientation)orientation +// captureSessionQueue:(dispatch_queue_t)captureSessionQueue +// error:(NSError **)error { +// AVCaptureSession *videoSession = [[AVCaptureSession alloc] init]; +// AVCaptureSession *audioSession = [[AVCaptureSession alloc] init]; +// +// return [self +// initWithCameraName:cameraName +// mediaSettings:mediaSettings +// mediaSettingsAVWrapper:mediaSettingsAVWrapper +// orientation:orientation +// videoCaptureSession:[[FLTDefaultCaptureSession alloc] initWithCaptureSession:videoSession] +// audioCaptureSession:[[FLTDefaultCaptureSession alloc] initWithCaptureSession:audioSession] +// captureSessionQueue:captureSessionQueue +// error:error]; +//} + +//- (instancetype)initWithCameraName:(NSString *)cameraName +// mediaSettings:(FCPPlatformMediaSettings *)mediaSettings +// mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper +// orientation:(UIDeviceOrientation)orientation +// videoCaptureSession:(id)videoCaptureSession +// audioCaptureSession:(id)audioCaptureSession +// captureSessionQueue:(dispatch_queue_t)captureSessionQueue +// error:(NSError **)error { +// return [self initWithMediaSettings:mediaSettings +// mediaSettingsAVWrapper:mediaSettingsAVWrapper +// orientation:orientation +// videoCaptureSession:videoCaptureSession +// audioCaptureSession:videoCaptureSession +// captureSessionQueue:captureSessionQueue +// captureDeviceFactory:^id(void) { +// AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:cameraName]; +// return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:device]; +// } +// videoDimensionsForFormat:^CMVideoDimensions(id format) { +// return CMVideoFormatDescriptionGetDimensions(format.formatDescription); +// } +// capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] +// initWithPhotoOutput:[AVCapturePhotoOutput new]] +// assetWriterFactory:^id _Nonnull(NSURL * _Nonnull url, AVFileType _Nonnull fileType, NSError * _Nullable __autoreleasing * _Nullable error) { +// return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; +// +// } error:error +// ]; +//} // Returns frame rate supported by format closest to targetFrameRate. -static double bestFrameRateForFormat(AVCaptureDeviceFormat *format, double targetFrameRate) { +static double bestFrameRateForFormat(id format, double targetFrameRate) { double bestFrameRate = 0; double minDistance = DBL_MAX; for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { @@ -182,17 +192,17 @@ static double bestFrameRateForFormat(AVCaptureDeviceFormat *format, double targe // as activeFormat and also updates mediaSettings.framesPerSecond to value which // bestFrameRateForFormat returned for that format. static void selectBestFormatForRequestedFrameRate( - AVCaptureDevice *captureDevice, FCPPlatformMediaSettings *mediaSettings, + id captureDevice, FCPPlatformMediaSettings *mediaSettings, VideoDimensionsForFormat videoDimensionsForFormat) { CMVideoDimensions targetResolution = videoDimensionsForFormat(captureDevice.activeFormat); double targetFrameRate = mediaSettings.framesPerSecond.doubleValue; FourCharCode preferredSubType = CMFormatDescriptionGetMediaSubType(captureDevice.activeFormat.formatDescription); - AVCaptureDeviceFormat *bestFormat = captureDevice.activeFormat; + id bestFormat = captureDevice.activeFormat; double bestFrameRate = bestFrameRateForFormat(bestFormat, targetFrameRate); double minDistance = fabs(bestFrameRate - targetFrameRate); BOOL isBestSubTypePreferred = YES; - for (AVCaptureDeviceFormat *format in captureDevice.formats) { + for (id format in captureDevice.formats) { CMVideoDimensions resolution = videoDimensionsForFormat(format); if (resolution.width != targetResolution.width || resolution.height != targetResolution.height) { @@ -222,6 +232,8 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings captureSessionQueue:(dispatch_queue_t)captureSessionQueue captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat + capturePhotoOutput:(id)capturePhotoOutput + assetWriterFactory:(AssetWriterFactory)assetWriterFactory error:(NSError **)error { self = [super init]; NSAssert(self, @"super init cannot be nil"); @@ -245,6 +257,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings _videoFormat = kCVPixelFormatType_32BGRA; _inProgressSavePhotoDelegates = [NSMutableDictionary dictionary]; _fileFormat = FCPPlatformImageFileFormatJpeg; + _assetWriterFactory = assetWriterFactory; // To limit memory consumption, limit the number of frames pending processing. // After some testing, 4 was determined to be the best maximum value. @@ -252,7 +265,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings _maxStreamingPendingFramesCount = 4; NSError *localError = nil; - AVCaptureConnection *connection = [self createConnection:&localError]; + id connection = [self createConnection:&localError]; if (localError) { if (error != nil) { *error = localError; @@ -264,13 +277,13 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings [_videoCaptureSession addOutputWithNoConnections:_captureVideoOutput]; [_videoCaptureSession addConnection:connection]; - _capturePhotoOutput = [AVCapturePhotoOutput new]; + _capturePhotoOutput = capturePhotoOutput; [_capturePhotoOutput setHighResolutionCaptureEnabled:YES]; [_videoCaptureSession addOutput:_capturePhotoOutput]; _motionManager = [[CMMotionManager alloc] init]; [_motionManager startAccelerometerUpdates]; - + _deviceOrientationProvider = [[FLTDefaultDeviceOrientationProvider alloc] init]; if (_mediaSettings.framesPerSecond) { @@ -317,7 +330,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings return self; } -- (AVCaptureConnection *)createConnection:(NSError **)error { +- (id)createConnection:(NSError **)error { // Setup video capture input. _captureVideoInput = [_captureDevice createInput:error]; @@ -344,7 +357,7 @@ - (AVCaptureConnection *)createConnection:(NSError **)error { connection.videoMirrored = YES; } - return connection; + return [[FLTDefaultCaptureConnection alloc] initWithConnection:connection]; } - (void)reportInitializationState { @@ -354,8 +367,8 @@ - (void)reportInitializationState { height:self.previewSize.height] exposureMode:self.exposureMode focusMode:self.focusMode - exposurePointSupported:self.captureDevice.isExposurePointOfInterestSupported - focusPointSupported:self.captureDevice.isFocusPointOfInterestSupported]; + exposurePointSupported:self.captureDevice.isExposurePointOfInterestSupported + focusPointSupported:self.captureDevice.isFocusPointOfInterestSupported]; __weak typeof(self) weakSelf = self; FLTEnsureToRunOnMainQueue(^{ @@ -422,7 +435,8 @@ - (void)updateOrientation:(UIDeviceOrientation)orientation - (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable, FlutterError *_Nullable))completion { - AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; + id settings = [[FLTDefaultCapturePhotoSettings alloc] + initWithSettings:[AVCapturePhotoSettings photoSettings]]; if (self.mediaSettings.resolutionPreset == FCPPlatformResolutionPresetMax) { [settings setHighResolutionPhotoEnabled:YES]; @@ -434,8 +448,9 @@ - (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable, [self.capturePhotoOutput.availablePhotoCodecTypes containsObject:AVVideoCodecTypeHEVC]; if (_fileFormat == FCPPlatformImageFileFormatHeif && isHEVCCodecAvailable) { - settings = - [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}]; + settings = [[FLTDefaultCapturePhotoSettings alloc] + initWithSettings:[AVCapturePhotoSettings + photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}]]; extension = @"heif"; } else { extension = @"jpg"; @@ -532,7 +547,7 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset withError:(NSError **)error { switch (resolutionPreset) { case FCPPlatformResolutionPresetMax: { - AVCaptureDeviceFormat *bestFormat = + id bestFormat = [self highestResolutionFormatForCaptureDevice:_captureDevice]; if (bestFormat) { _videoCaptureSession.sessionPreset = AVCaptureSessionPresetInputPriority; @@ -597,14 +612,14 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset /// Finds the highest available resolution in terms of pixel count for the given device. /// Preferred are formats with the same subtype as current activeFormat. -- (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice: - (AVCaptureDevice *)captureDevice { +- (id)highestResolutionFormatForCaptureDevice: + (id)captureDevice { FourCharCode preferredSubType = CMFormatDescriptionGetMediaSubType(_captureDevice.activeFormat.formatDescription); - AVCaptureDeviceFormat *bestFormat = nil; + id bestFormat = nil; NSUInteger maxPixelCount = 0; BOOL isBestSubTypePreferred = NO; - for (AVCaptureDeviceFormat *format in _captureDevice.formats) { + for (id format in _captureDevice.formats) { CMVideoDimensions res = self.videoDimensionsForFormat(format); NSUInteger height = res.height; NSUInteger width = res.width; @@ -623,7 +638,7 @@ - (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice: - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(AVCaptureConnection *)connection { + fromConnection:(id)connection { if (output == _captureVideoOutput) { CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CFRetain(newBuffer); @@ -1048,7 +1063,8 @@ - (void)applyFocusMode { [self applyFocusMode:_focusMode onDevice:_captureDevice]; } -- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(id)captureDevice { +- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode + onDevice:(id)captureDevice { [captureDevice lockForConfiguration:nil]; switch (focusMode) { case FCPPlatformFocusModeLocked: @@ -1098,7 +1114,7 @@ - (void)setDescriptionWhileRecording:(NSString *)cameraName [_videoCaptureSession removeOutput:_captureVideoOutput]; NSError *error = nil; - AVCaptureConnection *newConnection = [self createConnection:&error]; + id newConnection = [self createConnection:&error]; if (error) { completion(FlutterErrorFromNSError(error)); return; @@ -1216,7 +1232,8 @@ - (void)startImageStreamWithMessenger:(NSObject *)messen FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera_avfoundation/imageStream" binaryMessenger:messenger]; - id eventChannelProtocol = [[FLTDefaultEventChannel alloc] initWithEventChannel:eventChannel]; + id eventChannelProtocol = + [[FLTDefaultEventChannel alloc] initWithEventChannel:eventChannel]; FLTThreadSafeEventChannel *threadSafeEventChannel = [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannelProtocol]; @@ -1298,9 +1315,8 @@ - (BOOL)setupWriterForPath:(NSString *)path { [self setUpCaptureSessionForAudio]; } - _videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL - fileType:AVFileTypeMPEG4 - error:&error]; + _videoWriter =_assetWriterFactory(outputURL, AVFileTypeMPEG4, &error); + NSParameterAssert(_videoWriter); if (error) { [self reportErrorMessage:error.description]; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m index 6dae176ebb5a..c4f91387b4a1 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m @@ -5,10 +5,12 @@ #import "./include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" +#import "./include/camera_avfoundation/Protocols/FLTAssetWriter.h" @implementation FLTCamMediaSettingsAVWrapper -- (BOOL)lockDevice:(id)captureDevice error:(NSError *_Nullable *_Nullable)outError { +- (BOOL)lockDevice:(id)captureDevice + error:(NSError *_Nullable *_Nullable)outError { return [captureDevice lockForConfiguration:outError]; } @@ -24,11 +26,13 @@ - (void)commitConfigurationForSession:(id)videoCaptur [videoCaptureSession commitConfiguration]; } -- (void)setMinFrameDuration:(CMTime)duration onDevice:(id)captureDevice { +- (void)setMinFrameDuration:(CMTime)duration + onDevice:(id)captureDevice { captureDevice.activeVideoMinFrameDuration = duration; } -- (void)setMaxFrameDuration:(CMTime)duration onDevice:(id)captureDevice { +- (void)setMaxFrameDuration:(CMTime)duration + onDevice:(id)captureDevice { captureDevice.activeVideoMaxFrameDuration = duration; } @@ -44,7 +48,7 @@ - (AVAssetWriterInput *)assetWriterVideoInputWithOutputSettings: outputSettings:outputSettings]; } -- (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(AVAssetWriter *)writer { +- (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(id)writer { [writer addInput:writerInput]; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m index 27a0550e82c5..748338096380 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCameraPermissionManager.m @@ -16,11 +16,13 @@ - (instancetype)initWithPermissionService:(id)service { return self; } -- (void)requestAudioPermissionWithCompletionHandler:(__strong FLTCameraPermissionRequestCompletionHandler)handler { +- (void)requestAudioPermissionWithCompletionHandler: + (__strong FLTCameraPermissionRequestCompletionHandler)handler { [self requestPermissionForAudio:YES handler:handler]; } -- (void)requestCameraPermissionWithCompletionHandler:(__strong FLTCameraPermissionRequestCompletionHandler)handler { +- (void)requestCameraPermissionWithCompletionHandler: + (__strong FLTCameraPermissionRequestCompletionHandler)handler { [self requestPermissionForAudio:NO handler:handler]; } @@ -71,26 +73,26 @@ - (void)requestPermissionForAudio:(BOOL)forAudio } case AVAuthorizationStatusNotDetermined: { [_permissionService requestAccessForMediaType:mediaType - completionHandler:^(BOOL granted) { - // handler can be invoked on an arbitrary dispatch queue. - if (granted) { - handler(nil); - } else { - FlutterError *flutterError; - if (forAudio) { - flutterError = [FlutterError - errorWithCode:@"AudioAccessDenied" - message:@"User denied the audio access request." - details:nil]; - } else { - flutterError = [FlutterError - errorWithCode:@"CameraAccessDenied" - message:@"User denied the camera access request." - details:nil]; - } - handler(flutterError); - } - }]; + completionHandler:^(BOOL granted) { + // handler can be invoked on an arbitrary dispatch queue. + if (granted) { + handler(nil); + } else { + FlutterError *flutterError; + if (forAudio) { + flutterError = [FlutterError + errorWithCode:@"AudioAccessDenied" + message:@"User denied the audio access request." + details:nil]; + } else { + flutterError = [FlutterError + errorWithCode:@"CameraAccessDenied" + message:@"User denied the camera access request." + details:nil]; + } + handler(flutterError); + } + }]; break; } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m index 2efe90464954..c574e1abef66 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTSavePhotoDelegate.m @@ -4,9 +4,9 @@ #import "./include/camera_avfoundation/FLTSavePhotoDelegate.h" #import "./include/camera_avfoundation/FLTSavePhotoDelegate_Test.h" +#import "./include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h" #import "./include/camera_avfoundation/Protocols/FLTPhotoData.h" - @interface FLTSavePhotoDelegate () /// The file path for the captured photo. @property(readonly, nonatomic) NSString *path; @@ -48,13 +48,13 @@ - (void)handlePhotoCaptureResultWithError:(NSError *)error }); } -- (void)captureOutput:(AVCapturePhotoOutput *)output +- (void)captureOutput:(id)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(NSError *)error { [self handlePhotoCaptureResultWithError:error photoDataProvider:^id { NSData *data = [photo fileDataRepresentation]; - return [[FLTDefaultPhotoData alloc] init]; + return [[FLTDefaultPhotoData alloc] initWithData:data]; }]; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m index 3addfac69d18..e96ebba411be 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m @@ -3,8 +3,8 @@ // found in the LICENSE file. #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" -#import "./include/camera_avfoundation/QueueUtils.h" #import "./include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h" +#import "./include/camera_avfoundation/QueueUtils.h" @interface FLTThreadSafeEventChannel () @property(nonatomic, strong) id channel; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTAssetWriter.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTAssetWriter.m new file mode 100644 index 000000000000..fbbbdc84953c --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTAssetWriter.m @@ -0,0 +1,93 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTAssetWriter.h" + +@interface FLTDefaultAssetWriter () +@property(nonatomic, strong) AVAssetWriter *writer; +@end + +@implementation FLTDefaultAssetWriter + +- (instancetype)initWithURL:(NSURL *)url fileType:(AVFileType)fileType error:(NSError **)error { + self = [super init]; + if (self) { + _writer = [[AVAssetWriter alloc] initWithURL:url fileType:fileType error:error]; + } + return self; +} + +- (BOOL)startWriting { + return [self.writer startWriting]; +} + +- (void)finishWritingWithCompletionHandler:(void (^)(void))handler { + [self.writer finishWritingWithCompletionHandler:handler]; +} + +- (AVAssetWriterStatus)status { + return _writer.status; +} + +- (NSError *)error { + return _writer.error; +} + +- (void)startSessionAtSourceTime:(CMTime)startTime { + return [_writer startSessionAtSourceTime:startTime]; +} + +- (void)addInput:(AVAssetWriterInput *)input { + return [_writer addInput:input]; +} + +@end + +@interface FLTDefaultAssetWriterInput () +@property(nonatomic, strong) AVAssetWriterInput *input; +@end + +@implementation FLTDefaultAssetWriterInput + +- (instancetype)initWithInput:(AVAssetWriterInput *)input { + self = [super init]; + if (self) { + _input = input; + } + return self; +} + +- (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer { + return [self.input appendSampleBuffer:sampleBuffer]; +} + +- (BOOL)expectsMediaDataInRealTime { + return [self.input expectsMediaDataInRealTime]; +} + +- (BOOL)isReadyForMoreMediaData { + return [self.input isReadyForMoreMediaData]; +} + +@end + +@interface FLTDefaultPixelBufferAdaptor () +@property(nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *adaptor; +@end + +@implementation FLTDefaultPixelBufferAdaptor + +- (nonnull instancetype)initWithAdaptor:(nonnull AVAssetWriterInputPixelBufferAdaptor *)adaptor { + self = [super init]; + if (self) { + _adaptor = adaptor; + } + return self; +} + +- (BOOL)appendPixelBuffer:(nonnull CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime { + return [_adaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.m index 3987ffaea585..a691c40bf052 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.m @@ -8,22 +8,24 @@ @implementation FLTDefaultCameraDeviceDiscovery -- (NSArray> *)discoverySessionWithDeviceTypes:(NSArray *)deviceTypes - mediaType:(AVMediaType)mediaType - position:(AVCaptureDevicePosition)position { - AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession - discoverySessionWithDeviceTypes:deviceTypes - mediaType:mediaType - position:position]; - +- (NSArray> *) + discoverySessionWithDeviceTypes:(NSArray *)deviceTypes + mediaType:(AVMediaType)mediaType + position:(AVCaptureDevicePosition)position { + AVCaptureDeviceDiscoverySession *discoverySession = + [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes + mediaType:mediaType + position:position]; + NSArray *devices = discoverySession.devices; NSMutableArray> *deviceControllers = [NSMutableArray array]; - + for (AVCaptureDevice *device in devices) { - FLTDefaultCaptureDeviceController *controller = [[FLTDefaultCaptureDeviceController alloc] initWithDevice:device]; - [deviceControllers addObject:controller]; + FLTDefaultCaptureDeviceController *controller = + [[FLTDefaultCaptureDeviceController alloc] initWithDevice:device]; + [deviceControllers addObject:controller]; } - + return deviceControllers; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureConnection.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureConnection.m new file mode 100644 index 000000000000..2eaffd65d13e --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureConnection.m @@ -0,0 +1,49 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTCaptureConnection.h" + +@interface FLTDefaultCaptureConnection () +@property(nonatomic, strong) AVCaptureConnection *connection; +@end + +@implementation FLTDefaultCaptureConnection + +- (instancetype)initWithConnection:(AVCaptureConnection *)connection { + self = [super init]; + if (self) { + _connection = connection; + } + return self; +} + +- (BOOL)isVideoMirroringSupported { + return self.connection.isVideoMirroringSupported; +} + +- (BOOL)isVideoOrientationSupported { + return self.connection.isVideoOrientationSupported; +} + +- (void)setVideoMirrored:(BOOL)videoMirrored { + self.connection.videoMirrored = videoMirrored; +} + +- (BOOL)videoMirrored { + return self.connection.videoMirrored; +} + +- (void)setVideoOrientation:(AVCaptureVideoOrientation)videoOrientation { + self.connection.videoOrientation = videoOrientation; +} + +- (AVCaptureVideoOrientation)videoOrientation { + return self.connection.videoOrientation; +} + +- (NSArray *)inputPorts { + return self.connection.inputPorts; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m index 2044b87c7689..548a0d93a8d6 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m @@ -13,7 +13,7 @@ @implementation FLTDefaultCaptureDeviceController - (instancetype)initWithDevice:(AVCaptureDevice *)device { self = [super init]; if (self) { - _device = device; + _device = device; } return self; } @@ -24,45 +24,49 @@ - (nonnull NSString *)uniqueID { // Position/Orientation - (AVCaptureDevicePosition)position { - return self.device.position; + return self.device.position; } // Format/Configuration -- (AVCaptureDeviceFormat *)activeFormat { - return self.device.activeFormat; +- (id)activeFormat { + return [[FLTDefaultCaptureDeviceFormat alloc] initWithFormat:self.device.activeFormat]; } -- (NSArray *)formats { - return self.device.formats; +- (NSArray> *)formats { + NSMutableArray> *wrappedFormats = [NSMutableArray array]; + for (AVCaptureDeviceFormat *format in self.device.formats) { + [wrappedFormats addObject:[[FLTDefaultCaptureDeviceFormat alloc] initWithFormat:format]]; + } + return wrappedFormats; } -- (void)setActiveFormat:(AVCaptureDeviceFormat *)format { - self.device.activeFormat = format; +- (void)setActiveFormat:(id)format { + self.device.activeFormat = format.format; } // Flash/Torch - (BOOL)hasFlash { - return self.device.hasFlash; + return self.device.hasFlash; } - (BOOL)hasTorch { - return self.device.hasTorch; + return self.device.hasTorch; } - (BOOL)isTorchAvailable { - return self.device.isTorchAvailable; + return self.device.isTorchAvailable; } - (AVCaptureTorchMode)torchMode { - return self.device.torchMode; + return self.device.torchMode; } - (void)setTorchMode:(AVCaptureTorchMode)torchMode { - self.device.torchMode = torchMode; + self.device.torchMode = torchMode; } - (BOOL)isFlashModeSupported:(AVCaptureFlashMode)mode { - return [self.device isFlashModeSupported:mode]; + return [self.device isFlashModeSupported:mode]; } // Focus @@ -71,40 +75,40 @@ - (BOOL)isFocusPointOfInterestSupported { } - (BOOL)isFocusModeSupported:(AVCaptureFocusMode)mode { - return [self.device isFocusModeSupported:mode]; + return [self.device isFocusModeSupported:mode]; } - (void)setFocusMode:(AVCaptureFocusMode)focusMode { - self.device.focusMode = focusMode; + self.device.focusMode = focusMode; } - (void)setFocusPointOfInterest:(CGPoint)point { - self.device.focusPointOfInterest = point; + self.device.focusPointOfInterest = point; } // Exposure - (BOOL)isExposurePointOfInterestSupported { - return self.device.isExposurePointOfInterestSupported; + return self.device.isExposurePointOfInterestSupported; } - (void)setExposureMode:(AVCaptureExposureMode)exposureMode { - self.device.exposureMode = exposureMode; + self.device.exposureMode = exposureMode; } - (void)setExposurePointOfInterest:(CGPoint)point { - self.device.exposurePointOfInterest = point; + self.device.exposurePointOfInterest = point; } - (float)minExposureTargetBias { - return self.device.minExposureTargetBias; + return self.device.minExposureTargetBias; } - (float)maxExposureTargetBias { - return self.device.maxExposureTargetBias; + return self.device.maxExposureTargetBias; } - (void)setExposureTargetBias:(float)bias completionHandler:(void (^)(CMTime))handler { - [self.device setExposureTargetBias:bias completionHandler:handler]; + [self.device setExposureTargetBias:bias completionHandler:handler]; } - (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode { @@ -113,61 +117,85 @@ - (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode { // Zoom - (float)maxAvailableVideoZoomFactor { - return self.device.maxAvailableVideoZoomFactor; + return self.device.maxAvailableVideoZoomFactor; } - (float)minAvailableVideoZoomFactor { - return self.device.minAvailableVideoZoomFactor; + return self.device.minAvailableVideoZoomFactor; } - (float)videoZoomFactor { - return self.device.videoZoomFactor; + return self.device.videoZoomFactor; } - (void)setVideoZoomFactor:(float)factor { - self.device.videoZoomFactor = factor; + self.device.videoZoomFactor = factor; } // Camera Properties - (float)lensAperture { - return self.device.lensAperture; + return self.device.lensAperture; } - (CMTime)exposureDuration { - return self.device.exposureDuration; + return self.device.exposureDuration; } - (float)ISO { - return self.device.ISO; + return self.device.ISO; } // Configuration Lock - (BOOL)lockForConfiguration:(NSError **)error { - return [self.device lockForConfiguration:error]; + return [self.device lockForConfiguration:error]; } - (void)unlockForConfiguration { - [self.device unlockForConfiguration]; + [self.device unlockForConfiguration]; } - (CMTime)activeVideoMinFrameDuration { - return self.device.activeVideoMinFrameDuration; + return self.device.activeVideoMinFrameDuration; } - (void)setActiveVideoMinFrameDuration:(CMTime)duration { - self.device.activeVideoMinFrameDuration = duration; + self.device.activeVideoMinFrameDuration = duration; } - (CMTime)activeVideoMaxFrameDuration { - return self.device.activeVideoMaxFrameDuration; + return self.device.activeVideoMaxFrameDuration; } - (void)setActiveVideoMaxFrameDuration:(CMTime)duration { - self.device.activeVideoMaxFrameDuration = duration; + self.device.activeVideoMaxFrameDuration = duration; } -- (AVCaptureInput *)createInput:(NSError * _Nullable * _Nullable)error { +- (AVCaptureInput *)createInput:(NSError *_Nullable *_Nullable)error { return [AVCaptureDeviceInput deviceInputWithDevice:_device error:error]; } @end + +@implementation FLTDefaultCaptureDeviceFormat { + id _format; +} + +- (instancetype)initWithFormat:(id)format { + self = [super init]; + if (self) { + format = format; + } + return self; +} + +- (CMFormatDescriptionRef)formatDescription { + return _format.formatDescription; +} + +- (NSArray *)videoSupportedFrameRateRanges { + return _format.videoSupportedFrameRateRanges; +} + +@synthesize format; + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoOutput.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoOutput.m new file mode 100644 index 000000000000..c9734e3c18cd --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoOutput.m @@ -0,0 +1,50 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h" +#import "../include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h" + +@implementation FLTDefaultCapturePhotoOutput { + AVCapturePhotoOutput *_photoOutput; +} + +- (instancetype)initWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput { + self = [super init]; + if (self) { + _photoOutput = photoOutput; + } + return self; +} + +- (AVCapturePhotoOutput *)photoOutput { + return _photoOutput; +} + +- (NSArray *)availablePhotoCodecTypes { + return _photoOutput.availablePhotoCodecTypes; +} + +- (void)setHighResolutionCaptureEnabled:(BOOL)enabled { + [_photoOutput setHighResolutionCaptureEnabled:enabled]; +} + +- (BOOL)isHighResolutionCaptureEnabled { + return _photoOutput.isHighResolutionCaptureEnabled; +} + +- (void)capturePhotoWithSettings:(id)settings + delegate:(id)delegate { + [_photoOutput capturePhotoWithSettings:settings.settings delegate:delegate]; +} + +- (nullable AVCaptureConnection *)connectionWithMediaType:(nonnull AVMediaType)mediaType { + return [_photoOutput connectionWithMediaType:mediaType]; +} + + +- (NSArray *)supportedFlashModes { + return _photoOutput.supportedFlashModes; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m new file mode 100644 index 000000000000..11fadd94f952 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m @@ -0,0 +1,34 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "../include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h" + +@implementation FLTDefaultCapturePhotoSettings +- (instancetype)initWithSettings:(AVCapturePhotoSettings *)settings { + self = [super init]; + if (self) { + settings = settings; + } + return self; +} + +- (int64_t)uniqueID { + return settings.uniqueID; +} + +- (NSDictionary *)format { + return settings.format; +} + +- (void)setFlashMode:(AVCaptureFlashMode)flashMode { + [settings setFlashMode:flashMode]; +} + +- (void)setHighResolutionPhotoEnabled:(BOOL)enabled { + [settings setHighResolutionPhotoEnabled:enabled]; +} + +@synthesize settings; + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m index d9be4a1a794e..101330f260de 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m @@ -9,7 +9,7 @@ @interface FLTDefaultCaptureSession () @end @implementation FLTDefaultCaptureSession - + - (instancetype)initWithCaptureSession:(AVCaptureSession *)session { self = [super init]; if (self) { @@ -19,79 +19,79 @@ - (instancetype)initWithCaptureSession:(AVCaptureSession *)session { } - (void)beginConfiguration { - [_captureSession beginConfiguration]; + [_captureSession beginConfiguration]; } - (void)commitConfiguration { - [_captureSession commitConfiguration]; + [_captureSession commitConfiguration]; } - (void)startRunning { - [_captureSession startRunning]; + [_captureSession startRunning]; } - (void)stopRunning { - [_captureSession stopRunning]; + [_captureSession stopRunning]; } - (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset { - return [_captureSession canSetSessionPreset:preset]; + return [_captureSession canSetSessionPreset:preset]; } - (void)addInputWithNoConnections:(AVCaptureInput *)input { - [_captureSession addInputWithNoConnections:input]; + [_captureSession addInputWithNoConnections:input]; } - (void)addOutputWithNoConnections:(AVCaptureOutput *)output { - [_captureSession addOutputWithNoConnections:output]; + [_captureSession addOutputWithNoConnections:output]; } - (void)addConnection:(AVCaptureConnection *)connection { - [_captureSession addConnection:connection]; + [_captureSession addConnection:connection]; } - (void)addOutput:(AVCaptureOutput *)output { - [_captureSession addOutput:output]; + [_captureSession addOutput:output]; } - (void)removeInput:(AVCaptureInput *)input { - [_captureSession removeInput:input]; + [_captureSession removeInput:input]; } - (void)removeOutput:(AVCaptureOutput *)output { - [_captureSession removeOutput:output]; + [_captureSession removeOutput:output]; } - (void)setSessionPreset:(AVCaptureSessionPreset)sessionPreset { - _captureSession.sessionPreset = sessionPreset; + _captureSession.sessionPreset = sessionPreset; } - (AVCaptureSessionPreset)sessionPreset { - return _captureSession.sessionPreset; + return _captureSession.sessionPreset; } - (NSArray *)inputs { - return _captureSession.inputs; + return _captureSession.inputs; } - (NSArray *)outputs { - return _captureSession.outputs; + return _captureSession.outputs; } - (BOOL)canAddInput:(AVCaptureInput *)input { - return [_captureSession canAddInput:input]; + return [_captureSession canAddInput:input]; } - (BOOL)canAddOutput:(AVCaptureOutput *)output { - return [_captureSession canAddOutput:output]; + return [_captureSession canAddOutput:output]; } - (BOOL)canAddConnection:(AVCaptureConnection *)connection { - return [_captureSession canAddConnection:connection]; + return [_captureSession canAddConnection:connection]; } - (void)addInput:(AVCaptureInput *)input { - [_captureSession addInput:input]; + [_captureSession addInput:input]; } @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.m index 7b10f09109d4..0f909b2a9fa6 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.m @@ -6,7 +6,7 @@ @implementation FLTDefaultDeviceOrientationProvider -- (UIDeviceOrientation)orientation { +- (UIDeviceOrientation)orientation { return [[UIDevice currentDevice] orientation]; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPermissionService.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPermissionService.m index afdac5d3da59..458540c2b9fe 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPermissionService.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPermissionService.m @@ -6,12 +6,11 @@ @implementation FLTDefaultPermissionService - (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType { - return [AVCaptureDevice authorizationStatusForMediaType:mediaType]; + return [AVCaptureDevice authorizationStatusForMediaType:mediaType]; } - (void)requestAccessForMediaType:(AVMediaType)mediaType - completionHandler:(void (^)(BOOL granted))handler { - [AVCaptureDevice requestAccessForMediaType:mediaType - completionHandler:handler]; + completionHandler:(void (^)(BOOL granted))handler { + [AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:handler]; } @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPhotoData.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPhotoData.m index 583c9f27e430..b2b9dae0ffb6 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPhotoData.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTPhotoData.m @@ -15,8 +15,8 @@ - (instancetype)initWithData:(NSData *)data { } - (BOOL)writeToFile:(NSString *)path - options:(NSDataWritingOptions)writeOptionsMask - error:(NSError **)errorPtr { + options:(NSDataWritingOptions)writeOptionsMask + error:(NSError **)errorPtr { return [self.data writeToFile:path options:writeOptionsMask error:errorPtr]; } @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap index 1ba442f7b107..b49a1b0a0039 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap @@ -18,7 +18,11 @@ framework module camera_avfoundation { header "FLTEventChannelProtocol.h" header "FLTCameraDeviceDiscovery.h" header "FLTCaptureSessionProtocol.h" + header "FLTCapturePhotoSettings.h" + header "FLTCapturePhotoOutput.h" header "FLTPhotoData.h" + header "FLTAssetWriter.h" + header "FLTCaptureConnection.h" header "QueueUtils.h" } } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h index c25e375342d6..cff761883573 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h @@ -5,11 +5,12 @@ #import #import "messages.g.h" -#import "FLTCaptureSessionProtocol.h" + #import "FLTCaptureDeviceControlling.h" +#import "FLTCaptureSessionProtocol.h" typedef id (^CaptureSessionFactory)(void); -typedef id (^CaptureNamedDeviceFactory)(NSString* name); +typedef id (^CaptureNamedDeviceFactory)(NSString *name); @interface CameraPlugin : NSObject @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h index 6b8170eb2dee..68029369dc7b 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin_Test.h @@ -6,8 +6,8 @@ #import "CameraPlugin.h" #import "FLTCam.h" -#import "messages.g.h" #import "FLTCameraDeviceDiscovery.h" +#import "messages.g.h" /// APIs exposed for unit testing. @interface CameraPlugin () @@ -29,8 +29,7 @@ globalAPI:(FCPCameraGlobalEventApi *)globalAPI deviceDiscovery:(id)deviceDiscovery sessionFactory:(CaptureSessionFactory)sessionFactory - deviceFactory:(CaptureNamedDeviceFactory)deviceFactory -NS_DESIGNATED_INITIALIZER; + deviceFactory:(CaptureNamedDeviceFactory)deviceFactory NS_DESIGNATED_INITIALIZER; /// Hide the default public constructor. - (instancetype)init NS_UNAVAILABLE; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h index fe9315ced9cd..04008d9e2134 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h @@ -9,6 +9,8 @@ #import "CameraProperties.h" #import "FLTCamMediaSettingsAVWrapper.h" #import "FLTCaptureDeviceControlling.h" +#import "FLTCapturePhotoOutput.h" +#import "FLTAssetWriter.h" #import "messages.g.h" NS_ASSUME_NONNULL_BEGIN @@ -17,9 +19,11 @@ NS_ASSUME_NONNULL_BEGIN /// Used in tests to inject a device into FLTCam. typedef id _Nonnull (^CaptureDeviceFactory)(void); +typedef id _Nonnull (^AssetWriterFactory)(NSURL*, AVFileType, NSError * _Nullable * _Nullable); + /// Determines the video dimensions (width and height) for a given capture device format. /// Used in tests to mock CMVideoFormatDescriptionGetDimensions. -typedef CMVideoDimensions (^VideoDimensionsForFormat)(AVCaptureDeviceFormat *); +typedef CMVideoDimensions (^VideoDimensionsForFormat)(id); /// A class that manages camera's state and performs camera operations. @interface FLTCam : NSObject @@ -48,23 +52,23 @@ typedef CMVideoDimensions (^VideoDimensionsForFormat)(AVCaptureDeviceFormat *); /// @param orientation the orientation of camera /// @param captureSessionQueue the queue on which camera's capture session operations happen. /// @param error report to the caller if any error happened creating the camera. -- (instancetype)initWithCameraName:(NSString *)cameraName - mediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - error:(NSError **)error; +//- (instancetype)initWithCameraName:(NSString *)cameraName +// mediaSettings:(FCPPlatformMediaSettings *)mediaSettings +// mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper +// orientation:(UIDeviceOrientation)orientation +// captureSessionQueue:(dispatch_queue_t)captureSessionQueue +// error:(NSError **)error; /// Initializes a camera instance. /// Allows for injecting dependencies that are usually internal. -- (instancetype)initWithCameraName:(NSString *)cameraName - mediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(id)videoCaptureSession - audioCaptureSession:(id)audioCaptureSession - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - error:(NSError **)error; +//- (instancetype)initWithCameraName:(NSString *)cameraName +// mediaSettings:(FCPPlatformMediaSettings *)mediaSettings +// mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper +// orientation:(UIDeviceOrientation)orientation +// videoCaptureSession:(id)videoCaptureSession +// audioCaptureSession:(id)audioCaptureSession +// captureSessionQueue:(dispatch_queue_t)captureSessionQueue +// error:(NSError **)error; /// Initializes a camera instance. /// Allows for testing with specified resolution, audio preference, orientation, @@ -77,6 +81,8 @@ typedef CMVideoDimensions (^VideoDimensionsForFormat)(AVCaptureDeviceFormat *); captureSessionQueue:(dispatch_queue_t)captureSessionQueue captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat + capturePhotoOutput:(id)capturePhotoOutput + assetWriterFactory:(AssetWriterFactory)assetWriterFactory error:(NSError **)error; /// Informs the Dart side of the plugin of the current camera state and capabilities. @@ -125,7 +131,8 @@ typedef CMVideoDimensions (^VideoDimensionsForFormat)(AVCaptureDeviceFormat *); /// /// @param focusMode The focus mode that should be applied to the @captureDevice instance. /// @param captureDevice The AVCaptureDevice to which the @focusMode will be applied. -- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode onDevice:(id)captureDevice; +- (void)applyFocusMode:(FCPPlatformFocusMode)focusMode + onDevice:(id)captureDevice; - (void)pausePreview; - (void)resumePreview; - (void)setDescriptionWhileRecording:(NSString *)cameraName diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h index da75d7e4e75b..844143335e98 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h @@ -28,7 +28,8 @@ NS_ASSUME_NONNULL_BEGIN * @param outError The optional error. * @result A BOOL indicating whether the device was successfully locked for configuration. */ -- (BOOL)lockDevice:(id)captureDevice error:(NSError *_Nullable *_Nullable)outError; +- (BOOL)lockDevice:(id)captureDevice + error:(NSError *_Nullable *_Nullable)outError; /** * @method unlockDevice: @@ -60,7 +61,8 @@ NS_ASSUME_NONNULL_BEGIN * @param duration The frame duration. * @param captureDevice The capture device */ -- (void)setMinFrameDuration:(CMTime)duration onDevice:(id)captureDevice; +- (void)setMinFrameDuration:(CMTime)duration + onDevice:(id)captureDevice; /** * @method setMaxFrameDuration:onDevice: @@ -69,7 +71,8 @@ NS_ASSUME_NONNULL_BEGIN * @param duration The frame duration. * @param captureDevice The capture device */ -- (void)setMaxFrameDuration:(CMTime)duration onDevice:(id)captureDevice; +- (void)setMaxFrameDuration:(CMTime)duration + onDevice:(id)captureDevice; /** * @method assetWriterAudioInputWithOutputSettings: diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h index cfe1b0772787..6a63daacc8ab 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h @@ -3,11 +3,14 @@ // found in the LICENSE file. #import "FLTCam.h" -#import "FLTSavePhotoDelegate.h" #import "FLTCaptureDeviceControlling.h" +#import "FLTCapturePhotoOutput.h" #import "FLTCaptureSessionProtocol.h" +#import "FLTSavePhotoDelegate.h" +#import "FLTCaptureConnection.h" @interface FLTImageStreamHandler : NSObject +- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue; /// The queue on which `eventSink` property should be accessed. @property(nonatomic, strong) dispatch_queue_t captureSessionQueue; @@ -27,7 +30,7 @@ @property(readonly, nonatomic) AVCaptureVideoDataOutput *captureVideoOutput; /// The output for photo capturing. Exposed setter for unit tests. -@property(strong, nonatomic) AVCapturePhotoOutput *capturePhotoOutput; +@property(strong, nonatomic) id capturePhotoOutput; /// True when images from the camera are being streamed. @property(assign, nonatomic) BOOL isStreamingImages; @@ -42,9 +45,9 @@ /// Delegate callback when receiving a new video or audio sample. /// Exposed for unit tests. -- (void)captureOutput:(AVCaptureOutput *)output +- (void)captureOutput:(AVCaptureVideoDataOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(AVCaptureConnection *)connection; + fromConnection:(id)connection; /// Start streaming images. - (void)startImageStreamWithMessenger:(NSObject *)messenger diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCameraPermissionManager.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCameraPermissionManager.h index a7e859aebdde..6bd24bae12fe 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCameraPermissionManager.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCameraPermissionManager.h @@ -12,7 +12,7 @@ NS_ASSUME_NONNULL_BEGIN typedef void (^FLTCameraPermissionRequestCompletionHandler)(FlutterError *_Nullable); @interface FLTCameraPermissionManager : NSObject -@property (nonatomic, strong) id permissionService; +@property(nonatomic, strong) id permissionService; - (instancetype)initWithPermissionService:(id)service; @@ -25,8 +25,8 @@ typedef void (^FLTCameraPermissionRequestCompletionHandler)(FlutterError *_Nulla /// @param handler if access permission is (or was previously) granted, completion handler will be /// called without error; Otherwise completion handler will be called with error. Handler can be /// called on an arbitrary dispatch queue. -- (void)requestCameraPermissionWithCompletionHandler:( - FLTCameraPermissionRequestCompletionHandler)handler; +- (void)requestCameraPermissionWithCompletionHandler: + (FLTCameraPermissionRequestCompletionHandler)handler; /// Requests audio access permission. /// @@ -37,8 +37,8 @@ typedef void (^FLTCameraPermissionRequestCompletionHandler)(FlutterError *_Nulla /// @param handler if access permission is (or was previously) granted, completion handler will be /// called without error; Otherwise completion handler will be called with error. Handler can be /// called on an arbitrary dispatch queue. -- (void)requestAudioPermissionWithCompletionHandler:( - FLTCameraPermissionRequestCompletionHandler)handler; +- (void)requestAudioPermissionWithCompletionHandler: + (FLTCameraPermissionRequestCompletionHandler)handler; @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h index 4e18ba71c465..ac72be4c2c4b 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTSavePhotoDelegate_Test.h @@ -2,8 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#import "FLTSavePhotoDelegate.h" #import "FLTPhotoData.h" +#import "FLTSavePhotoDelegate.h" /// API exposed for unit tests. @interface FLTSavePhotoDelegate () diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTAssetWriter.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTAssetWriter.h new file mode 100644 index 000000000000..c39f935e2c0d --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTAssetWriter.h @@ -0,0 +1,42 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Foundation; +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTAssetWriter +@property(nonatomic, readonly) AVAssetWriterStatus status; +@property (readonly, nullable) NSError *error; +- (BOOL)startWriting; +- (void)finishWritingWithCompletionHandler:(void (^)(void))handler; +- (void)startSessionAtSourceTime:(CMTime)startTime; +- (void)addInput:(AVAssetWriterInput *)input; +@end + +@protocol FLTAssetWriterInput +@property(nonatomic, readonly) BOOL expectsMediaDataInRealTime; +@property(nonatomic, readonly) BOOL isReadyForMoreMediaData; +- (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer; +@end + +@protocol FLTPixelBufferAdaptor +- (BOOL)appendPixelBuffer:(CVPixelBufferRef)pixelBuffer + withPresentationTime:(CMTime)presentationTime; +@end + +@interface FLTDefaultAssetWriter : NSObject +- (instancetype)initWithURL:(NSURL *)url fileType:(AVFileType)fileType error:(NSError **)error; +@end + +@interface FLTDefaultAssetWriterInput : NSObject +- (instancetype)initWithInput:(AVAssetWriterInput *)input; +@end + +@interface FLTDefaultPixelBufferAdaptor : NSObject +- (instancetype)initWithAdaptor:(AVAssetWriterInputPixelBufferAdaptor *)adaptor; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h index 4f28bbe02315..cd4e8e92b096 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCameraDeviceDiscovery.h @@ -10,9 +10,10 @@ NS_ASSUME_NONNULL_BEGIN @protocol FLTCameraDeviceDiscovery -- (NSArray> *)discoverySessionWithDeviceTypes:(NSArray *)deviceTypes - mediaType:(AVMediaType)mediaType - position:(AVCaptureDevicePosition)position; +- (NSArray> *) + discoverySessionWithDeviceTypes:(NSArray *)deviceTypes + mediaType:(AVMediaType)mediaType + position:(AVCaptureDevicePosition)position; @end @interface FLTDefaultCameraDeviceDiscovery : NSObject diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureConnection.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureConnection.h new file mode 100644 index 000000000000..95d3082c015e --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureConnection.h @@ -0,0 +1,21 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTCaptureConnection +@property(nonatomic) BOOL videoMirrored; +@property(nonatomic) AVCaptureVideoOrientation videoOrientation; +@property(nonatomic, readonly) NSArray *inputPorts; +@property(nonatomic, readonly) BOOL isVideoMirroringSupported; +@property(nonatomic, readonly) BOOL isVideoOrientationSupported; +@end + +@interface FLTDefaultCaptureConnection : NSObject +- (instancetype)initWithConnection:(AVCaptureConnection *)connection; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h index 434e639f3d07..b61f678b726e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h @@ -7,6 +7,12 @@ NS_ASSUME_NONNULL_BEGIN +@protocol FLTCaptureDeviceFormat +@property(nonatomic, readonly) CMFormatDescriptionRef formatDescription; +@property(nonatomic, readonly) NSArray *videoSupportedFrameRateRanges; +@property(nonatomic, readonly) AVCaptureDeviceFormat *format; +@end + @protocol FLTCaptureDeviceControlling - (NSString *)uniqueID; @@ -15,9 +21,9 @@ NS_ASSUME_NONNULL_BEGIN - (AVCaptureDevicePosition)position; // Format/Configuration -- (AVCaptureDeviceFormat *)activeFormat; -- (NSArray *)formats; -- (void)setActiveFormat:(AVCaptureDeviceFormat *)format; +- (id)activeFormat; +- (NSArray> *)formats; +- (void)setActiveFormat:(id)format; // Flash/Torch - (BOOL)hasFlash; @@ -39,7 +45,7 @@ NS_ASSUME_NONNULL_BEGIN - (void)setExposurePointOfInterest:(CGPoint)point; - (float)minExposureTargetBias; - (float)maxExposureTargetBias; -- (void)setExposureTargetBias:(float)bias completionHandler:(void (^ _Nullable)(CMTime))handler; +- (void)setExposureTargetBias:(float)bias completionHandler:(void (^_Nullable)(CMTime))handler; - (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode; // Zoom @@ -63,7 +69,7 @@ NS_ASSUME_NONNULL_BEGIN - (CMTime)activeVideoMaxFrameDuration; - (void)setActiveVideoMaxFrameDuration:(CMTime)duration; -- (AVCaptureInput *)createInput:(NSError * _Nullable * _Nullable)error; +- (AVCaptureInput *)createInput:(NSError *_Nullable *_Nullable)error; @end @@ -71,4 +77,8 @@ NS_ASSUME_NONNULL_BEGIN - (instancetype)initWithDevice:(AVCaptureDevice *)device; @end +@interface FLTDefaultCaptureDeviceFormat : NSObject +- (instancetype)initWithFormat:(AVCaptureDeviceFormat *)format; +@end + NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h new file mode 100644 index 000000000000..96b8db5b2089 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h @@ -0,0 +1,30 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Foundation; +@import AVFoundation; + +#import "FLTCapturePhotoOutput.h" +#import "FLTCapturePhotoSettings.h" + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTCapturePhotoOutput +@property(nonatomic, readonly) NSArray *availablePhotoCodecTypes; +@property(nonatomic, assign, getter=isHighResolutionCaptureEnabled) + BOOL highResolutionCaptureEnabled; +@property(nonatomic, readonly) NSArray *supportedFlashModes; + +- (void)capturePhotoWithSettings:(id)settings + delegate:(id)delegate; + +- (nullable AVCaptureConnection *)connectionWithMediaType:(AVMediaType)mediaType; +@end + +@interface FLTDefaultCapturePhotoOutput : NSObject +- (instancetype)initWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput; +@property(nonatomic, readonly) AVCapturePhotoOutput *photoOutput; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h new file mode 100644 index 000000000000..9cf3f9acb724 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h @@ -0,0 +1,24 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import Foundation; +@import AVFoundation; + +NS_ASSUME_NONNULL_BEGIN + +@protocol FLTCapturePhotoSettings +@property(nonatomic, readonly) AVCapturePhotoSettings *settings; + +@property(readonly, nonatomic) int64_t uniqueID; +@property(nonatomic, copy, readonly) NSDictionary *format; + +- (void)setFlashMode:(AVCaptureFlashMode)flashMode; +- (void)setHighResolutionPhotoEnabled:(BOOL)enabled; +@end + +@interface FLTDefaultCapturePhotoSettings : NSObject +- (instancetype)initWithSettings:(AVCapturePhotoSettings *)settings; +@end + +NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h index b6c2501f1703..88824959e24c 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h @@ -29,7 +29,6 @@ NS_ASSUME_NONNULL_BEGIN @end - @interface FLTDefaultCaptureSession : NSObject - (instancetype)initWithCaptureSession:(AVCaptureSession *)session; @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPermissionService.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPermissionService.h index af31e415e995..7ab7cdfbbd8c 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPermissionService.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPermissionService.h @@ -10,7 +10,7 @@ NS_ASSUME_NONNULL_BEGIN @protocol FLTPermissionService - (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType; - (void)requestAccessForMediaType:(AVMediaType)mediaType - completionHandler:(void (^)(BOOL granted))handler; + completionHandler:(void (^)(BOOL granted))handler; @end @interface FLTDefaultPermissionService : NSObject diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPhotoData.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPhotoData.h index 7afd7b8bc88a..099b7b5c6067 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPhotoData.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTPhotoData.h @@ -8,11 +8,10 @@ NS_ASSUME_NONNULL_BEGIN @protocol FLTPhotoData - (BOOL)writeToFile:(NSString *)path - options:(NSDataWritingOptions)writeOptionsMask - error:(NSError **)errorPtr; + options:(NSDataWritingOptions)writeOptionsMask + error:(NSError **)errorPtr; @end - @interface FLTDefaultPhotoData : NSObject @property(nonatomic, strong, readonly) NSData *data; - (instancetype)initWithData:(NSData *)data; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/QueueUtils.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/QueueUtils.h index a7e22da716d0..e230a53508fa 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/QueueUtils.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/QueueUtils.h @@ -7,7 +7,7 @@ NS_ASSUME_NONNULL_BEGIN /// Queue-specific context data to be associated with the capture session queue. -extern const char* FLTCaptureSessionQueueSpecific; +extern const char *FLTCaptureSessionQueueSpecific; /// Ensures the given block to be run on the main queue. /// If caller site is already on the main queue, the block will be run From 4be16a2b3761ab1806361572f6aa7e17cb99cfb2 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Thu, 19 Dec 2024 09:09:38 +0100 Subject: [PATCH 10/16] Remove OCMock dependency --- .../ios/Runner.xcodeproj/project.pbxproj | 39 +---- .../example/ios/Runner/Info.plist | 6 +- .../RunnerTests/CameraSessionPresetsTests.m | 9 +- .../ios/RunnerTests/CameraSettingsTests.m | 18 +-- .../example/ios/RunnerTests/CameraTestUtils.h | 3 +- .../example/ios/RunnerTests/CameraTestUtils.m | 144 +++++++++--------- .../ios/RunnerTests/FLTCamPhotoCaptureTests.m | 2 +- .../ios/RunnerTests/FLTCamSampleBufferTests.m | 125 ++++++++++----- .../example/ios/RunnerTests/MockAssetWriter.h | 1 + .../example/ios/RunnerTests/MockAssetWriter.m | 7 +- .../ios/RunnerTests/MockCaptureConnection.h | 1 + .../RunnerTests/MockCaptureDeviceController.h | 15 +- .../RunnerTests/MockCaptureDeviceController.m | 21 ++- .../ios/RunnerTests/MockCapturePhotoOutput.h | 4 + .../ios/RunnerTests/MockCapturePhotoOutput.m | 5 + .../RunnerTests/MockCapturePhotoSettings.h | 1 + .../ios/RunnerTests/MockCaptureSession.m | 8 +- .../RunnerTests/TestMediaSettingsAVWrapper.h | 114 ++++++++++++++ .../camera_avfoundation/CameraPlugin.m | 23 ++- .../Sources/camera_avfoundation/FLTCam.m | 66 ++++---- .../FLTCamMediaSettingsAVWrapper.m | 20 +-- .../Protocols/FLTAssetWriter.m | 7 +- .../Protocols/FLTCaptureDeviceControlling.m | 70 +++++++-- .../Protocols/FLTCapturePhotoOutput.m | 17 ++- .../Protocols/FLTCapturePhotoSettings.m | 16 +- .../Protocols/FLTCaptureSessionProtocol.m | 25 +-- .../include/camera_avfoundation/FLTCam.h | 12 +- .../FLTCamMediaSettingsAVWrapper.h | 7 +- .../include/camera_avfoundation/FLTCam_Test.h | 2 +- .../Protocols/FLTAssetWriter.h | 7 +- .../Protocols/FLTCaptureConnection.h | 1 + .../Protocols/FLTCaptureDeviceControlling.h | 24 ++- .../Protocols/FLTCapturePhotoOutput.h | 3 +- .../Protocols/FLTCapturePhotoSettings.h | 1 - .../Protocols/FLTCaptureSessionProtocol.h | 8 +- 35 files changed, 556 insertions(+), 276 deletions(-) create mode 100644 packages/camera/camera_avfoundation/example/ios/RunnerTests/TestMediaSettingsAVWrapper.h diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index c59c7e652ae4..2c369f6a9397 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -12,7 +12,6 @@ 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; - 408D7A792C3C9CD000B71F9A /* OCMock in Frameworks */ = {isa = PBXBuildFile; productRef = 408D7A782C3C9CD000B71F9A /* OCMock */; }; 43ED1537282570DE00EB00DE /* AvailableCamerasTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 43ED1536282570DE00EB00DE /* AvailableCamerasTest.m */; }; 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; }; 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */ = {isa = PBXBuildFile; productRef = 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */; }; @@ -140,7 +139,6 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 408D7A792C3C9CD000B71F9A /* OCMock in Frameworks */, F392940CDE88632C06D6CB59 /* libPods-RunnerTests.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; @@ -299,7 +297,6 @@ ); name = RunnerTests; packageProductDependencies = ( - 408D7A782C3C9CD000B71F9A /* OCMock */, ); productName = camera_exampleTests; productReference = 03BB76682665316900CE5A93 /* RunnerTests.xctest */; @@ -359,10 +356,7 @@ Base, ); mainGroup = 97C146E51CF9000F007C117D; - packageReferences = ( - 781AD8BC2B33823900A9FFBB /* XCLocalSwiftPackageReference "FlutterGeneratedPluginSwiftPackage" */, - 408D7A772C3C9CD000B71F9A /* XCRemoteSwiftPackageReference "ocmock" */, - ); + packageReferences = (); productRefGroup = 97C146EF1CF9000F007C117D /* Products */; projectDirPath = ""; projectRoot = ""; @@ -582,6 +576,7 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO; CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = ""; GCC_C_LANGUAGE_STANDARD = gnu11; INFOPLIST_FILE = RunnerTests/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 12.0; @@ -812,36 +807,6 @@ defaultConfigurationName = Release; }; /* End XCConfigurationList section */ - -/* Begin XCLocalSwiftPackageReference section */ - 781AD8BC2B33823900A9FFBB /* XCLocalSwiftPackageReference "FlutterGeneratedPluginSwiftPackage" */ = { - isa = XCLocalSwiftPackageReference; - relativePath = Flutter/ephemeral/Packages/FlutterGeneratedPluginSwiftPackage; - }; -/* End XCLocalSwiftPackageReference section */ - -/* Begin XCRemoteSwiftPackageReference section */ - 408D7A772C3C9CD000B71F9A /* XCRemoteSwiftPackageReference "ocmock" */ = { - isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/erikdoe/ocmock"; - requirement = { - kind = revision; - revision = fe1661a3efed11831a6452f4b1a0c5e6ddc08c3d; - }; - }; -/* End XCRemoteSwiftPackageReference section */ - -/* Begin XCSwiftPackageProductDependency section */ - 408D7A782C3C9CD000B71F9A /* OCMock */ = { - isa = XCSwiftPackageProductDependency; - package = 408D7A772C3C9CD000B71F9A /* XCRemoteSwiftPackageReference "ocmock" */; - productName = OCMock; - }; - 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */ = { - isa = XCSwiftPackageProductDependency; - productName = FlutterGeneratedPluginSwiftPackage; - }; -/* End XCSwiftPackageProductDependency section */ }; rootObject = 97C146E61CF9000F007C117D /* Project object */; } diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist index adb62fb7803d..b263ffe1e5a9 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist +++ b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist @@ -30,6 +30,8 @@ Can I use the camera please? Only for demo purpose of the app NSMicrophoneUsageDescription Only for demo purpose of the app + UIApplicationSupportsIndirectInputEvents + UILaunchStoryboardName LaunchScreen UIMainStoryboardFile @@ -52,9 +54,5 @@ UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight - CADisableMinimumFrameDurationOnPhone - - UIApplicationSupportsIndirectInputEvents - diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m index 936395c7c5f9..766d287dc8eb 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m @@ -22,7 +22,6 @@ @implementation FLTCamSessionPresetsTest - (void)testResolutionPresetWithBestFormat_mustUpdateCaptureSessionPreset { NSString *expectedPreset = AVCaptureSessionPresetInputPriority; XCTestExpectation *presetExpectation = [self expectationWithDescription:@"Expected preset set"]; - XCTestExpectation *formatExpectation = [self expectationWithDescription:@"Expected format set"]; MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; MockCaptureDeviceController *captureDeviceMock = [[MockCaptureDeviceController alloc] init]; @@ -30,12 +29,6 @@ - (void)testResolutionPresetWithBestFormat_mustUpdateCaptureSessionPreset { captureDeviceMock.formats = @[ fakeFormat ]; captureDeviceMock.activeFormat = fakeFormat; - captureDeviceMock.setActiveFormatStub = ^(id format) { - if (format == fakeFormat) { - [formatExpectation fulfill]; - } - }; - videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { if (preset == expectedPreset) { [presetExpectation fulfill]; @@ -44,7 +37,7 @@ - (void)testResolutionPresetWithBestFormat_mustUpdateCaptureSessionPreset { FLTCreateCamWithVideoDimensionsForFormat(videoSessionMock, FCPPlatformResolutionPresetMax, captureDeviceMock, - ^CMVideoDimensions(AVCaptureDeviceFormat *format) { + ^CMVideoDimensions(id format) { CMVideoDimensions videoDimensions; videoDimensions.width = 1; videoDimensions.height = 1; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m index 9d72a24d9f0f..58e8cb33a237 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m @@ -9,9 +9,9 @@ @import XCTest; @import AVFoundation; #import "CameraTestUtils.h" +#import "MockAssetWriter.h" #import "MockCaptureDeviceController.h" #import "MockCaptureSession.h" -#import "MockAssetWriter.h" static const FCPPlatformResolutionPreset gTestResolutionPreset = FCPPlatformResolutionPresetMedium; static const int gTestFramesPerSecond = 15; @@ -95,17 +95,16 @@ - (void)setMaxFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)capture } } -- (AVAssetWriterInput *)assetWriterAudioInputWithOutputSettings: +- (id)assetWriterAudioInputWithOutputSettings: (nullable NSDictionary *)outputSettings { if ([outputSettings[AVEncoderBitRateKey] isEqual:@(gTestAudioBitrate)]) { [_audioSettingsExpectation fulfill]; } - return [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio - outputSettings:outputSettings]; + return [[MockAssetWriterInput alloc] init]; } -- (AVAssetWriterInput *)assetWriterVideoInputWithOutputSettings: +- (id)assetWriterVideoInputWithOutputSettings: (nullable NSDictionary *)outputSettings { if ([outputSettings[AVVideoCompressionPropertiesKey] isKindOfClass:[NSMutableDictionary class]]) { NSDictionary *compressionProperties = outputSettings[AVVideoCompressionPropertiesKey]; @@ -117,8 +116,7 @@ - (AVAssetWriterInput *)assetWriterVideoInputWithOutputSettings: } } - return [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo - outputSettings:outputSettings]; + return [[MockAssetWriterInput alloc] init]; } - (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(AVAssetWriter *)writer { @@ -146,7 +144,7 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter { [[TestMediaSettingsAVWrapper alloc] initWithTestCase:self]; FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_create("test", NULL), settings, injectedWrapper, nil, nil, nil); + dispatch_queue_create("test", NULL), settings, injectedWrapper, nil, nil, nil, nil); // Expect FPS configuration is passed to camera device. [self waitForExpectations:@[ @@ -217,9 +215,9 @@ - (void)testSettings_ShouldSelectFormatWhichSupports60FPS { enableAudio:gTestEnableAudio]; FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_create("test", NULL), settings, nil, nil, nil, nil); + dispatch_queue_create("test", NULL), settings, nil, nil, nil, nil, nil); - AVFrameRateRange *range = camera.captureDevice.activeFormat.videoSupportedFrameRateRanges[0]; + id range = camera.captureDevice.activeFormat.videoSupportedFrameRateRanges[0]; XCTAssertLessThanOrEqual(range.minFrameRate, 60); XCTAssertGreaterThanOrEqual(range.maxFrameRate, 60); } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h index b62205fc2cb1..47a94effe9e3 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h @@ -22,7 +22,8 @@ extern FLTCam *_Nullable FLTCreateCamWithCaptureSessionQueueAndMediaSettings( FLTCamMediaSettingsAVWrapper *_Nullable mediaSettingsAVWrapper, CaptureDeviceFactory _Nullable captureDeviceFactory, id _Nullable capturePhotoOutput, - id _Nullable assetWriter); + id _Nullable assetWriter, + id _Nullable pixelBufferAdaptor); extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index 80e9be78fa9f..1c6f8c3a4ee3 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -4,13 +4,12 @@ #import "CameraTestUtils.h" -#import @import AVFoundation; @import camera_avfoundation; +#import "MockAssetWriter.h" #import "MockCaptureDeviceController.h" #import "MockCaptureSession.h" -#import "MockAssetWriter.h" static FCPPlatformMediaSettings *FCPGetDefaultMediaSettings( FCPPlatformResolutionPreset resolutionPreset) { @@ -23,14 +22,14 @@ FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue) { return FLTCreateCamWithCaptureSessionQueueAndMediaSettings(captureSessionQueue, nil, nil, nil, - nil, nil); + nil, nil, nil); } FLTCam *FLTCreateCamWithCaptureSessionQueueAndMediaSettings( dispatch_queue_t captureSessionQueue, FCPPlatformMediaSettings *mediaSettings, FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper, CaptureDeviceFactory captureDeviceFactory, - id capturePhotoOutput, - id assetWriter) { + id capturePhotoOutput, id assetWriter, + id pixelBufferAdaptor) { if (!mediaSettings) { mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMedium); } @@ -38,11 +37,15 @@ if (!mediaSettingsAVWrapper) { mediaSettingsAVWrapper = [[FLTCamMediaSettingsAVWrapper alloc] init]; } - + if (!assetWriter) { assetWriter = [[MockAssetWriter alloc] init]; } + if (!pixelBufferAdaptor) { + pixelBufferAdaptor = [[MockPixelBufferAdaptor alloc] init]; + } + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; videoSessionMock.mockCanSetSessionPreset = YES; @@ -51,21 +54,17 @@ __block MockCaptureDeviceController *mockDevice = [[MockCaptureDeviceController alloc] init]; - id frameRateRangeMock1 = OCMClassMock([AVFrameRateRange class]); - OCMStub([frameRateRangeMock1 minFrameRate]).andReturn(3); - OCMStub([frameRateRangeMock1 maxFrameRate]).andReturn(30); + MockFrameRateRange *frameRateRange1 = [[MockFrameRateRange alloc] initWithMinFrameRate:3 + maxFrameRate:30]; MockCaptureDeviceFormat *captureDeviceFormatMock1 = [[MockCaptureDeviceFormat alloc] init]; - captureDeviceFormatMock1.videoSupportedFrameRateRanges = @[ frameRateRangeMock1 ]; + captureDeviceFormatMock1.videoSupportedFrameRateRanges = @[ frameRateRange1 ]; - id frameRateRangeMock2 = OCMClassMock([AVFrameRateRange class]); - OCMStub([frameRateRangeMock2 minFrameRate]).andReturn(3); - OCMStub([frameRateRangeMock2 maxFrameRate]).andReturn(60); + MockFrameRateRange *frameRateRange2 = [[MockFrameRateRange alloc] initWithMinFrameRate:3 + maxFrameRate:60]; MockCaptureDeviceFormat *captureDeviceFormatMock2 = [[MockCaptureDeviceFormat alloc] init]; - captureDeviceFormatMock1.videoSupportedFrameRateRanges = @[ frameRateRangeMock2 ]; + captureDeviceFormatMock2.videoSupportedFrameRateRanges = @[ frameRateRange2 ]; - id inputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) - .andReturn(inputMock); + MockCaptureInput *inputMock = [[MockCaptureInput alloc] init]; mockDevice.formats = @[ captureDeviceFormatMock1, captureDeviceFormatMock2 ]; mockDevice.activeFormat = captureDeviceFormatMock1; @@ -77,7 +76,10 @@ videoCaptureSession:videoSessionMock audioCaptureSession:audioSessionMock captureSessionQueue:captureSessionQueue - captureDeviceFactory:captureDeviceFactory ?: ^id(void) { + captureDeviceFactory:captureDeviceFactory ?: ^id(void) { + return mockDevice; + } + audioCaptureDeviceFactory:captureDeviceFactory ?: ^id(void) { return mockDevice; } videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { @@ -87,78 +89,80 @@ assetWriterFactory:^id _Nonnull(NSURL *url, AVFileType fileType, NSError * _Nullable __autoreleasing * _Nullable error) { return assetWriter; } + pixelBufferAdaptorFactory:^id _Nonnull(id _Nonnull writerInput, NSDictionary * _Nullable source) { + return pixelBufferAdaptor; + } error:nil]; - id captureVideoDataOutputMock = [OCMockObject niceMockForClass:[AVCaptureVideoDataOutput class]]; - - OCMStub([captureVideoDataOutputMock new]).andReturn(captureVideoDataOutputMock); - - OCMStub([captureVideoDataOutputMock - recommendedVideoSettingsForAssetWriterWithOutputFileType:AVFileTypeMPEG4]) - .andReturn(@{}); - - OCMStub([captureVideoDataOutputMock sampleBufferCallbackQueue]).andReturn(captureSessionQueue); - - - MockPixelBufferAdaptor *videoMock = [[MockPixelBufferAdaptor alloc] init]; - MockAssetWriterInput *writerInputMock = [[MockAssetWriterInput alloc] init]; - return fltCam; } FLTCam *FLTCreateCamWithVideoCaptureSession(id captureSession, FCPPlatformResolutionPreset resolutionPreset) { -// id inputMock = OCMClassMock([AVCaptureDeviceInput class]); -// OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) -// .andReturn(inputMock); - MockCaptureSession *audioSessionMock = [[MockCaptureSession alloc] init]; audioSessionMock.mockCanSetSessionPreset = YES; - + return [[FLTCam alloc] initWithMediaSettings:FCPGetDefaultMediaSettings(resolutionPreset) - mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init] - orientation:UIDeviceOrientationPortrait - videoCaptureSession:captureSession - audioCaptureSession:audioSessionMock - captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) - captureDeviceFactory: ^id(void) { - return [[MockCaptureDeviceController alloc] init]; - } - videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { - return CMVideoFormatDescriptionGetDimensions(format.formatDescription); - } - capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] initWithPhotoOutput:[AVCapturePhotoOutput new]] assetWriterFactory:^id _Nonnull(NSURL *url, AVFileType fileType, NSError * _Nullable __autoreleasing * _Nullable error) { - return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; - } error:nil]; + mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init] + orientation:UIDeviceOrientationPortrait + videoCaptureSession:captureSession + audioCaptureSession:audioSessionMock + captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) + captureDeviceFactory:^id(void) { + return [[MockCaptureDeviceController alloc] init]; + } + audioCaptureDeviceFactory:^id(void) { + return [[MockCaptureDeviceController alloc] init]; + } + videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { + return CMVideoFormatDescriptionGetDimensions(format.formatDescription); + } + capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] + initWithPhotoOutput:[AVCapturePhotoOutput new]] + assetWriterFactory:^id _Nonnull( + NSURL *url, AVFileType fileType, NSError *_Nullable __autoreleasing *_Nullable error) { + return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; + } + pixelBufferAdaptorFactory:^id _Nonnull( + id _Nonnull writerInput, + NSDictionary *_Nullable source) { + return [[MockPixelBufferAdaptor alloc] init]; + } + error:nil]; } FLTCam *FLTCreateCamWithVideoDimensionsForFormat( id captureSession, FCPPlatformResolutionPreset resolutionPreset, id captureDevice, VideoDimensionsForFormat videoDimensionsForFormat) { - // id inputMock = OCMClassMock([AVCaptureDeviceInput class]); - // OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) - // .andReturn(inputMock); - // MockCaptureSession *audioSessionMock = [[MockCaptureSession alloc] init]; audioSessionMock.mockCanSetSessionPreset = YES; return [[FLTCam alloc] initWithMediaSettings:FCPGetDefaultMediaSettings(resolutionPreset) - mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init] - orientation:UIDeviceOrientationPortrait - videoCaptureSession:captureSession - audioCaptureSession:audioSessionMock - captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) - captureDeviceFactory:^id(void) { - return captureDevice; - } - videoDimensionsForFormat:videoDimensionsForFormat - capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] - initWithPhotoOutput:[AVCapturePhotoOutput new]] - assetWriterFactory:^id _Nonnull(NSURL *url, AVFileType fileType, NSError * _Nullable __autoreleasing * _Nullable error) { - return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; - } - error:nil]; + mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init] + orientation:UIDeviceOrientationPortrait + videoCaptureSession:captureSession + audioCaptureSession:audioSessionMock + captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) + captureDeviceFactory:^id(void) { + return captureDevice; + } + audioCaptureDeviceFactory:^id(void) { + return [[MockCaptureDeviceController alloc] init]; + } + videoDimensionsForFormat:videoDimensionsForFormat + capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] + initWithPhotoOutput:[AVCapturePhotoOutput new]] + assetWriterFactory:^id _Nonnull( + NSURL *url, AVFileType fileType, NSError *_Nullable __autoreleasing *_Nullable error) { + return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; + } + pixelBufferAdaptorFactory:^id _Nonnull( + id _Nonnull writerInput, + NSDictionary *_Nullable source) { + return [[MockPixelBufferAdaptor alloc] init]; + } + error:nil]; } CMSampleBufferRef FLTCreateTestSampleBuffer(void) { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m index d2c96092c914..e4e99e9f6a63 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m @@ -200,7 +200,7 @@ - (void)testCaptureToFile_handlesTorchMode { ^id(void) { return captureDeviceMock; }, - nil, nil); + nil, nil, nil); // AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; // id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m index 95a7347bce62..c375b0f9413d 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m @@ -12,12 +12,67 @@ #import "MockAssetWriter.h" #import "MockCaptureConnection.h" +@import camera_avfoundation; +@import AVFoundation; + +@interface FakeMediaSettingsAVWrapper : FLTCamMediaSettingsAVWrapper +@property(readonly, nonatomic) MockAssetWriterInput *inputMock; +@end + +@implementation FakeMediaSettingsAVWrapper +- (instancetype)initWithInputMock:(MockAssetWriterInput *)inputMock { + _inputMock = inputMock; + return self; +} + +- (BOOL)lockDevice:(AVCaptureDevice *)captureDevice error:(NSError **)outError { + return YES; +} + +- (void)unlockDevice:(AVCaptureDevice *)captureDevice { +} + +- (void)beginConfigurationForSession:(id)videoCaptureSession { +} + +- (void)commitConfigurationForSession:(id)videoCaptureSession { +} + +- (void)setMinFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { +} + +- (void)setMaxFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { +} + +- (id)assetWriterAudioInputWithOutputSettings: + (nullable NSDictionary *)outputSettings { + return _inputMock; +} + +- (id)assetWriterVideoInputWithOutputSettings: + (nullable NSDictionary *)outputSettings { + return _inputMock; +} + +- (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(AVAssetWriter *)writer { +} + +- (NSDictionary *) + recommendedVideoSettingsForAssetWriterWithFileType:(AVFileType)fileType + forOutput:(AVCaptureVideoDataOutput *)output { + return @{}; +} +@end + /// Includes test cases related to sample buffer handling for FLTCam class. @interface FLTCamSampleBufferTests : XCTestCase @property(readonly, nonatomic) dispatch_queue_t captureSessionQueue; @property(readonly, nonatomic) FLTCam *camera; @property(readonly, nonatomic) MockAssetWriter *writerMock; @property(readonly, nonatomic) MockCaptureConnection *connectionMock; +@property(readonly, nonatomic) MockAssetWriterInput *inputMock; +@property(readonly, nonatomic) MockPixelBufferAdaptor *adaptorMock; +@property(readonly, nonatomic) FakeMediaSettingsAVWrapper *mediaSettingsWrapper; @end @implementation FLTCamSampleBufferTests @@ -26,15 +81,18 @@ - (void)setUp { _captureSessionQueue = dispatch_queue_create("testing", NULL); _writerMock = [[MockAssetWriter alloc] init]; _connectionMock = [[MockCaptureConnection alloc] init]; + _inputMock = [[MockAssetWriterInput alloc] init]; + _adaptorMock = [[MockPixelBufferAdaptor alloc] init]; + _mediaSettingsWrapper = [[FakeMediaSettingsAVWrapper alloc] initWithInputMock:_inputMock]; _camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - _captureSessionQueue, - [FCPPlatformMediaSettings makeWithResolutionPreset:FCPPlatformResolutionPresetMedium - framesPerSecond:nil - videoBitrate:nil - audioBitrate:nil - enableAudio:YES], - nil, nil,nil, _writerMock); + _captureSessionQueue, + [FCPPlatformMediaSettings makeWithResolutionPreset:FCPPlatformResolutionPresetMedium + framesPerSecond:nil + videoBitrate:nil + audioBitrate:nil + enableAudio:YES], + _mediaSettingsWrapper, nil, nil, _writerMock, _adaptorMock); } - (void)testSampleBufferCallbackQueueMustBeCaptureSessionQueue { @@ -48,7 +106,7 @@ - (void)testCopyPixelBuffer { // Mimic sample buffer callback when captured a new video sample [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:capturedSampleBuffer - fromConnection:_connectionMock]; + fromConnection:_connectionMock]; CVPixelBufferRef deliveriedPixelBuffer = [_camera copyPixelBuffer]; XCTAssertEqual(deliveriedPixelBuffer, capturedPixelBuffer, @"FLTCam must deliver the latest captured pixel buffer to copyPixelBuffer API."); @@ -69,7 +127,7 @@ - (void)testDidOutputSampleBuffer_mustNotChangeSampleBufferRetainCountAfterPause [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:sampleBuffer - fromConnection:_connectionMock]; + fromConnection:_connectionMock]; XCTAssertEqual(CFGetRetainCount(sampleBuffer), 1, @"didOutputSampleBuffer must not change the sample buffer retain count after " @"pause resume recording."); @@ -81,16 +139,14 @@ - (void)testDidOutputSampleBufferIgnoreAudioSamplesBeforeVideoSamples { CMSampleBufferRef audioSample = FLTCreateTestAudioSampleBuffer(); __block NSArray *writtenSamples = @[]; - - MockPixelBufferAdaptor *adaptorMock = [[MockPixelBufferAdaptor alloc] init]; - adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + + _adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { writtenSamples = [writtenSamples arrayByAddingObject:@"video"]; return YES; }; - - MockAssetWriterInput *inputMock = [[MockAssetWriterInput alloc] init]; - inputMock.isReadyForMoreMediaData = YES; - inputMock.appendSampleBufferStub = ^BOOL(CMSampleBufferRef buffer) { + + _inputMock.isReadyForMoreMediaData = YES; + _inputMock.appendSampleBufferStub = ^BOOL(CMSampleBufferRef buffer) { writtenSamples = [writtenSamples arrayByAddingObject:@"audio"]; return YES; }; @@ -119,17 +175,15 @@ - (void)testDidOutputSampleBufferSampleTimesMustBeNumericAfterPauseResume { CMSampleBufferRef audioSample = FLTCreateTestAudioSampleBuffer(); __block BOOL videoAppended = NO; - MockPixelBufferAdaptor *adaptorMock = [[MockPixelBufferAdaptor alloc] init]; - adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + _adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { XCTAssert(CMTIME_IS_NUMERIC(time)); videoAppended = YES; return YES; }; __block BOOL audioAppended = NO; - MockAssetWriterInput *inputMock = [[MockAssetWriterInput alloc] init]; - inputMock.isReadyForMoreMediaData = YES; - inputMock.appendSampleBufferStub = ^BOOL(CMSampleBufferRef buffer) { + _inputMock.isReadyForMoreMediaData = YES; + _inputMock.appendSampleBufferStub = ^BOOL(CMSampleBufferRef buffer) { CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(buffer); XCTAssert(CMTIME_IS_NUMERIC(sampleTime)); audioAppended = YES; @@ -137,7 +191,8 @@ - (void)testDidOutputSampleBufferSampleTimesMustBeNumericAfterPauseResume { }; [_camera - startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) {} + startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { + } messengerForStreaming:nil]; [_camera pauseVideoRecording]; @@ -161,29 +216,24 @@ - (void)testDidOutputSampleBufferMustNotAppendSampleWhenReadyForMoreMediaDataIsN CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer(); __block BOOL sampleAppended = NO; - MockPixelBufferAdaptor *adaptorMock = [[MockPixelBufferAdaptor alloc] init]; - adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + _adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { sampleAppended = YES; return YES; }; - __block BOOL readyForMoreMediaData = NO; - MockAssetWriterInput *inputMock = [[MockAssetWriterInput alloc] init]; - inputMock.isReadyForMoreMediaData = readyForMoreMediaData; - [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { } messengerForStreaming:nil]; - readyForMoreMediaData = YES; + _inputMock.isReadyForMoreMediaData = YES; sampleAppended = NO; [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample fromConnection:_connectionMock]; XCTAssertTrue(sampleAppended, @"Sample was not appended."); - readyForMoreMediaData = NO; + _inputMock.isReadyForMoreMediaData = NO; sampleAppended = NO; [_camera captureOutput:_camera.captureVideoOutput didOutputSampleBuffer:videoSample @@ -209,9 +259,10 @@ - (void)testStopVideoRecordingWithCompletionMustCallCompletion { messengerForStreaming:nil]; __block BOOL completionCalled = NO; - [_camera stopVideoRecordingWithCompletion:^(NSString *_Nullable path, FlutterError *_Nullable error) { - completionCalled = YES; - }]; + [_camera + stopVideoRecordingWithCompletion:^(NSString *_Nullable path, FlutterError *_Nullable error) { + completionCalled = YES; + }]; XCTAssert(completionCalled, @"Completion was not called."); } @@ -224,14 +275,12 @@ - (void)testStartWritingShouldNotBeCalledBetweenSampleCreationAndAppending { }; __block BOOL videoAppended = NO; - MockPixelBufferAdaptor *adaptorMock = [[MockPixelBufferAdaptor alloc] init]; - adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { + _adaptorMock.appendPixelBufferStub = ^BOOL(CVPixelBufferRef buffer, CMTime time) { videoAppended = YES; return YES; }; - - MockAssetWriterInput *inputMock = [[MockAssetWriterInput alloc] init]; - inputMock.isReadyForMoreMediaData = YES; + + _inputMock.isReadyForMoreMediaData = YES; [_camera startVideoRecordingWithCompletion:^(FlutterError *_Nullable error) { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.h index 1184ad805583..1f9e8478a966 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.h @@ -14,6 +14,7 @@ @end @interface MockAssetWriterInput : NSObject +@property(nonatomic, strong) AVAssetWriterInput *input; @property(nonatomic, assign) BOOL isReadyForMoreMediaData; @property(nonatomic, assign) BOOL expectsMediaDataInRealTime; @property(nonatomic, copy) BOOL (^appendSampleBufferStub)(CMSampleBufferRef); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.m index c12b8631b920..0d537c00f57a 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.m @@ -24,11 +24,9 @@ - (void)finishWritingWithCompletionHandler:(void (^)(void))handler { - (void)startSessionAtSourceTime:(CMTime)startTime { } -- (void)addInput:(nonnull AVAssetWriterInput *)input { - +- (void)addInput:(nonnull AVAssetWriterInput *)input { } - @end @implementation MockAssetWriterInput @@ -41,7 +39,8 @@ - (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer { @end @implementation MockPixelBufferAdaptor -- (BOOL)appendPixelBuffer:(nonnull CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime { +- (BOOL)appendPixelBuffer:(nonnull CVPixelBufferRef)pixelBuffer + withPresentationTime:(CMTime)presentationTime { if (self.appendPixelBufferStub) { return self.appendPixelBufferStub(pixelBuffer, presentationTime); } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.h index 7349093533da..2791cc2db1a9 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.h @@ -8,6 +8,7 @@ NS_ASSUME_NONNULL_BEGIN @interface MockCaptureConnection : NSObject +@property(nonatomic, strong) AVCaptureConnection *connection; @property(nonatomic, assign) BOOL videoMirrored; @property(nonatomic, assign) AVCaptureVideoOrientation videoOrientation; @property(nonatomic, strong) NSArray *inputPorts; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h index 9d6ada2acecb..8e196bb2501c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h @@ -69,16 +69,27 @@ NS_ASSUME_NONNULL_BEGIN @property(nonatomic, copy) void (^setActiveVideoMaxFrameDurationStub)(CMTime duration); // Input Creation -@property(nonatomic, strong) AVCaptureInput *inputToReturn; +@property(nonatomic, strong) id inputToReturn; @property(nonatomic, copy) void (^createInputStub)(NSError **error); @end @interface MockCaptureDeviceFormat : NSObject -@property(nonatomic, strong) NSArray *videoSupportedFrameRateRanges; +@property(nonatomic, strong) NSArray> *videoSupportedFrameRateRanges; @property(nonatomic, assign) CMFormatDescriptionRef formatDescription; +@property(nonatomic, strong) AVCaptureDeviceFormat *format; - (instancetype)initWithDimensions:(CMVideoDimensions)dimensions; @end +@interface MockFrameRateRange : NSObject +- (instancetype)initWithMinFrameRate:(float)minFrameRate maxFrameRate:(float)maxFrameRate; +@property(nonatomic, readwrite) float minFrameRate; +@property(nonatomic, readwrite) float maxFrameRate; +@end + +@interface MockCaptureInput : NSObject +@property(nonatomic, strong) NSArray *ports; +@end + NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m index ca855a2c9e68..92a6036b15eb 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m @@ -114,7 +114,7 @@ - (BOOL)isExposureModeSupported:(AVCaptureExposureMode)mode { return self.exposureModeSupported; } -- (AVCaptureInput *)createInput:(NSError *_Nullable *_Nullable)error { +- (id)createInput:(NSError *_Nullable *_Nullable)error { if (self.createInputStub) { self.createInputStub(error); } @@ -124,6 +124,7 @@ - (AVCaptureInput *)createInput:(NSError *_Nullable *_Nullable)error { @end @implementation MockCaptureDeviceFormat + - (void)dealloc { if (_formatDescription) { CFRelease(_formatDescription); @@ -139,6 +140,22 @@ - (instancetype)initWithDimensions:(CMVideoDimensions)dimensions { return self; } -@synthesize format; +@end + +@implementation MockFrameRateRange + +- (instancetype)initWithMinFrameRate:(float)minFrameRate maxFrameRate:(float)maxFrameRate { + self = [super init]; + if (self) { + _minFrameRate = minFrameRate; + _maxFrameRate = maxFrameRate; + } + return self; +} + +@end +@implementation MockCaptureInput +@synthesize ports; +@synthesize input; @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.h index fda1904097d4..1f886bb3a489 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.h @@ -9,4 +9,8 @@ @property(nonatomic, copy) void (^capturePhotoWithSettingsStub) (id, id); @property(nonatomic, strong) NSArray *availablePhotoCodecTypes; +@property(nonatomic, strong) AVCapturePhotoOutput *photoOutput; +@property(nonatomic, assign, getter=isHighResolutionCaptureEnabled) + BOOL highResolutionCaptureEnabled; +@property(nonatomic, strong) NSArray *supportedFlashModes; @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.m index 617e262a1688..2a1721bee0a8 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.m @@ -11,4 +11,9 @@ - (void)capturePhotoWithSettings:(id)settings self.capturePhotoWithSettingsStub(settings, delegate); } } + +- (nullable AVCaptureConnection *)connectionWithMediaType:(nonnull AVMediaType)mediaType { + return nil; +} + @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.h index 5a13da24edb2..522ec63bfa0c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.h @@ -6,6 +6,7 @@ @import AVFoundation; @interface MockCapturePhotoSettings : NSObject +@property(nonatomic, strong) AVCapturePhotoSettings *settings; @property(nonatomic, assign) int64_t uniqueID; @property(nonatomic, copy) NSDictionary *format; @property(nonatomic, assign) AVCaptureFlashMode flashMode; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m index 9adb54e90d6c..0653dd17a118 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m @@ -46,10 +46,10 @@ - (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset { - (void)addConnection:(nonnull AVCaptureConnection *)connection { } -- (void)addInput:(nonnull AVCaptureInput *)input { +- (void)addInput:(nonnull id)input { } -- (void)addInputWithNoConnections:(nonnull AVCaptureInput *)input { +- (void)addInputWithNoConnections:(nonnull id)input { } - (void)addOutput:(nonnull AVCaptureOutput *)output { @@ -62,7 +62,7 @@ - (BOOL)canAddConnection:(nonnull AVCaptureConnection *)connection { return YES; } -- (BOOL)canAddInput:(nonnull AVCaptureInput *)input { +- (BOOL)canAddInput:(nonnull id)input { return YES; } @@ -70,7 +70,7 @@ - (BOOL)canAddOutput:(nonnull AVCaptureOutput *)output { return YES; } -- (void)removeInput:(nonnull AVCaptureInput *)input { +- (void)removeInput:(nonnull id)input { } - (void)removeOutput:(nonnull AVCaptureOutput *)output { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/TestMediaSettingsAVWrapper.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/TestMediaSettingsAVWrapper.h new file mode 100644 index 000000000000..983e00c036af --- /dev/null +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/TestMediaSettingsAVWrapper.h @@ -0,0 +1,114 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera_avfoundation; +@import AVFoundation; + +/** + * A test implemetation of `FLTCamMediaSettingsAVWrapper` + * + * This xctest-expectation-checking implementation of `FLTCamMediaSettingsAVWrapper` is injected + * into `camera-avfoundation` plugin instead of real AVFoundation-based realization. + * Such kind of Dependency Injection (DI) allows to run media-settings tests without + * any additional mocking of AVFoundation classes. + */ +@interface TestMediaSettingsAVWrapper : FLTCamMediaSettingsAVWrapper +@property(nonatomic, readonly) XCTestExpectation *lockExpectation; +@property(nonatomic, readonly) XCTestExpectation *unlockExpectation; +@property(nonatomic, readonly) XCTestExpectation *minFrameDurationExpectation; +@property(nonatomic, readonly) XCTestExpectation *maxFrameDurationExpectation; +@property(nonatomic, readonly) XCTestExpectation *beginConfigurationExpectation; +@property(nonatomic, readonly) XCTestExpectation *commitConfigurationExpectation; +@property(nonatomic, readonly) XCTestExpectation *audioSettingsExpectation; +@property(nonatomic, readonly) XCTestExpectation *videoSettingsExpectation; +@end + +@implementation TestMediaSettingsAVWrapper + +- (instancetype)initWithTestCase:(XCTestCase *)test { + _lockExpectation = [test expectationWithDescription:@"lockExpectation"]; + _unlockExpectation = [test expectationWithDescription:@"unlockExpectation"]; + _minFrameDurationExpectation = [test expectationWithDescription:@"minFrameDurationExpectation"]; + _maxFrameDurationExpectation = [test expectationWithDescription:@"maxFrameDurationExpectation"]; + _beginConfigurationExpectation = + [test expectationWithDescription:@"beginConfigurationExpectation"]; + _commitConfigurationExpectation = + [test expectationWithDescription:@"commitConfigurationExpectation"]; + _audioSettingsExpectation = [test expectationWithDescription:@"audioSettingsExpectation"]; + _videoSettingsExpectation = [test expectationWithDescription:@"videoSettingsExpectation"]; + + return self; +} + +- (BOOL)lockDevice:(AVCaptureDevice *)captureDevice error:(NSError **)outError { + [_lockExpectation fulfill]; + return YES; +} + +- (void)unlockDevice:(AVCaptureDevice *)captureDevice { + [_unlockExpectation fulfill]; +} + +- (void)beginConfigurationForSession:(id)videoCaptureSession { + [_beginConfigurationExpectation fulfill]; +} + +- (void)commitConfigurationForSession:(id)videoCaptureSession { + [_commitConfigurationExpectation fulfill]; +} + +- (void)setMinFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { + // FLTCam allows to set frame rate with 1/10 precision. + CMTime expectedDuration = CMTimeMake(10, gTestFramesPerSecond * 10); + + if (duration.value == expectedDuration.value && + duration.timescale == expectedDuration.timescale) { + [_minFrameDurationExpectation fulfill]; + } +} + +- (void)setMaxFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { + // FLTCam allows to set frame rate with 1/10 precision. + CMTime expectedDuration = CMTimeMake(10, gTestFramesPerSecond * 10); + + if (duration.value == expectedDuration.value && + duration.timescale == expectedDuration.timescale) { + [_maxFrameDurationExpectation fulfill]; + } +} + +- (id)assetWriterAudioInputWithOutputSettings: + (nullable NSDictionary *)outputSettings { + if ([outputSettings[AVEncoderBitRateKey] isEqual:@(gTestAudioBitrate)]) { + [_audioSettingsExpectation fulfill]; + } + + return [[MockAssetWriterInput alloc] init]; +} + +- (id)assetWriterVideoInputWithOutputSettings: + (nullable NSDictionary *)outputSettings { + if ([outputSettings[AVVideoCompressionPropertiesKey] isKindOfClass:[NSMutableDictionary class]]) { + NSDictionary *compressionProperties = outputSettings[AVVideoCompressionPropertiesKey]; + + if ([compressionProperties[AVVideoAverageBitRateKey] isEqual:@(gTestVideoBitrate)] && + [compressionProperties[AVVideoExpectedSourceFrameRateKey] + isEqual:@(gTestFramesPerSecond)]) { + [_videoSettingsExpectation fulfill]; + } + } + + return [[MockAssetWriterInput alloc] init]; +} + +- (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(AVAssetWriter *)writer { +} + +- (NSDictionary *) + recommendedVideoSettingsForAssetWriterWithFileType:(AVFileType)fileType + forOutput:(AVCaptureVideoDataOutput *)output { + return @{}; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m index 0d94c92f3814..4f4afa9bd123 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m @@ -48,7 +48,8 @@ - (instancetype)initWithRegistry:(NSObject *)registry globalAPI:[[FCPCameraGlobalEventApi alloc] initWithBinaryMessenger:messenger] deviceDiscovery:[[FLTDefaultCameraDeviceDiscovery alloc] init] sessionFactory:^id(void) { - return [[FLTDefaultCaptureSession alloc] init]; + return [[FLTDefaultCaptureSession alloc] + initWithCaptureSession:[[AVCaptureSession alloc] init]]; } deviceFactory:^id(NSString *name) { AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:name]; @@ -510,14 +511,28 @@ - (void)sessionQueueCreateCameraWithName:(NSString *)name captureDeviceFactory:^id _Nonnull { return weakSelf.captureDeviceFactory(name); } + audioCaptureDeviceFactory:^id _Nonnull { + return [[FLTDefaultCaptureDeviceController alloc] + initWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]]; + } videoDimensionsForFormat:^CMVideoDimensions(id _Nonnull format) { - return CMVideoFormatDescriptionGetDimensions(format.format.formatDescription); + return CMVideoFormatDescriptionGetDimensions(format.formatDescription); } capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] initWithPhotoOutput:[AVCapturePhotoOutput new]] - assetWriterFactory:^id _Nonnull(NSURL * _Nonnull url, AVFileType _Nonnull fileType, NSError * _Nullable __autoreleasing * _Nullable error) { + assetWriterFactory:^id _Nonnull( + NSURL *_Nonnull url, AVFileType _Nonnull fileType, + NSError *_Nullable __autoreleasing *_Nullable error) { return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; - } + } + pixelBufferAdaptorFactory:^id _Nonnull( + id _Nonnull assetWriterInput, + NSDictionary *_Nullable sourcePixelBufferAttributes) { + return [[FLTDefaultPixelBufferAdaptor alloc] + initWithAdaptor:[[AVAssetWriterInputPixelBufferAdaptor alloc] + initWithAssetWriterInput:assetWriterInput.input + sourcePixelBufferAttributes:sourcePixelBufferAttributes]]; + } error:&error]; if (error) { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 0ac9389b9e4e..6c7973d9c12c 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -11,12 +11,12 @@ #import "./include/camera_avfoundation/FLTSavePhotoDelegate.h" #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureConnection.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" #import "./include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" #import "./include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h" #import "./include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h" -#import "./include/camera_avfoundation/Protocols/FLTCaptureConnection.h" #import "./include/camera_avfoundation/QueueUtils.h" #import "./include/camera_avfoundation/messages.g.h" @@ -63,15 +63,15 @@ @interface FLTCam () videoCaptureSession; @property(readonly, nonatomic) id audioCaptureSession; -@property(readonly, nonatomic) AVCaptureInput *captureVideoInput; +@property(readonly, nonatomic) id captureVideoInput; /// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. /// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API. @property(readwrite, nonatomic) CVPixelBufferRef latestPixelBuffer; @property(readonly, nonatomic) CGSize captureSize; @property(strong, nonatomic) id videoWriter; -@property(strong, nonatomic) AVAssetWriterInput *videoWriterInput; -@property(strong, nonatomic) AVAssetWriterInput *audioWriterInput; -@property(strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferAdaptor; +@property(strong, nonatomic) id videoWriterInput; +@property(strong, nonatomic) id audioWriterInput; +@property(strong, nonatomic) id assetWriterPixelBufferAdaptor; @property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput; @property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput; @property(strong, nonatomic) NSString *videoRecordingPath; @@ -94,7 +94,7 @@ @interface FLTCam () videoAdaptor; /// All FLTCam's state access and capture session related operations should be on run on this queue. @property(strong, nonatomic) dispatch_queue_t captureSessionQueue; /// The queue on which `latestPixelBuffer` property is accessed. @@ -109,7 +109,9 @@ @interface FLTCam () deviceOrientationProvider; /// Reports the given error message to the Dart side of the plugin. /// @@ -135,9 +137,9 @@ @implementation FLTCam // mediaSettings:mediaSettings // mediaSettingsAVWrapper:mediaSettingsAVWrapper // orientation:orientation -// videoCaptureSession:[[FLTDefaultCaptureSession alloc] initWithCaptureSession:videoSession] -// audioCaptureSession:[[FLTDefaultCaptureSession alloc] initWithCaptureSession:audioSession] -// captureSessionQueue:captureSessionQueue +// videoCaptureSession:[[FLTDefaultCaptureSession alloc] +// initWithCaptureSession:videoSession] audioCaptureSession:[[FLTDefaultCaptureSession +// alloc] initWithCaptureSession:audioSession] captureSessionQueue:captureSessionQueue // error:error]; //} @@ -164,9 +166,10 @@ @implementation FLTCam // } // capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] // initWithPhotoOutput:[AVCapturePhotoOutput new]] -// assetWriterFactory:^id _Nonnull(NSURL * _Nonnull url, AVFileType _Nonnull fileType, NSError * _Nullable __autoreleasing * _Nullable error) { +// assetWriterFactory:^id _Nonnull(NSURL * _Nonnull url, AVFileType +// _Nonnull fileType, NSError * _Nullable __autoreleasing * _Nullable error) { // return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; -// +// // } error:error // ]; //} @@ -175,7 +178,7 @@ @implementation FLTCam static double bestFrameRateForFormat(id format, double targetFrameRate) { double bestFrameRate = 0; double minDistance = DBL_MAX; - for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) { + for (id range in format.videoSupportedFrameRateRanges) { double frameRate = MIN(MAX(targetFrameRate, range.minFrameRate), range.maxFrameRate); double distance = fabs(frameRate - targetFrameRate); if (distance < minDistance) { @@ -231,9 +234,11 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings audioCaptureSession:(id)audioCaptureSession captureSessionQueue:(dispatch_queue_t)captureSessionQueue captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory + audioCaptureDeviceFactory:(CaptureDeviceFactory)audioCaptureDeviceFactory videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat capturePhotoOutput:(id)capturePhotoOutput assetWriterFactory:(AssetWriterFactory)assetWriterFactory + pixelBufferAdaptorFactory:(PixelBufferAdaptorFactory)pixelBufferAdaptorFactory error:(NSError **)error { self = [super init]; NSAssert(self, @"super init cannot be nil"); @@ -248,6 +253,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings _audioCaptureSession = audioCaptureSession; _captureDeviceFactory = captureDeviceFactory; _captureDevice = captureDeviceFactory(); + _audioCaptureDeviceFactory = audioCaptureDeviceFactory; _videoDimensionsForFormat = videoDimensionsForFormat; _flashMode = _captureDevice.hasFlash ? FCPPlatformFlashModeAuto : FCPPlatformFlashModeOff; _exposureMode = FCPPlatformExposureModeAuto; @@ -258,6 +264,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings _inProgressSavePhotoDelegates = [NSMutableDictionary dictionary]; _fileFormat = FCPPlatformImageFileFormatJpeg; _assetWriterFactory = assetWriterFactory; + _pixelBufferAdaptorFactory = pixelBufferAdaptorFactory; // To limit memory consumption, limit the number of frames pending processing. // After some testing, 4 was determined to be the best maximum value. @@ -273,13 +280,13 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings return nil; } - [_videoCaptureSession addInputWithNoConnections:_captureVideoInput]; + [_videoCaptureSession addInputWithNoConnections:_captureVideoInput.input]; [_videoCaptureSession addOutputWithNoConnections:_captureVideoOutput]; [_videoCaptureSession addConnection:connection]; _capturePhotoOutput = capturePhotoOutput; [_capturePhotoOutput setHighResolutionCaptureEnabled:YES]; - [_videoCaptureSession addOutput:_capturePhotoOutput]; + [_videoCaptureSession addOutput:_capturePhotoOutput.photoOutput]; _motionManager = [[CMMotionManager alloc] init]; [_motionManager startAccelerometerUpdates]; @@ -417,7 +424,7 @@ - (void)updateOrientation { ? _lockedCaptureOrientation : _deviceOrientation; - [self updateOrientation:orientation forCaptureOutput:_capturePhotoOutput]; + [self updateOrientation:orientation forCaptureOutput:_capturePhotoOutput.photoOutput]; [self updateOrientation:orientation forCaptureOutput:_captureVideoOutput]; } @@ -552,8 +559,7 @@ - (BOOL)setCaptureSessionPreset:(FCPPlatformResolutionPreset)resolutionPreset if (bestFormat) { _videoCaptureSession.sessionPreset = AVCaptureSessionPresetInputPriority; if ([_captureDevice lockForConfiguration:NULL]) { - // Set the best device format found and finish the device configuration. - _captureDevice.activeFormat = bestFormat; + // Set the best device formaxRQX= bestFormat; [_captureDevice unlockForConfiguration]; break; } @@ -776,7 +782,7 @@ - (void)captureOutput:(AVCaptureOutput *)output CMTime nextSampleTime = CMTimeSubtract(_lastVideoSampleTime, _videoTimeOffset); // do not append sample buffer when readyForMoreMediaData is NO to avoid crash // https://github.com/flutter/flutter/issues/132073 - if (_videoWriterInput.readyForMoreMediaData) { + if (_videoWriterInput.isReadyForMoreMediaData) { [_videoAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextSampleTime]; } } else { @@ -835,7 +841,7 @@ - (void)newVideoSample:(CMSampleBufferRef)sampleBuffer { } return; } - if (_videoWriterInput.readyForMoreMediaData) { + if (_videoWriterInput.isReadyForMoreMediaData) { if (![_videoWriterInput appendSampleBuffer:sampleBuffer]) { [self reportErrorMessage:@"Unable to write to video input"]; } @@ -849,7 +855,7 @@ - (void)newAudioSample:(CMSampleBufferRef)sampleBuffer { } return; } - if (_audioWriterInput.readyForMoreMediaData) { + if (_audioWriterInput.isReadyForMoreMediaData) { if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) { [self reportErrorMessage:@"Unable to write to audio input"]; } @@ -1315,8 +1321,8 @@ - (BOOL)setupWriterForPath:(NSString *)path { [self setUpCaptureSessionForAudio]; } - _videoWriter =_assetWriterFactory(outputURL, AVFileTypeMPEG4, &error); - + _videoWriter = _assetWriterFactory(outputURL, AVFileTypeMPEG4, &error); + NSParameterAssert(_videoWriter); if (error) { [self reportErrorMessage:error.description]; @@ -1344,11 +1350,8 @@ - (BOOL)setupWriterForPath:(NSString *)path { _videoWriterInput = [_mediaSettingsAVWrapper assetWriterVideoInputWithOutputSettings:videoSettings]; - _videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor - assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput - sourcePixelBufferAttributes:@{ - (NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat) - }]; + _videoAdaptor = _pixelBufferAdaptorFactory( + _videoWriterInput, @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)}); NSParameterAssert(_videoWriterInput); @@ -1401,17 +1404,16 @@ - (void)setUpCaptureSessionForAudio { NSError *error = nil; // Create a device input with the device and add it to the session. // Setup the audio input. - AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; - AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice - error:&error]; + id audioDevice = _audioCaptureDeviceFactory(); + id audioInput = [audioDevice createInput:&error]; if (error) { [self reportErrorMessage:error.description]; } // Setup the audio output. _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; - if ([_audioCaptureSession canAddInput:audioInput]) { - [_audioCaptureSession addInput:audioInput]; + if ([_audioCaptureSession canAddInput:audioInput.input]) { + [_audioCaptureSession addInput:audioInput.input]; if ([_audioCaptureSession canAddOutput:_audioOutput]) { [_audioCaptureSession addOutput:_audioOutput]; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m index c4f91387b4a1..4d19027b9db9 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m @@ -3,9 +3,9 @@ // found in the LICENSE file. #import "./include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h" +#import "./include/camera_avfoundation/Protocols/FLTAssetWriter.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" -#import "./include/camera_avfoundation/Protocols/FLTAssetWriter.h" @implementation FLTCamMediaSettingsAVWrapper @@ -36,20 +36,22 @@ - (void)setMaxFrameDuration:(CMTime)duration captureDevice.activeVideoMaxFrameDuration = duration; } -- (AVAssetWriterInput *)assetWriterAudioInputWithOutputSettings: +- (id)assetWriterAudioInputWithOutputSettings: (nullable NSDictionary *)outputSettings { - return [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio - outputSettings:outputSettings]; + return [[FLTDefaultAssetWriterInput alloc] + initWithInput:[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio + outputSettings:outputSettings]]; } -- (AVAssetWriterInput *)assetWriterVideoInputWithOutputSettings: +- (id)assetWriterVideoInputWithOutputSettings: (nullable NSDictionary *)outputSettings { - return [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo - outputSettings:outputSettings]; + return [[FLTDefaultAssetWriterInput alloc] + initWithInput:[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo + outputSettings:outputSettings]]; } -- (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(id)writer { - [writer addInput:writerInput]; +- (void)addInput:(id)writerInput toAssetWriter:(id)writer { + [writer addInput:writerInput.input]; } - (nullable NSDictionary *) diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTAssetWriter.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTAssetWriter.m index fbbbdc84953c..cf16f3f5e71d 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTAssetWriter.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTAssetWriter.m @@ -66,6 +66,10 @@ - (BOOL)expectsMediaDataInRealTime { return [self.input expectsMediaDataInRealTime]; } +- (void)setExpectsMediaDataInRealTime:(BOOL)expectsMediaDataInRealTime { + self.input.expectsMediaDataInRealTime = expectsMediaDataInRealTime; +} + - (BOOL)isReadyForMoreMediaData { return [self.input isReadyForMoreMediaData]; } @@ -86,7 +90,8 @@ - (nonnull instancetype)initWithAdaptor:(nonnull AVAssetWriterInputPixelBufferAd return self; } -- (BOOL)appendPixelBuffer:(nonnull CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime { +- (BOOL)appendPixelBuffer:(nonnull CVPixelBufferRef)pixelBuffer + withPresentationTime:(CMTime)presentationTime { return [_adaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime]; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m index 548a0d93a8d6..0b932c9fd4a7 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.m @@ -170,20 +170,23 @@ - (void)setActiveVideoMaxFrameDuration:(CMTime)duration { self.device.activeVideoMaxFrameDuration = duration; } -- (AVCaptureInput *)createInput:(NSError *_Nullable *_Nullable)error { - return [AVCaptureDeviceInput deviceInputWithDevice:_device error:error]; +- (id)createInput:(NSError *_Nullable *_Nullable)error { + return [[FLTDefaultCaptureInput alloc] + initWithInput:[AVCaptureDeviceInput deviceInputWithDevice:_device error:error]]; } @end -@implementation FLTDefaultCaptureDeviceFormat { - id _format; -} +@interface FLTDefaultCaptureDeviceFormat () +@property(nonatomic, strong) AVCaptureDeviceFormat *format; +@end -- (instancetype)initWithFormat:(id)format { +@implementation FLTDefaultCaptureDeviceFormat + +- (instancetype)initWithFormat:(AVCaptureDeviceFormat *)format { self = [super init]; if (self) { - format = format; + _format = format; } return self; } @@ -192,10 +195,57 @@ - (CMFormatDescriptionRef)formatDescription { return _format.formatDescription; } -- (NSArray *)videoSupportedFrameRateRanges { - return _format.videoSupportedFrameRateRanges; +- (NSArray> *)videoSupportedFrameRateRanges { + NSMutableArray> *ranges = [NSMutableArray array]; + for (AVFrameRateRange *range in _format.videoSupportedFrameRateRanges) { + FLTDefaultFrameRateRange *wrapper = [[FLTDefaultFrameRateRange alloc] initWithRange:range]; + [ranges addObject:wrapper]; + } + return ranges; +} + +@end + +@interface FLTDefaultFrameRateRange () +@property(nonatomic, strong) AVFrameRateRange *range; +@end + +@implementation FLTDefaultFrameRateRange + +- (instancetype)initWithRange:(AVFrameRateRange *)range { + self = [super init]; + if (self) { + _range = range; + } + return self; } -@synthesize format; +- (float)minFrameRate { + return self.range.minFrameRate; +} + +- (float)maxFrameRate { + return self.range.maxFrameRate; +} + +@end + +@interface FLTDefaultCaptureInput () +@property(nonatomic, strong) AVCaptureInput *input; +@end + +@implementation FLTDefaultCaptureInput + +- (instancetype)initWithInput:(AVCaptureInput *)input { + self = [super init]; + if (self) { + _input = input; + } + return self; +} + +- (NSArray *)ports { + return self.input.ports; +} @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoOutput.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoOutput.m index c9734e3c18cd..b26e9d4c9515 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoOutput.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoOutput.m @@ -38,13 +38,26 @@ - (void)capturePhotoWithSettings:(id)settings [_photoOutput capturePhotoWithSettings:settings.settings delegate:delegate]; } -- (nullable AVCaptureConnection *)connectionWithMediaType:(nonnull AVMediaType)mediaType { +- (nullable AVCaptureConnection *)connectionWithMediaType:(nonnull AVMediaType)mediaType { return [_photoOutput connectionWithMediaType:mediaType]; } - - (NSArray *)supportedFlashModes { return _photoOutput.supportedFlashModes; } +- (void)forwardInvocation:(NSInvocation *)invocation { + NSLog(@"Selector being called: %@", NSStringFromSelector([invocation selector])); + if ([_photoOutput respondsToSelector:[invocation selector]]) { + [invocation invokeWithTarget:_photoOutput]; + } else { + [super forwardInvocation:invocation]; + } +} + +- (BOOL)respondsToSelector:(SEL)aSelector { + NSLog(@"Checking selector: %@", NSStringFromSelector(aSelector)); + return [super respondsToSelector:aSelector] || [_photoOutput respondsToSelector:aSelector]; +} + @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m index 11fadd94f952..9ca4d1350c41 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m @@ -4,31 +4,33 @@ #import "../include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h" +@interface FLTDefaultCapturePhotoSettings () +@property(nonatomic, strong) AVCapturePhotoSettings *settings; +@end + @implementation FLTDefaultCapturePhotoSettings - (instancetype)initWithSettings:(AVCapturePhotoSettings *)settings { self = [super init]; if (self) { - settings = settings; + _settings = settings; } return self; } - (int64_t)uniqueID { - return settings.uniqueID; + return _settings.uniqueID; } - (NSDictionary *)format { - return settings.format; + return _settings.format; } - (void)setFlashMode:(AVCaptureFlashMode)flashMode { - [settings setFlashMode:flashMode]; + [_settings setFlashMode:flashMode]; } - (void)setHighResolutionPhotoEnabled:(BOOL)enabled { - [settings setHighResolutionPhotoEnabled:enabled]; + [_settings setHighResolutionPhotoEnabled:enabled]; } -@synthesize settings; - @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m index 101330f260de..260bcd574165 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m @@ -3,6 +3,7 @@ // found in the LICENSE file. #import "../include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" +#import "../include/camera_avfoundation/Protocols/FLTCaptureConnection.h" @interface FLTDefaultCaptureSession () @property(nonatomic, strong) AVCaptureSession *captureSession; @@ -38,24 +39,24 @@ - (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset { return [_captureSession canSetSessionPreset:preset]; } -- (void)addInputWithNoConnections:(AVCaptureInput *)input { - [_captureSession addInputWithNoConnections:input]; +- (void)addInputWithNoConnections:(id)input { + [_captureSession addInputWithNoConnections:input.input]; } - (void)addOutputWithNoConnections:(AVCaptureOutput *)output { [_captureSession addOutputWithNoConnections:output]; } -- (void)addConnection:(AVCaptureConnection *)connection { - [_captureSession addConnection:connection]; +- (void)addConnection:(id)connection { + [_captureSession addConnection:connection.connection]; } - (void)addOutput:(AVCaptureOutput *)output { [_captureSession addOutput:output]; } -- (void)removeInput:(AVCaptureInput *)input { - [_captureSession removeInput:input]; +- (void)removeInput:(id)input { + [_captureSession removeInput:input.input]; } - (void)removeOutput:(AVCaptureOutput *)output { @@ -78,20 +79,20 @@ - (AVCaptureSessionPreset)sessionPreset { return _captureSession.outputs; } -- (BOOL)canAddInput:(AVCaptureInput *)input { - return [_captureSession canAddInput:input]; +- (BOOL)canAddInput:(id)input { + return [_captureSession canAddInput:input.input]; } - (BOOL)canAddOutput:(AVCaptureOutput *)output { return [_captureSession canAddOutput:output]; } -- (BOOL)canAddConnection:(AVCaptureConnection *)connection { - return [_captureSession canAddConnection:connection]; +- (BOOL)canAddConnection:(id)connection { + return [_captureSession canAddConnection:connection.connection]; } -- (void)addInput:(AVCaptureInput *)input { - [_captureSession addInput:input]; +- (void)addInput:(id)input { + [_captureSession addInput:input.input]; } @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h index 04008d9e2134..f30e614db3ee 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h @@ -7,10 +7,10 @@ @import Flutter; #import "CameraProperties.h" +#import "FLTAssetWriter.h" #import "FLTCamMediaSettingsAVWrapper.h" #import "FLTCaptureDeviceControlling.h" #import "FLTCapturePhotoOutput.h" -#import "FLTAssetWriter.h" #import "messages.g.h" NS_ASSUME_NONNULL_BEGIN @@ -19,7 +19,13 @@ NS_ASSUME_NONNULL_BEGIN /// Used in tests to inject a device into FLTCam. typedef id _Nonnull (^CaptureDeviceFactory)(void); -typedef id _Nonnull (^AssetWriterFactory)(NSURL*, AVFileType, NSError * _Nullable * _Nullable); +typedef id _Nonnull (^AudioCaptureDeviceFactory)(void); + +typedef id _Nonnull (^AssetWriterFactory)(NSURL *, AVFileType, + NSError *_Nullable *_Nullable); + +typedef id _Nonnull (^PixelBufferAdaptorFactory)( + id, NSDictionary *_Nullable); /// Determines the video dimensions (width and height) for a given capture device format. /// Used in tests to mock CMVideoFormatDescriptionGetDimensions. @@ -80,9 +86,11 @@ typedef CMVideoDimensions (^VideoDimensionsForFormat)(id audioCaptureSession:(id)audioCaptureSession captureSessionQueue:(dispatch_queue_t)captureSessionQueue captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory + audioCaptureDeviceFactory:(CaptureDeviceFactory)audioCaptureDeviceFactory videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat capturePhotoOutput:(id)capturePhotoOutput assetWriterFactory:(AssetWriterFactory)assetWriterFactory + pixelBufferAdaptorFactory:(PixelBufferAdaptorFactory)pixelBufferAdaptorFactory error:(NSError **)error; /// Informs the Dart side of the plugin of the current camera state and capabilities. diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h index 844143335e98..d926d0cc97ab 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h @@ -5,6 +5,7 @@ @import AVFoundation; @import Foundation; +#import "FLTAssetWriter.h" #import "FLTCaptureDeviceControlling.h" #import "FLTCaptureSessionProtocol.h" @@ -81,7 +82,7 @@ NS_ASSUME_NONNULL_BEGIN * @param outputSettings The settings used for encoding the audio appended to the output. * @result An instance of `AVAssetWriterInput`. */ -- (AVAssetWriterInput *)assetWriterAudioInputWithOutputSettings: +- (id)assetWriterAudioInputWithOutputSettings: (nullable NSDictionary *)outputSettings; /** @@ -91,7 +92,7 @@ NS_ASSUME_NONNULL_BEGIN * @param outputSettings The settings used for encoding the video appended to the output. * @result An instance of `AVAssetWriterInput`. */ -- (AVAssetWriterInput *)assetWriterVideoInputWithOutputSettings: +- (id)assetWriterVideoInputWithOutputSettings: (nullable NSDictionary *)outputSettings; /** @@ -100,7 +101,7 @@ NS_ASSUME_NONNULL_BEGIN * @param writerInput The `AVAssetWriterInput` object to be added. * @param writer The `AVAssetWriter` object. */ -- (void)addInput:(AVAssetWriterInput *)writerInput toAssetWriter:(AVAssetWriter *)writer; +- (void)addInput:(id)writerInput toAssetWriter:(AVAssetWriter *)writer; /** * @method recommendedVideoSettingsForAssetWriterWithFileType:forOutput: diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h index 6a63daacc8ab..dac11d4ed44d 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h @@ -3,11 +3,11 @@ // found in the LICENSE file. #import "FLTCam.h" +#import "FLTCaptureConnection.h" #import "FLTCaptureDeviceControlling.h" #import "FLTCapturePhotoOutput.h" #import "FLTCaptureSessionProtocol.h" #import "FLTSavePhotoDelegate.h" -#import "FLTCaptureConnection.h" @interface FLTImageStreamHandler : NSObject - (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTAssetWriter.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTAssetWriter.h index c39f935e2c0d..1a9dffb117ab 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTAssetWriter.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTAssetWriter.h @@ -9,7 +9,7 @@ NS_ASSUME_NONNULL_BEGIN @protocol FLTAssetWriter @property(nonatomic, readonly) AVAssetWriterStatus status; -@property (readonly, nullable) NSError *error; +@property(readonly, nullable) NSError *error; - (BOOL)startWriting; - (void)finishWritingWithCompletionHandler:(void (^)(void))handler; - (void)startSessionAtSourceTime:(CMTime)startTime; @@ -17,14 +17,15 @@ NS_ASSUME_NONNULL_BEGIN @end @protocol FLTAssetWriterInput -@property(nonatomic, readonly) BOOL expectsMediaDataInRealTime; +@property(nonatomic, readonly) AVAssetWriterInput *input; +@property(nonatomic, assign) BOOL expectsMediaDataInRealTime; @property(nonatomic, readonly) BOOL isReadyForMoreMediaData; - (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer; @end @protocol FLTPixelBufferAdaptor - (BOOL)appendPixelBuffer:(CVPixelBufferRef)pixelBuffer - withPresentationTime:(CMTime)presentationTime; + withPresentationTime:(CMTime)presentationTime; @end @interface FLTDefaultAssetWriter : NSObject diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureConnection.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureConnection.h index 95d3082c015e..e814745eb57a 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureConnection.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureConnection.h @@ -7,6 +7,7 @@ NS_ASSUME_NONNULL_BEGIN @protocol FLTCaptureConnection +@property(nonatomic, readonly) AVCaptureConnection *connection; @property(nonatomic) BOOL videoMirrored; @property(nonatomic) AVCaptureVideoOrientation videoOrientation; @property(nonatomic, readonly) NSArray *inputPorts; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h index b61f678b726e..c73ec017eb33 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h @@ -7,10 +7,20 @@ NS_ASSUME_NONNULL_BEGIN +@protocol FLTFrameRateRange +@property(readonly, nonatomic) float minFrameRate; +@property(readonly, nonatomic) float maxFrameRate; +@end + @protocol FLTCaptureDeviceFormat -@property(nonatomic, readonly) CMFormatDescriptionRef formatDescription; -@property(nonatomic, readonly) NSArray *videoSupportedFrameRateRanges; @property(nonatomic, readonly) AVCaptureDeviceFormat *format; +@property(nonatomic, readonly) CMFormatDescriptionRef formatDescription; +@property(nonatomic, readonly) NSArray> *videoSupportedFrameRateRanges; +@end + +@protocol FLTCaptureInput +@property(nonatomic, readonly) AVCaptureInput *input; +@property(nonatomic, readonly) NSArray *ports; @end @protocol FLTCaptureDeviceControlling @@ -69,7 +79,7 @@ NS_ASSUME_NONNULL_BEGIN - (CMTime)activeVideoMaxFrameDuration; - (void)setActiveVideoMaxFrameDuration:(CMTime)duration; -- (AVCaptureInput *)createInput:(NSError *_Nullable *_Nullable)error; +- (id)createInput:(NSError *_Nullable *_Nullable)error; @end @@ -81,4 +91,12 @@ NS_ASSUME_NONNULL_BEGIN - (instancetype)initWithFormat:(AVCaptureDeviceFormat *)format; @end +@interface FLTDefaultFrameRateRange : NSObject +- (instancetype)initWithRange:(AVFrameRateRange *)range; +@end + +@interface FLTDefaultCaptureInput : NSObject +- (instancetype)initWithInput:(AVCaptureInput *)input; +@end + NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h index 96b8db5b2089..1a4beb19c726 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoOutput.h @@ -11,6 +11,7 @@ NS_ASSUME_NONNULL_BEGIN @protocol FLTCapturePhotoOutput +@property(nonatomic, readonly) AVCapturePhotoOutput *photoOutput; @property(nonatomic, readonly) NSArray *availablePhotoCodecTypes; @property(nonatomic, assign, getter=isHighResolutionCaptureEnabled) BOOL highResolutionCaptureEnabled; @@ -18,13 +19,11 @@ NS_ASSUME_NONNULL_BEGIN - (void)capturePhotoWithSettings:(id)settings delegate:(id)delegate; - - (nullable AVCaptureConnection *)connectionWithMediaType:(AVMediaType)mediaType; @end @interface FLTDefaultCapturePhotoOutput : NSObject - (instancetype)initWithPhotoOutput:(AVCapturePhotoOutput *)photoOutput; -@property(nonatomic, readonly) AVCapturePhotoOutput *photoOutput; @end NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h index 9cf3f9acb724..1a103e4f351d 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h @@ -9,7 +9,6 @@ NS_ASSUME_NONNULL_BEGIN @protocol FLTCapturePhotoSettings @property(nonatomic, readonly) AVCapturePhotoSettings *settings; - @property(readonly, nonatomic) int64_t uniqueID; @property(nonatomic, copy, readonly) NSDictionary *format; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h index 88824959e24c..44e8baacb7c6 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h @@ -4,6 +4,8 @@ @import AVFoundation; +#import "FLTCaptureDeviceControlling.h" + NS_ASSUME_NONNULL_BEGIN @protocol FLTCaptureSessionProtocol @@ -17,12 +19,12 @@ NS_ASSUME_NONNULL_BEGIN - (void)addOutputWithNoConnections:(AVCaptureOutput *)output; - (void)addConnection:(AVCaptureConnection *)connection; - (void)addOutput:(AVCaptureOutput *)output; -- (void)removeInput:(AVCaptureInput *)input; +- (void)removeInput:(id)input; - (void)removeOutput:(AVCaptureOutput *)output; -- (BOOL)canAddInput:(AVCaptureInput *)input; +- (BOOL)canAddInput:(id)input; - (BOOL)canAddOutput:(AVCaptureOutput *)output; - (BOOL)canAddConnection:(AVCaptureConnection *)connection; -- (void)addInput:(AVCaptureInput *)input; +- (void)addInput:(id)input; @property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; @property(nonatomic, readonly) NSArray *inputs; @property(nonatomic, readonly) NSArray *outputs; From 124ae95dead5dda611bbcfd7cebee63d50b48901 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Thu, 19 Dec 2024 09:47:53 +0100 Subject: [PATCH 11/16] Reorder files --- .../ios/Runner.xcodeproj/project.pbxproj | 70 +++--------------- .../ios/RunnerTests/AvailableCamerasTest.m | 2 +- .../RunnerTests/CameraMethodChannelTests.m | 2 +- .../ios/RunnerTests/CameraOrientationTests.m | 6 +- .../ios/RunnerTests/CameraSettingsTests.m | 6 +- .../example/ios/RunnerTests/CameraTestUtils.h | 4 +- .../example/ios/RunnerTests/CameraTestUtils.m | 18 ++--- .../ios/RunnerTests/FLTCamSampleBufferTests.m | 4 +- .../RunnerTests/{ => Mocks}/MockAssetWriter.h | 0 .../RunnerTests/{ => Mocks}/MockAssetWriter.m | 0 .../{ => Mocks}/MockCameraDeviceDiscovery.h | 0 .../{ => Mocks}/MockCameraDeviceDiscovery.m | 0 .../{ => Mocks}/MockCaptureConnection.h | 0 .../{ => Mocks}/MockCaptureConnection.m | 0 .../{ => Mocks}/MockCaptureDeviceController.h | 0 .../{ => Mocks}/MockCaptureDeviceController.m | 0 .../{ => Mocks}/MockCapturePhotoOutput.h | 0 .../{ => Mocks}/MockCapturePhotoOutput.m | 0 .../{ => Mocks}/MockCapturePhotoSettings.h | 0 .../{ => Mocks}/MockCapturePhotoSettings.m | 0 .../{ => Mocks}/MockCaptureSession.h | 2 +- .../{ => Mocks}/MockCaptureSession.m | 8 +-- .../MockDeviceOrientationProvider.h | 0 .../MockDeviceOrientationProvider.m | 0 .../{ => Mocks}/MockEventChannel.h | 2 +- .../{ => Mocks}/MockEventChannel.m | 0 .../RunnerTests/{ => Mocks}/MockPhotoData.h | 0 .../RunnerTests/{ => Mocks}/MockPhotoData.m | 0 .../camera_avfoundation/CameraPlugin.m | 4 +- .../Sources/camera_avfoundation/FLTCam.m | 71 +++---------------- .../FLTCamMediaSettingsAVWrapper.m | 6 +- .../FLTThreadSafeEventChannel.m | 6 +- ...eSessionProtocol.m => FLTCaptureSession.m} | 18 ++--- ...entChannelProtocol.m => FLTEventChannel.m} | 2 +- .../include/CameraPlugin.modulemap | 4 +- .../camera_avfoundation/CameraPlugin.h | 4 +- .../include/camera_avfoundation/FLTCam.h | 28 ++------ .../FLTCamMediaSettingsAVWrapper.h | 6 +- .../include/camera_avfoundation/FLTCam_Test.h | 2 +- .../FLTThreadSafeEventChannel.h | 4 +- ...eSessionProtocol.h => FLTCaptureSession.h} | 17 ++--- ...entChannelProtocol.h => FLTEventChannel.h} | 4 +- 42 files changed, 88 insertions(+), 212 deletions(-) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockAssetWriter.h (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockAssetWriter.m (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCameraDeviceDiscovery.h (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCameraDeviceDiscovery.m (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCaptureConnection.h (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCaptureConnection.m (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCaptureDeviceController.h (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCaptureDeviceController.m (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCapturePhotoOutput.h (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCapturePhotoOutput.m (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCapturePhotoSettings.h (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCapturePhotoSettings.m (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCaptureSession.h (92%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockCaptureSession.m (86%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockDeviceOrientationProvider.h (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockDeviceOrientationProvider.m (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockEventChannel.h (84%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockEventChannel.m (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockPhotoData.h (100%) rename packages/camera/camera_avfoundation/example/ios/RunnerTests/{ => Mocks}/MockPhotoData.m (100%) rename packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/{FLTCaptureSessionProtocol.m => FLTCaptureSession.m} (82%) rename packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/{FLTEventChannelProtocol.m => FLTEventChannel.m} (94%) rename packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/{FLTCaptureSessionProtocol.h => FLTCaptureSession.h} (85%) rename packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/{FLTEventChannelProtocol.h => FLTEventChannel.h} (79%) diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index 2c369f6a9397..257a897fa208 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 54; + objectVersion = 70; objects = { /* Begin PBXBuildFile section */ @@ -16,17 +16,7 @@ 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; }; 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */ = {isa = PBXBuildFile; productRef = 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */; }; 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */; }; - 7F29385D2D10A728009D2F67 /* MockCaptureConnection.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F29385C2D10A728009D2F67 /* MockCaptureConnection.m */; }; - 7F87E8022D01FD6F00A3549C /* MockCaptureDeviceController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8012D01FD5600A3549C /* MockCaptureDeviceController.m */; }; - 7F87E80C2D0325D900A3549C /* MockDeviceOrientationProvider.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80B2D0325D700A3549C /* MockDeviceOrientationProvider.m */; }; 7F87E80E2D03283E00A3549C /* CameraExposureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */; }; - 7F87E81C2D06DE2400A3549C /* MockEventChannel.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E81B2D06DE2400A3549C /* MockEventChannel.m */; }; - 7F87E8262D06EBCB00A3549C /* MockCameraDeviceDiscovery.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8252D06EBCB00A3549C /* MockCameraDeviceDiscovery.m */; }; - 7F87E8342D072F9A00A3549C /* MockCaptureSession.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8332D072F9A00A3549C /* MockCaptureSession.m */; }; - 7F87E83B2D09B4A300A3549C /* MockPhotoData.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E83A2D09B4A300A3549C /* MockPhotoData.m */; }; - 7F87E8422D0AF98D00A3549C /* MockAssetWriter.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E8412D0AF98D00A3549C /* MockAssetWriter.m */; }; - 7F87E84D2D0B248A00A3549C /* MockCapturePhotoSettings.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E84C2D0B248A00A3549C /* MockCapturePhotoSettings.m */; }; - 7F87E8502D0B30DD00A3549C /* MockCapturePhotoOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E84F2D0B30DD00A3549C /* MockCapturePhotoOutput.m */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; @@ -89,27 +79,7 @@ 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraSettingsTests.m; sourceTree = ""; }; - 7F29385A2D10A653009D2F67 /* MockCaptureConnection.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureConnection.h; sourceTree = ""; }; - 7F29385C2D10A728009D2F67 /* MockCaptureConnection.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureConnection.m; sourceTree = ""; }; - 7F87E8012D01FD5600A3549C /* MockCaptureDeviceController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureDeviceController.m; sourceTree = ""; }; - 7F87E8032D02FF8C00A3549C /* MockCaptureDeviceController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureDeviceController.h; sourceTree = ""; }; - 7F87E80A2D0325B200A3549C /* MockDeviceOrientationProvider.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockDeviceOrientationProvider.h; sourceTree = ""; }; - 7F87E80B2D0325D700A3549C /* MockDeviceOrientationProvider.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockDeviceOrientationProvider.m; sourceTree = ""; }; 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraExposureTests.m; sourceTree = ""; }; - 7F87E81A2D06DDD700A3549C /* MockEventChannel.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockEventChannel.h; sourceTree = ""; }; - 7F87E81B2D06DE2400A3549C /* MockEventChannel.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockEventChannel.m; sourceTree = ""; }; - 7F87E8242D06EBB800A3549C /* MockCameraDeviceDiscovery.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCameraDeviceDiscovery.h; sourceTree = ""; }; - 7F87E8252D06EBCB00A3549C /* MockCameraDeviceDiscovery.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCameraDeviceDiscovery.m; sourceTree = ""; }; - 7F87E8322D072F8B00A3549C /* MockCaptureSession.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureSession.h; sourceTree = ""; }; - 7F87E8332D072F9A00A3549C /* MockCaptureSession.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureSession.m; sourceTree = ""; }; - 7F87E8392D09B45300A3549C /* MockPhotoData.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockPhotoData.h; sourceTree = ""; }; - 7F87E83A2D09B4A300A3549C /* MockPhotoData.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockPhotoData.m; sourceTree = ""; }; - 7F87E8402D0AF96400A3549C /* MockAssetWriter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockAssetWriter.h; sourceTree = ""; }; - 7F87E8412D0AF98D00A3549C /* MockAssetWriter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockAssetWriter.m; sourceTree = ""; }; - 7F87E84B2D0B245E00A3549C /* MockCapturePhotoSettings.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCapturePhotoSettings.h; sourceTree = ""; }; - 7F87E84C2D0B248A00A3549C /* MockCapturePhotoSettings.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCapturePhotoSettings.m; sourceTree = ""; }; - 7F87E84E2D0B30CD00A3549C /* MockCapturePhotoOutput.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCapturePhotoOutput.h; sourceTree = ""; }; - 7F87E84F2D0B30DD00A3549C /* MockCapturePhotoOutput.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCapturePhotoOutput.m; sourceTree = ""; }; 8FE1D7C55B3760E3991BE41E /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; }; 93DE3DA611CB15AE1AF7956C /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; @@ -134,6 +104,10 @@ E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPreviewPauseTests.m; sourceTree = ""; }; /* End PBXFileReference section */ +/* Begin PBXFileSystemSynchronizedRootGroup section */ + 7FA5D71B2D140E5B0041B9E2 /* Mocks */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = Mocks; sourceTree = ""; }; +/* End PBXFileSystemSynchronizedRootGroup section */ + /* Begin PBXFrameworksBuildPhase section */ 03BB76652665316900CE5A93 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; @@ -158,26 +132,7 @@ 03BB76692665316900CE5A93 /* RunnerTests */ = { isa = PBXGroup; children = ( - 7F87E81A2D06DDD700A3549C /* MockEventChannel.h */, - 7F87E81B2D06DE2400A3549C /* MockEventChannel.m */, - 7F87E80B2D0325D700A3549C /* MockDeviceOrientationProvider.m */, - 7F87E80A2D0325B200A3549C /* MockDeviceOrientationProvider.h */, - 7F87E8032D02FF8C00A3549C /* MockCaptureDeviceController.h */, - 7F87E8012D01FD5600A3549C /* MockCaptureDeviceController.m */, - 7F87E8242D06EBB800A3549C /* MockCameraDeviceDiscovery.h */, - 7F87E8252D06EBCB00A3549C /* MockCameraDeviceDiscovery.m */, - 7F87E8322D072F8B00A3549C /* MockCaptureSession.h */, - 7F87E8332D072F9A00A3549C /* MockCaptureSession.m */, - 7F87E8392D09B45300A3549C /* MockPhotoData.h */, - 7F87E83A2D09B4A300A3549C /* MockPhotoData.m */, - 7F87E8402D0AF96400A3549C /* MockAssetWriter.h */, - 7F87E8412D0AF98D00A3549C /* MockAssetWriter.m */, - 7F87E84B2D0B245E00A3549C /* MockCapturePhotoSettings.h */, - 7F87E84C2D0B248A00A3549C /* MockCapturePhotoSettings.m */, - 7F87E84E2D0B30CD00A3549C /* MockCapturePhotoOutput.h */, - 7F87E84F2D0B30DD00A3549C /* MockCapturePhotoOutput.m */, - 7F29385A2D10A653009D2F67 /* MockCaptureConnection.h */, - 7F29385C2D10A728009D2F67 /* MockCaptureConnection.m */, + 7FA5D71B2D140E5B0041B9E2 /* Mocks */, 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */, 03BB766A2665316900CE5A93 /* CameraFocusTests.m */, 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */, @@ -295,6 +250,9 @@ dependencies = ( 03BB766E2665316900CE5A93 /* PBXTargetDependency */, ); + fileSystemSynchronizedGroups = ( + 7FA5D71B2D140E5B0041B9E2 /* Mocks */, + ); name = RunnerTests; packageProductDependencies = ( ); @@ -494,23 +452,14 @@ buildActionMask = 2147483647; files = ( 033B94BE269C40A200B4DF97 /* CameraMethodChannelTests.m in Sources */, - 7F87E8502D0B30DD00A3549C /* MockCapturePhotoOutput.m in Sources */, E071CF7227B3061B006EF3BA /* FLTCamPhotoCaptureTests.m in Sources */, - 7F87E83B2D09B4A300A3549C /* MockPhotoData.m in Sources */, - 7F29385D2D10A728009D2F67 /* MockCaptureConnection.m in Sources */, E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */, - 7F87E8422D0AF98D00A3549C /* MockAssetWriter.m in Sources */, 03BB766B2665316900CE5A93 /* CameraFocusTests.m in Sources */, - 7F87E84D2D0B248A00A3549C /* MockCapturePhotoSettings.m in Sources */, - 7F87E8022D01FD6F00A3549C /* MockCaptureDeviceController.m in Sources */, 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */, E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */, 7F87E80E2D03283E00A3549C /* CameraExposureTests.m in Sources */, - 7F87E80C2D0325D900A3549C /* MockDeviceOrientationProvider.m in Sources */, - 7F87E8262D06EBCB00A3549C /* MockCameraDeviceDiscovery.m in Sources */, E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */, E04F108627A87CA600573D0C /* FLTSavePhotoDelegateTests.m in Sources */, - 7F87E81C2D06DE2400A3549C /* MockEventChannel.m in Sources */, 43ED1537282570DE00EB00DE /* AvailableCamerasTest.m in Sources */, E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */, 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */, @@ -519,7 +468,6 @@ 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */, E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */, E0B0D2BB27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m in Sources */, - 7F87E8342D072F9A00A3549C /* MockCaptureSession.m in Sources */, E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m index 82a51ba3a346..a2db7ece30dc 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/AvailableCamerasTest.m @@ -27,7 +27,7 @@ - (void)setUp { messenger:nil globalAPI:nil deviceDiscovery:_mockDeviceDiscovery - sessionFactory:^id { + sessionFactory:^id { return nil; } deviceFactory:^id(NSString *name) { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m index c86346a2794c..978f63d2805b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraMethodChannelTests.m @@ -27,7 +27,7 @@ - (void)testCreate_ShouldCallResultOnMainThread { messenger:nil globalAPI:nil deviceDiscovery:nil - sessionFactory:^id { + sessionFactory:^id { return avCaptureSessionMock; } deviceFactory:^id(NSString *name) { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m index 720648a83890..0ee82925dbb4 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraOrientationTests.m @@ -88,7 +88,7 @@ - (void)setUp { messenger:nil globalAPI:_eventAPI deviceDiscovery:_deviceDiscovery - sessionFactory:^id { + sessionFactory:^id { return weakSelf.captureSession; } deviceFactory:^id(NSString *name) { @@ -139,7 +139,7 @@ - (void)testOrientationNotificationsNotCalledForFaceDown { messenger:nil globalAPI:eventAPI deviceDiscovery:_deviceDiscovery - sessionFactory:^id { + sessionFactory:^id { return weakSelf.captureSession; } deviceFactory:^id(NSString *name) { @@ -183,7 +183,7 @@ - (void)testOrientationChanged_noRetainCycle { messenger:nil globalAPI:_eventAPI deviceDiscovery:_deviceDiscovery - sessionFactory:^id { + sessionFactory:^id { return weakSelf.captureSession; } deviceFactory:^id(NSString *name) { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m index 58e8cb33a237..e09c475d4808 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m @@ -67,11 +67,11 @@ - (void)unlockDevice:(AVCaptureDevice *)captureDevice { [_unlockExpectation fulfill]; } -- (void)beginConfigurationForSession:(id)videoCaptureSession { +- (void)beginConfigurationForSession:(id)videoCaptureSession { [_beginConfigurationExpectation fulfill]; } -- (void)commitConfigurationForSession:(id)videoCaptureSession { +- (void)commitConfigurationForSession:(id)videoCaptureSession { [_commitConfigurationExpectation fulfill]; } @@ -175,7 +175,7 @@ - (void)testSettings_ShouldBeSupportedByMethodCall { messenger:nil globalAPI:nil deviceDiscovery:nil - sessionFactory:^id { + sessionFactory:^id { return mockSession; } deviceFactory:^id(NSString *name) { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h index 47a94effe9e3..70cfd5ca3a49 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h @@ -31,7 +31,7 @@ extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessi /// @param captureSession AVCaptureSession for video /// @param resolutionPreset preset for camera's captureSession resolution /// @return an FLTCam object. -extern FLTCam *FLTCreateCamWithVideoCaptureSession(id captureSession, +extern FLTCam *FLTCreateCamWithVideoCaptureSession(id captureSession, FCPPlatformResolutionPreset resolutionPreset); /// Creates an `FLTCam` with a given captureSession and resolutionPreset. @@ -42,7 +42,7 @@ extern FLTCam *FLTCreateCamWithVideoCaptureSession(id /// @param videoDimensionsForFormat custom code to determine video dimensions /// @return an FLTCam object. extern FLTCam *FLTCreateCamWithVideoDimensionsForFormat( - id captureSession, FCPPlatformResolutionPreset resolutionPreset, + id captureSession, FCPPlatformResolutionPreset resolutionPreset, id captureDevice, VideoDimensionsForFormat videoDimensionsForFormat); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index 1c6f8c3a4ee3..22def2bf0fcb 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -77,27 +77,27 @@ audioCaptureSession:audioSessionMock captureSessionQueue:captureSessionQueue captureDeviceFactory:captureDeviceFactory ?: ^id(void) { - return mockDevice; + return mockDevice; } audioCaptureDeviceFactory:captureDeviceFactory ?: ^id(void) { - return mockDevice; + return mockDevice; } videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { return CMVideoFormatDescriptionGetDimensions(format.formatDescription); } capturePhotoOutput:capturePhotoOutput assetWriterFactory:^id _Nonnull(NSURL *url, AVFileType fileType, NSError * _Nullable __autoreleasing * _Nullable error) { - return assetWriter; - } - pixelBufferAdaptorFactory:^id _Nonnull(id _Nonnull writerInput, NSDictionary * _Nullable source) { - return pixelBufferAdaptor; - } + return assetWriter; + } + pixelBufferAdaptorFactory:^id _Nonnull(id _Nonnull writerInput, NSDictionary * _Nullable source) { + return pixelBufferAdaptor; + } error:nil]; return fltCam; } -FLTCam *FLTCreateCamWithVideoCaptureSession(id captureSession, +FLTCam *FLTCreateCamWithVideoCaptureSession(id captureSession, FCPPlatformResolutionPreset resolutionPreset) { MockCaptureSession *audioSessionMock = [[MockCaptureSession alloc] init]; audioSessionMock.mockCanSetSessionPreset = YES; @@ -132,7 +132,7 @@ } FLTCam *FLTCreateCamWithVideoDimensionsForFormat( - id captureSession, FCPPlatformResolutionPreset resolutionPreset, + id captureSession, FCPPlatformResolutionPreset resolutionPreset, id captureDevice, VideoDimensionsForFormat videoDimensionsForFormat) { MockCaptureSession *audioSessionMock = [[MockCaptureSession alloc] init]; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m index c375b0f9413d..67d5c005c1e7 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m @@ -32,10 +32,10 @@ - (BOOL)lockDevice:(AVCaptureDevice *)captureDevice error:(NSError **)outError { - (void)unlockDevice:(AVCaptureDevice *)captureDevice { } -- (void)beginConfigurationForSession:(id)videoCaptureSession { +- (void)beginConfigurationForSession:(id)videoCaptureSession { } -- (void)commitConfigurationForSession:(id)videoCaptureSession { +- (void)commitConfigurationForSession:(id)videoCaptureSession { } - (void)setMinFrameDuration:(CMTime)duration onDevice:(AVCaptureDevice *)captureDevice { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.h similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.h rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.h diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.m similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockAssetWriter.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockAssetWriter.m diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscovery.h similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.h rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscovery.h diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscovery.m similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCameraDeviceDiscovery.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCameraDeviceDiscovery.m diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureConnection.h similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.h rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureConnection.h diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureConnection.m similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureConnection.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureConnection.m diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceController.h similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.h rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceController.h diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceController.m similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureDeviceController.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureDeviceController.m diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoOutput.h similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.h rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoOutput.h diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoOutput.m similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoOutput.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoOutput.m diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.h rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCapturePhotoSettings.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.h similarity index 92% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.h rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.h index fc2af24098cd..4fea3a6a1929 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.h @@ -7,7 +7,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface MockCaptureSession : NSObject +@interface MockCaptureSession : NSObject @property(nonatomic, copy) void (^beginConfigurationStub)(void); @property(nonatomic, copy) void (^commitConfigurationStub)(void); @property(nonatomic, copy) void (^startRunningStub)(void); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.m similarity index 86% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.m index 0653dd17a118..9adb54e90d6c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockCaptureSession.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCaptureSession.m @@ -46,10 +46,10 @@ - (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset { - (void)addConnection:(nonnull AVCaptureConnection *)connection { } -- (void)addInput:(nonnull id)input { +- (void)addInput:(nonnull AVCaptureInput *)input { } -- (void)addInputWithNoConnections:(nonnull id)input { +- (void)addInputWithNoConnections:(nonnull AVCaptureInput *)input { } - (void)addOutput:(nonnull AVCaptureOutput *)output { @@ -62,7 +62,7 @@ - (BOOL)canAddConnection:(nonnull AVCaptureConnection *)connection { return YES; } -- (BOOL)canAddInput:(nonnull id)input { +- (BOOL)canAddInput:(nonnull AVCaptureInput *)input { return YES; } @@ -70,7 +70,7 @@ - (BOOL)canAddOutput:(nonnull AVCaptureOutput *)output { return YES; } -- (void)removeInput:(nonnull id)input { +- (void)removeInput:(nonnull AVCaptureInput *)input { } - (void)removeOutput:(nonnull AVCaptureOutput *)output { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceOrientationProvider.h similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.h rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceOrientationProvider.h diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceOrientationProvider.m similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockDeviceOrientationProvider.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockDeviceOrientationProvider.m diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockEventChannel.h similarity index 84% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.h rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockEventChannel.h index 098b75b6982e..fe5b34cf7552 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockEventChannel.h @@ -7,7 +7,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface MockEventChannel : NSObject +@interface MockEventChannel : NSObject @property(nonatomic, copy) void (^setStreamHandlerStub)(NSObject *); @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockEventChannel.m similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockEventChannel.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockEventChannel.m diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockPhotoData.h similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.h rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockPhotoData.h diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockPhotoData.m similarity index 100% rename from packages/camera/camera_avfoundation/example/ios/RunnerTests/MockPhotoData.m rename to packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockPhotoData.m diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m index 4f4afa9bd123..882256710839 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m @@ -47,7 +47,7 @@ - (instancetype)initWithRegistry:(NSObject *)registry messenger:messenger globalAPI:[[FCPCameraGlobalEventApi alloc] initWithBinaryMessenger:messenger] deviceDiscovery:[[FLTDefaultCameraDeviceDiscovery alloc] init] - sessionFactory:^id(void) { + sessionFactory:^id(void) { return [[FLTDefaultCaptureSession alloc] initWithCaptureSession:[[AVCaptureSession alloc] init]]; } @@ -80,7 +80,6 @@ - (instancetype)initWithRegistry:(NSObject *)registry dispatch_queue_set_specific(_captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); - // TODO: use device orientation protocol [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(orientationChanged:) @@ -94,7 +93,6 @@ - (void)detachFromEngineForRegistrar:(NSObject *)registr } - (void)orientationChanged:(NSNotification *)note { - // TODO: change to protocol UIDevice *device = note.object; UIDeviceOrientation orientation = device.orientation; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 6c7973d9c12c..1400f2b1359e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -14,9 +14,9 @@ #import "./include/camera_avfoundation/Protocols/FLTCaptureConnection.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" #import "./include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h" -#import "./include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureSession.h" #import "./include/camera_avfoundation/Protocols/FLTDeviceOrientationProviding.h" -#import "./include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h" +#import "./include/camera_avfoundation/Protocols/FLTEventChannel.h" #import "./include/camera_avfoundation/QueueUtils.h" #import "./include/camera_avfoundation/messages.g.h" @@ -60,8 +60,8 @@ @interface FLTCam () videoCaptureSession; -@property(readonly, nonatomic) id audioCaptureSession; +@property(readonly, nonatomic) id videoCaptureSession; +@property(readonly, nonatomic) id audioCaptureSession; @property(readonly, nonatomic) id captureVideoInput; /// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback. @@ -123,57 +123,6 @@ @implementation FLTCam NSString *const errorMethod = @"error"; -//- (instancetype)initWithCameraName:(NSString *)cameraName -// mediaSettings:(FCPPlatformMediaSettings *)mediaSettings -// mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper -// orientation:(UIDeviceOrientation)orientation -// captureSessionQueue:(dispatch_queue_t)captureSessionQueue -// error:(NSError **)error { -// AVCaptureSession *videoSession = [[AVCaptureSession alloc] init]; -// AVCaptureSession *audioSession = [[AVCaptureSession alloc] init]; -// -// return [self -// initWithCameraName:cameraName -// mediaSettings:mediaSettings -// mediaSettingsAVWrapper:mediaSettingsAVWrapper -// orientation:orientation -// videoCaptureSession:[[FLTDefaultCaptureSession alloc] -// initWithCaptureSession:videoSession] audioCaptureSession:[[FLTDefaultCaptureSession -// alloc] initWithCaptureSession:audioSession] captureSessionQueue:captureSessionQueue -// error:error]; -//} - -//- (instancetype)initWithCameraName:(NSString *)cameraName -// mediaSettings:(FCPPlatformMediaSettings *)mediaSettings -// mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper -// orientation:(UIDeviceOrientation)orientation -// videoCaptureSession:(id)videoCaptureSession -// audioCaptureSession:(id)audioCaptureSession -// captureSessionQueue:(dispatch_queue_t)captureSessionQueue -// error:(NSError **)error { -// return [self initWithMediaSettings:mediaSettings -// mediaSettingsAVWrapper:mediaSettingsAVWrapper -// orientation:orientation -// videoCaptureSession:videoCaptureSession -// audioCaptureSession:videoCaptureSession -// captureSessionQueue:captureSessionQueue -// captureDeviceFactory:^id(void) { -// AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:cameraName]; -// return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:device]; -// } -// videoDimensionsForFormat:^CMVideoDimensions(id format) { -// return CMVideoFormatDescriptionGetDimensions(format.formatDescription); -// } -// capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] -// initWithPhotoOutput:[AVCapturePhotoOutput new]] -// assetWriterFactory:^id _Nonnull(NSURL * _Nonnull url, AVFileType -// _Nonnull fileType, NSError * _Nullable __autoreleasing * _Nullable error) { -// return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; -// -// } error:error -// ]; -//} - // Returns frame rate supported by format closest to targetFrameRate. static double bestFrameRateForFormat(id format, double targetFrameRate) { double bestFrameRate = 0; @@ -230,8 +179,8 @@ static void selectBestFormatForRequestedFrameRate( - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(id)videoCaptureSession - audioCaptureSession:(id)audioCaptureSession + videoCaptureSession:(id)videoCaptureSession + audioCaptureSession:(id)audioCaptureSession captureSessionQueue:(dispatch_queue_t)captureSessionQueue captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory audioCaptureDeviceFactory:(CaptureDeviceFactory)audioCaptureDeviceFactory @@ -1116,7 +1065,7 @@ - (void)setDescriptionWhileRecording:(NSString *)cameraName // Remove the old video capture connections. [_videoCaptureSession beginConfiguration]; - [_videoCaptureSession removeInput:_captureVideoInput]; + [_videoCaptureSession removeInput:_captureVideoInput.input]; [_videoCaptureSession removeOutput:_captureVideoOutput]; NSError *error = nil; @@ -1132,11 +1081,11 @@ - (void)setDescriptionWhileRecording:(NSString *)cameraName } // Add the new connections to the session. - if (![_videoCaptureSession canAddInput:_captureVideoInput]) + if (![_videoCaptureSession canAddInput:_captureVideoInput.input]) completion([FlutterError errorWithCode:@"VideoError" message:@"Unable switch video input" details:nil]); - [_videoCaptureSession addInputWithNoConnections:_captureVideoInput]; + [_videoCaptureSession addInputWithNoConnections:_captureVideoInput.input]; if (![_videoCaptureSession canAddOutput:_captureVideoOutput]) completion([FlutterError errorWithCode:@"VideoError" message:@"Unable switch video output" @@ -1238,7 +1187,7 @@ - (void)startImageStreamWithMessenger:(NSObject *)messen FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera_avfoundation/imageStream" binaryMessenger:messenger]; - id eventChannelProtocol = + id eventChannelProtocol = [[FLTDefaultEventChannel alloc] initWithEventChannel:eventChannel]; FLTThreadSafeEventChannel *threadSafeEventChannel = [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannelProtocol]; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m index 4d19027b9db9..0cc66b724240 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamMediaSettingsAVWrapper.m @@ -5,7 +5,7 @@ #import "./include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h" #import "./include/camera_avfoundation/Protocols/FLTAssetWriter.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureDeviceControlling.h" -#import "./include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" +#import "./include/camera_avfoundation/Protocols/FLTCaptureSession.h" @implementation FLTCamMediaSettingsAVWrapper @@ -18,11 +18,11 @@ - (void)unlockDevice:(id)captureDevice { return [captureDevice unlockForConfiguration]; } -- (void)beginConfigurationForSession:(id)videoCaptureSession { +- (void)beginConfigurationForSession:(id)videoCaptureSession { [videoCaptureSession beginConfiguration]; } -- (void)commitConfigurationForSession:(id)videoCaptureSession { +- (void)commitConfigurationForSession:(id)videoCaptureSession { [videoCaptureSession commitConfiguration]; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m index e96ebba411be..515fa75948d2 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTThreadSafeEventChannel.m @@ -3,16 +3,16 @@ // found in the LICENSE file. #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" -#import "./include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h" +#import "./include/camera_avfoundation/Protocols/FLTEventChannel.h" #import "./include/camera_avfoundation/QueueUtils.h" @interface FLTThreadSafeEventChannel () -@property(nonatomic, strong) id channel; +@property(nonatomic, strong) id channel; @end @implementation FLTThreadSafeEventChannel -- (instancetype)initWithEventChannel:(id)channel { +- (instancetype)initWithEventChannel:(id)channel { self = [super init]; if (self) { _channel = channel; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSession.m similarity index 82% rename from packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m rename to packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSession.m index 260bcd574165..241fe223658d 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCaptureSession.m @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#import "../include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h" +#import "../include/camera_avfoundation/Protocols/FLTCaptureSession.h" #import "../include/camera_avfoundation/Protocols/FLTCaptureConnection.h" @interface FLTDefaultCaptureSession () @@ -39,8 +39,8 @@ - (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset { return [_captureSession canSetSessionPreset:preset]; } -- (void)addInputWithNoConnections:(id)input { - [_captureSession addInputWithNoConnections:input.input]; +- (void)addInputWithNoConnections:(AVCaptureInput *)input { + [_captureSession addInputWithNoConnections:input]; } - (void)addOutputWithNoConnections:(AVCaptureOutput *)output { @@ -55,8 +55,8 @@ - (void)addOutput:(AVCaptureOutput *)output { [_captureSession addOutput:output]; } -- (void)removeInput:(id)input { - [_captureSession removeInput:input.input]; +- (void)removeInput:(AVCaptureInput *)input { + [_captureSession removeInput:input]; } - (void)removeOutput:(AVCaptureOutput *)output { @@ -79,8 +79,8 @@ - (AVCaptureSessionPreset)sessionPreset { return _captureSession.outputs; } -- (BOOL)canAddInput:(id)input { - return [_captureSession canAddInput:input.input]; +- (BOOL)canAddInput:(AVCaptureInput *)input { + return [_captureSession canAddInput:input]; } - (BOOL)canAddOutput:(AVCaptureOutput *)output { @@ -91,8 +91,8 @@ - (BOOL)canAddConnection:(id)connection { return [_captureSession canAddConnection:connection.connection]; } -- (void)addInput:(id)input { - [_captureSession addInput:input.input]; +- (void)addInput:(AVCaptureInput *)input { + [_captureSession addInput:input]; } @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannelProtocol.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannel.m similarity index 94% rename from packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannelProtocol.m rename to packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannel.m index 7ddb68e07b05..c3eb17a6dc2e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannelProtocol.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTEventChannel.m @@ -4,7 +4,7 @@ @import Flutter; -#import "FLTEventChannelProtocol.h" +#import "FLTEventChannel.h" @interface FLTDefaultEventChannel () @property(nonatomic, strong) FlutterEventChannel *channel; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap index b49a1b0a0039..75604fa61bb7 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap @@ -15,9 +15,9 @@ framework module camera_avfoundation { header "FLTCaptureDeviceControlling.h" header "FLTCameraPermissionManager.h" header "FLTDeviceOrientationProviding.h" - header "FLTEventChannelProtocol.h" + header "FLTEventChannel.h" header "FLTCameraDeviceDiscovery.h" - header "FLTCaptureSessionProtocol.h" + header "FLTCaptureSession.h" header "FLTCapturePhotoSettings.h" header "FLTCapturePhotoOutput.h" header "FLTPhotoData.h" diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h index cff761883573..5a1310931067 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h @@ -7,9 +7,9 @@ #import "messages.g.h" #import "FLTCaptureDeviceControlling.h" -#import "FLTCaptureSessionProtocol.h" +#import "FLTCaptureSession.h" -typedef id (^CaptureSessionFactory)(void); +typedef id (^CaptureSessionFactory)(void); typedef id (^CaptureNamedDeviceFactory)(NSString *name); @interface CameraPlugin : NSObject diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h index f30e614db3ee..e664abe21fc1 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h @@ -51,39 +51,19 @@ typedef CMVideoDimensions (^VideoDimensionsForFormat)(id @property(assign, nonatomic) CGFloat maximumAvailableZoomFactor; /// Initializes an `FLTCam` instance. -/// @param cameraName a name used to uniquely identify the camera. +/// Allows for testing with specified resolution, audio preference, orientation, +/// and direct access to capture sessions and blocks. /// @param mediaSettings the media settings configuration parameters /// @param mediaSettingsAVWrapper AVFoundation wrapper to perform media settings related operations /// (for dependency injection in unit tests). /// @param orientation the orientation of camera /// @param captureSessionQueue the queue on which camera's capture session operations happen. /// @param error report to the caller if any error happened creating the camera. -//- (instancetype)initWithCameraName:(NSString *)cameraName -// mediaSettings:(FCPPlatformMediaSettings *)mediaSettings -// mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper -// orientation:(UIDeviceOrientation)orientation -// captureSessionQueue:(dispatch_queue_t)captureSessionQueue -// error:(NSError **)error; - -/// Initializes a camera instance. -/// Allows for injecting dependencies that are usually internal. -//- (instancetype)initWithCameraName:(NSString *)cameraName -// mediaSettings:(FCPPlatformMediaSettings *)mediaSettings -// mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper -// orientation:(UIDeviceOrientation)orientation -// videoCaptureSession:(id)videoCaptureSession -// audioCaptureSession:(id)audioCaptureSession -// captureSessionQueue:(dispatch_queue_t)captureSessionQueue -// error:(NSError **)error; - -/// Initializes a camera instance. -/// Allows for testing with specified resolution, audio preference, orientation, -/// and direct access to capture sessions and blocks. - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(id)videoCaptureSession - audioCaptureSession:(id)audioCaptureSession + videoCaptureSession:(id)videoCaptureSession + audioCaptureSession:(id)audioCaptureSession captureSessionQueue:(dispatch_queue_t)captureSessionQueue captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory audioCaptureDeviceFactory:(CaptureDeviceFactory)audioCaptureDeviceFactory diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h index d926d0cc97ab..767ab63e5bab 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamMediaSettingsAVWrapper.h @@ -7,7 +7,7 @@ #import "FLTAssetWriter.h" #import "FLTCaptureDeviceControlling.h" -#import "FLTCaptureSessionProtocol.h" +#import "FLTCaptureSession.h" NS_ASSUME_NONNULL_BEGIN @@ -45,7 +45,7 @@ NS_ASSUME_NONNULL_BEGIN * operations on a running session into atomic updates. * @param videoCaptureSession The video capture session. */ -- (void)beginConfigurationForSession:(id)videoCaptureSession; +- (void)beginConfigurationForSession:(id)videoCaptureSession; /** * @method commitConfigurationForSession: @@ -53,7 +53,7 @@ NS_ASSUME_NONNULL_BEGIN * operations on a running session into atomic updates. * @param videoCaptureSession The video capture session. */ -- (void)commitConfigurationForSession:(id)videoCaptureSession; +- (void)commitConfigurationForSession:(id)videoCaptureSession; /** * @method setMinFrameDuration:onDevice: diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h index dac11d4ed44d..39ae6f49cdf8 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam_Test.h @@ -6,7 +6,7 @@ #import "FLTCaptureConnection.h" #import "FLTCaptureDeviceControlling.h" #import "FLTCapturePhotoOutput.h" -#import "FLTCaptureSessionProtocol.h" +#import "FLTCaptureSession.h" #import "FLTSavePhotoDelegate.h" @interface FLTImageStreamHandler : NSObject diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h index e7cf7f90c74b..b942b687dc6d 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTThreadSafeEventChannel.h @@ -4,7 +4,7 @@ #import -#import "FLTEventChannelProtocol.h" +#import "FLTEventChannel.h" NS_ASSUME_NONNULL_BEGIN @@ -14,7 +14,7 @@ NS_ASSUME_NONNULL_BEGIN /// Creates a FLTThreadSafeEventChannel by wrapping a FlutterEventChannel object. /// @param channel The FlutterEventChannel object to be wrapped. -- (instancetype)initWithEventChannel:(id)channel; +- (instancetype)initWithEventChannel:(id)channel; /// Registers a handler on the main thread for stream setup requests from the Flutter side. /// The completion block runs on the main thread. diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSession.h similarity index 85% rename from packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h rename to packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSession.h index 44e8baacb7c6..9428b57d414f 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSessionProtocol.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCaptureSession.h @@ -8,7 +8,11 @@ NS_ASSUME_NONNULL_BEGIN -@protocol FLTCaptureSessionProtocol +@protocol FLTCaptureSession + +@property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; +@property(nonatomic, readonly) NSArray *inputs; +@property(nonatomic, readonly) NSArray *outputs; - (void)beginConfiguration; - (void)commitConfiguration; @@ -19,19 +23,16 @@ NS_ASSUME_NONNULL_BEGIN - (void)addOutputWithNoConnections:(AVCaptureOutput *)output; - (void)addConnection:(AVCaptureConnection *)connection; - (void)addOutput:(AVCaptureOutput *)output; -- (void)removeInput:(id)input; +- (void)removeInput:(AVCaptureInput *)input; - (void)removeOutput:(AVCaptureOutput *)output; -- (BOOL)canAddInput:(id)input; +- (BOOL)canAddInput:(AVCaptureInput *)input; - (BOOL)canAddOutput:(AVCaptureOutput *)output; - (BOOL)canAddConnection:(AVCaptureConnection *)connection; -- (void)addInput:(id)input; -@property(nonatomic, copy) AVCaptureSessionPreset sessionPreset; -@property(nonatomic, readonly) NSArray *inputs; -@property(nonatomic, readonly) NSArray *outputs; +- (void)addInput:(AVCaptureInput *)input; @end -@interface FLTDefaultCaptureSession : NSObject +@interface FLTDefaultCaptureSession : NSObject - (instancetype)initWithCaptureSession:(AVCaptureSession *)session; @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannel.h similarity index 79% rename from packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h rename to packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannel.h index eefdd2a0a6fa..fa509c04dea6 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannelProtocol.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTEventChannel.h @@ -6,12 +6,12 @@ NS_ASSUME_NONNULL_BEGIN -@protocol FLTEventChannelProtocol +@protocol FLTEventChannel - (void)setStreamHandler:(nullable NSObject *)handler; @end /// The default method channel that wraps FlutterMethodChannel -@interface FLTDefaultEventChannel : NSObject +@interface FLTDefaultEventChannel : NSObject - (instancetype)initWithEventChannel:(FlutterEventChannel *)channel; @end From eaf80cf4d9163a2b767cd41affe87a79a6530a2e Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Thu, 19 Dec 2024 14:20:15 +0100 Subject: [PATCH 12/16] Add protocol for AVCapturePhotoSettings --- .../example/ios/RunnerTests/CameraTestUtils.m | 4 ++++ .../ios/RunnerTests/FLTCamPhotoCaptureTests.m | 23 ------------------- .../Mocks/MockCapturePhotoSettings.h | 5 ++++ .../Mocks/MockCapturePhotoSettings.m | 12 ++++++++++ .../camera_avfoundation/CameraPlugin.m | 1 + .../Sources/camera_avfoundation/FLTCam.m | 13 ++++++----- .../Protocols/FLTCapturePhotoSettings.m | 12 ++++++++++ .../include/camera_avfoundation/FLTCam.h | 1 + .../Protocols/FLTCapturePhotoSettings.h | 8 +++++++ 9 files changed, 50 insertions(+), 29 deletions(-) diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index 22def2bf0fcb..931b30be014b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -10,6 +10,7 @@ #import "MockAssetWriter.h" #import "MockCaptureDeviceController.h" #import "MockCaptureSession.h" +#import "MockCapturePhotoSettings.h" static FCPPlatformMediaSettings *FCPGetDefaultMediaSettings( FCPPlatformResolutionPreset resolutionPreset) { @@ -92,6 +93,7 @@ pixelBufferAdaptorFactory:^id _Nonnull(id _Nonnull writerInput, NSDictionary * _Nullable source) { return pixelBufferAdaptor; } + photoSettingsFactory: [[MockCapturePhotoSettingsFactory alloc] init] error:nil]; return fltCam; @@ -128,6 +130,7 @@ NSDictionary *_Nullable source) { return [[MockPixelBufferAdaptor alloc] init]; } + photoSettingsFactory: [[MockCapturePhotoSettingsFactory alloc] init] error:nil]; } @@ -162,6 +165,7 @@ NSDictionary *_Nullable source) { return [[MockPixelBufferAdaptor alloc] init]; } + photoSettingsFactory: [[MockCapturePhotoSettingsFactory alloc] init] error:nil]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m index e4e99e9f6a63..bb7960c7e395 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m @@ -29,10 +29,6 @@ - (void)testCaptureToFile_mustReportErrorToResultIfSavePhotoDelegateCompletionsW (void *)FLTCaptureSessionQueueSpecific, NULL); FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - // AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - // id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - // OCMStub([mockSettings photoSettings]).andReturn(settings); - NSError *error = [NSError errorWithDomain:@"test" code:0 userInfo:nil]; MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; @@ -70,10 +66,6 @@ - (void)testCaptureToFile_mustReportPathToResultIfSavePhotoDelegateCompletionsWi (void *)FLTCaptureSessionQueueSpecific, NULL); FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - // AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - // id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - // OCMStub([mockSettings photoSettings]).andReturn(settings); - NSString *filePath = @"test"; MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; @@ -108,13 +100,6 @@ - (void)testCaptureToFile_mustReportFileExtensionWithHeifWhenHEVCIsAvailableAndF FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); [cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; - // AVCapturePhotoSettings *settings = - // [AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : - // AVVideoCodecTypeHEVC}]; - // - // id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - // OCMStub([mockSettings photoSettingsWithFormat:OCMOCK_ANY]).andReturn(settings); - MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; // Set availablePhotoCodecTypes to HEVC mockOutput.availablePhotoCodecTypes = @[ AVVideoCodecTypeHEVC ]; @@ -149,10 +134,6 @@ - (void)testCaptureToFile_mustReportFileExtensionWithJpgWhenHEVCNotAvailableAndF FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); [cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; - // AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - // id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - // OCMStub([mockSettings photoSettings]).andReturn(settings); - MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; mockOutput.capturePhotoWithSettingsStub = ^(id settings, id photoDelegate) { @@ -202,10 +183,6 @@ - (void)testCaptureToFile_handlesTorchMode { }, nil, nil, nil); - // AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings]; - // id mockSettings = OCMClassMock([AVCapturePhotoSettings class]); - // OCMStub([mockSettings photoSettings]).andReturn(settings); - NSString *filePath = @"test"; MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h index 522ec63bfa0c..c5c7b7b4aa5b 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h @@ -12,3 +12,8 @@ @property(nonatomic, assign) AVCaptureFlashMode flashMode; @property(nonatomic, assign) BOOL highResolutionPhotoEnabled; @end + +@interface MockCapturePhotoSettingsFactory : NSObject +@property(nonatomic, copy) id (^createPhotoSettingsStub)(void); +@property(nonatomic, copy) id (^createPhotoSettingsWithFormatStub)(NSDictionary *); +@end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m index 10d2daf45a7b..366d291934e2 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m @@ -5,5 +5,17 @@ #import "MockCapturePhotoSettings.h" @implementation MockCapturePhotoSettings +@end + + +@implementation MockCapturePhotoSettingsFactory + +- (id)createPhotoSettings { + return self.createPhotoSettingsStub ? self.createPhotoSettingsStub() : [[MockCapturePhotoSettings alloc] init]; +} + +- (id)createPhotoSettingsWithFormat:(NSDictionary *)format { + return self.createPhotoSettingsWithFormatStub ? self.createPhotoSettingsWithFormatStub(format) : [[MockCapturePhotoSettings alloc] init]; +} @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m index 882256710839..8daa442eeb5d 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m @@ -531,6 +531,7 @@ - (void)sessionQueueCreateCameraWithName:(NSString *)name initWithAssetWriterInput:assetWriterInput.input sourcePixelBufferAttributes:sourcePixelBufferAttributes]]; } + photoSettingsFactory:[[FLTDefaultCapturePhotoSettingsFactory alloc] init] error:&error]; if (error) { diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 1400f2b1359e..05bb5e784fd9 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -112,7 +112,9 @@ @interface FLTCam () photoSettingsFactory; @property(readonly, nonatomic) id deviceOrientationProvider; + /// Reports the given error message to the Dart side of the plugin. /// /// Can be called from any thread. @@ -188,6 +190,7 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings capturePhotoOutput:(id)capturePhotoOutput assetWriterFactory:(AssetWriterFactory)assetWriterFactory pixelBufferAdaptorFactory:(PixelBufferAdaptorFactory)pixelBufferAdaptorFactory + photoSettingsFactory:(id)photoSettingsFactory error:(NSError **)error { self = [super init]; NSAssert(self, @"super init cannot be nil"); @@ -214,7 +217,8 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings _fileFormat = FCPPlatformImageFileFormatJpeg; _assetWriterFactory = assetWriterFactory; _pixelBufferAdaptorFactory = pixelBufferAdaptorFactory; - + _photoSettingsFactory = photoSettingsFactory; + // To limit memory consumption, limit the number of frames pending processing. // After some testing, 4 was determined to be the best maximum value. // https://github.com/flutter/plugins/pull/4520#discussion_r766335637 @@ -391,8 +395,7 @@ - (void)updateOrientation:(UIDeviceOrientation)orientation - (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable, FlutterError *_Nullable))completion { - id settings = [[FLTDefaultCapturePhotoSettings alloc] - initWithSettings:[AVCapturePhotoSettings photoSettings]]; + id settings = [_photoSettingsFactory createPhotoSettings]; if (self.mediaSettings.resolutionPreset == FCPPlatformResolutionPresetMax) { [settings setHighResolutionPhotoEnabled:YES]; @@ -404,9 +407,7 @@ - (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable, [self.capturePhotoOutput.availablePhotoCodecTypes containsObject:AVVideoCodecTypeHEVC]; if (_fileFormat == FCPPlatformImageFileFormatHeif && isHEVCCodecAvailable) { - settings = [[FLTDefaultCapturePhotoSettings alloc] - initWithSettings:[AVCapturePhotoSettings - photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}]]; + settings = [_photoSettingsFactory createPhotoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}]; extension = @"heif"; } else { extension = @"jpg"; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m index 9ca4d1350c41..6fdf47d19da3 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m @@ -34,3 +34,15 @@ - (void)setHighResolutionPhotoEnabled:(BOOL)enabled { } @end + +@implementation FLTDefaultCapturePhotoSettingsFactory + +- (id)createPhotoSettings { + return [[FLTDefaultCapturePhotoSettings alloc] initWithSettings:[AVCapturePhotoSettings photoSettings]]; +} + +- (id)createPhotoSettingsWithFormat:(NSDictionary *)format { + return [[FLTDefaultCapturePhotoSettings alloc] initWithSettings:[AVCapturePhotoSettings photoSettingsWithFormat:format]]; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h index e664abe21fc1..c07d687aceef 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h @@ -71,6 +71,7 @@ typedef CMVideoDimensions (^VideoDimensionsForFormat)(id capturePhotoOutput:(id)capturePhotoOutput assetWriterFactory:(AssetWriterFactory)assetWriterFactory pixelBufferAdaptorFactory:(PixelBufferAdaptorFactory)pixelBufferAdaptorFactory + photoSettingsFactory:(id)photoSettingsFactory error:(NSError **)error; /// Informs the Dart side of the plugin of the current camera state and capabilities. diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h index 1a103e4f351d..1ed4397320e6 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/Protocols/FLTCapturePhotoSettings.h @@ -16,8 +16,16 @@ NS_ASSUME_NONNULL_BEGIN - (void)setHighResolutionPhotoEnabled:(BOOL)enabled; @end +@protocol FLTCapturePhotoSettingsFactory +- (id)createPhotoSettings; +- (id)createPhotoSettingsWithFormat:(NSDictionary *)format; +@end + @interface FLTDefaultCapturePhotoSettings : NSObject - (instancetype)initWithSettings:(AVCapturePhotoSettings *)settings; @end +@interface FLTDefaultCapturePhotoSettingsFactory : NSObject +@end + NS_ASSUME_NONNULL_END From 7ac93396366cba446e7819e078e0a2b1ce544e05 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Mon, 23 Dec 2024 11:39:57 +0100 Subject: [PATCH 13/16] Refactor CameraTestUtils --- .../ios/Runner.xcodeproj/project.pbxproj | 78 ++++++++- .../RunnerTests/CameraSessionPresetsTests.m | 48 ++++-- .../ios/RunnerTests/CameraSettingsTests.m | 12 +- .../example/ios/RunnerTests/CameraTestUtils.h | 44 +---- .../example/ios/RunnerTests/CameraTestUtils.m | 160 +++++------------- .../ios/RunnerTests/FLTCamPhotoCaptureTests.m | 91 +++++----- .../ios/RunnerTests/FLTCamSampleBufferTests.m | 21 ++- .../Mocks/MockCapturePhotoSettings.h | 3 +- .../Mocks/MockCapturePhotoSettings.m | 10 +- .../example/ios/RunnerTests/StreamingTest.m | 4 +- .../camera_avfoundation/CameraPlugin.m | 35 +--- .../Sources/camera_avfoundation/FLTCam.m | 58 +++---- .../camera_avfoundation/FLTCamConfiguration.m | 50 ++++++ .../Protocols/FLTCapturePhotoSettings.m | 9 +- .../include/CameraPlugin.modulemap | 1 + .../camera_avfoundation/CameraPlugin.h | 1 - .../include/camera_avfoundation/FLTCam.h | 38 +---- .../camera_avfoundation/FLTCamConfiguration.h | 62 +++++++ 18 files changed, 387 insertions(+), 338 deletions(-) create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamConfiguration.m create mode 100644 packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamConfiguration.h diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index 257a897fa208..eae4b99a59d9 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 70; + objectVersion = 54; objects = { /* Begin PBXBuildFile section */ @@ -17,6 +17,16 @@ 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */ = {isa = PBXBuildFile; productRef = 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */; }; 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */; }; 7F87E80E2D03283E00A3549C /* CameraExposureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */; }; + 7FF2D09B2D15AB7A0092C411 /* MockAssetWriter.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0872D15AB7A0092C411 /* MockAssetWriter.m */; }; + 7FF2D09C2D15AB7A0092C411 /* MockCameraDeviceDiscovery.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0892D15AB7A0092C411 /* MockCameraDeviceDiscovery.m */; }; + 7FF2D09D2D15AB7A0092C411 /* MockCaptureConnection.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D08B2D15AB7A0092C411 /* MockCaptureConnection.m */; }; + 7FF2D09E2D15AB7A0092C411 /* MockCaptureDeviceController.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D08D2D15AB7A0092C411 /* MockCaptureDeviceController.m */; }; + 7FF2D09F2D15AB7A0092C411 /* MockCapturePhotoOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D08F2D15AB7A0092C411 /* MockCapturePhotoOutput.m */; }; + 7FF2D0A02D15AB7A0092C411 /* MockCapturePhotoSettings.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0912D15AB7A0092C411 /* MockCapturePhotoSettings.m */; }; + 7FF2D0A12D15AB7A0092C411 /* MockCaptureSession.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0932D15AB7A0092C411 /* MockCaptureSession.m */; }; + 7FF2D0A22D15AB7A0092C411 /* MockDeviceOrientationProvider.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0952D15AB7A0092C411 /* MockDeviceOrientationProvider.m */; }; + 7FF2D0A32D15AB7A0092C411 /* MockEventChannel.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0972D15AB7A0092C411 /* MockEventChannel.m */; }; + 7FF2D0A42D15AB7A0092C411 /* MockPhotoData.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0992D15AB7A0092C411 /* MockPhotoData.m */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; @@ -80,6 +90,26 @@ 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraSettingsTests.m; sourceTree = ""; }; 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraExposureTests.m; sourceTree = ""; }; + 7FF2D0862D15AB7A0092C411 /* MockAssetWriter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockAssetWriter.h; sourceTree = ""; }; + 7FF2D0872D15AB7A0092C411 /* MockAssetWriter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockAssetWriter.m; sourceTree = ""; }; + 7FF2D0882D15AB7A0092C411 /* MockCameraDeviceDiscovery.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCameraDeviceDiscovery.h; sourceTree = ""; }; + 7FF2D0892D15AB7A0092C411 /* MockCameraDeviceDiscovery.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCameraDeviceDiscovery.m; sourceTree = ""; }; + 7FF2D08A2D15AB7A0092C411 /* MockCaptureConnection.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureConnection.h; sourceTree = ""; }; + 7FF2D08B2D15AB7A0092C411 /* MockCaptureConnection.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureConnection.m; sourceTree = ""; }; + 7FF2D08C2D15AB7A0092C411 /* MockCaptureDeviceController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureDeviceController.h; sourceTree = ""; }; + 7FF2D08D2D15AB7A0092C411 /* MockCaptureDeviceController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureDeviceController.m; sourceTree = ""; }; + 7FF2D08E2D15AB7A0092C411 /* MockCapturePhotoOutput.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCapturePhotoOutput.h; sourceTree = ""; }; + 7FF2D08F2D15AB7A0092C411 /* MockCapturePhotoOutput.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCapturePhotoOutput.m; sourceTree = ""; }; + 7FF2D0902D15AB7A0092C411 /* MockCapturePhotoSettings.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCapturePhotoSettings.h; sourceTree = ""; }; + 7FF2D0912D15AB7A0092C411 /* MockCapturePhotoSettings.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCapturePhotoSettings.m; sourceTree = ""; }; + 7FF2D0922D15AB7A0092C411 /* MockCaptureSession.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockCaptureSession.h; sourceTree = ""; }; + 7FF2D0932D15AB7A0092C411 /* MockCaptureSession.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockCaptureSession.m; sourceTree = ""; }; + 7FF2D0942D15AB7A0092C411 /* MockDeviceOrientationProvider.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockDeviceOrientationProvider.h; sourceTree = ""; }; + 7FF2D0952D15AB7A0092C411 /* MockDeviceOrientationProvider.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockDeviceOrientationProvider.m; sourceTree = ""; }; + 7FF2D0962D15AB7A0092C411 /* MockEventChannel.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockEventChannel.h; sourceTree = ""; }; + 7FF2D0972D15AB7A0092C411 /* MockEventChannel.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockEventChannel.m; sourceTree = ""; }; + 7FF2D0982D15AB7A0092C411 /* MockPhotoData.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockPhotoData.h; sourceTree = ""; }; + 7FF2D0992D15AB7A0092C411 /* MockPhotoData.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MockPhotoData.m; sourceTree = ""; }; 8FE1D7C55B3760E3991BE41E /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; }; 93DE3DA611CB15AE1AF7956C /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; @@ -104,10 +134,6 @@ E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPreviewPauseTests.m; sourceTree = ""; }; /* End PBXFileReference section */ -/* Begin PBXFileSystemSynchronizedRootGroup section */ - 7FA5D71B2D140E5B0041B9E2 /* Mocks */ = {isa = PBXFileSystemSynchronizedRootGroup; explicitFileTypes = {}; explicitFolders = (); path = Mocks; sourceTree = ""; }; -/* End PBXFileSystemSynchronizedRootGroup section */ - /* Begin PBXFrameworksBuildPhase section */ 03BB76652665316900CE5A93 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; @@ -132,7 +158,7 @@ 03BB76692665316900CE5A93 /* RunnerTests */ = { isa = PBXGroup; children = ( - 7FA5D71B2D140E5B0041B9E2 /* Mocks */, + 7FF2D09A2D15AB7A0092C411 /* Mocks */, 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */, 03BB766A2665316900CE5A93 /* CameraFocusTests.m */, 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */, @@ -166,6 +192,33 @@ name = Frameworks; sourceTree = ""; }; + 7FF2D09A2D15AB7A0092C411 /* Mocks */ = { + isa = PBXGroup; + children = ( + 7FF2D0862D15AB7A0092C411 /* MockAssetWriter.h */, + 7FF2D0872D15AB7A0092C411 /* MockAssetWriter.m */, + 7FF2D0882D15AB7A0092C411 /* MockCameraDeviceDiscovery.h */, + 7FF2D0892D15AB7A0092C411 /* MockCameraDeviceDiscovery.m */, + 7FF2D08A2D15AB7A0092C411 /* MockCaptureConnection.h */, + 7FF2D08B2D15AB7A0092C411 /* MockCaptureConnection.m */, + 7FF2D08C2D15AB7A0092C411 /* MockCaptureDeviceController.h */, + 7FF2D08D2D15AB7A0092C411 /* MockCaptureDeviceController.m */, + 7FF2D08E2D15AB7A0092C411 /* MockCapturePhotoOutput.h */, + 7FF2D08F2D15AB7A0092C411 /* MockCapturePhotoOutput.m */, + 7FF2D0902D15AB7A0092C411 /* MockCapturePhotoSettings.h */, + 7FF2D0912D15AB7A0092C411 /* MockCapturePhotoSettings.m */, + 7FF2D0922D15AB7A0092C411 /* MockCaptureSession.h */, + 7FF2D0932D15AB7A0092C411 /* MockCaptureSession.m */, + 7FF2D0942D15AB7A0092C411 /* MockDeviceOrientationProvider.h */, + 7FF2D0952D15AB7A0092C411 /* MockDeviceOrientationProvider.m */, + 7FF2D0962D15AB7A0092C411 /* MockEventChannel.h */, + 7FF2D0972D15AB7A0092C411 /* MockEventChannel.m */, + 7FF2D0982D15AB7A0092C411 /* MockPhotoData.h */, + 7FF2D0992D15AB7A0092C411 /* MockPhotoData.m */, + ); + path = Mocks; + sourceTree = ""; + }; 9740EEB11CF90186004384FC /* Flutter */ = { isa = PBXGroup; children = ( @@ -250,9 +303,6 @@ dependencies = ( 03BB766E2665316900CE5A93 /* PBXTargetDependency */, ); - fileSystemSynchronizedGroups = ( - 7FA5D71B2D140E5B0041B9E2 /* Mocks */, - ); name = RunnerTests; packageProductDependencies = ( ); @@ -467,6 +517,16 @@ E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */, 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */, E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */, + 7FF2D09B2D15AB7A0092C411 /* MockAssetWriter.m in Sources */, + 7FF2D09C2D15AB7A0092C411 /* MockCameraDeviceDiscovery.m in Sources */, + 7FF2D09D2D15AB7A0092C411 /* MockCaptureConnection.m in Sources */, + 7FF2D09E2D15AB7A0092C411 /* MockCaptureDeviceController.m in Sources */, + 7FF2D09F2D15AB7A0092C411 /* MockCapturePhotoOutput.m in Sources */, + 7FF2D0A02D15AB7A0092C411 /* MockCapturePhotoSettings.m in Sources */, + 7FF2D0A12D15AB7A0092C411 /* MockCaptureSession.m in Sources */, + 7FF2D0A22D15AB7A0092C411 /* MockDeviceOrientationProvider.m in Sources */, + 7FF2D0A32D15AB7A0092C411 /* MockEventChannel.m in Sources */, + 7FF2D0A42D15AB7A0092C411 /* MockPhotoData.m in Sources */, E0B0D2BB27DFF2AF00E71E4B /* FLTCameraPermissionManagerTests.m in Sources */, E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */, ); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m index 766d287dc8eb..4c270cd864b6 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSessionPresetsTests.m @@ -23,26 +23,33 @@ - (void)testResolutionPresetWithBestFormat_mustUpdateCaptureSessionPreset { NSString *expectedPreset = AVCaptureSessionPresetInputPriority; XCTestExpectation *presetExpectation = [self expectationWithDescription:@"Expected preset set"]; - MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMax); + MockCaptureDeviceController *captureDeviceMock = [[MockCaptureDeviceController alloc] init]; MockCaptureDeviceFormat *fakeFormat = [[MockCaptureDeviceFormat alloc] init]; captureDeviceMock.formats = @[ fakeFormat ]; captureDeviceMock.activeFormat = fakeFormat; + configuration.captureDeviceFactory = ^id _Nonnull { + return captureDeviceMock; + }; + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { if (preset == expectedPreset) { [presetExpectation fulfill]; } }; + configuration.videoCaptureSession = videoSessionMock; - FLTCreateCamWithVideoDimensionsForFormat(videoSessionMock, FCPPlatformResolutionPresetMax, - captureDeviceMock, - ^CMVideoDimensions(id format) { - CMVideoDimensions videoDimensions; - videoDimensions.width = 1; - videoDimensions.height = 1; - return videoDimensions; - }); + configuration.videoDimensionsForFormat = ^CMVideoDimensions(id format) { + CMVideoDimensions videoDimensions; + videoDimensions.width = 1; + videoDimensions.height = 1; + return videoDimensions; + }; + + FLTCreateCamWithConfiguration(configuration); [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -51,17 +58,24 @@ - (void)testResolutionPresetWithCanSetSessionPresetMax_mustUpdateCaptureSessionP NSString *expectedPreset = AVCaptureSessionPreset3840x2160; XCTestExpectation *expectation = [self expectationWithDescription:@"Expected preset set"]; + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; // Make sure that setting resolution preset for session always succeeds. videoSessionMock.mockCanSetSessionPreset = YES; - videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { if (preset == expectedPreset) { [expectation fulfill]; } }; - FLTCreateCamWithVideoCaptureSession(videoSessionMock, FCPPlatformResolutionPresetMax); + configuration.videoCaptureSession = videoSessionMock; + configuration.mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMax); + configuration.captureDeviceFactory = ^id _Nonnull { + return [[MockCaptureDeviceController alloc] init]; + }; + + FLTCreateCamWithConfiguration(configuration); [self waitForExpectationsWithTimeout:1 handler:nil]; } @@ -70,11 +84,11 @@ - (void)testResolutionPresetWithCanSetSessionPresetUltraHigh_mustUpdateCaptureSe NSString *expectedPreset = AVCaptureSessionPreset3840x2160; XCTestExpectation *expectation = [self expectationWithDescription:@"Expected preset set"]; - MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; // Make sure that setting resolution preset for session always succeeds. videoSessionMock.mockCanSetSessionPreset = YES; - // Expect that setting "ultraHigh" resolutionPreset correctly updates videoCaptureSession. videoSessionMock.setSessionPresetStub = ^(AVCaptureSessionPreset _Nonnull preset) { if (preset == expectedPreset) { @@ -82,7 +96,13 @@ - (void)testResolutionPresetWithCanSetSessionPresetUltraHigh_mustUpdateCaptureSe } }; - FLTCreateCamWithVideoCaptureSession(videoSessionMock, FCPPlatformResolutionPresetUltraHigh); + configuration.videoCaptureSession = videoSessionMock; + configuration.mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetUltraHigh); + configuration.captureDeviceFactory = ^id _Nonnull { + return [[MockCaptureDeviceController alloc] init]; + }; + + FLTCreateCamWithConfiguration(configuration); [self waitForExpectationsWithTimeout:1 handler:nil]; } diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m index e09c475d4808..cc4daf6fda9c 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraSettingsTests.m @@ -143,8 +143,11 @@ - (void)testSettings_shouldPassConfigurationToCameraDeviceAndWriter { TestMediaSettingsAVWrapper *injectedWrapper = [[TestMediaSettingsAVWrapper alloc] initWithTestCase:self]; - FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_create("test", NULL), settings, injectedWrapper, nil, nil, nil, nil); + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.mediaSettings = settings; + configuration.mediaSettingsWrapper = injectedWrapper; + + FLTCam *camera = FLTCreateCamWithConfiguration(configuration); // Expect FPS configuration is passed to camera device. [self waitForExpectations:@[ @@ -213,9 +216,10 @@ - (void)testSettings_ShouldSelectFormatWhichSupports60FPS { videoBitrate:@(gTestVideoBitrate) audioBitrate:@(gTestAudioBitrate) enableAudio:gTestEnableAudio]; + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.mediaSettings = settings; - FLTCam *camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_create("test", NULL), settings, nil, nil, nil, nil, nil); + FLTCam *camera = FLTCreateCamWithConfiguration(configuration); id range = camera.captureDevice.activeFormat.videoSupportedFrameRateRanges[0]; XCTAssertLessThanOrEqual(range.minFrameRate, 60); diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h index 70cfd5ca3a49..dd00c6b24ff8 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h @@ -9,42 +9,14 @@ NS_ASSUME_NONNULL_BEGIN -/// Creates an `FLTCam` that runs its capture session operations on a given queue. -/// @param captureSessionQueue the capture session queue -/// @param mediaSettings media settings configuration parameters -/// @param mediaSettingsAVWrapper provider to perform media settings operations (for unit test -/// dependency injection). -/// @param captureDeviceFactory a callback to create capture device instances -/// @return an FLTCam object. -extern FLTCam *_Nullable FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_t _Nullable captureSessionQueue, - FCPPlatformMediaSettings *_Nullable mediaSettings, - FLTCamMediaSettingsAVWrapper *_Nullable mediaSettingsAVWrapper, - CaptureDeviceFactory _Nullable captureDeviceFactory, - id _Nullable capturePhotoOutput, - id _Nullable assetWriter, - id _Nullable pixelBufferAdaptor); - -extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue); - -/// Creates an `FLTCam` with a given captureSession and resolutionPreset -/// @param captureSession AVCaptureSession for video -/// @param resolutionPreset preset for camera's captureSession resolution -/// @return an FLTCam object. -extern FLTCam *FLTCreateCamWithVideoCaptureSession(id captureSession, - FCPPlatformResolutionPreset resolutionPreset); - -/// Creates an `FLTCam` with a given captureSession and resolutionPreset. -/// Allows to inject a capture device and a block to compute the video dimensions. -/// @param captureSession AVCaptureSession for video -/// @param resolutionPreset preset for camera's captureSession resolution -/// @param captureDevice AVCaptureDevice to be used -/// @param videoDimensionsForFormat custom code to determine video dimensions -/// @return an FLTCam object. -extern FLTCam *FLTCreateCamWithVideoDimensionsForFormat( - id captureSession, FCPPlatformResolutionPreset resolutionPreset, - id captureDevice, - VideoDimensionsForFormat videoDimensionsForFormat); +extern FCPPlatformMediaSettings *FCPGetDefaultMediaSettings( + FCPPlatformResolutionPreset resolutionPreset); + +/// Creates a test `FLTCamConfiguration` that has a default mock setup. +extern FLTCamConfiguration *FLTCreateTestConfiguration(void); + +/// Creates an `FLTCam` with a test configuration. +extern FLTCam *FLTCreateCamWithConfiguration(FLTCamConfiguration *configuration); /// Creates a test sample buffer. /// @return a test sample buffer. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index 931b30be014b..b445701d01e6 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -9,11 +9,10 @@ #import "MockAssetWriter.h" #import "MockCaptureDeviceController.h" -#import "MockCaptureSession.h" #import "MockCapturePhotoSettings.h" +#import "MockCaptureSession.h" -static FCPPlatformMediaSettings *FCPGetDefaultMediaSettings( - FCPPlatformResolutionPreset resolutionPreset) { +FCPPlatformMediaSettings *FCPGetDefaultMediaSettings(FCPPlatformResolutionPreset resolutionPreset) { return [FCPPlatformMediaSettings makeWithResolutionPreset:resolutionPreset framesPerSecond:nil videoBitrate:nil @@ -21,31 +20,13 @@ enableAudio:YES]; } -FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue) { - return FLTCreateCamWithCaptureSessionQueueAndMediaSettings(captureSessionQueue, nil, nil, nil, - nil, nil, nil); -} - -FLTCam *FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - dispatch_queue_t captureSessionQueue, FCPPlatformMediaSettings *mediaSettings, - FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper, CaptureDeviceFactory captureDeviceFactory, - id capturePhotoOutput, id assetWriter, - id pixelBufferAdaptor) { - if (!mediaSettings) { - mediaSettings = FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMedium); - } - - if (!mediaSettingsAVWrapper) { - mediaSettingsAVWrapper = [[FLTCamMediaSettingsAVWrapper alloc] init]; - } - - if (!assetWriter) { - assetWriter = [[MockAssetWriter alloc] init]; - } - - if (!pixelBufferAdaptor) { - pixelBufferAdaptor = [[MockPixelBufferAdaptor alloc] init]; - } +FLTCamConfiguration *FLTCreateTestConfiguration(void) { + FCPPlatformMediaSettings *mediaSettings = + FCPGetDefaultMediaSettings(FCPPlatformResolutionPresetMedium); + FLTCamMediaSettingsAVWrapper *mediaSettingsAVWrapper = + [[FLTCamMediaSettingsAVWrapper alloc] init]; + MockAssetWriter *assetWriter = [[MockAssetWriter alloc] init]; + MockPixelBufferAdaptor *pixelBufferAdaptor = [[MockPixelBufferAdaptor alloc] init]; MockCaptureSession *videoSessionMock = [[MockCaptureSession alloc] init]; videoSessionMock.mockCanSetSessionPreset = YES; @@ -71,102 +52,37 @@ mockDevice.activeFormat = captureDeviceFormatMock1; mockDevice.inputToReturn = inputMock; - id fltCam = [[FLTCam alloc] initWithMediaSettings:mediaSettings - mediaSettingsAVWrapper:mediaSettingsAVWrapper - orientation:UIDeviceOrientationPortrait - videoCaptureSession:videoSessionMock - audioCaptureSession:audioSessionMock - captureSessionQueue:captureSessionQueue - captureDeviceFactory:captureDeviceFactory ?: ^id(void) { - return mockDevice; - } - audioCaptureDeviceFactory:captureDeviceFactory ?: ^id(void) { - return mockDevice; - } - videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { - return CMVideoFormatDescriptionGetDimensions(format.formatDescription); - } - capturePhotoOutput:capturePhotoOutput - assetWriterFactory:^id _Nonnull(NSURL *url, AVFileType fileType, NSError * _Nullable __autoreleasing * _Nullable error) { - return assetWriter; - } - pixelBufferAdaptorFactory:^id _Nonnull(id _Nonnull writerInput, NSDictionary * _Nullable source) { - return pixelBufferAdaptor; - } - photoSettingsFactory: [[MockCapturePhotoSettingsFactory alloc] init] - error:nil]; - - return fltCam; + FLTCamConfiguration *configuration = + [[FLTCamConfiguration alloc] initWithMediaSettings:mediaSettings + mediaSettingsWrapper:mediaSettingsAVWrapper + captureDeviceFactory:^id(void) { + return mockDevice; + } + captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) + captureSessionFactory:^id _Nonnull { + return videoSessionMock; + } + audioCaptureDeviceFactory:^id _Nonnull { + return mockDevice; + }]; + configuration.capturePhotoOutput = + [[FLTDefaultCapturePhotoOutput alloc] initWithPhotoOutput:[AVCapturePhotoOutput new]]; + configuration.orientation = UIDeviceOrientationPortrait; + configuration.assetWriterFactory = ^id _Nonnull( + NSURL *_Nonnull url, AVFileType _Nonnull fileType, NSError **error) { + return assetWriter; + }; + configuration.pixelBufferAdaptorFactory = ^id _Nonnull( + id _Nonnull input, NSDictionary *_Nullable settings) { + return pixelBufferAdaptor; + }; + configuration.photoSettingsFactory = [[MockCapturePhotoSettingsFactory alloc] init]; + + return configuration; } -FLTCam *FLTCreateCamWithVideoCaptureSession(id captureSession, - FCPPlatformResolutionPreset resolutionPreset) { - MockCaptureSession *audioSessionMock = [[MockCaptureSession alloc] init]; - audioSessionMock.mockCanSetSessionPreset = YES; - - return [[FLTCam alloc] initWithMediaSettings:FCPGetDefaultMediaSettings(resolutionPreset) - mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init] - orientation:UIDeviceOrientationPortrait - videoCaptureSession:captureSession - audioCaptureSession:audioSessionMock - captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) - captureDeviceFactory:^id(void) { - return [[MockCaptureDeviceController alloc] init]; - } - audioCaptureDeviceFactory:^id(void) { - return [[MockCaptureDeviceController alloc] init]; - } - videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) { - return CMVideoFormatDescriptionGetDimensions(format.formatDescription); - } - capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] - initWithPhotoOutput:[AVCapturePhotoOutput new]] - assetWriterFactory:^id _Nonnull( - NSURL *url, AVFileType fileType, NSError *_Nullable __autoreleasing *_Nullable error) { - return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; - } - pixelBufferAdaptorFactory:^id _Nonnull( - id _Nonnull writerInput, - NSDictionary *_Nullable source) { - return [[MockPixelBufferAdaptor alloc] init]; - } - photoSettingsFactory: [[MockCapturePhotoSettingsFactory alloc] init] - error:nil]; -} - -FLTCam *FLTCreateCamWithVideoDimensionsForFormat( - id captureSession, FCPPlatformResolutionPreset resolutionPreset, - id captureDevice, - VideoDimensionsForFormat videoDimensionsForFormat) { - MockCaptureSession *audioSessionMock = [[MockCaptureSession alloc] init]; - audioSessionMock.mockCanSetSessionPreset = YES; - - return [[FLTCam alloc] initWithMediaSettings:FCPGetDefaultMediaSettings(resolutionPreset) - mediaSettingsAVWrapper:[[FLTCamMediaSettingsAVWrapper alloc] init] - orientation:UIDeviceOrientationPortrait - videoCaptureSession:captureSession - audioCaptureSession:audioSessionMock - captureSessionQueue:dispatch_queue_create("capture_session_queue", NULL) - captureDeviceFactory:^id(void) { - return captureDevice; - } - audioCaptureDeviceFactory:^id(void) { - return [[MockCaptureDeviceController alloc] init]; - } - videoDimensionsForFormat:videoDimensionsForFormat - capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] - initWithPhotoOutput:[AVCapturePhotoOutput new]] - assetWriterFactory:^id _Nonnull( - NSURL *url, AVFileType fileType, NSError *_Nullable __autoreleasing *_Nullable error) { - return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; - } - pixelBufferAdaptorFactory:^id _Nonnull( - id _Nonnull writerInput, - NSDictionary *_Nullable source) { - return [[MockPixelBufferAdaptor alloc] init]; - } - photoSettingsFactory: [[MockCapturePhotoSettingsFactory alloc] init] - error:nil]; +FLTCam *FLTCreateCamWithConfiguration(FLTCamConfiguration *configuration) { + return [[FLTCam alloc] initWithConfiguration:configuration error:nil]; } CMSampleBufferRef FLTCreateTestSampleBuffer(void) { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m index bb7960c7e395..212f4ae6ce95 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamPhotoCaptureTests.m @@ -14,27 +14,35 @@ /// Includes test cases related to photo capture operations for FLTCam class. @interface FLTCamPhotoCaptureTests : XCTestCase - +@property(readonly, nonatomic) FLTCam *cam; +@property(readonly, nonatomic) dispatch_queue_t captureSessionQueue; @end @implementation FLTCamPhotoCaptureTests +- (void)setUp { + _captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); + dispatch_queue_set_specific(_captureSessionQueue, FLTCaptureSessionQueueSpecific, + (void *)FLTCaptureSessionQueueSpecific, NULL); + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.captureSessionQueue = _captureSessionQueue; + _cam = FLTCreateCamWithConfiguration(configuration); +} + - (void)testCaptureToFile_mustReportErrorToResultIfSavePhotoDelegateCompletionsWithError { XCTestExpectation *errorExpectation = [self expectationWithDescription: @"Must send error to result if save photo delegate completes with error."]; - dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); - dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, - (void *)FLTCaptureSessionQueueSpecific, NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - NSError *error = [NSError errorWithDomain:@"test" code:0 userInfo:nil]; + __weak typeof(self) weakSelf = self; + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; mockOutput.capturePhotoWithSettingsStub = ^(id settings, id captureDelegate) { - FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; + FLTSavePhotoDelegate *delegate = + weakSelf.cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ @@ -42,11 +50,11 @@ - (void)testCaptureToFile_mustReportErrorToResultIfSavePhotoDelegateCompletionsW }); }; - cam.capturePhotoOutput = mockOutput; + _cam.capturePhotoOutput = mockOutput; // `FLTCam::captureToFile` runs on capture session queue. - dispatch_async(captureSessionQueue, ^{ - [cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) { + dispatch_async(_captureSessionQueue, ^{ + [weakSelf.cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) { XCTAssertNil(result); XCTAssertNotNil(error); [errorExpectation fulfill]; @@ -61,28 +69,26 @@ - (void)testCaptureToFile_mustReportPathToResultIfSavePhotoDelegateCompletionsWi [self expectationWithDescription: @"Must send file path to result if save photo delegate completes with file path."]; - dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); - dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, - (void *)FLTCaptureSessionQueueSpecific, NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - NSString *filePath = @"test"; + __weak typeof(self) weakSelf = self; + MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; mockOutput.capturePhotoWithSettingsStub = ^(id settings, id captureDelegate) { - FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; + FLTSavePhotoDelegate *delegate = + weakSelf.cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(filePath, nil); }); }; - cam.capturePhotoOutput = mockOutput; + _cam.capturePhotoOutput = mockOutput; // `FLTCam::captureToFile` runs on capture session queue. - dispatch_async(captureSessionQueue, ^{ - [cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) { + dispatch_async(_captureSessionQueue, ^{ + [weakSelf.cam captureToFileWithCompletion:^(NSString *result, FlutterError *error) { XCTAssertEqual(result, filePath); [pathExpectation fulfill]; }]; @@ -94,29 +100,29 @@ - (void)testCaptureToFile_mustReportFileExtensionWithHeifWhenHEVCIsAvailableAndF XCTestExpectation *expectation = [self expectationWithDescription: @"Test must set extension to heif if availablePhotoCodecTypes contains HEVC."]; - dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); - dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, - (void *)FLTCaptureSessionQueueSpecific, NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - [cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; + [_cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; + + __weak typeof(self) weakSelf = self; + // Set availablePhotoCodecTypes to HEVC mockOutput.availablePhotoCodecTypes = @[ AVVideoCodecTypeHEVC ]; mockOutput.capturePhotoWithSettingsStub = ^(id settings, id photoDelegate) { - FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; + FLTSavePhotoDelegate *delegate = + weakSelf.cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ delegate.completionHandler(delegate.filePath, nil); }); }; - cam.capturePhotoOutput = mockOutput; + _cam.capturePhotoOutput = mockOutput; // `FLTCam::captureToFile` runs on capture session queue. - dispatch_async(captureSessionQueue, ^{ - [cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) { + dispatch_async(_captureSessionQueue, ^{ + [weakSelf.cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) { XCTAssertEqualObjects([filePath pathExtension], @"heif"); [expectation fulfill]; }]; @@ -128,16 +134,15 @@ - (void)testCaptureToFile_mustReportFileExtensionWithJpgWhenHEVCNotAvailableAndF XCTestExpectation *expectation = [self expectationWithDescription: @"Test must set extension to jpg if availablePhotoCodecTypes does not contain HEVC."]; - dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); - dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, - (void *)FLTCaptureSessionQueueSpecific, NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); - [cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; + [_cam setImageFileFormat:FCPPlatformImageFileFormatHeif]; + + __weak typeof(self) weakSelf = self; MockCapturePhotoOutput *mockOutput = [[MockCapturePhotoOutput alloc] init]; mockOutput.capturePhotoWithSettingsStub = ^(id settings, id photoDelegate) { - FLTSavePhotoDelegate *delegate = cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; + FLTSavePhotoDelegate *delegate = + weakSelf.cam.inProgressSavePhotoDelegates[@(settings.uniqueID)]; // Completion runs on IO queue. dispatch_queue_t ioQueue = dispatch_queue_create("io_queue", NULL); dispatch_async(ioQueue, ^{ @@ -145,11 +150,11 @@ - (void)testCaptureToFile_mustReportFileExtensionWithJpgWhenHEVCNotAvailableAndF }); }; - cam.capturePhotoOutput = mockOutput; + _cam.capturePhotoOutput = mockOutput; // `FLTCam::captureToFile` runs on capture session queue. - dispatch_async(captureSessionQueue, ^{ - [cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) { + dispatch_async(_captureSessionQueue, ^{ + [weakSelf.cam captureToFileWithCompletion:^(NSString *filePath, FlutterError *error) { XCTAssertEqualObjects([filePath pathExtension], @"jpg"); [expectation fulfill]; }]; @@ -176,12 +181,12 @@ - (void)testCaptureToFile_handlesTorchMode { dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, (void *)FLTCaptureSessionQueueSpecific, NULL); - FLTCam *cam = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - captureSessionQueue, nil, nil, - ^id(void) { - return captureDeviceMock; - }, - nil, nil, nil); + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.captureSessionQueue = captureSessionQueue; + configuration.captureDeviceFactory = ^id(void) { + return captureDeviceMock; + }; + FLTCam *cam = FLTCreateCamWithConfiguration(configuration); NSString *filePath = @"test"; diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m index 67d5c005c1e7..72aaecddfc4d 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m @@ -85,14 +85,27 @@ - (void)setUp { _adaptorMock = [[MockPixelBufferAdaptor alloc] init]; _mediaSettingsWrapper = [[FakeMediaSettingsAVWrapper alloc] initWithInputMock:_inputMock]; - _camera = FLTCreateCamWithCaptureSessionQueueAndMediaSettings( - _captureSessionQueue, + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.captureSessionQueue = _captureSessionQueue; + configuration.mediaSettings = [FCPPlatformMediaSettings makeWithResolutionPreset:FCPPlatformResolutionPresetMedium framesPerSecond:nil videoBitrate:nil audioBitrate:nil - enableAudio:YES], - _mediaSettingsWrapper, nil, nil, _writerMock, _adaptorMock); + enableAudio:YES]; + configuration.mediaSettingsWrapper = _mediaSettingsWrapper; + + __weak typeof(self) weakSelf = self; + configuration.assetWriterFactory = + ^id _Nonnull(NSURL *url, AVFileType fileType, NSError **error) { + return weakSelf.writerMock; + }; + configuration.pixelBufferAdaptorFactory = ^id _Nonnull( + id input, NSDictionary *settings) { + return weakSelf.adaptorMock; + }; + + _camera = FLTCreateCamWithConfiguration(configuration); } - (void)testSampleBufferCallbackQueueMustBeCaptureSessionQueue { diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h index c5c7b7b4aa5b..066cb94cb531 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.h @@ -15,5 +15,6 @@ @interface MockCapturePhotoSettingsFactory : NSObject @property(nonatomic, copy) id (^createPhotoSettingsStub)(void); -@property(nonatomic, copy) id (^createPhotoSettingsWithFormatStub)(NSDictionary *); +@property(nonatomic, copy) id (^createPhotoSettingsWithFormatStub) + (NSDictionary *); @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m index 366d291934e2..851334265cba 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/Mocks/MockCapturePhotoSettings.m @@ -7,15 +7,17 @@ @implementation MockCapturePhotoSettings @end - @implementation MockCapturePhotoSettingsFactory - (id)createPhotoSettings { - return self.createPhotoSettingsStub ? self.createPhotoSettingsStub() : [[MockCapturePhotoSettings alloc] init]; + return self.createPhotoSettingsStub ? self.createPhotoSettingsStub() + : [[MockCapturePhotoSettings alloc] init]; } -- (id)createPhotoSettingsWithFormat:(NSDictionary *)format { - return self.createPhotoSettingsWithFormatStub ? self.createPhotoSettingsWithFormatStub(format) : [[MockCapturePhotoSettings alloc] init]; +- (id)createPhotoSettingsWithFormat: + (NSDictionary *)format { + return self.createPhotoSettingsWithFormatStub ? self.createPhotoSettingsWithFormatStub(format) + : [[MockCapturePhotoSettings alloc] init]; } @end diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m index b0c504181d69..6974235bd7b7 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/StreamingTest.m @@ -66,7 +66,9 @@ - (void)setUp { dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL); _mockStreamHandler = [[MockImageStreamHandler alloc] initWithCaptureSessionQueue:captureSessionQueue]; - _camera = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); + FLTCamConfiguration *configuration = FLTCreateTestConfiguration(); + configuration.captureSessionQueue = captureSessionQueue; + _camera = FLTCreateCamWithConfiguration(configuration); _sampleBuffer = FLTCreateTestSampleBuffer(); _messengerMock = [[MockFlutterBinaryMessenger alloc] init]; } diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m index 8daa442eeb5d..558ccd245c14 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/CameraPlugin.m @@ -500,39 +500,20 @@ - (void)sessionQueueCreateCameraWithName:(NSString *)name NSError *error; __weak typeof(self) weakSelf = self; - FLTCam *cam = [[FLTCam alloc] initWithMediaSettings:settings - mediaSettingsAVWrapper:mediaSettingsAVWrapper - orientation:[[UIDevice currentDevice] orientation] - videoCaptureSession:_captureSessionFactory() - audioCaptureSession:_captureSessionFactory() - captureSessionQueue:self.captureSessionQueue + + FLTCamConfiguration *configuration = [[FLTCamConfiguration alloc] initWithMediaSettings:settings + mediaSettingsWrapper:mediaSettingsAVWrapper captureDeviceFactory:^id _Nonnull { return weakSelf.captureDeviceFactory(name); } + captureSessionQueue:self.captureSessionQueue + captureSessionFactory:_captureSessionFactory audioCaptureDeviceFactory:^id _Nonnull { return [[FLTDefaultCaptureDeviceController alloc] initWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]]; - } - videoDimensionsForFormat:^CMVideoDimensions(id _Nonnull format) { - return CMVideoFormatDescriptionGetDimensions(format.formatDescription); - } - capturePhotoOutput:[[FLTDefaultCapturePhotoOutput alloc] - initWithPhotoOutput:[AVCapturePhotoOutput new]] - assetWriterFactory:^id _Nonnull( - NSURL *_Nonnull url, AVFileType _Nonnull fileType, - NSError *_Nullable __autoreleasing *_Nullable error) { - return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; - } - pixelBufferAdaptorFactory:^id _Nonnull( - id _Nonnull assetWriterInput, - NSDictionary *_Nullable sourcePixelBufferAttributes) { - return [[FLTDefaultPixelBufferAdaptor alloc] - initWithAdaptor:[[AVAssetWriterInputPixelBufferAdaptor alloc] - initWithAssetWriterInput:assetWriterInput.input - sourcePixelBufferAttributes:sourcePixelBufferAttributes]]; - } - photoSettingsFactory:[[FLTDefaultCapturePhotoSettingsFactory alloc] init] - error:&error]; + }]; + + FLTCam *cam = [[FLTCam alloc] initWithConfiguration:configuration error:&error]; if (error) { completion(nil, FlutterErrorFromNSError(error)); diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m index 05bb5e784fd9..086f1e446cff 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCam.m @@ -9,6 +9,7 @@ @import Flutter; #import +#import "./include/camera_avfoundation/FLTCamConfiguration.h" #import "./include/camera_avfoundation/FLTSavePhotoDelegate.h" #import "./include/camera_avfoundation/FLTThreadSafeEventChannel.h" #import "./include/camera_avfoundation/Protocols/FLTCaptureConnection.h" @@ -178,47 +179,35 @@ static void selectBestFormatForRequestedFrameRate( mediaSettings.framesPerSecond = @(bestFrameRate); } -- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(id)videoCaptureSession - audioCaptureSession:(id)audioCaptureSession - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory - audioCaptureDeviceFactory:(CaptureDeviceFactory)audioCaptureDeviceFactory - videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat - capturePhotoOutput:(id)capturePhotoOutput - assetWriterFactory:(AssetWriterFactory)assetWriterFactory - pixelBufferAdaptorFactory:(PixelBufferAdaptorFactory)pixelBufferAdaptorFactory - photoSettingsFactory:(id)photoSettingsFactory - error:(NSError **)error { +- (nonnull instancetype)initWithConfiguration:(nonnull FLTCamConfiguration *)configuration + error:(NSError **)error { self = [super init]; NSAssert(self, @"super init cannot be nil"); - _mediaSettings = mediaSettings; - _mediaSettingsAVWrapper = mediaSettingsAVWrapper; + _mediaSettings = configuration.mediaSettings; + _mediaSettingsAVWrapper = configuration.mediaSettingsWrapper; - _captureSessionQueue = captureSessionQueue; + _captureSessionQueue = configuration.captureSessionQueue; _pixelBufferSynchronizationQueue = dispatch_queue_create("io.flutter.camera.pixelBufferSynchronizationQueue", NULL); _photoIOQueue = dispatch_queue_create("io.flutter.camera.photoIOQueue", NULL); - _videoCaptureSession = videoCaptureSession; - _audioCaptureSession = audioCaptureSession; - _captureDeviceFactory = captureDeviceFactory; - _captureDevice = captureDeviceFactory(); - _audioCaptureDeviceFactory = audioCaptureDeviceFactory; - _videoDimensionsForFormat = videoDimensionsForFormat; + _videoCaptureSession = configuration.videoCaptureSession; + _audioCaptureSession = configuration.audioCaptureSession; + _captureDeviceFactory = configuration.captureDeviceFactory; + _captureDevice = _captureDeviceFactory(); + _audioCaptureDeviceFactory = configuration.audioCaptureDeviceFactory; + _videoDimensionsForFormat = configuration.videoDimensionsForFormat; _flashMode = _captureDevice.hasFlash ? FCPPlatformFlashModeAuto : FCPPlatformFlashModeOff; _exposureMode = FCPPlatformExposureModeAuto; _focusMode = FCPPlatformFocusModeAuto; _lockedCaptureOrientation = UIDeviceOrientationUnknown; - _deviceOrientation = orientation; + _deviceOrientation = configuration.orientation; _videoFormat = kCVPixelFormatType_32BGRA; _inProgressSavePhotoDelegates = [NSMutableDictionary dictionary]; _fileFormat = FCPPlatformImageFileFormatJpeg; - _assetWriterFactory = assetWriterFactory; - _pixelBufferAdaptorFactory = pixelBufferAdaptorFactory; - _photoSettingsFactory = photoSettingsFactory; - + _assetWriterFactory = configuration.assetWriterFactory; + _pixelBufferAdaptorFactory = configuration.pixelBufferAdaptorFactory; + _photoSettingsFactory = configuration.photoSettingsFactory; + // To limit memory consumption, limit the number of frames pending processing. // After some testing, 4 was determined to be the best maximum value. // https://github.com/flutter/plugins/pull/4520#discussion_r766335637 @@ -237,18 +226,18 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings [_videoCaptureSession addOutputWithNoConnections:_captureVideoOutput]; [_videoCaptureSession addConnection:connection]; - _capturePhotoOutput = capturePhotoOutput; + _capturePhotoOutput = configuration.capturePhotoOutput; [_capturePhotoOutput setHighResolutionCaptureEnabled:YES]; [_videoCaptureSession addOutput:_capturePhotoOutput.photoOutput]; _motionManager = [[CMMotionManager alloc] init]; [_motionManager startAccelerometerUpdates]; - _deviceOrientationProvider = [[FLTDefaultDeviceOrientationProvider alloc] init]; + _deviceOrientationProvider = configuration.deviceOrientationProvider; if (_mediaSettings.framesPerSecond) { // The frame rate can be changed only on a locked for configuration device. - if ([mediaSettingsAVWrapper lockDevice:_captureDevice error:error]) { + if ([_mediaSettingsAVWrapper lockDevice:_captureDevice error:error]) { [_mediaSettingsAVWrapper beginConfigurationForSession:_videoCaptureSession]; // Possible values for presets are hard-coded in FLT interface having @@ -269,8 +258,8 @@ - (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings int fpsNominator = floor([_mediaSettings.framesPerSecond doubleValue] * 10.0); CMTime duration = CMTimeMake(10, fpsNominator); - [mediaSettingsAVWrapper setMinFrameDuration:duration onDevice:_captureDevice]; - [mediaSettingsAVWrapper setMaxFrameDuration:duration onDevice:_captureDevice]; + [_mediaSettingsAVWrapper setMinFrameDuration:duration onDevice:_captureDevice]; + [_mediaSettingsAVWrapper setMaxFrameDuration:duration onDevice:_captureDevice]; [_mediaSettingsAVWrapper commitConfigurationForSession:_videoCaptureSession]; [_mediaSettingsAVWrapper unlockDevice:_captureDevice]; @@ -407,7 +396,8 @@ - (void)captureToFileWithCompletion:(void (^)(NSString *_Nullable, [self.capturePhotoOutput.availablePhotoCodecTypes containsObject:AVVideoCodecTypeHEVC]; if (_fileFormat == FCPPlatformImageFileFormatHeif && isHEVCCodecAvailable) { - settings = [_photoSettingsFactory createPhotoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}]; + settings = [_photoSettingsFactory + createPhotoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}]; extension = @"heif"; } else { extension = @"jpg"; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamConfiguration.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamConfiguration.m new file mode 100644 index 000000000000..a047f71a488c --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/FLTCamConfiguration.m @@ -0,0 +1,50 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#import "./include/camera_avfoundation/FLTCamConfiguration.h" + +@implementation FLTCamConfiguration + +- (nonnull instancetype) + initWithMediaSettings:(nonnull FCPPlatformMediaSettings *)mediaSettings + mediaSettingsWrapper:(nonnull FLTCamMediaSettingsAVWrapper *)mediaSettingsWrapper + captureDeviceFactory:(nonnull CaptureDeviceFactory)captureDeviceFactory + captureSessionQueue:(nonnull dispatch_queue_t)captureSessionQueue + captureSessionFactory:(nonnull CaptureSessionFactory)captureSessionFactory + audioCaptureDeviceFactory:(nonnull AudioCaptureDeviceFactory)audioCaptureDeviceFactory { + self = [super init]; + if (self) { + _mediaSettings = mediaSettings; + _mediaSettingsWrapper = mediaSettingsWrapper; + _captureSessionQueue = captureSessionQueue; + _videoCaptureSession = captureSessionFactory(); + _audioCaptureSession = captureSessionFactory(); + _captureDeviceFactory = captureDeviceFactory; + _audioCaptureDeviceFactory = audioCaptureDeviceFactory; + _orientation = [[UIDevice currentDevice] orientation]; + _capturePhotoOutput = + [[FLTDefaultCapturePhotoOutput alloc] initWithPhotoOutput:[AVCapturePhotoOutput new]]; + _deviceOrientationProvider = [[FLTDefaultDeviceOrientationProvider alloc] init]; + _assetWriterFactory = + ^id _Nonnull(NSURL *_Nonnull url, AVFileType _Nonnull fileType, + NSError *_Nullable __autoreleasing *_Nullable error) { + return [[FLTDefaultAssetWriter alloc] initWithURL:url fileType:fileType error:error]; + }; + _pixelBufferAdaptorFactory = ^id( + id _Nonnull assetWriterInput, + NSDictionary *_Nullable sourcePixelBufferAttributes) { + return [[FLTDefaultPixelBufferAdaptor alloc] + initWithAdaptor:[[AVAssetWriterInputPixelBufferAdaptor alloc] + initWithAssetWriterInput:assetWriterInput.input + sourcePixelBufferAttributes:sourcePixelBufferAttributes]]; + }; + _photoSettingsFactory = [[FLTDefaultCapturePhotoSettingsFactory alloc] init]; + _videoDimensionsForFormat = ^CMVideoDimensions(id _Nonnull format) { + return CMVideoFormatDescriptionGetDimensions(format.formatDescription); + }; + } + return self; +} + +@end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m index 6fdf47d19da3..8ca88eb6fa15 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/Protocols/FLTCapturePhotoSettings.m @@ -38,11 +38,14 @@ - (void)setHighResolutionPhotoEnabled:(BOOL)enabled { @implementation FLTDefaultCapturePhotoSettingsFactory - (id)createPhotoSettings { - return [[FLTDefaultCapturePhotoSettings alloc] initWithSettings:[AVCapturePhotoSettings photoSettings]]; + return [[FLTDefaultCapturePhotoSettings alloc] + initWithSettings:[AVCapturePhotoSettings photoSettings]]; } -- (id)createPhotoSettingsWithFormat:(NSDictionary *)format { - return [[FLTDefaultCapturePhotoSettings alloc] initWithSettings:[AVCapturePhotoSettings photoSettingsWithFormat:format]]; +- (id)createPhotoSettingsWithFormat: + (NSDictionary *)format { + return [[FLTDefaultCapturePhotoSettings alloc] + initWithSettings:[AVCapturePhotoSettings photoSettingsWithFormat:format]]; } @end diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap index 75604fa61bb7..18f45ce1be3e 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/CameraPlugin.modulemap @@ -9,6 +9,7 @@ framework module camera_avfoundation { header "CameraProperties.h" header "FLTCam.h" header "FLTCam_Test.h" + header "FLTCamConfiguration.h" header "FLTSavePhotoDelegate_Test.h" header "FLTThreadSafeEventChannel.h" header "FLTPermissionService.h" diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h index 5a1310931067..0ccff032d1ca 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/CameraPlugin.h @@ -9,7 +9,6 @@ #import "FLTCaptureDeviceControlling.h" #import "FLTCaptureSession.h" -typedef id (^CaptureSessionFactory)(void); typedef id (^CaptureNamedDeviceFactory)(NSString *name); @interface CameraPlugin : NSObject diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h index c07d687aceef..831e760faa9a 100644 --- a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCam.h @@ -8,29 +8,15 @@ #import "CameraProperties.h" #import "FLTAssetWriter.h" +#import "FLTCamConfiguration.h" #import "FLTCamMediaSettingsAVWrapper.h" #import "FLTCaptureDeviceControlling.h" #import "FLTCapturePhotoOutput.h" +#import "FLTDeviceOrientationProviding.h" #import "messages.g.h" NS_ASSUME_NONNULL_BEGIN -/// Factory block returning an AVCaptureDevice. -/// Used in tests to inject a device into FLTCam. -typedef id _Nonnull (^CaptureDeviceFactory)(void); - -typedef id _Nonnull (^AudioCaptureDeviceFactory)(void); - -typedef id _Nonnull (^AssetWriterFactory)(NSURL *, AVFileType, - NSError *_Nullable *_Nullable); - -typedef id _Nonnull (^PixelBufferAdaptorFactory)( - id, NSDictionary *_Nullable); - -/// Determines the video dimensions (width and height) for a given capture device format. -/// Used in tests to mock CMVideoFormatDescriptionGetDimensions. -typedef CMVideoDimensions (^VideoDimensionsForFormat)(id); - /// A class that manages camera's state and performs camera operations. @interface FLTCam : NSObject @@ -53,26 +39,8 @@ typedef CMVideoDimensions (^VideoDimensionsForFormat)(id /// Initializes an `FLTCam` instance. /// Allows for testing with specified resolution, audio preference, orientation, /// and direct access to capture sessions and blocks. -/// @param mediaSettings the media settings configuration parameters -/// @param mediaSettingsAVWrapper AVFoundation wrapper to perform media settings related operations -/// (for dependency injection in unit tests). -/// @param orientation the orientation of camera -/// @param captureSessionQueue the queue on which camera's capture session operations happen. /// @param error report to the caller if any error happened creating the camera. -- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings - mediaSettingsAVWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsAVWrapper - orientation:(UIDeviceOrientation)orientation - videoCaptureSession:(id)videoCaptureSession - audioCaptureSession:(id)audioCaptureSession - captureSessionQueue:(dispatch_queue_t)captureSessionQueue - captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory - audioCaptureDeviceFactory:(CaptureDeviceFactory)audioCaptureDeviceFactory - videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat - capturePhotoOutput:(id)capturePhotoOutput - assetWriterFactory:(AssetWriterFactory)assetWriterFactory - pixelBufferAdaptorFactory:(PixelBufferAdaptorFactory)pixelBufferAdaptorFactory - photoSettingsFactory:(id)photoSettingsFactory - error:(NSError **)error; +- (instancetype)initWithConfiguration:(FLTCamConfiguration *)configuration error:(NSError **)error; /// Informs the Dart side of the plugin of the current camera state and capabilities. - (void)reportInitializationState; diff --git a/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamConfiguration.h b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamConfiguration.h new file mode 100644 index 000000000000..9b59b9b5e715 --- /dev/null +++ b/packages/camera/camera_avfoundation/ios/camera_avfoundation/Sources/camera_avfoundation/include/camera_avfoundation/FLTCamConfiguration.h @@ -0,0 +1,62 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import AVFoundation; +@import Foundation; +@import Flutter; + +#import "CameraProperties.h" +#import "FLTAssetWriter.h" +#import "FLTCamMediaSettingsAVWrapper.h" +#import "FLTCaptureDeviceControlling.h" +#import "FLTCapturePhotoOutput.h" +#import "FLTDeviceOrientationProviding.h" + +NS_ASSUME_NONNULL_BEGIN + +/// Factory block returning an AVCaptureDevice. +/// Used in tests to inject a device into FLTCam. +typedef id _Nonnull (^CaptureDeviceFactory)(void); + +typedef id _Nonnull (^AudioCaptureDeviceFactory)(void); + +typedef id _Nonnull (^AssetWriterFactory)(NSURL *, AVFileType, + NSError *_Nullable *_Nullable); + +typedef id _Nonnull (^PixelBufferAdaptorFactory)( + id, NSDictionary *_Nullable); + +typedef id _Nonnull (^CaptureSessionFactory)(void); + +/// Determines the video dimensions (width and height) for a given capture device format. +/// Used in tests to mock CMVideoFormatDescriptionGetDimensions. +typedef CMVideoDimensions (^VideoDimensionsForFormat)(id); + +@interface FLTCamConfiguration : NSObject + +- (instancetype)initWithMediaSettings:(FCPPlatformMediaSettings *)mediaSettings + mediaSettingsWrapper:(FLTCamMediaSettingsAVWrapper *)mediaSettingsWrapper + captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory + captureSessionQueue:(dispatch_queue_t)captureSessionQueue + captureSessionFactory:(CaptureSessionFactory)captureSessionFactory + audioCaptureDeviceFactory:(AudioCaptureDeviceFactory)audioCaptureDeviceFactory; + +@property(nonatomic, strong) id deviceOrientationProvider; +@property(nonatomic, strong) id videoCaptureSession; +@property(nonatomic, strong) id audioCaptureSession; +@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; +@property(nonatomic, strong) FCPPlatformMediaSettings *mediaSettings; +@property(nonatomic, strong) FLTCamMediaSettingsAVWrapper *mediaSettingsWrapper; +@property(nonatomic, strong) id capturePhotoOutput; +@property(nonatomic, copy) AssetWriterFactory assetWriterFactory; +@property(nonatomic, copy) PixelBufferAdaptorFactory pixelBufferAdaptorFactory; +@property(nonatomic, strong) id photoSettingsFactory; +@property(nonatomic, copy) CaptureDeviceFactory captureDeviceFactory; +@property(nonatomic, copy) CaptureDeviceFactory audioCaptureDeviceFactory; +@property(nonatomic, copy) VideoDimensionsForFormat videoDimensionsForFormat; +@property(nonatomic, assign) UIDeviceOrientation orientation; + +@end + +NS_ASSUME_NONNULL_END From 222d30ae110d5d790c462dd9aea152a7e6608873 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Mon, 23 Dec 2024 11:51:12 +0100 Subject: [PATCH 14/16] Add versioning --- .../camera/camera_avfoundation/CHANGELOG.md | 117 +++++++++--------- 1 file changed, 59 insertions(+), 58 deletions(-) diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index 34de3bf88e46..bef115dc1dea 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,195 +1,196 @@ ## NEXT -* Updates minimum supported SDK version to Flutter 3.22/Dart 3.4. +- Updates minimum supported SDK version to Flutter 3.22/Dart 3.4. +- Removes OCMock from tests. ## 0.9.17+5 -* Adds ability to use any supported FPS and fixes crash when using unsupported FPS. +- Adds ability to use any supported FPS and fixes crash when using unsupported FPS. ## 0.9.17+4 -* Updates Pigeon for non-nullable collection type support. -* Updates minimum supported SDK version to Flutter 3.19/Dart 3.3. +- Updates Pigeon for non-nullable collection type support. +- Updates minimum supported SDK version to Flutter 3.19/Dart 3.3. ## 0.9.17+3 -* Fixes deallocation of camera on dispose. +- Fixes deallocation of camera on dispose. ## 0.9.17+2 -* Fixes stopVideoRecording waiting indefinitely and lag at start of video. +- Fixes stopVideoRecording waiting indefinitely and lag at start of video. ## 0.9.17+1 -* Fixes a crash due to appending sample buffers when readyForMoreMediaData is NO. +- Fixes a crash due to appending sample buffers when readyForMoreMediaData is NO. ## 0.9.17 -* Adds Swift Package Manager compatibility. +- Adds Swift Package Manager compatibility. ## 0.9.16+3 -* Removes unused `maxVideoDuration` code. +- Removes unused `maxVideoDuration` code. ## 0.9.16+2 -* Fixes regression taking a picture in torch mode. +- Fixes regression taking a picture in torch mode. ## 0.9.16+1 -* Fixes sample times not being numeric after pause/resume. +- Fixes sample times not being numeric after pause/resume. ## 0.9.16 -* Converts Dart-to-host communcation to Pigeon. -* Fixes a race condition in camera disposal. +- Converts Dart-to-host communcation to Pigeon. +- Fixes a race condition in camera disposal. ## 0.9.15+4 -* Converts host-to-Dart communcation to Pigeon. +- Converts host-to-Dart communcation to Pigeon. ## 0.9.15+3 -* Moves `pigeon` to `dev_dependencies`. +- Moves `pigeon` to `dev_dependencies`. ## 0.9.15+2 -* Converts camera query to Pigeon. +- Converts camera query to Pigeon. ## 0.9.15+1 -* Simplifies internal handling of method channel responses. +- Simplifies internal handling of method channel responses. ## 0.9.15 -* Adds support to control video FPS and bitrate. See `CameraController.withSettings`. +- Adds support to control video FPS and bitrate. See `CameraController.withSettings`. ## 0.9.14+2 -* Removes `_ambiguate` methods from example code. +- Removes `_ambiguate` methods from example code. ## 0.9.14+1 -* Fixes bug where max resolution preset does not produce highest available resolution on iOS. +- Fixes bug where max resolution preset does not produce highest available resolution on iOS. ## 0.9.14 -* Adds support to HEIF format. +- Adds support to HEIF format. ## 0.9.13+11 -* Fixes a memory leak of sample buffer when pause and resume the video recording. -* Removes development team from example app. -* Updates minimum iOS version to 12.0 and minimum Flutter version to 3.16.6. +- Fixes a memory leak of sample buffer when pause and resume the video recording. +- Removes development team from example app. +- Updates minimum iOS version to 12.0 and minimum Flutter version to 3.16.6. ## 0.9.13+10 -* Adds privacy manifest. +- Adds privacy manifest. ## 0.9.13+9 -* Fixes new lint warnings. +- Fixes new lint warnings. ## 0.9.13+8 -* Updates example app to use non-deprecated video_player method. -* Updates minimum supported SDK version to Flutter 3.10/Dart 3.0. +- Updates example app to use non-deprecated video_player method. +- Updates minimum supported SDK version to Flutter 3.10/Dart 3.0. ## 0.9.13+7 -* Fixes inverted orientation strings. +- Fixes inverted orientation strings. ## 0.9.13+6 -* Fixes incorrect use of `NSError` that could cause crashes on launch. +- Fixes incorrect use of `NSError` that could cause crashes on launch. ## 0.9.13+5 -* Ignores audio samples until the first video sample arrives. +- Ignores audio samples until the first video sample arrives. ## 0.9.13+4 -* Adds pub topics to package metadata. -* Updates minimum supported SDK version to Flutter 3.7/Dart 2.19. +- Adds pub topics to package metadata. +- Updates minimum supported SDK version to Flutter 3.7/Dart 2.19. ## 0.9.13+3 -* Migrates `styleFrom` usage in examples off of deprecated `primary` and `onPrimary` parameters. -* Fixes unawaited_futures violations. +- Migrates `styleFrom` usage in examples off of deprecated `primary` and `onPrimary` parameters. +- Fixes unawaited_futures violations. ## 0.9.13+2 -* Removes obsolete null checks on non-nullable values. -* Updates minimum supported SDK version to Flutter 3.3/Dart 2.18. +- Removes obsolete null checks on non-nullable values. +- Updates minimum supported SDK version to Flutter 3.3/Dart 2.18. ## 0.9.13+1 -* Clarifies explanation of endorsement in README. +- Clarifies explanation of endorsement in README. ## 0.9.13 -* Allows camera to be switched while video recording. -* Aligns Dart and Flutter SDK constraints. +- Allows camera to be switched while video recording. +- Aligns Dart and Flutter SDK constraints. ## 0.9.12 -* Updates minimum Flutter version to 3.3 and iOS 11. +- Updates minimum Flutter version to 3.3 and iOS 11. ## 0.9.11+1 -* Updates links for the merge of flutter/plugins into flutter/packages. +- Updates links for the merge of flutter/plugins into flutter/packages. ## 0.9.11 -* Adds back use of Optional type. -* Updates minimum Flutter version to 3.0. +- Adds back use of Optional type. +- Updates minimum Flutter version to 3.0. ## 0.9.10+2 -* Updates code for stricter lint checks. +- Updates code for stricter lint checks. ## 0.9.10+1 -* Updates code for stricter lint checks. +- Updates code for stricter lint checks. ## 0.9.10 -* Remove usage of deprecated quiver Optional type. +- Remove usage of deprecated quiver Optional type. ## 0.9.9 -* Implements option to also stream when recording a video. +- Implements option to also stream when recording a video. ## 0.9.8+6 -* Updates code for `no_leading_underscores_for_local_identifiers` lint. -* Updates minimum Flutter version to 2.10. +- Updates code for `no_leading_underscores_for_local_identifiers` lint. +- Updates minimum Flutter version to 2.10. ## 0.9.8+5 -* Fixes a regression introduced in 0.9.8+4 where the stream handler is not set. +- Fixes a regression introduced in 0.9.8+4 where the stream handler is not set. ## 0.9.8+4 -* Fixes a crash due to sending orientation change events when the engine is torn down. +- Fixes a crash due to sending orientation change events when the engine is torn down. ## 0.9.8+3 -* Fixes avoid_redundant_argument_values lint warnings and minor typos. -* Ignores missing return warnings in preparation for [upcoming analysis changes](https://github.com/flutter/flutter/issues/105750). +- Fixes avoid_redundant_argument_values lint warnings and minor typos. +- Ignores missing return warnings in preparation for [upcoming analysis changes](https://github.com/flutter/flutter/issues/105750). ## 0.9.8+2 -* Fixes exception in registerWith caused by the switch to an in-package method channel. +- Fixes exception in registerWith caused by the switch to an in-package method channel. ## 0.9.8+1 -* Ignores deprecation warnings for upcoming styleFrom button API changes. +- Ignores deprecation warnings for upcoming styleFrom button API changes. ## 0.9.8 -* Switches to internal method channel implementation. +- Switches to internal method channel implementation. ## 0.9.7+1 -* Splits from `camera` as a federated implementation. +- Splits from `camera` as a federated implementation. From 0088ad64ef724703512dca3d0935c36900bd3aa5 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Mon, 23 Dec 2024 11:52:07 +0100 Subject: [PATCH 15/16] Revert Info.plist change --- .../camera_avfoundation/example/ios/Runner/Info.plist | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist index b263ffe1e5a9..adb62fb7803d 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist +++ b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist @@ -30,8 +30,6 @@ Can I use the camera please? Only for demo purpose of the app NSMicrophoneUsageDescription Only for demo purpose of the app - UIApplicationSupportsIndirectInputEvents - UILaunchStoryboardName LaunchScreen UIMainStoryboardFile @@ -54,5 +52,9 @@ UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight + CADisableMinimumFrameDurationOnPhone + + UIApplicationSupportsIndirectInputEvents + From 81143f5b904f2ef859a4aae136ed91ccaf0e9944 Mon Sep 17 00:00:00 2001 From: Marcin Chudy Date: Mon, 23 Dec 2024 12:03:33 +0100 Subject: [PATCH 16/16] Fix project.pbxproj --- .../ios/Runner.xcodeproj/project.pbxproj | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj index eae4b99a59d9..dbcaf65cfe78 100644 --- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj @@ -12,9 +12,10 @@ 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; + 3C036CAD15FEA6FC964935EE /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 8FE1D7C55B3760E3991BE41E /* libPods-Runner.a */; }; 43ED1537282570DE00EB00DE /* AvailableCamerasTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 43ED1536282570DE00EB00DE /* AvailableCamerasTest.m */; }; 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; }; - 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */ = {isa = PBXBuildFile; productRef = 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */; }; + 78A318202AECB46A00862997 /* BuildFile in Frameworks */ = {isa = PBXBuildFile; }; 7D5FCCD42AEF9D0200FB7108 /* CameraSettingsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7D5FCCD32AEF9D0200FB7108 /* CameraSettingsTests.m */; }; 7F87E80E2D03283E00A3549C /* CameraExposureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F87E80D2D03283E00A3549C /* CameraExposureTests.m */; }; 7FF2D09B2D15AB7A0092C411 /* MockAssetWriter.m in Sources */ = {isa = PBXBuildFile; fileRef = 7FF2D0872D15AB7A0092C411 /* MockAssetWriter.m */; }; @@ -32,7 +33,6 @@ 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; - ABA022F748E0C5AECBCD8F5F /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 8FE1D7C55B3760E3991BE41E /* libPods-Runner.a */; }; CEF6611A2B5E36A500D33FD4 /* CameraSessionPresetsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = CEF661192B5E36A500D33FD4 /* CameraSessionPresetsTests.m */; }; E01EE4A82799F3A5008C1950 /* QueueUtilsTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */; }; E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */; }; @@ -147,8 +147,8 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */, - 236906D1621AE863A5B2E770 /* libPods-Runner.a in Frameworks */, + 78A318202AECB46A00862997 /* BuildFile in Frameworks */, + 3C036CAD15FEA6FC964935EE /* libPods-Runner.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -304,8 +304,6 @@ 03BB766E2665316900CE5A93 /* PBXTargetDependency */, ); name = RunnerTests; - packageProductDependencies = ( - ); productName = camera_exampleTests; productReference = 03BB76682665316900CE5A93 /* RunnerTests.xctest */; productType = "com.apple.product-type.bundle.unit-test"; @@ -328,9 +326,6 @@ dependencies = ( ); name = Runner; - packageProductDependencies = ( - 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */, - ); productName = Runner; productReference = 97C146EE1CF9000F007C117D /* Runner.app */; productType = "com.apple.product-type.application"; @@ -364,7 +359,6 @@ Base, ); mainGroup = 97C146E51CF9000F007C117D; - packageReferences = (); productRefGroup = 97C146EF1CF9000F007C117D /* Products */; projectDirPath = ""; projectRoot = ""; @@ -574,7 +568,7 @@ /* Begin XCBuildConfiguration section */ 03BB766F2665316900CE5A93 /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = E27055DF15226B1DFE032420 /* Pods-RunnerTests.debug.xcconfig */; + baseConfigurationReference = 73BD4FD74789D3EB46FB5774 /* Pods-RunnerTests.debug.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; @@ -604,7 +598,7 @@ }; 03BB76702665316900CE5A93 /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = CB65379B3085E03D11D2786A /* Pods-RunnerTests.release.xcconfig */; + baseConfigurationReference = 5A32C345E4881D9C7CE9479C /* Pods-RunnerTests.release.xcconfig */; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;