@@ -19,11 +19,6 @@ @interface FLTSavePhotoDelegate : NSObject <AVCapturePhotoCaptureDelegate>
1919@property (readonly , nonatomic ) FlutterResult result;
2020@property (readonly , nonatomic ) CMMotionManager *motionManager;
2121@property (readonly , nonatomic ) AVCaptureDevicePosition cameraPosition;
22-
23- - initWithPath : (NSString *)filename
24- result : (FlutterResult)result
25- motionManager : (CMMotionManager *)motionManager
26- cameraPosition : (AVCaptureDevicePosition)cameraPosition ;
2722@end
2823
2924@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
@@ -68,7 +63,7 @@ - (void)captureOutput:(AVCapturePhotoOutput *)output
6863 previewPhotoSampleBuffer : (CMSampleBufferRef)previewPhotoSampleBuffer
6964 resolvedSettings : (AVCaptureResolvedPhotoSettings *)resolvedSettings
7065 bracketSettings : (AVCaptureBracketedStillImageSettings *)bracketSettings
71- error : (NSError *)error {
66+ error : (NSError *)error API_AVAILABLE(ios( 10 )) {
7267 selfReference = nil ;
7368 if (error) {
7469 _result (getFlutterError (error));
@@ -160,14 +155,14 @@ @interface FLTCam : NSObject <FlutterTexture,
160155 AVCaptureAudioDataOutputSampleBufferDelegate,
161156 FlutterStreamHandler>
162157@property (readonly , nonatomic ) int64_t textureId;
163- @property (nonatomic , copy ) void (^onFrameAvailable)();
158+ @property (nonatomic , copy ) void (^onFrameAvailable)(void );
164159@property BOOL enableAudio;
165160@property (nonatomic ) FlutterEventChannel *eventChannel;
166161@property (nonatomic ) FLTImageStreamHandler *imageStreamHandler;
167162@property (nonatomic ) FlutterEventSink eventSink;
168163@property (readonly , nonatomic ) AVCaptureSession *captureSession;
169164@property (readonly , nonatomic ) AVCaptureDevice *captureDevice;
170- @property (readonly , nonatomic ) AVCapturePhotoOutput *capturePhotoOutput;
165+ @property (readonly , nonatomic ) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE (ios( 10 )) ;
171166@property (readonly , nonatomic ) AVCaptureVideoDataOutput *captureVideoOutput;
172167@property (readonly , nonatomic ) AVCaptureInput *captureVideoInput;
173168@property (readonly ) CVPixelBufferRef volatile latestPixelBuffer;
@@ -192,19 +187,6 @@ @interface FLTCam : NSObject <FlutterTexture,
192187@property (assign , nonatomic ) CMTime audioTimeOffset;
193188@property (nonatomic ) CMMotionManager *motionManager;
194189@property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
195- - (instancetype )initWithCameraName : (NSString *)cameraName
196- resolutionPreset : (NSString *)resolutionPreset
197- enableAudio : (BOOL )enableAudio
198- dispatchQueue : (dispatch_queue_t )dispatchQueue
199- error : (NSError **)error ;
200-
201- - (void )start ;
202- - (void )stop ;
203- - (void )startVideoRecordingAtPath : (NSString *)path result : (FlutterResult)result ;
204- - (void )stopVideoRecordingWithResult : (FlutterResult)result ;
205- - (void )startImageStreamWithMessenger : (NSObject <FlutterBinaryMessenger> *)messenger ;
206- - (void )stopImageStream ;
207- - (void )captureToFile : (NSString *)filename result : (FlutterResult)result ;
208190@end
209191
210192@implementation FLTCam {
@@ -254,9 +236,12 @@ - (instancetype)initWithCameraName:(NSString *)cameraName
254236 [_captureSession addInputWithNoConnections: _captureVideoInput];
255237 [_captureSession addOutputWithNoConnections: _captureVideoOutput];
256238 [_captureSession addConnection: connection];
257- _capturePhotoOutput = [AVCapturePhotoOutput new ];
258- [_capturePhotoOutput setHighResolutionCaptureEnabled: YES ];
259- [_captureSession addOutput: _capturePhotoOutput];
239+
240+ if (@available (iOS 10.0 , *)) {
241+ _capturePhotoOutput = [AVCapturePhotoOutput new ];
242+ [_capturePhotoOutput setHighResolutionCaptureEnabled: YES ];
243+ [_captureSession addOutput: _capturePhotoOutput];
244+ }
260245 _motionManager = [[CMMotionManager alloc ] init ];
261246 [_motionManager startAccelerometerUpdates ];
262247
@@ -272,7 +257,7 @@ - (void)stop {
272257 [_captureSession stopRunning ];
273258}
274259
275- - (void )captureToFile : (NSString *)path result : (FlutterResult)result {
260+ - (void )captureToFile : (NSString *)path result : (FlutterResult)result API_AVAILABLE(ios( 10 )) {
276261 AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings ];
277262 if (_resolutionPreset == max) {
278263 [settings setHighResolutionPhotoEnabled: YES ];
@@ -288,19 +273,21 @@ - (void)captureToFile:(NSString *)path result:(FlutterResult)result {
288273- (void )setCaptureSessionPreset : (ResolutionPreset)resolutionPreset {
289274 switch (resolutionPreset) {
290275 case max:
276+ case ultraHigh:
277+ if (@available (iOS 9.0 , *)) {
278+ if ([_captureSession canSetSessionPreset: AVCaptureSessionPreset3840x2160]) {
279+ _captureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
280+ _previewSize = CGSizeMake (3840 , 2160 );
281+ break ;
282+ }
283+ }
291284 if ([_captureSession canSetSessionPreset: AVCaptureSessionPresetHigh]) {
292285 _captureSession.sessionPreset = AVCaptureSessionPresetHigh;
293286 _previewSize =
294287 CGSizeMake (_captureDevice.activeFormat .highResolutionStillImageDimensions .width ,
295288 _captureDevice.activeFormat .highResolutionStillImageDimensions .height );
296289 break ;
297290 }
298- case ultraHigh:
299- if ([_captureSession canSetSessionPreset: AVCaptureSessionPreset3840x2160]) {
300- _captureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
301- _previewSize = CGSizeMake (3840 , 2160 );
302- break ;
303- }
304291 case veryHigh:
305292 if ([_captureSession canSetSessionPreset: AVCaptureSessionPreset1920x1080]) {
306293 _captureSession.sessionPreset = AVCaptureSessionPreset1920x1080;
@@ -495,7 +482,7 @@ - (void)captureOutput:(AVCaptureOutput *)output
495482 }
496483}
497484
498- - (CMSampleBufferRef)adjustTime : (CMSampleBufferRef)sample by : (CMTime)offset {
485+ - (CMSampleBufferRef)adjustTime : (CMSampleBufferRef)sample by : (CMTime)offset CF_RETURNS_RETAINED {
499486 CMItemCount count;
500487 CMSampleBufferGetSampleTimingInfoArray (sample, 0 , nil , &count);
501488 CMSampleTimingInfo *pInfo = malloc (sizeof (CMSampleTimingInfo) * count);
@@ -801,33 +788,37 @@ - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result
801788
802789- (void )handleMethodCallAsync : (FlutterMethodCall *)call result : (FlutterResult)result {
803790 if ([@" availableCameras" isEqualToString: call.method]) {
804- AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
805- discoverySessionWithDeviceTypes: @[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
806- mediaType: AVMediaTypeVideo
807- position: AVCaptureDevicePositionUnspecified];
808- NSArray <AVCaptureDevice *> *devices = discoverySession.devices ;
809- NSMutableArray <NSDictionary <NSString *, NSObject *> *> *reply =
810- [[NSMutableArray alloc ] initWithCapacity: devices.count];
811- for (AVCaptureDevice *device in devices) {
812- NSString *lensFacing;
813- switch ([device position ]) {
814- case AVCaptureDevicePositionBack:
815- lensFacing = @" back" ;
816- break ;
817- case AVCaptureDevicePositionFront:
818- lensFacing = @" front" ;
819- break ;
820- case AVCaptureDevicePositionUnspecified:
821- lensFacing = @" external" ;
822- break ;
791+ if (@available (iOS 10.0 , *)) {
792+ AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
793+ discoverySessionWithDeviceTypes: @[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
794+ mediaType: AVMediaTypeVideo
795+ position: AVCaptureDevicePositionUnspecified];
796+ NSArray <AVCaptureDevice *> *devices = discoverySession.devices ;
797+ NSMutableArray <NSDictionary <NSString *, NSObject *> *> *reply =
798+ [[NSMutableArray alloc ] initWithCapacity: devices.count];
799+ for (AVCaptureDevice *device in devices) {
800+ NSString *lensFacing;
801+ switch ([device position ]) {
802+ case AVCaptureDevicePositionBack:
803+ lensFacing = @" back" ;
804+ break ;
805+ case AVCaptureDevicePositionFront:
806+ lensFacing = @" front" ;
807+ break ;
808+ case AVCaptureDevicePositionUnspecified:
809+ lensFacing = @" external" ;
810+ break ;
811+ }
812+ [reply addObject: @{
813+ @" name" : [device uniqueID ],
814+ @" lensFacing" : lensFacing,
815+ @" sensorOrientation" : @90 ,
816+ }];
823817 }
824- [reply addObject: @{
825- @" name" : [device uniqueID ],
826- @" lensFacing" : lensFacing,
827- @" sensorOrientation" : @90 ,
828- }];
818+ result (reply);
819+ } else {
820+ result (FlutterMethodNotImplemented);
829821 }
830- result (reply);
831822 } else if ([@" initialize" isEqualToString: call.method]) {
832823 NSString *cameraName = call.arguments [@" cameraName" ];
833824 NSString *resolutionPreset = call.arguments [@" resolutionPreset" ];
@@ -846,8 +837,9 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
846837 }
847838 int64_t textureId = [_registry registerTexture: cam];
848839 _camera = cam;
840+ __weak CameraPlugin *weakSelf = self;
849841 cam.onFrameAvailable = ^{
850- [_registry textureFrameAvailable: textureId];
842+ [weakSelf.registry textureFrameAvailable: textureId];
851843 };
852844 FlutterEventChannel *eventChannel = [FlutterEventChannel
853845 eventChannelWithName: [NSString
@@ -880,9 +872,12 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
880872 } else {
881873 NSDictionary *argsMap = call.arguments ;
882874 NSUInteger textureId = ((NSNumber *)argsMap[@" textureId" ]).unsignedIntegerValue ;
883-
884875 if ([@" takePicture" isEqualToString: call.method]) {
885- [_camera captureToFile: call.arguments[@" path" ] result: result];
876+ if (@available (iOS 10.0 , *)) {
877+ [_camera captureToFile: call.arguments[@" path" ] result: result];
878+ } else {
879+ result (FlutterMethodNotImplemented);
880+ }
886881 } else if ([@" dispose" isEqualToString: call.method]) {
887882 [_registry unregisterTexture: textureId];
888883 [_camera close ];
0 commit comments