Skip to content
This repository was archived by the owner on Feb 22, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .cirrus.yml
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ task:
- find . -name "*.podspec" | xargs grep -l "osx" | xargs rm
# Skip the dummy podspecs used to placate the tool.
- find . -name "*_web*.podspec" -o -name "*_mac*.podspec" | xargs rm
- ./script/incremental_build.sh podspecs --no-analyze camera --ignore-warnings camera
- ./script/incremental_build.sh podspecs
- name: build-ipas+drive-examples
env:
PATH: $PATH:/usr/local/bin
Expand Down
6 changes: 5 additions & 1 deletion packages/camera/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
## 0.5.8+5

* Fix compilation/availability issues on iOS.

## 0.5.8+4

* Fixed bug caused by casting a `CameraAccessException` on Android.

## 0.5.8+3

* Fix bug in usage example in README.md
* Fix bug in usage example in README.md

## 0.5.8+2

Expand Down
115 changes: 55 additions & 60 deletions packages/camera/ios/Classes/CameraPlugin.m
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,6 @@ @interface FLTSavePhotoDelegate : NSObject <AVCapturePhotoCaptureDelegate>
@property(readonly, nonatomic) FlutterResult result;
@property(readonly, nonatomic) CMMotionManager *motionManager;
@property(readonly, nonatomic) AVCaptureDevicePosition cameraPosition;

- initWithPath:(NSString *)filename
result:(FlutterResult)result
motionManager:(CMMotionManager *)motionManager
cameraPosition:(AVCaptureDevicePosition)cameraPosition;
@end

@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
Expand Down Expand Up @@ -68,7 +63,7 @@ - (void)captureOutput:(AVCapturePhotoOutput *)output
previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer
resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings
error:(NSError *)error {
error:(NSError *)error API_AVAILABLE(ios(10)) {
selfReference = nil;
if (error) {
_result(getFlutterError(error));
Expand Down Expand Up @@ -160,14 +155,14 @@ @interface FLTCam : NSObject <FlutterTexture,
AVCaptureAudioDataOutputSampleBufferDelegate,
FlutterStreamHandler>
@property(readonly, nonatomic) int64_t textureId;
@property(nonatomic, copy) void (^onFrameAvailable)();
@property(nonatomic, copy) void (^onFrameAvailable)(void);
@property BOOL enableAudio;
@property(nonatomic) FlutterEventChannel *eventChannel;
@property(nonatomic) FLTImageStreamHandler *imageStreamHandler;
@property(nonatomic) FlutterEventSink eventSink;
@property(readonly, nonatomic) AVCaptureSession *captureSession;
@property(readonly, nonatomic) AVCaptureDevice *captureDevice;
@property(readonly, nonatomic) AVCapturePhotoOutput *capturePhotoOutput;
@property(readonly, nonatomic) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10));
@property(readonly, nonatomic) AVCaptureVideoDataOutput *captureVideoOutput;
@property(readonly, nonatomic) AVCaptureInput *captureVideoInput;
@property(readonly) CVPixelBufferRef volatile latestPixelBuffer;
Expand All @@ -192,19 +187,6 @@ @interface FLTCam : NSObject <FlutterTexture,
@property(assign, nonatomic) CMTime audioTimeOffset;
@property(nonatomic) CMMotionManager *motionManager;
@property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
- (instancetype)initWithCameraName:(NSString *)cameraName
resolutionPreset:(NSString *)resolutionPreset
enableAudio:(BOOL)enableAudio
dispatchQueue:(dispatch_queue_t)dispatchQueue
error:(NSError **)error;

- (void)start;
- (void)stop;
- (void)startVideoRecordingAtPath:(NSString *)path result:(FlutterResult)result;
- (void)stopVideoRecordingWithResult:(FlutterResult)result;
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger;
- (void)stopImageStream;
- (void)captureToFile:(NSString *)filename result:(FlutterResult)result;
@end

@implementation FLTCam {
Expand Down Expand Up @@ -254,9 +236,12 @@ - (instancetype)initWithCameraName:(NSString *)cameraName
[_captureSession addInputWithNoConnections:_captureVideoInput];
[_captureSession addOutputWithNoConnections:_captureVideoOutput];
[_captureSession addConnection:connection];
_capturePhotoOutput = [AVCapturePhotoOutput new];
[_capturePhotoOutput setHighResolutionCaptureEnabled:YES];
[_captureSession addOutput:_capturePhotoOutput];

if (@available(iOS 10.0, *)) {
_capturePhotoOutput = [AVCapturePhotoOutput new];
[_capturePhotoOutput setHighResolutionCaptureEnabled:YES];
[_captureSession addOutput:_capturePhotoOutput];
}
_motionManager = [[CMMotionManager alloc] init];
[_motionManager startAccelerometerUpdates];

Expand All @@ -272,7 +257,7 @@ - (void)stop {
[_captureSession stopRunning];
}

- (void)captureToFile:(NSString *)path result:(FlutterResult)result {
- (void)captureToFile:(NSString *)path result:(FlutterResult)result API_AVAILABLE(ios(10)) {
AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
if (_resolutionPreset == max) {
[settings setHighResolutionPhotoEnabled:YES];
Expand All @@ -288,19 +273,21 @@ - (void)captureToFile:(NSString *)path result:(FlutterResult)result {
- (void)setCaptureSessionPreset:(ResolutionPreset)resolutionPreset {
switch (resolutionPreset) {
case max:
case ultraHigh:
if (@available(iOS 9.0, *)) {
if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
_captureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
_previewSize = CGSizeMake(3840, 2160);
break;
}
}
if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
_captureSession.sessionPreset = AVCaptureSessionPresetHigh;
_previewSize =
CGSizeMake(_captureDevice.activeFormat.highResolutionStillImageDimensions.width,
_captureDevice.activeFormat.highResolutionStillImageDimensions.height);
break;
}
case ultraHigh:
if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
_captureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
_previewSize = CGSizeMake(3840, 2160);
break;
}
case veryHigh:
if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
_captureSession.sessionPreset = AVCaptureSessionPreset1920x1080;
Expand Down Expand Up @@ -495,7 +482,7 @@ - (void)captureOutput:(AVCaptureOutput *)output
}
}

- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset {
- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset CF_RETURNS_RETAINED {
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
Expand Down Expand Up @@ -801,33 +788,37 @@ - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result

- (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)result {
if ([@"availableCameras" isEqualToString:call.method]) {
AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
NSArray<AVCaptureDevice *> *devices = discoverySession.devices;
NSMutableArray<NSDictionary<NSString *, NSObject *> *> *reply =
[[NSMutableArray alloc] initWithCapacity:devices.count];
for (AVCaptureDevice *device in devices) {
NSString *lensFacing;
switch ([device position]) {
case AVCaptureDevicePositionBack:
lensFacing = @"back";
break;
case AVCaptureDevicePositionFront:
lensFacing = @"front";
break;
case AVCaptureDevicePositionUnspecified:
lensFacing = @"external";
break;
if (@available(iOS 10.0, *)) {
AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
NSArray<AVCaptureDevice *> *devices = discoverySession.devices;
NSMutableArray<NSDictionary<NSString *, NSObject *> *> *reply =
[[NSMutableArray alloc] initWithCapacity:devices.count];
for (AVCaptureDevice *device in devices) {
NSString *lensFacing;
switch ([device position]) {
case AVCaptureDevicePositionBack:
lensFacing = @"back";
break;
case AVCaptureDevicePositionFront:
lensFacing = @"front";
break;
case AVCaptureDevicePositionUnspecified:
lensFacing = @"external";
break;
}
[reply addObject:@{
@"name" : [device uniqueID],
@"lensFacing" : lensFacing,
@"sensorOrientation" : @90,
}];
}
[reply addObject:@{
@"name" : [device uniqueID],
@"lensFacing" : lensFacing,
@"sensorOrientation" : @90,
}];
result(reply);
} else {
result(FlutterMethodNotImplemented);
Comment on lines +819 to +820
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think what you had before would have fallen through to FlutterMethodNotImplemented, whatever you prefer though.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah but it'd make the other else blocks not run on iOS 10+ :\

}
result(reply);
} else if ([@"initialize" isEqualToString:call.method]) {
NSString *cameraName = call.arguments[@"cameraName"];
NSString *resolutionPreset = call.arguments[@"resolutionPreset"];
Expand All @@ -846,8 +837,9 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
}
int64_t textureId = [_registry registerTexture:cam];
_camera = cam;
__weak CameraPlugin *weakSelf = self;
cam.onFrameAvailable = ^{
[_registry textureFrameAvailable:textureId];
[weakSelf.registry textureFrameAvailable:textureId];
};
FlutterEventChannel *eventChannel = [FlutterEventChannel
eventChannelWithName:[NSString
Expand Down Expand Up @@ -880,9 +872,12 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
} else {
NSDictionary *argsMap = call.arguments;
NSUInteger textureId = ((NSNumber *)argsMap[@"textureId"]).unsignedIntegerValue;

if ([@"takePicture" isEqualToString:call.method]) {
[_camera captureToFile:call.arguments[@"path"] result:result];
if (@available(iOS 10.0, *)) {
[_camera captureToFile:call.arguments[@"path"] result:result];
} else {
result(FlutterMethodNotImplemented);
}
} else if ([@"dispose" isEqualToString:call.method]) {
[_registry unregisterTexture:textureId];
[_camera close];
Expand Down
16 changes: 9 additions & 7 deletions packages/camera/ios/camera.podspec
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,21 @@
Pod::Spec.new do |s|
s.name = 'camera'
s.version = '0.0.1'
s.summary = 'A new flutter plugin project.'
s.summary = 'Flutter Camera'
s.description = <<-DESC
A new flutter plugin project.
A Flutter plugin to use the camera from your Flutter app.
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => 'email@example.com' }
s.source = { :path => '.' }
s.homepage = 'https://github.com/flutter/plugins'
s.license = { :type => 'BSD', :file => '../LICENSE' }
s.author = { 'Flutter Dev Team' => 'flutter-dev@googlegroups.com' }
s.source = { :http => 'https://github.com/flutter/plugins/tree/master/packages/camera' }
s.documentation_url = 'https://pub.dev/packages/camera'
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'

s.platform = :ios, '8.0'
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS' => 'armv7 arm64 x86_64' }
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' }

s.test_spec 'Tests' do |test_spec|
test_spec.source_files = 'Tests/**/*'
Expand Down
2 changes: 1 addition & 1 deletion packages/camera/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: camera
description: A Flutter plugin for getting information about and controlling the
camera on Android and iOS. Supports previewing the camera feed, capturing images, capturing video,
and streaming image buffers to dart.
version: 0.5.8+4
version: 0.5.8+5

homepage: https://github.com/flutter/plugins/tree/master/packages/camera

Expand Down