diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md index e8bd622b73dc..011d9c59dc04 100644 --- a/packages/camera/camera_avfoundation/CHANGELOG.md +++ b/packages/camera/camera_avfoundation/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.9.13+5 + +* Ignores audio samples until the first video sample arrives. + ## 0.9.13+4 * Adds pub topics to package metadata. diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h index f2d46114a0c5..0c7e62f9fbb5 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.h @@ -15,4 +15,8 @@ extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessi /// @return a test sample buffer. extern CMSampleBufferRef FLTCreateTestSampleBuffer(void); +/// Creates a test audio sample buffer. +/// @return a test audio sample buffer. +extern CMSampleBufferRef FLTCreateTestAudioSampleBuffer(void); + NS_ASSUME_NONNULL_END diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m index b42aa34e2a17..bb98f7cf71e9 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m @@ -47,3 +47,22 @@ CMSampleBufferRef FLTCreateTestSampleBuffer(void) { CFRelease(formatDescription); return sampleBuffer; } + +CMSampleBufferRef FLTCreateTestAudioSampleBuffer(void) { + CMBlockBufferRef blockBuffer; + CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, NULL, 100, kCFAllocatorDefault, NULL, 0, + 100, kCMBlockBufferAssureMemoryNowFlag, &blockBuffer); + + CMFormatDescriptionRef formatDescription; + AudioStreamBasicDescription basicDescription = {44100, kAudioFormatLinearPCM, 0, 1, 1, 1, 1, 8}; + CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &basicDescription, 0, NULL, 0, NULL, NULL, + &formatDescription); + + CMSampleBufferRef sampleBuffer; + CMAudioSampleBufferCreateReadyWithPacketDescriptions( + kCFAllocatorDefault, blockBuffer, formatDescription, 1, kCMTimeZero, NULL, &sampleBuffer); + + CFRelease(blockBuffer); + CFRelease(formatDescription); + return sampleBuffer; +} diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m index 94426ab3aeb8..6f0a4edab080 100644 --- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m +++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/FLTCamSampleBufferTests.m @@ -38,4 +38,63 @@ - (void)testCopyPixelBuffer { CFRelease(deliveriedPixelBuffer); } +- (void)testDidOutputSampleBufferIgnoreAudioSamplesBeforeVideoSamples { + FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("testing", NULL)); + CMSampleBufferRef videoSample = FLTCreateTestSampleBuffer(); + CMSampleBufferRef audioSample = FLTCreateTestAudioSampleBuffer(); + + id connectionMock = OCMClassMock([AVCaptureConnection class]); + + id writerMock = OCMClassMock([AVAssetWriter class]); + OCMStub([writerMock alloc]).andReturn(writerMock); + OCMStub([writerMock initWithURL:OCMOCK_ANY fileType:OCMOCK_ANY error:[OCMArg setTo:nil]]) + .andReturn(writerMock); + __block AVAssetWriterStatus status = AVAssetWriterStatusUnknown; + OCMStub([writerMock startWriting]).andDo(^(NSInvocation *invocation) { + status = AVAssetWriterStatusWriting; + }); + OCMStub([writerMock status]).andDo(^(NSInvocation *invocation) { + [invocation setReturnValue:&status]; + }); + + __block NSArray *writtenSamples = @[]; + + id videoMock = OCMClassMock([AVAssetWriterInputPixelBufferAdaptor class]); + OCMStub([videoMock assetWriterInputPixelBufferAdaptorWithAssetWriterInput:OCMOCK_ANY + sourcePixelBufferAttributes:OCMOCK_ANY]) + .andReturn(videoMock); + OCMStub([videoMock appendPixelBuffer:[OCMArg anyPointer] withPresentationTime:kCMTimeZero]) + .ignoringNonObjectArgs() + .andDo(^(NSInvocation *invocation) { + writtenSamples = [writtenSamples arrayByAddingObject:@"video"]; + }); + + id audioMock = OCMClassMock([AVAssetWriterInput class]); + OCMStub([audioMock assetWriterInputWithMediaType:[OCMArg isEqual:AVMediaTypeAudio] + outputSettings:OCMOCK_ANY]) + .andReturn(audioMock); + OCMStub([audioMock isReadyForMoreMediaData]).andReturn(YES); + OCMStub([audioMock appendSampleBuffer:[OCMArg anyPointer]]).andDo(^(NSInvocation *invocation) { + writtenSamples = [writtenSamples arrayByAddingObject:@"audio"]; + }); + + FLTThreadSafeFlutterResult *result = + [[FLTThreadSafeFlutterResult alloc] initWithResult:^(id result){ + }]; + [cam startVideoRecordingWithResult:result]; + + [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; + [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; + [cam captureOutput:cam.captureVideoOutput + didOutputSampleBuffer:videoSample + fromConnection:connectionMock]; + [cam captureOutput:nil didOutputSampleBuffer:audioSample fromConnection:connectionMock]; + + NSArray *expectedSamples = @[ @"video", @"audio" ]; + XCTAssertEqualObjects(writtenSamples, expectedSamples, @"First appended sample must be video."); + + CFRelease(videoSample); + CFRelease(audioSample); +} + @end diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m index 31bffc917947..e0f03000d458 100644 --- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m +++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m @@ -500,6 +500,12 @@ - (void)captureOutput:(AVCaptureOutput *)output return; } + // ignore audio samples until the first video sample arrives to avoid black frames + // https://github.com/flutter/flutter/issues/57831 + if (_videoWriter.status != AVAssetWriterStatusWriting && output != _captureVideoOutput) { + return; + } + CFRetain(sampleBuffer); CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml index d986235f8e5c..e13e957cad7b 100644 --- a/packages/camera/camera_avfoundation/pubspec.yaml +++ b/packages/camera/camera_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: camera_avfoundation description: iOS implementation of the camera plugin. repository: https://github.com/flutter/packages/tree/main/packages/camera/camera_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.13+4 +version: 0.9.13+5 environment: sdk: ">=2.19.0 <4.0.0"