diff --git a/packages/camera/camera_android/CHANGELOG.md b/packages/camera/camera_android/CHANGELOG.md
index 4609b402058a..f7f0b2a0343a 100644
--- a/packages/camera/camera_android/CHANGELOG.md
+++ b/packages/camera/camera_android/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.10.5
+
+* Allows camera to be switched while video recording.
+
## 0.10.4
* Temporarily fixes issue with requested video profiles being null by falling back to deprecated behavior in that case.
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java
index b02d6864b5b7..c2255e23273a 100644
--- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java
@@ -115,13 +115,28 @@ class Camera
* Holds all of the camera features/settings and will be used to update the request builder when
* one changes.
*/
- private final CameraFeatures cameraFeatures;
+ private CameraFeatures cameraFeatures;
+
+ private String imageFormatGroup;
+
+ /**
+ * Takes an input/output surface and orients the recording correctly. This is needed because
+ * switching cameras while recording causes the wrong orientation.
+ */
+ private VideoRenderer videoRenderer;
+
+ /**
+ * Whether or not the camera aligns with the initial way the camera was facing if the camera was
+ * flipped.
+ */
+ private int initialCameraFacing;
private final SurfaceTextureEntry flutterTexture;
+ private final ResolutionPreset resolutionPreset;
private final boolean enableAudio;
private final Context applicationContext;
private final DartMessenger dartMessenger;
- private final CameraProperties cameraProperties;
+ private CameraProperties cameraProperties;
private final CameraFeatureFactory cameraFeatureFactory;
private final Activity activity;
/** A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture. */
@@ -211,6 +226,7 @@ public Camera(
this.applicationContext = activity.getApplicationContext();
this.cameraProperties = cameraProperties;
this.cameraFeatureFactory = cameraFeatureFactory;
+ this.resolutionPreset = resolutionPreset;
this.cameraFeatures =
CameraFeatures.init(
cameraFeatureFactory, cameraProperties, activity, dartMessenger, resolutionPreset);
@@ -251,6 +267,7 @@ private void prepareMediaRecorder(String outputFilePath) throws IOException {
if (mediaRecorder != null) {
mediaRecorder.release();
}
+ closeRenderer();
final PlatformChannel.DeviceOrientation lockedOrientation =
((SensorOrientationFeature) cameraFeatures.getSensorOrientation())
@@ -279,6 +296,7 @@ private void prepareMediaRecorder(String outputFilePath) throws IOException {
@SuppressLint("MissingPermission")
public void open(String imageFormatGroup) throws CameraAccessException {
+ this.imageFormatGroup = imageFormatGroup;
final ResolutionFeature resolutionFeature = cameraFeatures.getResolution();
if (!resolutionFeature.checkIsSupported()) {
@@ -323,14 +341,16 @@ public void onOpened(@NonNull CameraDevice device) {
cameraDevice = new DefaultCameraDeviceWrapper(device);
try {
startPreview();
+ if (!recordingVideo) // only send initialization if we werent already recording and switching cameras
dartMessenger.sendCameraInitializedEvent(
- resolutionFeature.getPreviewSize().getWidth(),
- resolutionFeature.getPreviewSize().getHeight(),
- cameraFeatures.getExposureLock().getValue(),
- cameraFeatures.getAutoFocus().getValue(),
- cameraFeatures.getExposurePoint().checkIsSupported(),
- cameraFeatures.getFocusPoint().checkIsSupported());
- } catch (CameraAccessException e) {
+ resolutionFeature.getPreviewSize().getWidth(),
+ resolutionFeature.getPreviewSize().getHeight(),
+ cameraFeatures.getExposureLock().getValue(),
+ cameraFeatures.getAutoFocus().getValue(),
+ cameraFeatures.getExposurePoint().checkIsSupported(),
+ cameraFeatures.getFocusPoint().checkIsSupported());
+
+ } catch (CameraAccessException | InterruptedException e) {
dartMessenger.sendCameraErrorEvent(e.getMessage());
close();
}
@@ -340,7 +360,8 @@ public void onOpened(@NonNull CameraDevice device) {
public void onClosed(@NonNull CameraDevice camera) {
Log.i(TAG, "open | onClosed");
- // Prevents calls to methods that would otherwise result in IllegalStateException exceptions.
+ // Prevents calls to methods that would otherwise result in IllegalStateException
+ // exceptions.
cameraDevice = null;
closeCaptureSession();
dartMessenger.sendCameraClosingEvent();
@@ -756,7 +777,7 @@ public void startVideoRecording(
if (imageStreamChannel != null) {
setStreamHandler(imageStreamChannel);
}
-
+ initialCameraFacing = cameraProperties.getLensFacing();
recordingVideo = true;
try {
startCapture(true, imageStreamChannel != null);
@@ -768,6 +789,13 @@ public void startVideoRecording(
}
}
+ private void closeRenderer() {
+ if (videoRenderer != null) {
+ videoRenderer.close();
+ videoRenderer = null;
+ }
+ }
+
public void stopVideoRecording(@NonNull final Result result) {
if (!recordingVideo) {
result.success(null);
@@ -778,6 +806,7 @@ public void stopVideoRecording(@NonNull final Result result) {
cameraFeatureFactory.createAutoFocusFeature(cameraProperties, false));
recordingVideo = false;
try {
+ closeRenderer();
captureSession.abortCaptures();
mediaRecorder.stop();
} catch (CameraAccessException | IllegalStateException e) {
@@ -786,7 +815,7 @@ public void stopVideoRecording(@NonNull final Result result) {
mediaRecorder.reset();
try {
startPreview();
- } catch (CameraAccessException | IllegalStateException e) {
+ } catch (CameraAccessException | IllegalStateException | InterruptedException e) {
result.error("videoRecordingFailed", e.getMessage(), null);
return;
}
@@ -1070,13 +1099,51 @@ public void resumePreview() {
null, (code, message) -> dartMessenger.sendCameraErrorEvent(message));
}
- public void startPreview() throws CameraAccessException {
+ public void startPreview() throws CameraAccessException, InterruptedException {
+ // If recording is already in progress, the camera is being flipped, so send it through the VideoRenderer to keep the correct orientation.
+ if (recordingVideo) {
+ startPreviewWithVideoRendererStream();
+ } else {
+ startRegularPreview();
+ }
+ }
+
+ private void startRegularPreview() throws CameraAccessException {
if (pictureImageReader == null || pictureImageReader.getSurface() == null) return;
Log.i(TAG, "startPreview");
-
createCaptureSession(CameraDevice.TEMPLATE_PREVIEW, pictureImageReader.getSurface());
}
+ private void startPreviewWithVideoRendererStream()
+ throws CameraAccessException, InterruptedException {
+ if (videoRenderer == null) return;
+
+ // get rotation for rendered video
+ final PlatformChannel.DeviceOrientation lockedOrientation =
+ ((SensorOrientationFeature) cameraFeatures.getSensorOrientation())
+ .getLockedCaptureOrientation();
+ DeviceOrientationManager orientationManager =
+ cameraFeatures.getSensorOrientation().getDeviceOrientationManager();
+
+ int rotation = 0;
+ if (orientationManager != null) {
+ rotation =
+ lockedOrientation == null
+ ? orientationManager.getVideoOrientation()
+ : orientationManager.getVideoOrientation(lockedOrientation);
+ }
+
+ if (cameraProperties.getLensFacing() != initialCameraFacing) {
+
+ // If the new camera is facing the opposite way than the initial recording,
+ // the rotation should be flipped 180 degrees.
+ rotation = (rotation + 180) % 360;
+ }
+ videoRenderer.setRotation(rotation);
+
+ createCaptureSession(CameraDevice.TEMPLATE_RECORD, videoRenderer.getInputSurface());
+ }
+
public void startPreviewWithImageStream(EventChannel imageStreamChannel)
throws CameraAccessException {
setStreamHandler(imageStreamChannel);
@@ -1200,17 +1267,7 @@ private void closeCaptureSession() {
public void close() {
Log.i(TAG, "close");
- if (cameraDevice != null) {
- cameraDevice.close();
- cameraDevice = null;
-
- // Closing the CameraDevice without closing the CameraCaptureSession is recommended
- // for quickly closing the camera:
- // https://developer.android.com/reference/android/hardware/camera2/CameraCaptureSession#close()
- captureSession = null;
- } else {
- closeCaptureSession();
- }
+ stopAndReleaseCamera();
if (pictureImageReader != null) {
pictureImageReader.close();
@@ -1229,6 +1286,66 @@ public void close() {
stopBackgroundThread();
}
+ private void stopAndReleaseCamera() {
+ if (cameraDevice != null) {
+ cameraDevice.close();
+ cameraDevice = null;
+
+ // Closing the CameraDevice without closing the CameraCaptureSession is recommended
+ // for quickly closing the camera:
+ // https://developer.android.com/reference/android/hardware/camera2/CameraCaptureSession#close()
+ captureSession = null;
+ } else {
+ closeCaptureSession();
+ }
+ }
+
+ private void prepareVideoRenderer() {
+ if (videoRenderer != null) return;
+ final ResolutionFeature resolutionFeature = cameraFeatures.getResolution();
+
+ // handle videoRenderer errors
+ Thread.UncaughtExceptionHandler videoRendererUncaughtExceptionHandler =
+ new Thread.UncaughtExceptionHandler() {
+ @Override
+ public void uncaughtException(Thread thread, Throwable ex) {
+ dartMessenger.sendCameraErrorEvent(
+ "Failed to process frames after camera was flipped.");
+ }
+ };
+
+ videoRenderer =
+ new VideoRenderer(
+ mediaRecorder.getSurface(),
+ resolutionFeature.getCaptureSize().getWidth(),
+ resolutionFeature.getCaptureSize().getHeight(),
+ videoRendererUncaughtExceptionHandler);
+ }
+
+ public void setDescriptionWhileRecording(
+ @NonNull final Result result, CameraProperties properties) {
+
+ if (!recordingVideo) {
+ result.error("setDescriptionWhileRecordingFailed", "Device was not recording", null);
+ return;
+ }
+
+ stopAndReleaseCamera();
+ prepareVideoRenderer();
+ cameraProperties = properties;
+ cameraFeatures =
+ CameraFeatures.init(
+ cameraFeatureFactory, cameraProperties, activity, dartMessenger, resolutionPreset);
+ cameraFeatures.setAutoFocus(
+ cameraFeatureFactory.createAutoFocusFeature(cameraProperties, true));
+ try {
+ open(imageFormatGroup);
+ } catch (CameraAccessException e) {
+ result.error("setDescriptionWhileRecordingFailed", e.getMessage(), null);
+ }
+ result.success(null);
+ }
+
public void dispose() {
Log.i(TAG, "dispose");
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java
index 432344ade8cd..aad62bbaba85 100644
--- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java
@@ -354,6 +354,18 @@ public void onMethodCall(@NonNull MethodCall call, @NonNull final Result result)
result.success(null);
break;
}
+ case "setDescriptionWhileRecording":
+ {
+ try {
+ String cameraName = call.argument("cameraName");
+ CameraProperties cameraProperties =
+ new CameraPropertiesImpl(cameraName, CameraUtils.getCameraManager(activity));
+ camera.setDescriptionWhileRecording(result, cameraProperties);
+ } catch (Exception e) {
+ handleException(e, result);
+ }
+ break;
+ }
case "dispose":
{
if (camera != null) {
diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java
new file mode 100644
index 000000000000..b7128373b101
--- /dev/null
+++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java
@@ -0,0 +1,365 @@
+// Copyright 2013 The Flutter Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package io.flutter.plugins.camera;
+
+import static android.os.SystemClock.uptimeMillis;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.GLUtils;
+import android.opengl.Matrix;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.Log;
+import android.view.Surface;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Renders video onto texture after performing a matrix rotation on each frame.
+ *
+ *
VideoRenderer is needed because when switching between cameras mid recording, the orientation
+ * of the recording from the new camera usually becomes flipped. MediaRecorder has
+ * setOrientationHint, but that cannot be called mid recording and therefore isn't useful. Android
+ * Camera2 has no setDisplayOrientation on the camera itself as it is supposed to 'just work' (see
+ * https://stackoverflow.com/questions/33479004/what-is-the-camera2-api-equivalent-of-setdisplayorientation).
+ * Therefore it cannot be used to set the camera's orientation either.
+ *
+ *
This leaves the solution to be routing the recording through a surface texture and performing
+ * a matrix transformation on it manually to get the correct orientation. This only happens when
+ * setDescription is called mid video recording.
+ */
+public class VideoRenderer {
+
+ private static String TAG = "VideoRenderer";
+
+ private static final String vertexShaderCode =
+ " precision highp float;\n"
+ + " attribute vec3 vertexPosition;\n"
+ + " attribute vec2 uvs;\n"
+ + " varying vec2 varUvs;\n"
+ + " uniform mat4 texMatrix;\n"
+ + " uniform mat4 mvp;\n"
+ + "\n"
+ + " void main()\n"
+ + " {\n"
+ + " varUvs = (texMatrix * vec4(uvs.x, uvs.y, 0, 1.0)).xy;\n"
+ + " gl_Position = mvp * vec4(vertexPosition, 1.0);\n"
+ + " }";
+
+ private static final String fragmentShaderCode =
+ " #extension GL_OES_EGL_image_external : require\n"
+ + " precision mediump float;\n"
+ + "\n"
+ + " varying vec2 varUvs;\n"
+ + " uniform samplerExternalOES texSampler;\n"
+ + "\n"
+ + " void main()\n"
+ + " {\n"
+ + " vec4 c = texture2D(texSampler, varUvs);\n"
+ + " gl_FragColor = vec4(c.r, c.g, c.b, c.a);\n"
+ + " }";
+
+ private final int[] textureHandles = new int[1];
+
+ private final float[] vertices =
+ new float[] {
+ -1.0f, -1.0f, 0.0f, 0f, 0f, -1.0f, 1.0f, 0.0f, 0f, 1f, 1.0f, 1.0f, 0.0f, 1f, 1f, 1.0f,
+ -1.0f, 0.0f, 1f, 0f
+ };
+
+ private final int[] indices = new int[] {2, 1, 0, 0, 3, 2};
+
+ private int program;
+ private int vertexHandle = 0;
+ private final int[] bufferHandles = new int[2];
+ private int uvsHandle = 0;
+ private int texMatrixHandle = 0;
+ private int mvpHandle = 0;
+
+ EGLDisplay display;
+ EGLContext context;
+ EGLSurface surface;
+ private Thread thread;
+ private final Surface outputSurface;
+ private SurfaceTexture inputSurfaceTexture;
+ private Surface inputSurface;
+
+ private HandlerThread surfaceTextureFrameAvailableHandler;
+ private final Object surfaceTextureAvailableFrameLock = new Object();
+ private Boolean surfaceTextureFrameAvailable = false;
+
+ private final int recordingWidth;
+ private final int recordingHeight;
+ private int rotation = 0;
+
+ private final Object lock = new Object();
+
+ private final Thread.UncaughtExceptionHandler uncaughtExceptionHandler;
+
+ /** Gets surface for input. Blocks until surface is ready. */
+ public Surface getInputSurface() throws InterruptedException {
+ synchronized (lock) {
+ while (inputSurface == null) {
+ lock.wait();
+ }
+ }
+ return inputSurface;
+ }
+
+ public VideoRenderer(
+ Surface outputSurface,
+ int recordingWidth,
+ int recordingHeight,
+ Thread.UncaughtExceptionHandler uncaughtExceptionHandler) {
+ this.outputSurface = outputSurface;
+ this.recordingHeight = recordingHeight;
+ this.recordingWidth = recordingWidth;
+ this.uncaughtExceptionHandler = uncaughtExceptionHandler;
+ startOpenGL();
+ Log.d(TAG, "VideoRenderer setup complete");
+ }
+
+ /** Stop rendering and cleanup resources. */
+ public void close() {
+ thread.interrupt();
+ surfaceTextureFrameAvailableHandler.quitSafely();
+ cleanupOpenGL();
+ inputSurfaceTexture.release();
+ }
+
+ private void cleanupOpenGL() {
+ GLES20.glDeleteBuffers(2, bufferHandles, 0);
+ GLES20.glDeleteTextures(1, textureHandles, 0);
+ EGL14.eglDestroyContext(display, context);
+ EGL14.eglDestroySurface(display, surface);
+ GLES20.glDeleteProgram(program);
+ }
+
+ /** Configures openGL. Must be called in same thread as draw is called. */
+ private void configureOpenGL() {
+ synchronized (lock) {
+ display = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (display == EGL14.EGL_NO_DISPLAY)
+ throw new RuntimeException(
+ "eglDisplay == EGL14.EGL_NO_DISPLAY: "
+ + GLUtils.getEGLErrorString(EGL14.eglGetError()));
+
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(display, version, 0, version, 1))
+ throw new RuntimeException(
+ "eglInitialize(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
+
+ String eglExtensions = EGL14.eglQueryString(display, EGL14.EGL_EXTENSIONS);
+ if (!eglExtensions.contains("EGL_ANDROID_presentation_time"))
+ throw new RuntimeException(
+ "cannot configure OpenGL. missing EGL_ANDROID_presentation_time");
+
+ int[] attribList =
+ new int[] {
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_ALPHA_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+ EGLExt.EGL_RECORDABLE_ANDROID, 1,
+ EGL14.EGL_NONE
+ };
+
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(display, attribList, 0, configs, 0, configs.length, numConfigs, 0))
+ throw new RuntimeException(GLUtils.getEGLErrorString(EGL14.eglGetError()));
+
+ int err = EGL14.eglGetError();
+ if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
+
+ int[] ctxAttribs = new int[] {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
+ context = EGL14.eglCreateContext(display, configs[0], EGL14.EGL_NO_CONTEXT, ctxAttribs, 0);
+
+ err = EGL14.eglGetError();
+ if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
+
+ int[] surfaceAttribs = new int[] {EGL14.EGL_NONE};
+
+ surface = EGL14.eglCreateWindowSurface(display, configs[0], outputSurface, surfaceAttribs, 0);
+
+ err = EGL14.eglGetError();
+ if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
+
+ if (!EGL14.eglMakeCurrent(display, surface, surface, context))
+ throw new RuntimeException(
+ "eglMakeCurrent(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
+
+ ByteBuffer vertexBuffer = ByteBuffer.allocateDirect(vertices.length * 4);
+ vertexBuffer.order(ByteOrder.nativeOrder());
+ vertexBuffer.asFloatBuffer().put(vertices);
+ vertexBuffer.asFloatBuffer().position(0);
+
+ ByteBuffer indexBuffer = ByteBuffer.allocateDirect(indices.length * 4);
+ indexBuffer.order(ByteOrder.nativeOrder());
+ indexBuffer.asIntBuffer().put(indices);
+ indexBuffer.position(0);
+
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
+ int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
+
+ program = GLES20.glCreateProgram();
+
+ GLES20.glAttachShader(program, vertexShader);
+ GLES20.glAttachShader(program, fragmentShader);
+ GLES20.glLinkProgram(program);
+
+ deleteShader(vertexShader);
+ deleteShader(fragmentShader);
+
+ vertexHandle = GLES20.glGetAttribLocation(program, "vertexPosition");
+ uvsHandle = GLES20.glGetAttribLocation(program, "uvs");
+ texMatrixHandle = GLES20.glGetUniformLocation(program, "texMatrix");
+ mvpHandle = GLES20.glGetUniformLocation(program, "mvp");
+
+ // Initialize buffers
+ GLES20.glGenBuffers(2, bufferHandles, 0);
+
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0]);
+ GLES20.glBufferData(
+ GLES20.GL_ARRAY_BUFFER, vertices.length * 4, vertexBuffer, GLES20.GL_DYNAMIC_DRAW);
+
+ GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1]);
+ GLES20.glBufferData(
+ GLES20.GL_ELEMENT_ARRAY_BUFFER, indices.length * 4, indexBuffer, GLES20.GL_DYNAMIC_DRAW);
+
+ // Init texture that will receive decoded frames
+ GLES20.glGenTextures(1, textureHandles, 0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureHandles[0]);
+
+ inputSurfaceTexture = new SurfaceTexture(getTexId());
+ inputSurfaceTexture.setDefaultBufferSize(recordingWidth, recordingHeight);
+ surfaceTextureFrameAvailableHandler = new HandlerThread("FrameHandlerThread");
+ surfaceTextureFrameAvailableHandler.start();
+ inputSurface = new Surface(inputSurfaceTexture);
+
+ inputSurfaceTexture.setOnFrameAvailableListener(
+ new SurfaceTexture.OnFrameAvailableListener() {
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ synchronized (surfaceTextureAvailableFrameLock) {
+ if (surfaceTextureFrameAvailable)
+ Log.w(TAG, "Frame available before processing other frames. dropping frames");
+ surfaceTextureFrameAvailable = true;
+ surfaceTextureAvailableFrameLock.notifyAll();
+ }
+ }
+ },
+ new Handler(surfaceTextureFrameAvailableHandler.getLooper()));
+ lock.notifyAll();
+ }
+ }
+
+ /** Starts and configures Video Renderer. */
+ private void startOpenGL() {
+ Log.d(TAG, "Starting OpenGL Thread");
+ thread =
+ new Thread() {
+ @Override
+ public void run() {
+
+ configureOpenGL();
+
+ try {
+ // Continuously pull frames from input surface texture and use videoRenderer to modify
+ // to correct rotation.
+ while (!Thread.interrupted()) {
+
+ synchronized (surfaceTextureAvailableFrameLock) {
+ while (!surfaceTextureFrameAvailable) {
+ surfaceTextureAvailableFrameLock.wait(500);
+ }
+ surfaceTextureFrameAvailable = false;
+ }
+
+ inputSurfaceTexture.updateTexImage();
+
+ float[] surfaceTextureMatrix = new float[16];
+ inputSurfaceTexture.getTransformMatrix(surfaceTextureMatrix);
+
+ draw(recordingWidth, recordingHeight, surfaceTextureMatrix);
+ }
+ } catch (InterruptedException e) {
+ Log.d(TAG, "thread interrupted while waiting for frames");
+ }
+ }
+ };
+ thread.setUncaughtExceptionHandler(uncaughtExceptionHandler);
+ thread.start();
+ }
+
+ public int getTexId() {
+ return textureHandles[0];
+ }
+
+ public float[] moveMatrix() {
+ float[] m = new float[16];
+ Matrix.setIdentityM(m, 0);
+ Matrix.rotateM(m, 0, rotation, 0, 0, 1);
+ return m;
+ }
+
+ public void setRotation(int rotation) {
+ this.rotation = rotation;
+ }
+
+ private int loadShader(int type, String code) {
+
+ int shader = GLES20.glCreateShader(type);
+
+ GLES20.glShaderSource(shader, code);
+ GLES20.glCompileShader(shader);
+ return shader;
+ }
+
+ private void deleteShader(int shader) {
+ GLES20.glDeleteShader(shader);
+ }
+
+ public void draw(int viewportWidth, int viewportHeight, float[] texMatrix) {
+
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
+ GLES20.glClearColor(0f, 0f, 0f, 0f);
+
+ GLES20.glViewport(0, 0, viewportWidth, viewportHeight);
+
+ GLES20.glUseProgram(program);
+
+ // Pass transformations to shader
+ GLES20.glUniformMatrix4fv(texMatrixHandle, 1, false, texMatrix, 0);
+ GLES20.glUniformMatrix4fv(mvpHandle, 1, false, moveMatrix(), 0);
+
+ // Prepare buffers with vertices and indices & draw
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0]);
+ GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1]);
+
+ GLES20.glEnableVertexAttribArray(vertexHandle);
+ GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 4 * 5, 0);
+
+ GLES20.glEnableVertexAttribArray(uvsHandle);
+ GLES20.glVertexAttribPointer(uvsHandle, 2, GLES20.GL_FLOAT, false, 4 * 5, 3 * 4);
+
+ GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_INT, 0);
+
+ EGLExt.eglPresentationTimeANDROID(display, surface, uptimeMillis() * 1000000);
+ if (!EGL14.eglSwapBuffers(display, surface)) {
+ throw new RuntimeException(
+ "eglSwapBuffers()" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
index 9a679017ded2..9a6f7dc20d22 100644
--- a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
@@ -602,6 +602,115 @@ public void resumeVideoRecording_shouldCallPauseWhenRecordingAndOnAPIN() {
verify(mockResult, never()).error(any(), any(), any());
}
+ @Test
+ public void setDescriptionWhileRecording() {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ VideoRenderer mockVideoRenderer = mock(VideoRenderer.class);
+ TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setPrivateField(camera, "videoRenderer", mockVideoRenderer);
+
+ final CameraProperties newCameraProperties = mock(CameraProperties.class);
+ camera.setDescriptionWhileRecording(mockResult, newCameraProperties);
+
+ verify(mockResult, times(1)).success(null);
+ verify(mockResult, never()).error(any(), any(), any());
+ }
+
+ @Test
+ public void startPreview_shouldPullStreamFromVideoRenderer()
+ throws InterruptedException, CameraAccessException {
+ VideoRenderer mockVideoRenderer = mock(VideoRenderer.class);
+ ArrayList mockRequestBuilders = new ArrayList<>();
+ mockRequestBuilders.add(mock(CaptureRequest.Builder.class));
+ SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class);
+ Size mockSize = mock(Size.class);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setPrivateField(camera, "videoRenderer", mockVideoRenderer);
+ CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders);
+ TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera);
+
+ TextureRegistry.SurfaceTextureEntry cameraFlutterTexture =
+ (TextureRegistry.SurfaceTextureEntry) TestUtils.getPrivateField(camera, "flutterTexture");
+ ResolutionFeature resolutionFeature =
+ (ResolutionFeature)
+ TestUtils.getPrivateField(mockCameraFeatureFactory, "mockResolutionFeature");
+
+ when(cameraFlutterTexture.surfaceTexture()).thenReturn(mockSurfaceTexture);
+ when(resolutionFeature.getPreviewSize()).thenReturn(mockSize);
+
+ camera.startPreview();
+ verify(mockVideoRenderer, times(1))
+ .getInputSurface(); // stream pulled from videoRenderer's surface.
+ }
+
+ @Test
+ public void startPreview_shouldPullStreamFromImageReader()
+ throws InterruptedException, CameraAccessException {
+ ArrayList mockRequestBuilders = new ArrayList<>();
+ mockRequestBuilders.add(mock(CaptureRequest.Builder.class));
+ SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class);
+ Size mockSize = mock(Size.class);
+ ImageReader mockImageReader = mock(ImageReader.class);
+ TestUtils.setPrivateField(camera, "recordingVideo", false);
+ TestUtils.setPrivateField(camera, "pictureImageReader", mockImageReader);
+ CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders);
+ TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera);
+
+ TextureRegistry.SurfaceTextureEntry cameraFlutterTexture =
+ (TextureRegistry.SurfaceTextureEntry) TestUtils.getPrivateField(camera, "flutterTexture");
+ ResolutionFeature resolutionFeature =
+ (ResolutionFeature)
+ TestUtils.getPrivateField(mockCameraFeatureFactory, "mockResolutionFeature");
+
+ when(cameraFlutterTexture.surfaceTexture()).thenReturn(mockSurfaceTexture);
+ when(resolutionFeature.getPreviewSize()).thenReturn(mockSize);
+
+ camera.startPreview();
+ verify(mockImageReader, times(1))
+ .getSurface(); // stream pulled from regular imageReader's surface.
+ }
+
+ @Test
+ public void startPreview_shouldFlipRotation() throws InterruptedException, CameraAccessException {
+ VideoRenderer mockVideoRenderer = mock(VideoRenderer.class);
+ ArrayList mockRequestBuilders = new ArrayList<>();
+ mockRequestBuilders.add(mock(CaptureRequest.Builder.class));
+ SurfaceTexture mockSurfaceTexture = mock(SurfaceTexture.class);
+ Size mockSize = mock(Size.class);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setPrivateField(camera, "videoRenderer", mockVideoRenderer);
+ TestUtils.setPrivateField(camera, "initialCameraFacing", CameraMetadata.LENS_FACING_BACK);
+ CameraDeviceWrapper fakeCamera = new FakeCameraDeviceWrapper(mockRequestBuilders);
+ TestUtils.setPrivateField(camera, "cameraDevice", fakeCamera);
+
+ TextureRegistry.SurfaceTextureEntry cameraFlutterTexture =
+ (TextureRegistry.SurfaceTextureEntry) TestUtils.getPrivateField(camera, "flutterTexture");
+ ResolutionFeature resolutionFeature =
+ (ResolutionFeature)
+ TestUtils.getPrivateField(mockCameraFeatureFactory, "mockResolutionFeature");
+
+ when(cameraFlutterTexture.surfaceTexture()).thenReturn(mockSurfaceTexture);
+ when(resolutionFeature.getPreviewSize()).thenReturn(mockSize);
+ when(mockCameraProperties.getLensFacing()).thenReturn(CameraMetadata.LENS_FACING_FRONT);
+
+ camera.startPreview();
+ verify(mockVideoRenderer, times(1)).setRotation(180);
+ }
+
+ @Test
+ public void setDescriptionWhileRecording_shouldErrorWhenNotRecording() {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ TestUtils.setPrivateField(camera, "recordingVideo", false);
+ final CameraProperties newCameraProperties = mock(CameraProperties.class);
+ camera.setDescriptionWhileRecording(mockResult, newCameraProperties);
+
+ verify(mockResult, times(1))
+ .error("setDescriptionWhileRecordingFailed", "Device was not recording", null);
+ verify(mockResult, never()).success(any());
+ }
+
@Test
public void
resumeVideoRecording_shouldSendVideoRecordingFailedErrorWhenVersionCodeSmallerThanN() {
diff --git a/packages/camera/camera_android/example/integration_test/camera_test.dart b/packages/camera/camera_android/example/integration_test/camera_test.dart
index e499872da5f3..517a50d02cc5 100644
--- a/packages/camera/camera_android/example/integration_test/camera_test.dart
+++ b/packages/camera/camera_android/example/integration_test/camera_test.dart
@@ -205,6 +205,51 @@ void main() {
expect(duration, lessThan(recordingTime - timePaused));
});
+ testWidgets('Set description while recording', (WidgetTester tester) async {
+ final List cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.length < 2) {
+ return;
+ }
+
+ final CameraController controller = CameraController(
+ cameras[0],
+ ResolutionPreset.low,
+ enableAudio: false,
+ );
+
+ await controller.initialize();
+ await controller.prepareForVideoRecording();
+
+ await controller.startVideoRecording();
+ sleep(const Duration(milliseconds: 500));
+ await controller.setDescription(cameras[1]);
+ sleep(const Duration(milliseconds: 500));
+
+ expect(controller.description, cameras[1]);
+ });
+
+ testWidgets('Set description', (WidgetTester tester) async {
+ final List cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.length < 2) {
+ return;
+ }
+
+ final CameraController controller = CameraController(
+ cameras[0],
+ ResolutionPreset.low,
+ enableAudio: false,
+ );
+
+ await controller.initialize();
+ sleep(const Duration(milliseconds: 500));
+ await controller.setDescription(cameras[1]);
+ sleep(const Duration(milliseconds: 500));
+
+ expect(controller.description, cameras[1]);
+ });
+
testWidgets(
'image streaming',
(WidgetTester tester) async {
diff --git a/packages/camera/camera_android/example/lib/camera_controller.dart b/packages/camera/camera_android/example/lib/camera_controller.dart
index 8139dcdb0220..fd4f09a027b9 100644
--- a/packages/camera/camera_android/example/lib/camera_controller.dart
+++ b/packages/camera/camera_android/example/lib/camera_controller.dart
@@ -24,6 +24,7 @@ class CameraValue {
required this.exposureMode,
required this.focusMode,
required this.deviceOrientation,
+ required this.description,
this.lockedCaptureOrientation,
this.recordingOrientation,
this.isPreviewPaused = false,
@@ -31,7 +32,7 @@ class CameraValue {
});
/// Creates a new camera controller state for an uninitialized controller.
- const CameraValue.uninitialized()
+ const CameraValue.uninitialized(CameraDescription description)
: this(
isInitialized: false,
isRecordingVideo: false,
@@ -43,6 +44,7 @@ class CameraValue {
focusMode: FocusMode.auto,
deviceOrientation: DeviceOrientation.portraitUp,
isPreviewPaused: false,
+ description: description,
);
/// True after [CameraController.initialize] has completed successfully.
@@ -92,6 +94,9 @@ class CameraValue {
/// The orientation of the currently running video recording.
final DeviceOrientation? recordingOrientation;
+ /// The properties of the camera device controlled by this controller.
+ final CameraDescription description;
+
/// Creates a modified copy of the object.
///
/// Explicitly specified fields get the specified value, all other fields get
@@ -112,6 +117,7 @@ class CameraValue {
Optional? lockedCaptureOrientation,
Optional? recordingOrientation,
bool? isPreviewPaused,
+ CameraDescription? description,
Optional? previewPauseOrientation,
}) {
return CameraValue(
@@ -132,6 +138,7 @@ class CameraValue {
? this.recordingOrientation
: recordingOrientation.orNull,
isPreviewPaused: isPreviewPaused ?? this.isPreviewPaused,
+ description: description ?? this.description,
previewPauseOrientation: previewPauseOrientation == null
? this.previewPauseOrientation
: previewPauseOrientation.orNull,
@@ -165,14 +172,14 @@ class CameraValue {
class CameraController extends ValueNotifier {
/// Creates a new camera controller in an uninitialized state.
CameraController(
- this.description,
+ CameraDescription cameraDescription,
this.resolutionPreset, {
this.enableAudio = true,
this.imageFormatGroup,
- }) : super(const CameraValue.uninitialized());
+ }) : super(CameraValue.uninitialized(cameraDescription));
/// The properties of the camera device controlled by this controller.
- final CameraDescription description;
+ CameraDescription get description => value.description;
/// The resolution this controller is targeting.
///
@@ -202,7 +209,9 @@ class CameraController extends ValueNotifier {
int get cameraId => _cameraId;
/// Initializes the camera on the device.
- Future initialize() async {
+ Future initialize() => _initializeWithDescription(description);
+
+ Future _initializeWithDescription(CameraDescription description) async {
final Completer initializeCompleter =
Completer();
@@ -234,6 +243,7 @@ class CameraController extends ValueNotifier {
value = value.copyWith(
isInitialized: true,
+ description: description,
previewSize: await initializeCompleter.future
.then((CameraInitializedEvent event) => Size(
event.previewWidth,
@@ -274,6 +284,16 @@ class CameraController extends ValueNotifier {
previewPauseOrientation: const Optional.absent());
}
+ /// Sets the description of the camera.
+ Future setDescription(CameraDescription description) async {
+ if (value.isRecordingVideo) {
+ await CameraPlatform.instance.setDescriptionWhileRecording(description);
+ value = value.copyWith(description: description);
+ } else {
+ await _initializeWithDescription(description);
+ }
+ }
+
/// Captures an image and returns the file where it was saved.
///
/// Throws a [CameraException] if the capture fails.
diff --git a/packages/camera/camera_android/example/lib/main.dart b/packages/camera/camera_android/example/lib/main.dart
index 4d98aed9a4c2..3731325a49fd 100644
--- a/packages/camera/camera_android/example/lib/main.dart
+++ b/packages/camera/camera_android/example/lib/main.dart
@@ -123,7 +123,7 @@ class _CameraExampleHomeState extends State
if (state == AppLifecycleState.inactive) {
cameraController.dispose();
} else if (state == AppLifecycleState.resumed) {
- onNewCameraSelected(cameraController.description);
+ _initializeCameraController(cameraController.description);
}
}
@@ -603,10 +603,7 @@ class _CameraExampleHomeState extends State
title: Icon(getCameraLensIcon(cameraDescription.lensDirection)),
groupValue: controller?.description,
value: cameraDescription,
- onChanged:
- controller != null && controller!.value.isRecordingVideo
- ? null
- : onChanged,
+ onChanged: onChanged,
),
),
);
@@ -639,17 +636,15 @@ class _CameraExampleHomeState extends State
}
Future onNewCameraSelected(CameraDescription cameraDescription) async {
- final CameraController? oldController = controller;
- if (oldController != null) {
- // `controller` needs to be set to null before getting disposed,
- // to avoid a race condition when we use the controller that is being
- // disposed. This happens when camera permission dialog shows up,
- // which triggers `didChangeAppLifecycleState`, which disposes and
- // re-creates the controller.
- controller = null;
- await oldController.dispose();
+ if (controller != null) {
+ return controller!.setDescription(cameraDescription);
+ } else {
+ return _initializeCameraController(cameraDescription);
}
+ }
+ Future _initializeCameraController(
+ CameraDescription cameraDescription) async {
final CameraController cameraController = CameraController(
cameraDescription,
kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium,
diff --git a/packages/camera/camera_android/example/pubspec.yaml b/packages/camera/camera_android/example/pubspec.yaml
index e23e31a886de..08f94ced1f31 100644
--- a/packages/camera/camera_android/example/pubspec.yaml
+++ b/packages/camera/camera_android/example/pubspec.yaml
@@ -14,7 +14,7 @@ dependencies:
# The example app is bundled with the plugin so we use a path dependency on
# the parent directory to use the current plugin's version.
path: ../
- camera_platform_interface: ^2.3.1
+ camera_platform_interface: ^2.4.0
flutter:
sdk: flutter
path_provider: ^2.0.0
@@ -32,3 +32,4 @@ dev_dependencies:
flutter:
uses-material-design: true
+
diff --git a/packages/camera/camera_android/lib/src/android_camera.dart b/packages/camera/camera_android/lib/src/android_camera.dart
index 9ab9b578616a..eca1003247c6 100644
--- a/packages/camera/camera_android/lib/src/android_camera.dart
+++ b/packages/camera/camera_android/lib/src/android_camera.dart
@@ -505,6 +505,17 @@ class AndroidCamera extends CameraPlatform {
);
}
+ @override
+ Future setDescriptionWhileRecording(
+ CameraDescription description) async {
+ await _channel.invokeMethod(
+ 'setDescriptionWhileRecording',
+ {
+ 'cameraName': description.name,
+ },
+ );
+ }
+
@override
Widget buildPreview(int cameraId) {
return Texture(textureId: cameraId);
diff --git a/packages/camera/camera_android/pubspec.yaml b/packages/camera/camera_android/pubspec.yaml
index fb3371912911..637658f4e691 100644
--- a/packages/camera/camera_android/pubspec.yaml
+++ b/packages/camera/camera_android/pubspec.yaml
@@ -2,7 +2,7 @@ name: camera_android
description: Android implementation of the camera plugin.
repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_android
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
-version: 0.10.4
+version: 0.10.5
environment:
sdk: ">=2.14.0 <3.0.0"
@@ -18,7 +18,7 @@ flutter:
dartPluginClass: AndroidCamera
dependencies:
- camera_platform_interface: ^2.3.1
+ camera_platform_interface: ^2.4.0
flutter:
sdk: flutter
flutter_plugin_android_lifecycle: ^2.0.2
diff --git a/packages/camera/camera_android/test/android_camera_test.dart b/packages/camera/camera_android/test/android_camera_test.dart
index d80bd9cac7a3..b56aa4e352aa 100644
--- a/packages/camera/camera_android/test/android_camera_test.dart
+++ b/packages/camera/camera_android/test/android_camera_test.dart
@@ -700,6 +700,29 @@ void main() {
]);
});
+ test('Should set the description while recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: {'setDescriptionWhileRecording': null},
+ );
+ const CameraDescription camera2Description = CameraDescription(
+ name: 'Test2',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 0);
+
+ // Act
+ await camera.setDescriptionWhileRecording(camera2Description);
+
+ // Assert
+ expect(channel.log, [
+ isMethodCall('setDescriptionWhileRecording',
+ arguments: {
+ 'cameraName': camera2Description.name,
+ }),
+ ]);
+ });
+
test('Should set the flash mode', () async {
// Arrange
final MethodChannelMock channel = MethodChannelMock(
diff --git a/packages/camera/camera_avfoundation/CHANGELOG.md b/packages/camera/camera_avfoundation/CHANGELOG.md
index f0605b7914cc..169596fa647e 100644
--- a/packages/camera/camera_avfoundation/CHANGELOG.md
+++ b/packages/camera/camera_avfoundation/CHANGELOG.md
@@ -1,3 +1,7 @@
+## 0.9.12
+
+* Allows camera to be switched while video recording.
+
## 0.9.11
* Adds back use of Optional type.
diff --git a/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart b/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart
index 34d460d44ec7..5a6935a90114 100644
--- a/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart
+++ b/packages/camera/camera_avfoundation/example/integration_test/camera_test.dart
@@ -198,6 +198,51 @@ void main() {
expect(duration, lessThan(recordingTime - timePaused));
});
+ testWidgets('Set description while recording', (WidgetTester tester) async {
+ final List cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.length < 2) {
+ return;
+ }
+
+ final CameraController controller = CameraController(
+ cameras[0],
+ ResolutionPreset.low,
+ enableAudio: false,
+ );
+
+ await controller.initialize();
+ await controller.prepareForVideoRecording();
+
+ await controller.startVideoRecording();
+ sleep(const Duration(milliseconds: 500));
+ await controller.setDescription(cameras[1]);
+ sleep(const Duration(milliseconds: 500));
+
+ expect(controller.description, cameras[1]);
+ });
+
+ testWidgets('Set description', (WidgetTester tester) async {
+ final List cameras =
+ await CameraPlatform.instance.availableCameras();
+ if (cameras.length < 2) {
+ return;
+ }
+
+ final CameraController controller = CameraController(
+ cameras[0],
+ ResolutionPreset.low,
+ enableAudio: false,
+ );
+
+ await controller.initialize();
+ sleep(const Duration(milliseconds: 500));
+ await controller.setDescription(cameras[1]);
+ sleep(const Duration(milliseconds: 500));
+
+ expect(controller.description, cameras[1]);
+ });
+
/// Start streaming with specifying the ImageFormatGroup.
Future startStreaming(List cameras,
ImageFormatGroup? imageFormatGroup) async {
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
index 03c80d79c578..c63d00860204 100644
--- a/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
+++ b/packages/camera/camera_avfoundation/example/ios/Runner.xcodeproj/project.pbxproj
@@ -3,7 +3,7 @@
archiveVersion = 1;
classes = {
};
- objectVersion = 46;
+ objectVersion = 50;
objects = {
/* Begin PBXBuildFile section */
diff --git a/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist
index ff2e341a1803..c50ce989f0c2 100644
--- a/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist
+++ b/packages/camera/camera_avfoundation/example/ios/Runner/Info.plist
@@ -52,5 +52,7 @@
UIViewControllerBasedStatusBarAppearance
+ CADisableMinimumFrameDurationOnPhone
+
diff --git a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
index 0ae4887eb631..b42aa34e2a17 100644
--- a/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
+++ b/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraTestUtils.m
@@ -11,15 +11,20 @@
OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]])
.andReturn(inputMock);
- id sessionMock = OCMClassMock([AVCaptureSession class]);
- OCMStub([sessionMock addInputWithNoConnections:[OCMArg any]]); // no-op
- OCMStub([sessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
+ id videoSessionMock = OCMClassMock([AVCaptureSession class]);
+ OCMStub([videoSessionMock addInputWithNoConnections:[OCMArg any]]); // no-op
+ OCMStub([videoSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
+
+ id audioSessionMock = OCMClassMock([AVCaptureSession class]);
+ OCMStub([audioSessionMock addInputWithNoConnections:[OCMArg any]]); // no-op
+ OCMStub([audioSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);
return [[FLTCam alloc] initWithCameraName:@"camera"
resolutionPreset:@"medium"
enableAudio:true
orientation:UIDeviceOrientationPortrait
- captureSession:sessionMock
+ videoCaptureSession:videoSessionMock
+ audioCaptureSession:audioSessionMock
captureSessionQueue:captureSessionQueue
error:nil];
}
diff --git a/packages/camera/camera_avfoundation/example/lib/camera_controller.dart b/packages/camera/camera_avfoundation/example/lib/camera_controller.dart
index 524186816aab..6e1804328d52 100644
--- a/packages/camera/camera_avfoundation/example/lib/camera_controller.dart
+++ b/packages/camera/camera_avfoundation/example/lib/camera_controller.dart
@@ -24,6 +24,7 @@ class CameraValue {
required this.exposureMode,
required this.focusMode,
required this.deviceOrientation,
+ required this.description,
this.lockedCaptureOrientation,
this.recordingOrientation,
this.isPreviewPaused = false,
@@ -31,7 +32,7 @@ class CameraValue {
});
/// Creates a new camera controller state for an uninitialized controller.
- const CameraValue.uninitialized()
+ const CameraValue.uninitialized(CameraDescription description)
: this(
isInitialized: false,
isRecordingVideo: false,
@@ -43,6 +44,7 @@ class CameraValue {
focusMode: FocusMode.auto,
deviceOrientation: DeviceOrientation.portraitUp,
isPreviewPaused: false,
+ description: description,
);
/// True after [CameraController.initialize] has completed successfully.
@@ -92,6 +94,9 @@ class CameraValue {
/// The orientation of the currently running video recording.
final DeviceOrientation? recordingOrientation;
+ /// The properties of the camera device controlled by this controller.
+ final CameraDescription description;
+
/// Creates a modified copy of the object.
///
/// Explicitly specified fields get the specified value, all other fields get
@@ -112,6 +117,7 @@ class CameraValue {
Optional? lockedCaptureOrientation,
Optional? recordingOrientation,
bool? isPreviewPaused,
+ CameraDescription? description,
Optional? previewPauseOrientation,
}) {
return CameraValue(
@@ -132,6 +138,7 @@ class CameraValue {
? this.recordingOrientation
: recordingOrientation.orNull,
isPreviewPaused: isPreviewPaused ?? this.isPreviewPaused,
+ description: description ?? this.description,
previewPauseOrientation: previewPauseOrientation == null
? this.previewPauseOrientation
: previewPauseOrientation.orNull,
@@ -165,14 +172,14 @@ class CameraValue {
class CameraController extends ValueNotifier {
/// Creates a new camera controller in an uninitialized state.
CameraController(
- this.description,
+ CameraDescription cameraDescription,
this.resolutionPreset, {
this.enableAudio = true,
this.imageFormatGroup,
- }) : super(const CameraValue.uninitialized());
+ }) : super(CameraValue.uninitialized(cameraDescription));
/// The properties of the camera device controlled by this controller.
- final CameraDescription description;
+ CameraDescription get description => value.description;
/// The resolution this controller is targeting.
///
@@ -202,7 +209,9 @@ class CameraController extends ValueNotifier {
int get cameraId => _cameraId;
/// Initializes the camera on the device.
- Future initialize() async {
+ Future initialize() => _initializeWithDescription(description);
+
+ Future _initializeWithDescription(CameraDescription description) async {
final Completer initializeCompleter =
Completer();
@@ -234,6 +243,7 @@ class CameraController extends ValueNotifier {
value = value.copyWith(
isInitialized: true,
+ description: description,
previewSize: await initializeCompleter.future
.then((CameraInitializedEvent event) => Size(
event.previewWidth,
@@ -274,6 +284,16 @@ class CameraController extends ValueNotifier {
previewPauseOrientation: const Optional.absent());
}
+ /// Sets the description of the camera
+ Future setDescription(CameraDescription description) async {
+ if (value.isRecordingVideo) {
+ await CameraPlatform.instance.setDescriptionWhileRecording(description);
+ value = value.copyWith(description: description);
+ } else {
+ await _initializeWithDescription(description);
+ }
+ }
+
/// Captures an image and returns the file where it was saved.
///
/// Throws a [CameraException] if the capture fails.
diff --git a/packages/camera/camera_avfoundation/example/lib/main.dart b/packages/camera/camera_avfoundation/example/lib/main.dart
index 4d98aed9a4c2..3731325a49fd 100644
--- a/packages/camera/camera_avfoundation/example/lib/main.dart
+++ b/packages/camera/camera_avfoundation/example/lib/main.dart
@@ -123,7 +123,7 @@ class _CameraExampleHomeState extends State
if (state == AppLifecycleState.inactive) {
cameraController.dispose();
} else if (state == AppLifecycleState.resumed) {
- onNewCameraSelected(cameraController.description);
+ _initializeCameraController(cameraController.description);
}
}
@@ -603,10 +603,7 @@ class _CameraExampleHomeState extends State
title: Icon(getCameraLensIcon(cameraDescription.lensDirection)),
groupValue: controller?.description,
value: cameraDescription,
- onChanged:
- controller != null && controller!.value.isRecordingVideo
- ? null
- : onChanged,
+ onChanged: onChanged,
),
),
);
@@ -639,17 +636,15 @@ class _CameraExampleHomeState extends State
}
Future onNewCameraSelected(CameraDescription cameraDescription) async {
- final CameraController? oldController = controller;
- if (oldController != null) {
- // `controller` needs to be set to null before getting disposed,
- // to avoid a race condition when we use the controller that is being
- // disposed. This happens when camera permission dialog shows up,
- // which triggers `didChangeAppLifecycleState`, which disposes and
- // re-creates the controller.
- controller = null;
- await oldController.dispose();
+ if (controller != null) {
+ return controller!.setDescription(cameraDescription);
+ } else {
+ return _initializeCameraController(cameraDescription);
}
+ }
+ Future _initializeCameraController(
+ CameraDescription cameraDescription) async {
final CameraController cameraController = CameraController(
cameraDescription,
kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium,
diff --git a/packages/camera/camera_avfoundation/example/pubspec.yaml b/packages/camera/camera_avfoundation/example/pubspec.yaml
index 7c85ba807193..872a22021d2e 100644
--- a/packages/camera/camera_avfoundation/example/pubspec.yaml
+++ b/packages/camera/camera_avfoundation/example/pubspec.yaml
@@ -14,7 +14,7 @@ dependencies:
# The example app is bundled with the plugin so we use a path dependency on
# the parent directory to use the current plugin's version.
path: ../
- camera_platform_interface: ^2.2.0
+ camera_platform_interface: ^2.4.0
flutter:
sdk: flutter
path_provider: ^2.0.0
diff --git a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
index b85f68d1f957..f9b2a911b67d 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/CameraPlugin.m
@@ -261,6 +261,8 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call
[_camera pausePreviewWithResult:result];
} else if ([@"resumePreview" isEqualToString:call.method]) {
[_camera resumePreviewWithResult:result];
+ } else if ([@"setDescriptionWhileRecording" isEqualToString:call.method]) {
+ [_camera setDescriptionWhileRecording:(call.arguments[@"cameraName"]) result:result];
} else {
[result sendNotImplemented];
}
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h
index 85b8e2ae06f2..df2a155855dd 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.h
@@ -95,6 +95,8 @@ NS_ASSUME_NONNULL_BEGIN
- (void)applyFocusMode:(FLTFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice;
- (void)pausePreviewWithResult:(FLTThreadSafeFlutterResult *)result;
- (void)resumePreviewWithResult:(FLTThreadSafeFlutterResult *)result;
+- (void)setDescriptionWhileRecording:(NSString *)cameraName
+ result:(FLTThreadSafeFlutterResult *)result;
- (void)setExposurePointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y;
- (void)setFocusPointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y;
- (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset:(double)offset;
diff --git a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
index a7d6cd24be3c..d5247e00382e 100644
--- a/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
+++ b/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m
@@ -43,7 +43,8 @@ @interface FLTCam () setDescriptionWhileRecording(
+ CameraDescription description) async {
+ await _channel.invokeMethod(
+ 'setDescriptionWhileRecording',
+ {
+ 'cameraName': description.name,
+ },
+ );
+ }
+
@override
Widget buildPreview(int cameraId) {
return Texture(textureId: cameraId);
diff --git a/packages/camera/camera_avfoundation/pubspec.yaml b/packages/camera/camera_avfoundation/pubspec.yaml
index b272a4c5c68d..78c9156a7b79 100644
--- a/packages/camera/camera_avfoundation/pubspec.yaml
+++ b/packages/camera/camera_avfoundation/pubspec.yaml
@@ -2,7 +2,7 @@ name: camera_avfoundation
description: iOS implementation of the camera plugin.
repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_avfoundation
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
-version: 0.9.11
+version: 0.9.12
environment:
sdk: ">=2.14.0 <3.0.0"
@@ -17,7 +17,7 @@ flutter:
dartPluginClass: AVFoundationCamera
dependencies:
- camera_platform_interface: ^2.3.1
+ camera_platform_interface: ^2.4.0
flutter:
sdk: flutter
stream_transform: ^2.0.0
diff --git a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart
index 5d0b74cf0c0c..e756f38ff122 100644
--- a/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart
+++ b/packages/camera/camera_avfoundation/test/avfoundation_camera_test.dart
@@ -701,6 +701,29 @@ void main() {
]);
});
+ test('Should set the description while recording', () async {
+ // Arrange
+ final MethodChannelMock channel = MethodChannelMock(
+ channelName: _channelName,
+ methods: {'setDescriptionWhileRecording': null},
+ );
+ const CameraDescription camera2Description = CameraDescription(
+ name: 'Test2',
+ lensDirection: CameraLensDirection.front,
+ sensorOrientation: 0);
+
+ // Act
+ await camera.setDescriptionWhileRecording(camera2Description);
+
+ // Assert
+ expect(channel.log, [
+ isMethodCall('setDescriptionWhileRecording',
+ arguments: {
+ 'cameraName': camera2Description.name,
+ }),
+ ]);
+ });
+
test('Should set the flash mode', () async {
// Arrange
final MethodChannelMock channel = MethodChannelMock(