diff --git a/CHANGELOG.md b/CHANGELOG.md index 59e6f237f7..75e1d415e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,25 @@ # Changelog -------------------------------------------- +[0.12.5+hotfix.2] - 2024-12-25 + +* [iOS] fix: Audio route issue for iOS. + +[0.12.5+hotfix.1] - 2024-12-25 + +* [iOS/macOS] fix: Pass MediaConstraints for getUserAudio. + +[0.12.5] - 2024-12-23 + +* [iOS/Android] Fixed buf for screen capture. +* [Android] Fixed first frame flickering. + +[0.12.4] - 2024-12-16 + +* [iOS/Android] add FocusMode/ExposureMode settings for mobile. (#1435) +* [Dart] fix compiler errors. +* [eLinux] add $ORIGIN to rpath in elinux (#1720). + [0.12.3+1] - 2024-12-18 * [iOS/Android] Added clone() method to MediaStreamTrack. diff --git a/Documentation/E2EE.md b/Documentation/E2EE.md new file mode 100644 index 0000000000..a91c8233f7 --- /dev/null +++ b/Documentation/E2EE.md @@ -0,0 +1,92 @@ +# End to End Encryption + +E2EE is an AES-GCM encryption interface injected before sending the packaged RTP packet and after receiving the RTP packet, ensuring that the data is not eavesdropped when passing through SFU or any public transmission network. It coexists with DTLS-SRTP as two layers of encryption. You can control the key, ratchet and other operations of FrameCryptor yourself to ensure that no third party will monitor your tracks. + +## Process of enabling E2EE + +1, Prepare the key provider + +`ratchetSalt` is used to add to the mixture when ratcheting or deriving AES passwords +`aesKey` aesKey is the plaintext password you entered, which will be used to derive the actual password + +```dart + final aesKey = 'you-private-key-here'.codeUnits; + final ratchetSalt = 'flutter-webrtc-ratchet-salt'; + + var keyProviderOptions = KeyProviderOptions( + sharedKey: true, + ratchetSalt: Uint8List.fromList(ratchetSalt.codeUnits), + ratchetWindowSize: 16, + failureTolerance: -1, + ); + + var keyProvider = await frameCyrptorFactory.createDefaultKeyProvider(keyProviderOptions); + /// set shared key for all track, default index is 0 + /// also you can set multiple keys by different indexes + await keyProvider.setSharedKey(key: aesKey); +``` + +2, create PeerConnectioin + +when you use E2EE on the web, please add `encodedInsertableStreams`, + +``` dart +var pc = await createPeerConnection( { + 'encodedInsertableStreams': true, + }); +``` + +3, Enable FrameCryptor for RTPSender. + +```dart +var stream = await navigator.mediaDevices + .getUserMedia({'audio': true, 'video': false }); +var audioTrack = stream.getAudioTracks(); +var sender = await pc.addTrack(audioTrack, stream); + +var trackId = audioTrack?.id; +var id = 'audio_' + trackId! + '_sender'; + +var frameCyrptor = + await frameCyrptorFactory.createFrameCryptorForRtpSender( + participantId: id, + sender: sender, + algorithm: Algorithm.kAesGcm, + keyProvider: keyProvider!); +/// print framecyrptor state +frameCyrptor.onFrameCryptorStateChanged = (participantId, state) => + print('EN onFrameCryptorStateChanged $participantId $state'); + +/// set currently shared key index +await frameCyrptor.setKeyIndex(0); + +/// enable encryption now. +await frameCyrptor.setEnabled(true); +``` + +4, Enable FrameCryptor for RTPReceiver + +```dart + +pc.onTrack((RTCTrackEvent event) async { + var receiver = event.receiver; + var trackId = event.track?.id; + var id = event.track.kind + '_' + trackId! + '_receiver'; + + var frameCyrptor = + await frameCyrptorFactory.createFrameCryptorForRtpReceiver( + participantId: id, + receiver: receiver, + algorithm: Algorithm.kAesGcm, + keyProvider: keyProvider); + + frameCyrptor.onFrameCryptorStateChanged = (participantId, state) => + print('DE onFrameCryptorStateChanged $participantId $state'); + + /// set currently shared key index + await frameCyrptor.setKeyIndex(0); + + /// enable encryption now. + await frameCyrptor.setEnabled(true); +}); +``` diff --git a/android/src/main/java/io/getstream/webrtc/flutter/GetUserMediaImpl.java b/android/src/main/java/io/getstream/webrtc/flutter/GetUserMediaImpl.java index 6abe2b411a..b7006720e3 100755 --- a/android/src/main/java/io/getstream/webrtc/flutter/GetUserMediaImpl.java +++ b/android/src/main/java/io/getstream/webrtc/flutter/GetUserMediaImpl.java @@ -9,15 +9,7 @@ import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.Point; -import android.graphics.Rect; -import android.hardware.Camera; -import android.hardware.Camera.Parameters; -import android.hardware.camera2.CameraAccessException; -import android.hardware.camera2.CameraCaptureSession; -import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; -import android.hardware.camera2.CaptureRequest; import android.media.AudioDeviceInfo; import android.media.projection.MediaProjection; import android.media.projection.MediaProjectionManager; @@ -31,10 +23,8 @@ import android.provider.MediaStore; import android.util.Log; import android.util.Pair; -import android.util.Range; import android.util.SparseArray; import android.view.Display; -import android.view.Surface; import android.view.WindowManager; import androidx.annotation.Nullable; @@ -58,6 +48,7 @@ import io.getstream.webrtc.flutter.videoEffects.VideoEffectProcessor; import io.getstream.webrtc.flutter.videoEffects.ProcessorProvider; import io.getstream.webrtc.flutter.video.LocalVideoTrack; +import io.getstream.webrtc.flutter.video.VideoCapturerInfo; import org.webrtc.AudioSource; import org.webrtc.AudioTrack; @@ -67,7 +58,6 @@ import org.webrtc.Camera2Capturer; import org.webrtc.Camera2Enumerator; import org.webrtc.Camera2Helper; -import org.webrtc.CameraEnumerationAndroid.CaptureFormat; import org.webrtc.CameraEnumerator; import org.webrtc.CameraVideoCapturer; import org.webrtc.MediaConstraints; @@ -82,7 +72,6 @@ import org.webrtc.audio.JavaAudioDeviceModule; import java.io.File; -import java.lang.reflect.Field; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -96,7 +85,7 @@ * The implementation of {@code getUserMedia} extracted into a separate file in order to reduce * complexity and to (somewhat) separate concerns. */ -class GetUserMediaImpl { +public class GetUserMediaImpl { private static final int DEFAULT_WIDTH = 1280; private static final int DEFAULT_HEIGHT = 720; private static final int DEFAULT_FPS = 30; @@ -113,7 +102,7 @@ class GetUserMediaImpl { static final String TAG = FlutterWebRTCPlugin.TAG; - private final Map mVideoCapturers = new HashMap<>(); + private final Map mVideoCapturers = new HashMap<>(); private final Map mSurfaceTextureHelpers = new HashMap<>(); private final Map mVideoSources = new HashMap<>(); private final Map mAudioSources = new HashMap<>(); @@ -131,6 +120,7 @@ class GetUserMediaImpl { private boolean isTorchOn; private Intent mediaProjectionData = null; + public void screenRequestPermissions(ResultReceiver resultReceiver) { mediaProjectionData = null; final Activity activity = stateProvider.getActivity(); @@ -566,7 +556,7 @@ public void onStop() { Point size = new Point(); display.getRealSize(size); - VideoCapturerInfo info = new VideoCapturerInfo(); + VideoCapturerInfoEx info = new VideoCapturerInfoEx(); info.width = size.x; info.height = size.y; info.fps = DEFAULT_FPS; @@ -605,6 +595,7 @@ public void onStop() { track_.putBoolean("remote", false); videoTracks.pushMap(track_); + mediaStream.addTrack(displayTrack); } String streamId = mediaStream.getId(); @@ -815,7 +806,7 @@ private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream medi videoCapturer.initialize( surfaceTextureHelper, applicationContext, videoSource.getCapturerObserver()); - VideoCapturerInfo info = new VideoCapturerInfo(); + VideoCapturerInfoEx info = new VideoCapturerInfoEx(); Integer videoWidth = getConstrainInt(videoConstraintsMap, "width"); int targetWidth = videoWidth != null @@ -842,6 +833,7 @@ private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream medi info.height = targetHeight; info.fps = targetFps; info.capturer = videoCapturer; + info.cameraName = deviceId; // Find actual capture format. Size actualSize = null; @@ -863,6 +855,7 @@ private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream medi cameraEventsHandler.waitForCameraOpen(); + String trackId = stateProvider.getNextTrackUUID(); mVideoCapturers.put(trackId, info); mSurfaceTextureHelpers.put(trackId, surfaceTextureHelper); @@ -900,7 +893,7 @@ private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream medi } void removeVideoCapturer(String id) { - VideoCapturerInfo info = mVideoCapturers.get(id); + VideoCapturerInfoEx info = mVideoCapturers.get(id); if (info != null) { try { info.capturer.stopCapture(); @@ -1017,7 +1010,6 @@ void switchCamera(String id, Result result) { @Override public void onCameraSwitchDone(boolean b) { isFacing = !isFacing; - isTorchOn = false; result.success(b); } @@ -1075,302 +1067,10 @@ void stopRecording(Integer id) { } } - void hasTorch(String trackId, Result result) { - VideoCapturerInfo info = mVideoCapturers.get(trackId); - if (info == null) { - resultError("hasTorch", "Video capturer not found for id: " + trackId, result); - return; - } - - if (VERSION.SDK_INT >= VERSION_CODES.LOLLIPOP && info.capturer instanceof Camera2Capturer) { - CameraManager manager; - CameraDevice cameraDevice; - - try { - Object session = - getPrivateProperty( - Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); - manager = - (CameraManager) - getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); - cameraDevice = - (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - - boolean flashIsAvailable; - try { - CameraCharacteristics characteristics = - manager.getCameraCharacteristics(cameraDevice.getId()); - flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); - } - result.success(flashIsAvailable); - return; - } - - if (info.capturer instanceof Camera1Capturer) { - Camera camera; - - try { - Object session = - getPrivateProperty( - Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - - Parameters params = camera.getParameters(); - List supportedModes = params.getSupportedFlashModes(); - - result.success( - supportedModes != null && supportedModes.contains(Parameters.FLASH_MODE_TORCH)); - return; - } - - resultError("hasTorch", "[TORCH] Video capturer not compatible", result); - } - - @RequiresApi(api = VERSION_CODES.LOLLIPOP) - void setZoom(String trackId, double zoomLevel, Result result) { - VideoCapturerInfo info = mVideoCapturers.get(trackId); - if (info == null) { - resultError("setZoom", "Video capturer not found for id: " + trackId, result); - return; - } - - if (info.capturer instanceof Camera2Capturer) { - CameraCaptureSession captureSession; - CameraDevice cameraDevice; - CaptureFormat captureFormat; - int fpsUnitFactor; - Surface surface; - Handler cameraThreadHandler; - CameraManager manager; - - try { - Object session = - getPrivateProperty( - Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); - manager = - (CameraManager) - getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); - captureSession = - (CameraCaptureSession) - getPrivateProperty(session.getClass(), session, "captureSession"); - cameraDevice = - (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - captureFormat = - (CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); - fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); - surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); - cameraThreadHandler = - (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - - try { - final CaptureRequest.Builder captureRequestBuilder = - cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); - - final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); - final Rect rect = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); - final double maxZoomLevel = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); - - final double desiredZoomLevel = Math.max(1.0, Math.min(zoomLevel, maxZoomLevel)); - - float ratio = 1.0f / (float)desiredZoomLevel; - - if (rect != null) { - int croppedWidth = rect.width() - Math.round((float) rect.width() * ratio); - int croppedHeight = rect.height() - Math.round((float) rect.height() * ratio); - final Rect desiredRegion = new Rect(croppedWidth / 2, croppedHeight / 2, rect.width() - croppedWidth / 2, rect.height() - croppedHeight / 2); - captureRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, desiredRegion); - } - - captureRequestBuilder.set( - CaptureRequest.FLASH_MODE, - isTorchOn ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, - new Range<>( - captureFormat.framerate.min / fpsUnitFactor, - captureFormat.framerate.max / fpsUnitFactor)); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); - captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); - captureRequestBuilder.addTarget(surface); - captureSession.setRepeatingRequest( - captureRequestBuilder.build(), null, cameraThreadHandler); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); - } - - - result.success(null); - return; - } - - if (info.capturer instanceof Camera1Capturer) { - Camera camera; - try { - Object session = - getPrivateProperty( - Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - - Camera.Parameters params = camera.getParameters(); - params.setFlashMode( - isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); - if(params.isZoomSupported()) { - int maxZoom = params.getMaxZoom(); - double desiredZoom = Math.max(0, Math.min(zoomLevel, maxZoom)); - params.setZoom((int)desiredZoom); - result.success(null); - return; - } - } - resultError("setZoom", "[ZOOM] Video capturer not compatible", result); - } - - @RequiresApi(api = VERSION_CODES.LOLLIPOP) - void setTorch(String trackId, boolean torch, Result result) { - VideoCapturerInfo info = mVideoCapturers.get(trackId); - if (info == null) { - resultError("setTorch", "Video capturer not found for id: " + trackId, result); - return; - } - - if (info.capturer instanceof Camera2Capturer) { - CameraCaptureSession captureSession; - CameraDevice cameraDevice; - CaptureFormat captureFormat; - int fpsUnitFactor; - Surface surface; - Handler cameraThreadHandler; - - try { - Object session = - getPrivateProperty( - Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); - CameraManager manager = - (CameraManager) - getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); - captureSession = - (CameraCaptureSession) - getPrivateProperty(session.getClass(), session, "captureSession"); - cameraDevice = - (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); - captureFormat = - (CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); - fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); - surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); - cameraThreadHandler = - (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera2Capturer class have changed - resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - - try { - final CaptureRequest.Builder captureRequestBuilder = - cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); - captureRequestBuilder.set( - CaptureRequest.FLASH_MODE, - torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, - new Range<>( - captureFormat.framerate.min / fpsUnitFactor, - captureFormat.framerate.max / fpsUnitFactor)); - captureRequestBuilder.set( - CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); - captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); - captureRequestBuilder.addTarget(surface); - captureSession.setRepeatingRequest( - captureRequestBuilder.build(), null, cameraThreadHandler); - } catch (CameraAccessException e) { - // Should never happen since we are already accessing the camera - throw new RuntimeException(e); - } - - result.success(null); - isTorchOn = torch; - return; - } - - if (info.capturer instanceof Camera1Capturer) { - Camera camera; - try { - Object session = - getPrivateProperty( - Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); - camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); - } catch (NoSuchFieldWithNameException e) { - // Most likely the upstream Camera1Capturer class have changed - resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); - return; - } - - Camera.Parameters params = camera.getParameters(); - params.setFlashMode( - torch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); - camera.setParameters(params); - - result.success(null); - isTorchOn = torch; - return; - } - resultError("setTorch", "[TORCH] Video capturer not compatible", result); - } - - private Object getPrivateProperty(Class klass, Object object, String fieldName) - throws NoSuchFieldWithNameException { - try { - Field field = klass.getDeclaredField(fieldName); - field.setAccessible(true); - return field.get(object); - } catch (NoSuchFieldException e) { - throw new NoSuchFieldWithNameException(klass.getName(), fieldName, e); - } catch (IllegalAccessException e) { - // Should never happen since we are calling `setAccessible(true)` - throw new RuntimeException(e); - } - } - - private class NoSuchFieldWithNameException extends NoSuchFieldException { - - String className; - String fieldName; - - NoSuchFieldWithNameException(String className, String fieldName, NoSuchFieldException e) { - super(e.getMessage()); - this.className = className; - this.fieldName = fieldName; - } - } public void reStartCamera(IsCameraEnabled getCameraId) { - for (Map.Entry item : mVideoCapturers.entrySet()) { + for (Map.Entry item : mVideoCapturers.entrySet()) { if (!item.getValue().isScreenCapture && getCameraId.isEnabled(item.getKey())) { item.getValue().capturer.startCapture( item.getValue().width, @@ -1385,13 +1085,12 @@ public interface IsCameraEnabled { boolean isEnabled(String id); } - public static class VideoCapturerInfo { - VideoCapturer capturer; - int width; - int height; - int fps; - boolean isScreenCapture = false; - CameraEventsHandler cameraEventsHandler; + public static class VideoCapturerInfoEx extends VideoCapturerInfo { + public CameraEventsHandler cameraEventsHandler; + } + + public VideoCapturerInfoEx getCapturerInfo(String trackId) { + return mVideoCapturers.get(trackId); } @RequiresApi(api = VERSION_CODES.M) diff --git a/android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java b/android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java index dceb816bc4..ee1051796c 100644 --- a/android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java +++ b/android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java @@ -36,6 +36,9 @@ import io.getstream.webrtc.flutter.utils.ObjectType; import io.getstream.webrtc.flutter.utils.PermissionUtils; import io.getstream.webrtc.flutter.utils.Utils; +import io.getstream.webrtc.flutter.video.VideoCapturerInfo; +import io.getstream.webrtc.flutter.video.camera.CameraUtils; +import io.getstream.webrtc.flutter.video.camera.Point; import io.getstream.webrtc.flutter.video.LocalVideoTrack; import com.twilio.audioswitch.AudioDevice; @@ -115,6 +118,8 @@ public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider { */ private GetUserMediaImpl getUserMediaImpl; + private CameraUtils cameraUtils; + private AudioDeviceModule audioDeviceModule; private FlutterRTCFrameCryptor frameCryptor; @@ -167,6 +172,8 @@ private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, bo getUserMediaImpl = new GetUserMediaImpl(this, context); + cameraUtils = new CameraUtils(getUserMediaImpl, activity); + frameCryptor = new FlutterRTCFrameCryptor(this); AudioAttributes audioAttributes = null; @@ -632,19 +639,51 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { } case "mediaStreamTrackHasTorch": { String trackId = call.argument("trackId"); - getUserMediaImpl.hasTorch(trackId, result); + cameraUtils.hasTorch(trackId, result); break; } case "mediaStreamTrackSetTorch": { String trackId = call.argument("trackId"); boolean torch = call.argument("torch"); - getUserMediaImpl.setTorch(trackId, torch, result); + cameraUtils.setTorch(trackId, torch, result); break; } case "mediaStreamTrackSetZoom": { String trackId = call.argument("trackId"); double zoomLevel = call.argument("zoomLevel"); - getUserMediaImpl.setZoom(trackId, zoomLevel, result); + cameraUtils.setZoom(trackId, zoomLevel, result); + break; + } + case "mediaStreamTrackSetFocusMode": { + cameraUtils.setFocusMode(call, result); + break; + } + case "mediaStreamTrackSetFocusPoint":{ + Map focusPoint = call.argument("focusPoint"); + Boolean reset = (Boolean)focusPoint.get("reset"); + Double x = null; + Double y = null; + if (reset == null || !reset) { + x = (Double)focusPoint.get("x"); + y = (Double)focusPoint.get("y"); + } + cameraUtils.setFocusPoint(call, new Point(x, y), result); + break; + } + case "mediaStreamTrackSetExposureMode": { + cameraUtils.setExposureMode(call, result); + break; + } + case "mediaStreamTrackSetExposurePoint": { + Map exposurePoint = call.argument("exposurePoint"); + Boolean reset = (Boolean)exposurePoint.get("reset"); + Double x = null; + Double y = null; + if (reset == null || !reset) { + x = (Double)exposurePoint.get("x"); + y = (Double)exposurePoint.get("y"); + } + cameraUtils.setExposurePoint(call, new Point(x, y), result); break; } case "mediaStreamTrackSwitchCamera": { diff --git a/android/src/main/java/io/getstream/webrtc/flutter/SurfaceTextureRenderer.java b/android/src/main/java/io/getstream/webrtc/flutter/SurfaceTextureRenderer.java index be058ba341..f7e7dff853 100755 --- a/android/src/main/java/io/getstream/webrtc/flutter/SurfaceTextureRenderer.java +++ b/android/src/main/java/io/getstream/webrtc/flutter/SurfaceTextureRenderer.java @@ -95,6 +95,10 @@ public void pauseVideo() { // VideoSink interface. @Override public void onFrame(VideoFrame frame) { + if(!isFirstFrameRendered) { + texture.setDefaultBufferSize(frame.getRotatedWidth(), frame.getRotatedHeight()); + createEglSurface(texture); + } updateFrameDimensionsAndReportEvents(frame); super.onFrame(frame); } @@ -104,7 +108,6 @@ public void onFrame(VideoFrame frame) { public void surfaceCreated(final SurfaceTexture texture) { ThreadUtils.checkIsOnMainThread(); this.texture = texture; - createEglSurface(texture); } public void surfaceDestroyed() { diff --git a/android/src/main/java/io/getstream/webrtc/flutter/video/VideoCapturerInfo.java b/android/src/main/java/io/getstream/webrtc/flutter/video/VideoCapturerInfo.java new file mode 100644 index 0000000000..20cf9a724a --- /dev/null +++ b/android/src/main/java/io/getstream/webrtc/flutter/video/VideoCapturerInfo.java @@ -0,0 +1,12 @@ +package io.getstream.webrtc.flutter.video; + +import org.webrtc.VideoCapturer; + +public class VideoCapturerInfo { + public VideoCapturer capturer; + public int width; + public int height; + public int fps; + public boolean isScreenCapture = false; + public String cameraName; +} \ No newline at end of file diff --git a/android/src/main/java/io/getstream/webrtc/flutter/video/camera/CameraRegionUtils.java b/android/src/main/java/io/getstream/webrtc/flutter/video/camera/CameraRegionUtils.java new file mode 100644 index 0000000000..3d8c1c9862 --- /dev/null +++ b/android/src/main/java/io/getstream/webrtc/flutter/video/camera/CameraRegionUtils.java @@ -0,0 +1,205 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package io.getstream.webrtc.flutter.video.camera; + +import android.annotation.TargetApi; +import android.graphics.Rect; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; +import android.util.Size; +import androidx.annotation.NonNull; +import androidx.annotation.VisibleForTesting; +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import java.util.Arrays; + +/** + * Utility class offering functions to calculate values regarding the camera boundaries. + * + *

The functions are used to calculate focus and exposure settings. + */ +public final class CameraRegionUtils { + + @NonNull + public static Size getCameraBoundaries( + @NonNull CameraCharacteristics cameraCharacteristics, @NonNull CaptureRequest.Builder requestBuilder) { + if (SdkCapabilityChecker.supportsDistortionCorrection() + && supportsDistortionCorrection(cameraCharacteristics)) { + // Get the current distortion correction mode. + Integer distortionCorrectionMode = + requestBuilder.get(CaptureRequest.DISTORTION_CORRECTION_MODE); + + // Return the correct boundaries depending on the mode. + android.graphics.Rect rect; + if (distortionCorrectionMode == null + || distortionCorrectionMode == CaptureRequest.DISTORTION_CORRECTION_MODE_OFF) { + rect = getSensorInfoPreCorrectionActiveArraySize(cameraCharacteristics); + } else { + rect = getSensorInfoActiveArraySize(cameraCharacteristics); + } + + return SizeFactory.create(rect.width(), rect.height()); + } else { + // No distortion correction support. + return getSensorInfoPixelArraySize(cameraCharacteristics); + } + } + + @TargetApi(Build.VERSION_CODES.P) + private static boolean supportsDistortionCorrection(CameraCharacteristics cameraCharacteristics) { + int[] availableDistortionCorrectionModes = getDistortionCorrectionAvailableModes(cameraCharacteristics); + if (availableDistortionCorrectionModes == null) { + availableDistortionCorrectionModes = new int[0]; + } + long nonOffModesSupported = + Arrays.stream(availableDistortionCorrectionModes) + .filter((value) -> value != CaptureRequest.DISTORTION_CORRECTION_MODE_OFF) + .count(); + return nonOffModesSupported > 0; + } + + static public int[] getDistortionCorrectionAvailableModes(CameraCharacteristics cameraCharacteristics) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { + return cameraCharacteristics.get(CameraCharacteristics.DISTORTION_CORRECTION_AVAILABLE_MODES); + } + return null; + } + + public static Rect getSensorInfoActiveArraySize(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + } + + public static Size getSensorInfoPixelArraySize(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE); + } + + @NonNull + public static Rect getSensorInfoPreCorrectionActiveArraySize(CameraCharacteristics cameraCharacteristics) { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + return cameraCharacteristics.get( + CameraCharacteristics.SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE); + } + return getSensorInfoActiveArraySize(cameraCharacteristics); + } + + public static Integer getControlMaxRegionsAutoExposure(CameraCharacteristics cameraCharacteristics) { + return cameraCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE); + } + + /** + * Converts a point into a {@link MeteringRectangle} with the supplied coordinates as the center + * point. + * + *

Since the Camera API (due to cross-platform constraints) only accepts a point when + * configuring a specific focus or exposure area and Android requires a rectangle to configure + * these settings there is a need to convert the point into a rectangle. This method will create + * the required rectangle with an arbitrarily size that is a 10th of the current viewport and the + * coordinates as the center point. + * + * @param boundaries - The camera boundaries to calculate the metering rectangle for. + * @param x x - 1 >= coordinate >= 0. + * @param y y - 1 >= coordinate >= 0. + * @return The dimensions of the metering rectangle based on the supplied coordinates and + * boundaries. + */ + @NonNull + public static MeteringRectangle convertPointToMeteringRectangle( + @NonNull Size boundaries, + double x, + double y, + @NonNull PlatformChannel.DeviceOrientation orientation) { + assert (boundaries.getWidth() > 0 && boundaries.getHeight() > 0); + assert (x >= 0 && x <= 1); + assert (y >= 0 && y <= 1); + // Rotate the coordinates to match the device orientation. + double oldX = x, oldY = y; + switch (orientation) { + case PORTRAIT_UP: // 90 ccw. + y = 1 - oldX; + x = oldY; + break; + case PORTRAIT_DOWN: // 90 cw. + x = 1 - oldY; + y = oldX; + break; + case LANDSCAPE_LEFT: + // No rotation required. + break; + case LANDSCAPE_RIGHT: // 180. + x = 1 - x; + y = 1 - y; + break; + } + // Interpolate the target coordinate. + int targetX = (int) Math.round(x * ((double) (boundaries.getWidth() - 1))); + int targetY = (int) Math.round(y * ((double) (boundaries.getHeight() - 1))); + // Determine the dimensions of the metering rectangle (10th of the viewport). + int targetWidth = (int) Math.round(((double) boundaries.getWidth()) / 10d); + int targetHeight = (int) Math.round(((double) boundaries.getHeight()) / 10d); + // Adjust target coordinate to represent top-left corner of metering rectangle. + targetX -= targetWidth / 2; + targetY -= targetHeight / 2; + // Adjust target coordinate as to not fall out of bounds. + if (targetX < 0) { + targetX = 0; + } + if (targetY < 0) { + targetY = 0; + } + int maxTargetX = boundaries.getWidth() - 1 - targetWidth; + int maxTargetY = boundaries.getHeight() - 1 - targetHeight; + if (targetX > maxTargetX) { + targetX = maxTargetX; + } + if (targetY > maxTargetY) { + targetY = maxTargetY; + } + // Build the metering rectangle. + return MeteringRectangleFactory.create(targetX, targetY, targetWidth, targetHeight, 1); + } + + /** Factory class that assists in creating a {@link MeteringRectangle} instance. */ + static class MeteringRectangleFactory { + /** + * Creates a new instance of the {@link MeteringRectangle} class. + * + *

This method is visible for testing purposes only and should never be used outside this * + * class. + * + * @param x coordinate >= 0. + * @param y coordinate >= 0. + * @param width width >= 0. + * @param height height >= 0. + * @param meteringWeight weight between {@value MeteringRectangle#METERING_WEIGHT_MIN} and + * {@value MeteringRectangle#METERING_WEIGHT_MAX} inclusively. + * @return new instance of the {@link MeteringRectangle} class. + * @throws IllegalArgumentException if any of the parameters were negative. + */ + @VisibleForTesting + public static MeteringRectangle create( + int x, int y, int width, int height, int meteringWeight) { + return new MeteringRectangle(x, y, width, height, meteringWeight); + } + } + + /** Factory class that assists in creating a {@link Size} instance. */ + static class SizeFactory { + /** + * Creates a new instance of the {@link Size} class. + * + *

This method is visible for testing purposes only and should never be used outside this * + * class. + * + * @param width width >= 0. + * @param height height >= 0. + * @return new instance of the {@link Size} class. + */ + @VisibleForTesting + public static Size create(int width, int height) { + return new Size(width, height); + } + } +} diff --git a/android/src/main/java/io/getstream/webrtc/flutter/video/camera/CameraUtils.java b/android/src/main/java/io/getstream/webrtc/flutter/video/camera/CameraUtils.java new file mode 100644 index 0000000000..efc131f8ca --- /dev/null +++ b/android/src/main/java/io/getstream/webrtc/flutter/video/camera/CameraUtils.java @@ -0,0 +1,723 @@ +package io.getstream.webrtc.flutter.video.camera; + +import android.app.Activity; +import android.graphics.Rect; +import android.hardware.Camera; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.params.MeteringRectangle; +import android.os.Build; +import android.os.Handler; +import android.util.Log; +import android.util.Range; +import android.util.Size; +import android.view.Surface; + +import androidx.annotation.NonNull; +import androidx.annotation.RequiresApi; + +import io.getstream.webrtc.flutter.GetUserMediaImpl; +import io.getstream.webrtc.flutter.utils.AnyThreadResult; +import io.getstream.webrtc.flutter.video.VideoCapturerInfo; + +import org.webrtc.Camera1Capturer; +import org.webrtc.Camera2Capturer; +import org.webrtc.CameraEnumerationAndroid; + +import java.lang.reflect.Field; +import java.util.List; + +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import io.flutter.plugin.common.MethodCall; +import io.flutter.plugin.common.MethodChannel; + +public class CameraUtils { + private static final String TAG = "CameraUtils"; + Activity activity; + private GetUserMediaImpl getUserMediaImpl; + private boolean isTorchOn = false; + private DeviceOrientationManager deviceOrientationManager; + public CameraUtils(GetUserMediaImpl getUserMediaImpl, Activity activity) { + this.getUserMediaImpl = getUserMediaImpl; + this.activity = activity; + this.deviceOrientationManager = new DeviceOrientationManager(activity, 0); + this.deviceOrientationManager.start(); + } + + public void setFocusMode(MethodCall call, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setFocusMode", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + switch (mode) { + case "locked": + // When locking the auto-focus the camera device should do a one-time focus and afterwards + // set the auto-focus to idle. This is accomplished by setting the CONTROL_AF_MODE to + // CONTROL_AF_MODE_AUTO. + captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO); + break; + case "auto": + captureRequestBuilder.set( + CaptureRequest.CONTROL_AF_MODE, + CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO); + break; + default: + break; + } + + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + isTorchOn ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + + //captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + if(!params.getSupportedFocusModes().isEmpty()) { + switch (mode) { + case "locked": + params.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED); + break; + case "auto": + params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); + break; + default: + break; + } + result.success(null); + return; + } + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void setFocusPoint(MethodCall call, Point focusPoint, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setFocusMode", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + MeteringRectangle focusRectangle = null; + Size cameraBoundaries = CameraRegionUtils.getCameraBoundaries(cameraCharacteristics, captureRequestBuilder); + PlatformChannel.DeviceOrientation orientation = deviceOrientationManager.getLastUIOrientation(); + focusRectangle = + convertPointToMeteringRectangle(cameraBoundaries, focusPoint.x, focusPoint.y, orientation); + + captureRequestBuilder.set( + CaptureRequest.CONTROL_AF_REGIONS, + captureRequestBuilder == null ? null : new MeteringRectangle[] {focusRectangle}); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + params.setFocusAreas(null); + + result.success(null); + return; + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void setExposureMode(MethodCall call, AnyThreadResult result) {} + + public void setExposurePoint(MethodCall call,Point exposurePoint, AnyThreadResult result) { + String trackId = call.argument("trackId"); + String mode = call.argument("mode"); + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setExposurePoint", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setExposurePoint", "[setExposurePoint] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + + if(CameraRegionUtils.getControlMaxRegionsAutoExposure(cameraCharacteristics) <= 0) { + resultError("setExposurePoint", "[setExposurePoint] Camera does not support auto exposure", result); + return; + } + + MeteringRectangle exposureRectangle = null; + Size cameraBoundaries = CameraRegionUtils.getCameraBoundaries(cameraCharacteristics, captureRequestBuilder); + PlatformChannel.DeviceOrientation orientation = deviceOrientationManager.getLastUIOrientation(); + exposureRectangle = + convertPointToMeteringRectangle(cameraBoundaries, exposurePoint.x, exposurePoint.y, orientation); + if (exposureRectangle != null) { + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[] {exposureRectangle}); + } else { + MeteringRectangle[] defaultRegions = captureRequestBuilder.get(CaptureRequest.CONTROL_AE_REGIONS); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, defaultRegions); + } + + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setFocusMode", "[FocusMode] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + params.setFocusAreas(null); + } + resultError("setFocusMode", "[FocusMode] Video capturer not compatible", result); + } + + public void hasTorch(String trackId, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("hasTorch", "Video capturer not found for id: " + trackId, result); + return; + } + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && info.capturer instanceof Camera2Capturer) { + CameraManager manager; + CameraDevice cameraDevice; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + boolean flashIsAvailable; + try { + CameraCharacteristics characteristics = + manager.getCameraCharacteristics(cameraDevice.getId()); + flashIsAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + result.success(flashIsAvailable); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("hasTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + List supportedModes = params.getSupportedFlashModes(); + + result.success( + supportedModes != null && supportedModes.contains(Camera.Parameters.FLASH_MODE_TORCH)); + return; + } + + resultError("hasTorch", "[TORCH] Video capturer not compatible", result); + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + public void setZoom(String trackId, double zoomLevel, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setZoom", "Video capturer not found for id: " + trackId, result); + return; + } + + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + CameraManager manager; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + + final CameraCharacteristics cameraCharacteristics = manager.getCameraCharacteristics(cameraDevice.getId()); + final Rect rect = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + final double maxZoomLevel = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); + + final double desiredZoomLevel = Math.max(1.0, Math.min(zoomLevel, maxZoomLevel)); + + float ratio = 1.0f / (float)desiredZoomLevel; + + if (rect != null) { + int croppedWidth = rect.width() - Math.round((float) rect.width() * ratio); + int croppedHeight = rect.height() - Math.round((float) rect.height() * ratio); + final Rect desiredRegion = new Rect(croppedWidth / 2, croppedHeight / 2, rect.width() - croppedWidth / 2, rect.height() - croppedHeight / 2); + captureRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, desiredRegion); + } + + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + isTorchOn ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + + result.success(null); + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setZoom", "[ZOOM] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + isTorchOn ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + if(params.isZoomSupported()) { + int maxZoom = params.getMaxZoom(); + double desiredZoom = Math.max(0, Math.min(zoomLevel, maxZoom)); + params.setZoom((int)desiredZoom); + result.success(null); + return; + } + } + resultError("setZoom", "[ZOOM] Video capturer not compatible", result); + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + public void setTorch(String trackId, boolean torch, MethodChannel.Result result) { + VideoCapturerInfo info = getUserMediaImpl.getCapturerInfo(trackId); + if (info == null) { + resultError("setTorch", "Video capturer not found for id: " + trackId, result); + return; + } + if (info.capturer instanceof Camera2Capturer) { + CameraCaptureSession captureSession; + CameraDevice cameraDevice; + CameraEnumerationAndroid.CaptureFormat captureFormat; + int fpsUnitFactor; + Surface surface; + Handler cameraThreadHandler; + + try { + Object session = + getPrivateProperty( + Camera2Capturer.class.getSuperclass(), info.capturer, "currentSession"); + CameraManager manager = + (CameraManager) + getPrivateProperty(Camera2Capturer.class, info.capturer, "cameraManager"); + captureSession = + (CameraCaptureSession) + getPrivateProperty(session.getClass(), session, "captureSession"); + cameraDevice = + (CameraDevice) getPrivateProperty(session.getClass(), session, "cameraDevice"); + captureFormat = + (CameraEnumerationAndroid.CaptureFormat) getPrivateProperty(session.getClass(), session, "captureFormat"); + fpsUnitFactor = (int) getPrivateProperty(session.getClass(), session, "fpsUnitFactor"); + surface = (Surface) getPrivateProperty(session.getClass(), session, "surface"); + cameraThreadHandler = + (Handler) getPrivateProperty(session.getClass(), session, "cameraThreadHandler"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera2Capturer class have changed + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + try { + final CaptureRequest.Builder captureRequestBuilder = + cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); + captureRequestBuilder.set( + CaptureRequest.FLASH_MODE, + torch ? CaptureRequest.FLASH_MODE_TORCH : CaptureRequest.FLASH_MODE_OFF); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, + new Range<>( + captureFormat.framerate.min / fpsUnitFactor, + captureFormat.framerate.max / fpsUnitFactor)); + captureRequestBuilder.set( + CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); + captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); + captureRequestBuilder.addTarget(surface); + captureSession.setRepeatingRequest( + captureRequestBuilder.build(), null, cameraThreadHandler); + } catch (CameraAccessException e) { + // Should never happen since we are already accessing the camera + throw new RuntimeException(e); + } + + result.success(null); + isTorchOn = torch; + return; + } + + if (info.capturer instanceof Camera1Capturer) { + Camera camera; + try { + Object session = + getPrivateProperty( + Camera1Capturer.class.getSuperclass(), info.capturer, "currentSession"); + camera = (Camera) getPrivateProperty(session.getClass(), session, "camera"); + } catch (NoSuchFieldWithNameException e) { + // Most likely the upstream Camera1Capturer class have changed + resultError("setTorch", "[TORCH] Failed to get `" + e.fieldName + "` from `" + e.className + "`", result); + return; + } + + Camera.Parameters params = camera.getParameters(); + params.setFlashMode( + torch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); + camera.setParameters(params); + + result.success(null); + isTorchOn = torch; + return; + } + resultError("setTorch", "[TORCH] Video capturer not compatible", result); + } + + + private class NoSuchFieldWithNameException extends NoSuchFieldException { + + String className; + String fieldName; + + NoSuchFieldWithNameException(String className, String fieldName, NoSuchFieldException e) { + super(e.getMessage()); + this.className = className; + this.fieldName = fieldName; + } + } + static private void resultError(String method, String error, MethodChannel.Result result) { + String errorMsg = method + "(): " + error; + result.error(method, errorMsg, null); + Log.d(TAG, errorMsg); + } + private Object getPrivateProperty(Class klass, Object object, String fieldName) + throws NoSuchFieldWithNameException { + try { + Field field = klass.getDeclaredField(fieldName); + field.setAccessible(true); + return field.get(object); + } catch (NoSuchFieldException e) { + throw new NoSuchFieldWithNameException(klass.getName(), fieldName, e); + } catch (IllegalAccessException e) { + // Should never happen since we are calling `setAccessible(true)` + throw new RuntimeException(e); + } + } + @NonNull + public static MeteringRectangle convertPointToMeteringRectangle( + @NonNull Size boundaries, + double x, + double y, + @NonNull PlatformChannel.DeviceOrientation orientation) { + assert (boundaries.getWidth() > 0 && boundaries.getHeight() > 0); + assert (x >= 0 && x <= 1); + assert (y >= 0 && y <= 1); + // Rotate the coordinates to match the device orientation. + double oldX = x, oldY = y; + switch (orientation) { + case PORTRAIT_UP: // 90 ccw. + y = 1 - oldX; + x = oldY; + break; + case PORTRAIT_DOWN: // 90 cw. + x = 1 - oldY; + y = oldX; + break; + case LANDSCAPE_LEFT: + // No rotation required. + break; + case LANDSCAPE_RIGHT: // 180. + x = 1 - x; + y = 1 - y; + break; + } + // Interpolate the target coordinate. + int targetX = (int) Math.round(x * ((double) (boundaries.getWidth() - 1))); + int targetY = (int) Math.round(y * ((double) (boundaries.getHeight() - 1))); + // Determine the dimensions of the metering rectangle (10th of the viewport). + int targetWidth = (int) Math.round(((double) boundaries.getWidth()) / 10d); + int targetHeight = (int) Math.round(((double) boundaries.getHeight()) / 10d); + // Adjust target coordinate to represent top-left corner of metering rectangle. + targetX -= targetWidth / 2; + targetY -= targetHeight / 2; + // Adjust target coordinate as to not fall out of bounds. + if (targetX < 0) { + targetX = 0; + } + if (targetY < 0) { + targetY = 0; + } + int maxTargetX = boundaries.getWidth() - 1 - targetWidth; + int maxTargetY = boundaries.getHeight() - 1 - targetHeight; + if (targetX > maxTargetX) { + targetX = maxTargetX; + } + if (targetY > maxTargetY) { + targetY = maxTargetY; + } + // Build the metering rectangle. + return MeteringRectangleFactory.create(targetX, targetY, targetWidth, targetHeight, 1); + } + + static class MeteringRectangleFactory { + public static MeteringRectangle create( + int x, int y, int width, int height, int meteringWeight) { + return new MeteringRectangle(x, y, width, height, meteringWeight); + } + } +} + diff --git a/android/src/main/java/io/getstream/webrtc/flutter/video/camera/DeviceOrientationManager.java b/android/src/main/java/io/getstream/webrtc/flutter/video/camera/DeviceOrientationManager.java new file mode 100644 index 0000000000..dc87993a32 --- /dev/null +++ b/android/src/main/java/io/getstream/webrtc/flutter/video/camera/DeviceOrientationManager.java @@ -0,0 +1,188 @@ +package io.getstream.webrtc.flutter.video.camera; + +import android.app.Activity; +import android.content.BroadcastReceiver; +import android.content.Context; +import android.content.Intent; +import android.content.IntentFilter; +import android.content.res.Configuration; +import android.view.Display; +import android.view.Surface; +import android.view.WindowManager; +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.annotation.VisibleForTesting; +import io.flutter.embedding.engine.systemchannels.PlatformChannel; +import io.flutter.embedding.engine.systemchannels.PlatformChannel.DeviceOrientation; + +/** + * Support class to help to determine the media orientation based on the orientation of the device. + */ +public class DeviceOrientationManager { + + private static final IntentFilter orientationIntentFilter = + new IntentFilter(Intent.ACTION_CONFIGURATION_CHANGED); + + private final Activity activity; + private final int sensorOrientation; + private PlatformChannel.DeviceOrientation lastOrientation; + private BroadcastReceiver broadcastReceiver; + + /** Factory method to create a device orientation manager. */ + @NonNull + public static DeviceOrientationManager create( + @NonNull Activity activity, + int sensorOrientation) { + return new DeviceOrientationManager(activity, sensorOrientation); + } + + DeviceOrientationManager( + @NonNull Activity activity, + int sensorOrientation) { + this.activity = activity; + this.sensorOrientation = sensorOrientation; + } + + public void start() { + if (broadcastReceiver != null) { + return; + } + broadcastReceiver = + new BroadcastReceiver() { + @Override + public void onReceive(Context context, Intent intent) { + handleUIOrientationChange(); + } + }; + activity.registerReceiver(broadcastReceiver, orientationIntentFilter); + broadcastReceiver.onReceive(activity, null); + } + + /** Stops listening for orientation updates. */ + public void stop() { + if (broadcastReceiver == null) { + return; + } + activity.unregisterReceiver(broadcastReceiver); + broadcastReceiver = null; + } + + + /** @return the last received UI orientation. */ + @Nullable + public PlatformChannel.DeviceOrientation getLastUIOrientation() { + return this.lastOrientation; + } + + /** + * Handles orientation changes based on change events triggered by the OrientationIntentFilter. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + */ + @VisibleForTesting + void handleUIOrientationChange() { + PlatformChannel.DeviceOrientation orientation = getUIOrientation(); + handleOrientationChange(orientation, lastOrientation); + lastOrientation = orientation; + } + @VisibleForTesting + static void handleOrientationChange( + DeviceOrientation newOrientation, + DeviceOrientation previousOrientation) { + } + + @SuppressWarnings("deprecation") + @VisibleForTesting + PlatformChannel.DeviceOrientation getUIOrientation() { + final int rotation = getDisplay().getRotation(); + final int orientation = activity.getResources().getConfiguration().orientation; + + switch (orientation) { + case Configuration.ORIENTATION_PORTRAIT: + if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) { + return PlatformChannel.DeviceOrientation.PORTRAIT_UP; + } else { + return PlatformChannel.DeviceOrientation.PORTRAIT_DOWN; + } + case Configuration.ORIENTATION_LANDSCAPE: + if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) { + return PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT; + } else { + return PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT; + } + case Configuration.ORIENTATION_SQUARE: + case Configuration.ORIENTATION_UNDEFINED: + default: + return PlatformChannel.DeviceOrientation.PORTRAIT_UP; + } + } + + /** + * Calculates the sensor orientation based on the supplied angle. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @param angle Orientation angle. + * @return The sensor orientation based on the supplied angle. + */ + @VisibleForTesting + PlatformChannel.DeviceOrientation calculateSensorOrientation(int angle) { + final int tolerance = 45; + angle += tolerance; + + // Orientation is 0 in the default orientation mode. This is portrait-mode for phones + // and landscape for tablets. We have to compensate for this by calculating the default + // orientation, and apply an offset accordingly. + int defaultDeviceOrientation = getDeviceDefaultOrientation(); + if (defaultDeviceOrientation == Configuration.ORIENTATION_LANDSCAPE) { + angle += 90; + } + // Determine the orientation + angle = angle % 360; + return new PlatformChannel.DeviceOrientation[] { + PlatformChannel.DeviceOrientation.PORTRAIT_UP, + PlatformChannel.DeviceOrientation.LANDSCAPE_LEFT, + PlatformChannel.DeviceOrientation.PORTRAIT_DOWN, + PlatformChannel.DeviceOrientation.LANDSCAPE_RIGHT, + } + [angle / 90]; + } + + /** + * Gets the default orientation of the device. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @return The default orientation of the device. + */ + @VisibleForTesting + int getDeviceDefaultOrientation() { + Configuration config = activity.getResources().getConfiguration(); + int rotation = getDisplay().getRotation(); + if (((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) + && config.orientation == Configuration.ORIENTATION_LANDSCAPE) + || ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) + && config.orientation == Configuration.ORIENTATION_PORTRAIT)) { + return Configuration.ORIENTATION_LANDSCAPE; + } else { + return Configuration.ORIENTATION_PORTRAIT; + } + } + + /** + * Gets an instance of the Android {@link android.view.Display}. + * + *

This method is visible for testing purposes only and should never be used outside this + * class. + * + * @return An instance of the Android {@link android.view.Display}. + */ + @SuppressWarnings("deprecation") + @VisibleForTesting + Display getDisplay() { + return ((WindowManager) activity.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); + } +} diff --git a/android/src/main/java/io/getstream/webrtc/flutter/video/camera/Point.java b/android/src/main/java/io/getstream/webrtc/flutter/video/camera/Point.java new file mode 100644 index 0000000000..9e5c7619e7 --- /dev/null +++ b/android/src/main/java/io/getstream/webrtc/flutter/video/camera/Point.java @@ -0,0 +1,14 @@ +package io.getstream.webrtc.flutter.video.camera; + +import androidx.annotation.Nullable; + +/** Represents a point on an x/y axis. */ +public class Point { + public final Double x; + public final Double y; + + public Point(@Nullable Double x, @Nullable Double y) { + this.x = x; + this.y = y; + } +} diff --git a/android/src/main/java/io/getstream/webrtc/flutter/video/camera/SdkCapabilityChecker.java b/android/src/main/java/io/getstream/webrtc/flutter/video/camera/SdkCapabilityChecker.java new file mode 100644 index 0000000000..d9d46a0143 --- /dev/null +++ b/android/src/main/java/io/getstream/webrtc/flutter/video/camera/SdkCapabilityChecker.java @@ -0,0 +1,60 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package io.getstream.webrtc.flutter.video.camera; + +import android.annotation.SuppressLint; +import android.os.Build; +import androidx.annotation.ChecksSdkIntAtLeast; +import androidx.annotation.VisibleForTesting; + +/** Abstracts SDK version checks, and allows overriding them in unit tests. */ +public class SdkCapabilityChecker { + /** The current SDK version, overridable for testing. */ + @SuppressLint("AnnotateVersionCheck") + @VisibleForTesting + public static int SDK_VERSION = Build.VERSION.SDK_INT; + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.P) + public static boolean supportsDistortionCorrection() { + // See https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#DISTORTION_CORRECTION_AVAILABLE_MODES + return SDK_VERSION >= Build.VERSION_CODES.P; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.O) + public static boolean supportsEglRecordableAndroid() { + // See https://developer.android.com/reference/android/opengl/EGLExt#EGL_RECORDABLE_ANDROID + return SDK_VERSION >= Build.VERSION_CODES.O; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.S) + public static boolean supportsEncoderProfiles() { + // See https://developer.android.com/reference/android/media/EncoderProfiles + return SDK_VERSION >= Build.VERSION_CODES.S; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.M) + public static boolean supportsMarshmallowNoiseReductionModes() { + // See https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES + return SDK_VERSION >= Build.VERSION_CODES.M; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.P) + public static boolean supportsSessionConfiguration() { + // See https://developer.android.com/reference/android/hardware/camera2/params/SessionConfiguration + return SDK_VERSION >= Build.VERSION_CODES.P; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.N) + public static boolean supportsVideoPause() { + // See https://developer.android.com/reference/androidx/camera/video/VideoRecordEvent.Pause + return SDK_VERSION >= Build.VERSION_CODES.N; + } + + @ChecksSdkIntAtLeast(api = Build.VERSION_CODES.R) + public static boolean supportsZoomRatio() { + // See https://developer.android.com/reference/android/hardware/camera2/CaptureRequest#CONTROL_ZOOM_RATIO + return SDK_VERSION >= Build.VERSION_CODES.R; + } +} diff --git a/common/darwin/Classes/AudioUtils.m b/common/darwin/Classes/AudioUtils.m index 5a48a7daae..a2a863b057 100644 --- a/common/darwin/Classes/AudioUtils.m +++ b/common/darwin/Classes/AudioUtils.m @@ -93,7 +93,7 @@ + (void)setSpeakerphoneOn:(BOOL)enable { AVAudioSessionCategoryOptionAllowBluetooth error:&error]; - success = [session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_Speaker + success = [session overrideOutputAudioPort:kAudioSessionProperty_OverrideAudioRoute error:&error]; if (!success) NSLog(@"setSpeakerphoneOn: Port override failed due to: %@", error); diff --git a/common/darwin/Classes/CameraUtils.h b/common/darwin/Classes/CameraUtils.h new file mode 100644 index 0000000000..d6859d88ee --- /dev/null +++ b/common/darwin/Classes/CameraUtils.h @@ -0,0 +1,43 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (CameraUtils) + +- (void)mediaStreamTrackHasTorch:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetTorch:(nonnull RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetZoom:(nonnull RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSwitchCamera:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (NSInteger)selectFpsForFormat:(nonnull AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps; + +- (nullable AVCaptureDeviceFormat*)selectFormatForDevice:(nonnull AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight; + +- (nullable AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position; + + +@end diff --git a/common/darwin/Classes/CameraUtils.m b/common/darwin/Classes/CameraUtils.m new file mode 100644 index 0000000000..e05d32e055 --- /dev/null +++ b/common/darwin/Classes/CameraUtils.m @@ -0,0 +1,350 @@ +#import "CameraUtils.h" + +@implementation FlutterWebRTCPlugin (CameraUtils) + +-(AVCaptureDevice*) currentDevice { + if (!self.videoCapturer) { + return nil; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + return nil; + } + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + return deviceInput.device; +} + +- (void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack*)track result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't check torch"); + result(@NO); + return; + } + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +#else + NSLog(@"Not supported on macOS. Can't check torch"); + result(@NO); +#endif +} + +- (void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(FlutterResult)result { + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device is nil" details:nil]); + return; + } + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:@"device does not support torch" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" message:error.localizedDescription details:nil]); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +- (void)mediaStreamTrackSetZoom:(RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"device is nil" details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to acquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:error.localizedDescription details:nil]); + return; + } + + CGFloat desiredZoomFactor = (CGFloat)zoomLevel; + device.videoZoomFactor = MAX(1.0, MIN(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); + [device unlockForConfiguration]; + + result(nil); +#else + NSLog(@"Not supported on macOS. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)applyFocusMode:(NSString*)focusMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } else if([@"auto" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; + } else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"device is nil" details:nil]); + return; + } + self.focusMode = focusMode; + [self applyFocusMode:focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"device is nil" details:nil]); + return; + } + BOOL reset = ((NSNumber *)focusPoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)focusPoint[@"x"]).doubleValue; + y = ((NSNumber *)focusPoint[@"y"]).doubleValue; + } + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Focus point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + + [device setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyFocusMode:self.focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void) applyExposureMode:(NSString*)exposureMode onDevice:(AVCaptureDevice *)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if([@"locked" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } else if([@"auto" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) { + [captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; + } else if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result{ +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"device is nil" details:nil]); + return; + } + self.exposureMode = exposureMode; + [self applyExposureMode:exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +#if TARGET_OS_IPHONE +- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation + x:(double)x + y:(double)y { + double oldX = x, oldY = y; + switch (orientation) { + case UIDeviceOrientationPortrait: // 90 ccw + y = 1 - oldX; + x = oldY; + break; + case UIDeviceOrientationPortraitUpsideDown: // 90 cw + x = 1 - oldY; + y = oldX; + break; + case UIDeviceOrientationLandscapeRight: // 180 + x = 1 - x; + y = 1 - y; + break; + case UIDeviceOrientationLandscapeLeft: + default: + // No rotation required + break; + } + return CGPointMake(x, y); +} +#endif + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice *device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"device is nil" details:nil]); + return; + } + + BOOL reset = ((NSNumber *)exposurePoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber *)exposurePoint[@"x"]).doubleValue; + y = ((NSNumber *)exposurePoint[@"y"]).doubleValue; + } + if (!device.isExposurePointOfInterestSupported) { + NSLog(@"Exposure point of interest is not supported. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Exposure point of interest is not supported" details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + [device setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation + x:x + y:y]]; + [device unlockForConfiguration]; + + [self applyExposureMode:self.exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" message:@"Not supported on macOS" details:nil]); +#endif +} + +- (void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack*)track result:(FlutterResult)result { + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't switch camera"); + return; + } +#if TARGET_OS_IPHONE + [self.videoCapturer stopCapture]; +#endif + self._usingFrontCamera = !self._usingFrontCamera; + AVCaptureDevicePosition position = + self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; + AVCaptureDevice* videoDevice = [self findDeviceForPosition:position]; + AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice + targetWidth:self._lastTargetWidth + targetHeight:self._lastTargetHeight]; + [self.videoCapturer startCaptureWithDevice:videoDevice + format:selectedFormat + fps:[self selectFpsForFormat:selectedFormat + targetFps:self._lastTargetFps] + completionHandler:^(NSError* error) { + if (error != nil) { + result([FlutterError errorWithCode:@"Error while switching camera" + message:@"Error while switching camera" + details:error]); + } else { + result([NSNumber numberWithBool:self._usingFrontCamera]); + } + }]; +} + + +- (AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray* captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice* device in captureDevices) { + if (device.position == position) { + return device; + } + } + return captureDevices[0]; +} + +- (AVCaptureDeviceFormat*)selectFormatForDevice:(AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight { + NSArray* formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat* selectedFormat = nil; + long currentDiff = INT_MAX; + for (AVCaptureDeviceFormat* format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + //NSLog(@"AVCaptureDeviceFormats,fps %d, dimension: %dx%d", format.videoSupportedFrameRateRanges, dimension.width, dimension.height); + long diff = labs(targetWidth - dimension.width) + labs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && + pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange* fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, targetFps); +} + + +@end diff --git a/common/darwin/Classes/FlutterRTCDesktopCapturer.m b/common/darwin/Classes/FlutterRTCDesktopCapturer.m index f7a366af26..ee22a9bbd5 100644 --- a/common/darwin/Classes/FlutterRTCDesktopCapturer.m +++ b/common/darwin/Classes/FlutterRTCDesktopCapturer.m @@ -38,9 +38,9 @@ - (void)getDisplayMedia:(NSDictionary*)constraints result:(FlutterResult)result id screenCapturer; if (useBroadcastExtension) { - screenCapturer = [[FlutterBroadcastScreenCapturer alloc] initWithDelegate:videoSource]; + screenCapturer = [[FlutterBroadcastScreenCapturer alloc] initWithDelegate:videoProcessingAdapter]; } else { - screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource]; + screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoProcessingAdapter]; } [screenCapturer startCapture]; diff --git a/common/darwin/Classes/FlutterRTCMediaStream.h b/common/darwin/Classes/FlutterRTCMediaStream.h index dc37a10059..9bc4a1415d 100644 --- a/common/darwin/Classes/FlutterRTCMediaStream.h +++ b/common/darwin/Classes/FlutterRTCMediaStream.h @@ -15,18 +15,6 @@ - (void)getSources:(nonnull FlutterResult)result; -- (void)mediaStreamTrackHasTorch:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; - -- (void)mediaStreamTrackSetTorch:(nonnull RTCMediaStreamTrack*)track - torch:(BOOL)torch - result:(nonnull FlutterResult)result; - -- (void)mediaStreamTrackSetZoom:(nonnull RTCMediaStreamTrack*)track - zoomLevel:(double)zoomLevel - result:(nonnull FlutterResult)result; - -- (void)mediaStreamTrackSwitchCamera:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; - - (void)mediaStreamTrackCaptureFrame:(nonnull RTCMediaStreamTrack*)track toPath:(nonnull NSString*)path result:(nonnull FlutterResult)result; diff --git a/common/darwin/Classes/FlutterRTCMediaStream.m b/common/darwin/Classes/FlutterRTCMediaStream.m index 58906de79b..23d267aacf 100644 --- a/common/darwin/Classes/FlutterRTCMediaStream.m +++ b/common/darwin/Classes/FlutterRTCMediaStream.m @@ -1,5 +1,6 @@ #import #import "AudioUtils.h" +#import "CameraUtils.h" #import "FlutterRTCFrameCapturer.h" #import "FlutterRTCMediaStream.h" #import "FlutterRTCPeerConnection.h" @@ -50,6 +51,11 @@ - (NSDictionary*)defaultVideoConstraints { return @{@"minWidth" : @"1280", @"minHeight" : @"720", @"minFrameRate" : @"30"}; } +- (NSDictionary*)defaultAudioConstraints { + return @{}; +} + + - (RTCMediaConstraints*)defaultMediaStreamConstraints { RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:[self defaultVideoConstraints] @@ -113,7 +119,7 @@ - (void)getUserAudio:(NSDictionary*)constraints mediaStream:(RTCMediaStream*)mediaStream { id audioConstraints = constraints[@"audio"]; NSString* audioDeviceId = @""; - + RTCMediaConstraints *rtcConstraints; if ([audioConstraints isKindOfClass:[NSDictionary class]]) { // constraints.audio.deviceId NSString* deviceId = audioConstraints[@"deviceId"]; @@ -122,11 +128,12 @@ - (void)getUserAudio:(NSDictionary*)constraints audioDeviceId = deviceId; } + rtcConstraints = [self parseMediaConstraints:audioConstraints]; // constraints.audio.optional.sourceId - id optionalVideoConstraints = audioConstraints[@"optional"]; - if (optionalVideoConstraints && [optionalVideoConstraints isKindOfClass:[NSArray class]] && + id optionalConstraints = audioConstraints[@"optional"]; + if (optionalConstraints && [optionalConstraints isKindOfClass:[NSArray class]] && !deviceId) { - NSArray* options = optionalVideoConstraints; + NSArray* options = optionalConstraints; for (id item in options) { if ([item isKindOfClass:[NSDictionary class]]) { NSString* sourceId = ((NSDictionary*)item)[@"sourceId"]; @@ -136,6 +143,8 @@ - (void)getUserAudio:(NSDictionary*)constraints } } } + } else { + rtcConstraints = [self parseMediaConstraints:[self defaultAudioConstraints]]; } #if !defined(TARGET_OS_IPHONE) @@ -145,7 +154,8 @@ - (void)getUserAudio:(NSDictionary*)constraints #endif NSString* trackId = [[NSUUID UUID] UUIDString]; - RTCAudioTrack* audioTrack = [self.peerConnectionFactory audioTrackWithTrackId:trackId]; + RTCAudioSource *audioSource = [self.peerConnectionFactory audioSourceWithConstraints:rtcConstraints]; + RTCAudioTrack* audioTrack = [self.peerConnectionFactory audioTrackWithSource:audioSource trackId:trackId]; LocalAudioTrack *localAudioTrack = [[LocalAudioTrack alloc] initWithTrack:audioTrack]; audioTrack.settings = @{ @@ -957,34 +967,6 @@ - (void)mediaStreamTrackSetZoom:(RTCMediaStreamTrack*)track #endif } -- (void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack*)track result:(FlutterResult)result { - if (!self.videoCapturer) { - NSLog(@"Video capturer is null. Can't switch camera"); - return; - } - [self.videoCapturer stopCapture]; - self._usingFrontCamera = !self._usingFrontCamera; - AVCaptureDevicePosition position = - self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; - AVCaptureDevice* videoDevice = [self findDeviceForPosition:position]; - AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice - targetWidth:self._lastTargetWidth - targetHeight:self._lastTargetHeight]; - [self.videoCapturer startCaptureWithDevice:videoDevice - format:selectedFormat - fps:[self selectFpsForFormat:selectedFormat - targetFps:self._lastTargetFps] - completionHandler:^(NSError* error) { - if (error != nil) { - result([FlutterError errorWithCode:@"Error while switching camera" - message:@"Error while switching camera" - details:error]); - } else { - result([NSNumber numberWithBool:self._usingFrontCamera]); - } - }]; -} - - (void)mediaStreamTrackCaptureFrame:(RTCVideoTrack*)track toPath:(NSString*)path result:(FlutterResult)result { diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.h b/common/darwin/Classes/FlutterWebRTCPlugin.h index 2c57f74bab..0e1a50a649 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.h +++ b/common/darwin/Classes/FlutterWebRTCPlugin.h @@ -51,6 +51,9 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); @property(nonatomic, strong) AVAudioSessionPort _Nullable preferredInput; @property (nonatomic, strong) VideoEffectProcessor* videoEffectProcessor; +@property(nonatomic, strong) NSString *focusMode; +@property(nonatomic, strong) NSString *exposureMode; + @property(nonatomic) BOOL _usingFrontCamera; @property(nonatomic) NSInteger _lastTargetWidth; @property(nonatomic) NSInteger _lastTargetHeight; diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.m b/common/darwin/Classes/FlutterWebRTCPlugin.m index 7228fbd33a..c3d3ebfed1 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.m +++ b/common/darwin/Classes/FlutterWebRTCPlugin.m @@ -1,5 +1,6 @@ #import "FlutterWebRTCPlugin.h" #import "AudioUtils.h" +#import "CameraUtils.h" #import "FlutterRTCDataChannel.h" #import "FlutterRTCDesktopCapturer.h" #import "FlutterRTCMediaStream.h" @@ -188,6 +189,8 @@ - (instancetype)initWithChannel:(FlutterMethodChannel*)channel self.videoCapturerStopHandlers = [NSMutableDictionary new]; #if TARGET_OS_IPHONE + self.focusMode = @"locked"; + self.exposureMode = @"locked"; AVAudioSession* session = [AVAudioSession sharedInstance]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didSessionRouteChange:) @@ -891,12 +894,84 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { details:nil]); } } + } else if ([@"mediaStreamTrackSetFocusMode" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSString* focusMode = argsMap[@"focusMode"]; + id track = self.localTracks[trackId]; + if (track != nil && focusMode != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetFocusMode:videoTrack focusMode:focusMode result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetFocusPoint" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSDictionary* focusPoint = argsMap[@"focusPoint"]; + id track = self.localTracks[trackId]; + if (track != nil && focusPoint != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetFocusPoint:videoTrack focusPoint:focusPoint result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetExposureMode" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSString* exposureMode = argsMap[@"exposureMode"]; + id track = self.localTracks[trackId]; + if (track != nil && exposureMode != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetExposureMode:videoTrack exposureMode:exposureMode result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetExposurePoint" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSDictionary* exposurePoint = argsMap[@"exposurePoint"]; + id track = self.localTracks[trackId]; + if (track != nil && exposurePoint != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetExposurePoint:videoTrack exposurePoint:exposurePoint result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]) { NSDictionary* argsMap = call.arguments; NSString* trackId = argsMap[@"trackId"]; id track = self.localTracks[trackId]; if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { - RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; [self mediaStreamTrackSwitchCamera:videoTrack result:result]; } else { if (track == nil) { diff --git a/elinux/CMakeLists.txt b/elinux/CMakeLists.txt index 1cd46af760..6ae0ea6390 100644 --- a/elinux/CMakeLists.txt +++ b/elinux/CMakeLists.txt @@ -51,3 +51,10 @@ set(flutter_webrtc_bundled_libraries "${CMAKE_CURRENT_SOURCE_DIR}/../third_party/libwebrtc/lib/${FLUTTER_TARGET_PLATFORM}/libwebrtc.so" PARENT_SCOPE ) + +# Add $ORIGIN to RPATH so that lib/libflutter_webrtc_plugin.so can find lib/libwebrtc.so at runtime +set_property( + TARGET ${PLUGIN_NAME} + PROPERTY BUILD_RPATH + "\$ORIGIN" +) \ No newline at end of file diff --git a/example/lib/src/get_user_media_sample.dart b/example/lib/src/get_user_media_sample.dart index d57c1567f0..bfc2605f7b 100644 --- a/example/lib/src/get_user_media_sample.dart +++ b/example/lib/src/get_user_media_sample.dart @@ -1,5 +1,6 @@ import 'dart:core'; import 'dart:io'; +import 'dart:math'; import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; @@ -21,6 +22,7 @@ class _GetUserMediaSampleState extends State { final _localRenderer = RTCVideoRenderer(); bool _inCalling = false; bool _isTorchOn = false; + bool _isFrontCamera = true; MediaRecorder? _mediaRecorder; bool get _isRec => _mediaRecorder != null; @@ -127,6 +129,15 @@ class _GetUserMediaSampleState extends State { }); } + void onViewFinderTap(TapDownDetails details, BoxConstraints constraints) { + final point = Point( + details.localPosition.dx / constraints.maxWidth, + details.localPosition.dy / constraints.maxHeight, + ); + Helper.setFocusPoint(_localStream!.getVideoTracks().first, point); + Helper.setExposurePoint(_localStream!.getVideoTracks().first, point); + } + void _toggleTorch() async { if (_localStream == null) throw Exception('Stream is not initialized'); @@ -152,17 +163,19 @@ class _GetUserMediaSampleState extends State { final videoTrack = _localStream! .getVideoTracks() .firstWhere((track) => track.kind == 'video'); - await WebRTC.invokeMethod('mediaStreamTrackSetZoom', - {'trackId': videoTrack.id, 'zoomLevel': zoomLevel}); + await Helper.setZoom(videoTrack, zoomLevel); } - void _toggleCamera() async { + void _switchCamera() async { if (_localStream == null) throw Exception('Stream is not initialized'); final videoTrack = _localStream! .getVideoTracks() .firstWhere((track) => track.kind == 'video'); await Helper.switchCamera(videoTrack); + setState(() { + _isFrontCamera = _isFrontCamera; + }); } void _captureFrame() async { @@ -199,7 +212,7 @@ class _GetUserMediaSampleState extends State { ), IconButton( icon: Icon(Icons.switch_video), - onPressed: _toggleCamera, + onPressed: _switchCamera, ), IconButton( icon: Icon(Icons.camera), @@ -236,15 +249,20 @@ class _GetUserMediaSampleState extends State { width: MediaQuery.of(context).size.width, height: MediaQuery.of(context).size.height, decoration: BoxDecoration(color: Colors.black54), - child: GestureDetector( - onScaleStart: (details) {}, - onScaleUpdate: (details) { - if (details.scale != 1.0) { - setZoom(details.scale); - } - }, - child: RTCVideoView(_localRenderer, mirror: true), - ), + child: LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) { + return GestureDetector( + onScaleStart: (details) {}, + onScaleUpdate: (details) { + if (details.scale != 1.0) { + setZoom(details.scale); + } + }, + onTapDown: (TapDownDetails details) => + onViewFinderTap(details, constraints), + child: RTCVideoView(_localRenderer, mirror: false), + ); + }), )); }, ), diff --git a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m index 24d882803d..7485a3492a 100644 --- a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m +++ b/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m @@ -224,7 +224,7 @@ - (void)didCaptureVideoFrame:(CVPixelBufferRef)pixelBuffer break; } - RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer + RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithBuffer:[rtcPixelBuffer toI420] rotation:rotation timeStampNs:frameTimeStampNs]; diff --git a/ios/Classes/CameraUtils.h b/ios/Classes/CameraUtils.h new file mode 120000 index 0000000000..a31c2baab2 --- /dev/null +++ b/ios/Classes/CameraUtils.h @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.h \ No newline at end of file diff --git a/ios/Classes/CameraUtils.m b/ios/Classes/CameraUtils.m new file mode 120000 index 0000000000..336e1ea963 --- /dev/null +++ b/ios/Classes/CameraUtils.m @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.m \ No newline at end of file diff --git a/lib/src/helper.dart b/lib/src/helper.dart index 1dc5a52226..1ce4a42e4e 100644 --- a/lib/src/helper.dart +++ b/lib/src/helper.dart @@ -1,3 +1,5 @@ +import 'dart:math'; + import 'package:flutter/foundation.dart'; import '../stream_webrtc_flutter.dart'; @@ -67,17 +69,24 @@ class Helper { return Future.value(true); } - static Future setZoom( - MediaStreamTrack videoTrack, double zoomLevel) async { - if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { - await WebRTC.invokeMethod( - 'mediaStreamTrackSetZoom', - {'trackId': videoTrack.id, 'zoomLevel': zoomLevel}, - ); - } else { - throw Exception('setZoom only support for mobile devices!'); - } - } + static Future setZoom(MediaStreamTrack videoTrack, double zoomLevel) => + CameraUtils.setZoom(videoTrack, zoomLevel); + + static Future setFocusMode( + MediaStreamTrack videoTrack, CameraFocusMode focusMode) => + CameraUtils.setFocusMode(videoTrack, focusMode); + + static Future setFocusPoint( + MediaStreamTrack videoTrack, Point? point) => + CameraUtils.setFocusPoint(videoTrack, point); + + static Future setExposureMode( + MediaStreamTrack videoTrack, CameraExposureMode exposureMode) => + CameraUtils.setExposureMode(videoTrack, exposureMode); + + static Future setExposurePoint( + MediaStreamTrack videoTrack, Point? point) => + CameraUtils.setExposurePoint(videoTrack, point); /// Used to select a specific audio output device. /// diff --git a/lib/src/native/camera_utils.dart b/lib/src/native/camera_utils.dart new file mode 100644 index 0000000000..3557a64e46 --- /dev/null +++ b/lib/src/native/camera_utils.dart @@ -0,0 +1,93 @@ +import 'dart:math'; + +import 'package:webrtc_interface/webrtc_interface.dart'; + +import 'utils.dart'; + +enum CameraFocusMode { auto, locked } + +enum CameraExposureMode { auto, locked } + +class CameraUtils { + static Future setZoom( + MediaStreamTrack videoTrack, double zoomLevel) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetZoom', + {'trackId': videoTrack.id, 'zoomLevel': zoomLevel}, + ); + } else { + throw Exception('setZoom only support for mobile devices!'); + } + } + + /// Set the exposure point for the camera, focusMode can be: + /// 'auto', 'locked' + static Future setFocusMode( + MediaStreamTrack videoTrack, CameraFocusMode focusMode) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetFocusMode', + { + 'trackId': videoTrack.id, + 'focusMode': focusMode.name, + }, + ); + } else { + throw Exception('setFocusMode only support for mobile devices!'); + } + } + + static Future setFocusPoint( + MediaStreamTrack videoTrack, Point? point) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetFocusPoint', + { + 'trackId': videoTrack.id, + 'focusPoint': { + 'reset': point == null, + 'x': point?.x, + 'y': point?.y, + }, + }, + ); + } else { + throw Exception('setFocusPoint only support for mobile devices!'); + } + } + + static Future setExposureMode( + MediaStreamTrack videoTrack, CameraExposureMode exposureMode) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetExposureMode', + { + 'trackId': videoTrack.id, + 'exposureMode': exposureMode.name, + }, + ); + } else { + throw Exception('setExposureMode only support for mobile devices!'); + } + } + + static Future setExposurePoint( + MediaStreamTrack videoTrack, Point? point) async { + if (WebRTC.platformIsAndroid || WebRTC.platformIsIOS) { + await WebRTC.invokeMethod( + 'mediaStreamTrackSetExposurePoint', + { + 'trackId': videoTrack.id, + 'exposurePoint': { + 'reset': point == null, + 'x': point?.x, + 'y': point?.y, + }, + }, + ); + } else { + throw Exception('setExposurePoint only support for mobile devices!'); + } + } +} diff --git a/lib/stream_webrtc_flutter.dart b/lib/stream_webrtc_flutter.dart index 12921d598e..733ef3a036 100644 --- a/lib/stream_webrtc_flutter.dart +++ b/lib/stream_webrtc_flutter.dart @@ -15,6 +15,8 @@ export 'src/native/rtc_video_view_impl.dart' if (dart.library.html) 'src/web/rtc_video_view_impl.dart'; export 'src/native/utils.dart' if (dart.library.html) 'src/web/utils.dart'; export 'src/native/adapter_type.dart'; +export 'src/native/camera_utils.dart'; +export 'src/native/audio_management.dart'; export 'src/native/android/audio_configuration.dart'; export 'src/native/ios/audio_configuration.dart'; export 'src/native/rtc_video_platform_view_controller.dart'; diff --git a/macos/Classes/CameraUtils.h b/macos/Classes/CameraUtils.h new file mode 120000 index 0000000000..a31c2baab2 --- /dev/null +++ b/macos/Classes/CameraUtils.h @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.h \ No newline at end of file diff --git a/macos/Classes/CameraUtils.m b/macos/Classes/CameraUtils.m new file mode 120000 index 0000000000..336e1ea963 --- /dev/null +++ b/macos/Classes/CameraUtils.m @@ -0,0 +1 @@ +../../common/darwin/Classes/CameraUtils.m \ No newline at end of file diff --git a/pubspec.yaml b/pubspec.yaml index 6f277251fa..af128ef160 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: stream_webrtc_flutter description: Flutter WebRTC plugin for iOS/Android/Destkop/Web, based on GoogleWebRTC. -version: 0.12.3+3 +version: 0.12.5+hotfix.2 homepage: https://github.com/GetStream/webrtc-flutter environment: sdk: ">=3.3.0 <4.0.0" @@ -8,7 +8,7 @@ environment: dependencies: collection: ^1.17.0 - dart_webrtc: ^1.4.9 + dart_webrtc: ^1.4.10 flutter: sdk: flutter path_provider: ^2.0.2