diff --git a/.idea/camera_awesome.iml b/.idea/camera_awesome.iml
index 53716b82..8d63fc9b 100644
--- a/.idea/camera_awesome.iml
+++ b/.idea/camera_awesome.iml
@@ -3,73 +3,38 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
+
+
\ No newline at end of file
diff --git a/.idea/libraries/Dart_SDK.xml b/.idea/libraries/Dart_SDK.xml
index 91dae004..2421cae5 100644
--- a/.idea/libraries/Dart_SDK.xml
+++ b/.idea/libraries/Dart_SDK.xml
@@ -1,25 +1,25 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/.idea/libraries/Flutter_Plugins.xml b/.idea/libraries/Flutter_Plugins.xml
index 53449dae..6b7239fd 100644
--- a/.idea/libraries/Flutter_Plugins.xml
+++ b/.idea/libraries/Flutter_Plugins.xml
@@ -2,6 +2,11 @@
+
+
+
+
+
diff --git a/.idea/misc.xml b/.idea/misc.xml
index d900c2ad..7de1a832 100644
--- a/.idea/misc.xml
+++ b/.idea/misc.xml
@@ -3,4 +3,7 @@
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
index 35eb1ddf..83067447 100644
--- a/.idea/vcs.xml
+++ b/.idea/vcs.xml
@@ -2,5 +2,6 @@
+
\ No newline at end of file
diff --git a/.run/multi_camera.run.xml b/.run/multi_camera.run.xml
new file mode 100644
index 00000000..84afa9db
--- /dev/null
+++ b/.run/multi_camera.run.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.vscode/launch.json b/.vscode/launch.json
index b48ef1dd..90c1017d 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -17,6 +17,12 @@
"program": "example/lib/subroute_camera.dart",
"flutterMode": "profile"
},
+ {
+ "name": "Multi camera example",
+ "request": "launch",
+ "type": "dart",
+ "program": "example/lib/multi_camera.dart"
+ },
{
"name": "AI analysis - face detection",
"request": "launch",
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 278a2894..10b77e82 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,28 @@
+# 2.0.0 - Multi camera is here !
+
+Hello everyone 👋 !
+
+We are proud to announce the two most requested features on the official camera plugin:
+
+- Multi-camera 📹
+- Video settings 🎥
+
+This release introduces breaking changes in order to support above features.
+See the [migration guide](https://docs.page/Apparence-io/camera_awesome/migration_guide/from_1_to_2) for details.
+
+Here is the complete changelog:
+
+- ✨ Added multi-camera feature, allowing users to display multiple camera previews simultaneously. Note that this feature is currently in beta, and we do not recommend using it in production.
+- ✨ Users can now pass options (such as bitrate, fps, and quality) when recording a video.
+- ✨ You can now mirror video recording.
+- ✨🍏 Implemented brightness and exposure level settings on iOS / iPadOS.
+- ✨🤖 Added zoom indicator UI.
+- ✨🤖 Video recording is now mirrored if `mirrorFrontCamera` is set to true.
+- ♻️🍏 Completely reworked the code for increased clarity and performance.
+- 🐛 Fixed patrol tests.
+- 🐛 Fixed the use of capture button parameter in awesome bottom actions (thanks to @juliuszmandrosz).
+- 📝 Added Chinese README.md (thanks to @chyiiiiiiiiiiii).
+
# 1.4.0
- ✨ Add utilities to convert AnalysisImage into JPEG in order to display them using `toJpeg()`.
diff --git a/README.md b/README.md
index c954bae3..b567dfd7 100644
--- a/README.md
+++ b/README.md
@@ -52,14 +52,20 @@ Use our awesome built-in interface or customize it as you want.
+## Migration guide
+
+If you are migrating from version 1.x.x to 2.x.x, please read the [migration guide](docs.page/todo).
+
+
## Native features
Here's all native features that cameraAwesome provides to the flutter side.
-| System | Android | iOS |
-| :--------------------------------------- | :-----: | :---: |
+| Features | Android | iOS |
+| :--------------------------------------- | :-----: | :---: |
| 🔖 Ask permissions | ✅ | ✅ |
| 🎥 Record video | ✅ | ✅ |
+| 📹 Multi camera (🚧 BETA) | ✅ | ✅ |
| 🔈 Enable/disable audio | ✅ | ✅ |
| 🎞 Take photos | ✅ | ✅ |
| 🌆 Photo live filters | ✅ | ✅ |
@@ -79,11 +85,11 @@ Here's all native features that cameraAwesome provides to the flutter side.
## 📖 Installation and usage
-### Add the package in your pubspec.yaml
+### Add the package in your `pubspec.yaml`
```yaml
dependencies:
- camerawesome: ^1.3.0
+ camerawesome: ^2.0.0-dev.1
...
```
@@ -123,7 +129,6 @@ the [official documentation](https://developer.android.com/training/data-storage
If you want to record videos with audio, add this permission to your `AndroidManifest.xml`:
```xml
-
@@ -188,13 +193,11 @@ import 'package:camerawesome/camerawesome_plugin.dart';
## 👌 Awesome built-in interface
Just use our builder.
-That's all you need to create a complete camera experience within you app.
+That's all you need to create a complete camera experience within your app.
```dart
CameraAwesomeBuilder.awesome(
- saveConfig: SaveConfig.image(
- pathBuilder: _path(),
- ),
+ saveConfig: SaveConfig.photoAndVideo(),
onMediaTap: (mediaCapture) {
OpenFile.open(mediaCapture.filePath);
},
@@ -205,20 +208,18 @@ CameraAwesomeBuilder.awesome(
This builder can be customized with various settings:
-- a theme
-- builders for each part of the screen
-- initial camera setup
-- preview positioning
-- additional preview decoration
-- and more!
+- A theme.
+- Builders for each part of the screen.
+- Initial camera setup.
+- Preview positioning.
+- Additional preview decoration.
+- And much more!
Here is an example:
![Customized UI](docs/img/custom_awesome_ui.jpg)
-Check
-the [full documentation](https://docs.page/Apparence-io/camera_awesome/getting_started/awesome-ui)
-to learn more.
+Check the [full documentation](https://docs.page/Apparence-io/camera_awesome/getting_started/awesome-ui) to learn more.
---
@@ -232,7 +233,7 @@ The camera preview will be visible behind what you will provide to the builder.
```dart
CameraAwesomeBuilder.custom(
- saveConfig: SaveConfig.image(pathBuilder: _path()),
+ saveConfig: SaveConfig.photo(),
builder: (state, previewSize, previewRect) {
// create your interface here
},
@@ -290,7 +291,7 @@ Use this to achieve:
- Facial recognition.
- AI object detection.
- Realtime video chats.
- And much more 🤩
+- And much more 🤩
![Face AI](docs/img/face_ai.gif)
@@ -308,9 +309,7 @@ the [documentation](https://docs.page/Apparence-io/camera_awesome/ai_with_mlkit/
```dart
CameraAwesomeBuilder.awesome(
- saveConfig: SaveConfig.image(
- pathBuilder: _path(),
- ),
+ saveConfig: SaveConfig.photo(),
onImageForAnalysis: analyzeImage,
imageAnalysisConfig: AnalysisConfig(
// Android specific options
@@ -323,11 +322,12 @@ CameraAwesomeBuilder.awesome(
// Max frames per second, null for no limit (default)
maxFramesPerSecond: 20,
),
+)
```
> MLkit recommends using nv21 format for Android.
> bgra8888 is the iOS format
-> For machine learning you don't need full full-resolution (720 or lower should be enough and makes computation easier)
+> For machine learning you don't need full-resolution images (720 or lower should be enough and makes computation easier)
Learn more about the image analysis configuration in
the [documentation](https://docs.page/Apparence-io/camera_awesome/ai_with_mlkit/image_analysis_configuration)
@@ -351,15 +351,14 @@ Through state you can access to a `SensorConfig` class.
-| Function | Comment |
-| -------------------- | ---------------------------------------------------------- |
-| setZoom | changing zoom |
-| setFlashMode | changing flash between NONE,ON,AUTO,ALWAYS |
-| setBrightness | change brightness level manually (better to let this auto) |
-| setMirrorFrontCamera | set mirroring for front camera |
+| Function | Comment |
+| ---------------------- | ---------------------------------------------------------- |
+| `setZoom` | change zoom |
+| `setFlashMode` | change flash between NONE,ON,AUTO,ALWAYS |
+| `setBrightness` | change brightness level manually (better to let this auto) |
+| `setMirrorFrontCamera` | set mirroring for front camera |
-All of these configurations are listenable through a stream so your UI can automatically get updated
-according to the actual configuration.
+All of these configurations are listenable through a stream so your UI can automatically get updated according to the actual configuration.
@@ -400,6 +399,37 @@ CameraAwesomeBuilder.custom(
See all available filters in the [documentation](https://docs.page/Apparence-io/camera_awesome/widgets/awesome_filters).
+
+## 📷 📷 Concurrent cameras
+
+![Concurrent cameras](docs/img/concurrent_cameras.gif)
+
+> 🚧 Feature in beta 🚧
+> Any feedback is welcome!
+
+In order to start using CamerAwesome with multiple cameras simulatenously, you need to define a `SensorConfig` that uses several sensors. You can use the `SensorConfig.multiple()` constructor for this:
+
+```dart
+CameraAwesomeBuilder.awesome(
+ sensorConfig: SensorConfig.multiple(
+ sensors: [
+ Sensor.position(SensorPosition.back),
+ Sensor.position(SensorPosition.front),
+ ],
+ flashMode: FlashMode.auto,
+ aspectRatio: CameraAspectRatios.ratio_16_9,
+ ),
+ // Other params
+)
+```
+
+This feature is not supported by all devices and even when it is, there are limitations that you must be aware of.
+
+Check the details in the [dedicated documentation](https://docs.page/Apparence-io/camera_awesome/getting_started/multicam).
+
+
+
+
diff --git a/android/build.gradle b/android/build.gradle
index 9e603bb8..a0f349d4 100644
--- a/android/build.gradle
+++ b/android/build.gradle
@@ -2,7 +2,7 @@ group 'com.apparence.camerawesome'
version '1.0'
buildscript {
- ext.kotlin_version = '1.7.10'
+ ext.kotlin_version = '1.8.10'
repositories {
google()
mavenCentral()
@@ -98,6 +98,7 @@ def compatibleVersion(prop, fallbackVersion, min = null, max = null) {
android {
compileSdkVersion compatibleVersion('compileSdkVersion', DEFAULT_COMPILE_SDK_VERSION).toInteger()
+ namespace 'io.apparence.camerawesome'
defaultConfig {
minSdkVersion compatibleVersion('minSdkVersion', DEFAULT_MIN_SDK_VERSION, 21).toInteger()
@@ -139,7 +140,7 @@ dependencies {
// Optional -- mockito-kotlin
testImplementation "org.mockito.kotlin:mockito-kotlin:4.0.0"
- def camerax_version = "1.2.2"
+ def camerax_version = "1.3.0-alpha06"
implementation "androidx.camera:camera-core:${camerax_version}"
implementation "androidx.camera:camera-camera2:${camerax_version}"
implementation "androidx.camera:camera-lifecycle:${camerax_version}"
diff --git a/android/src/main/kotlin/com/apparence/camerawesome/cameraX/CameraAwesomeX.kt b/android/src/main/kotlin/com/apparence/camerawesome/cameraX/CameraAwesomeX.kt
index b07d9cd3..c5a1686d 100644
--- a/android/src/main/kotlin/com/apparence/camerawesome/cameraX/CameraAwesomeX.kt
+++ b/android/src/main/kotlin/com/apparence/camerawesome/cameraX/CameraAwesomeX.kt
@@ -8,14 +8,12 @@ import android.content.pm.PackageManager
import android.graphics.*
import android.hardware.camera2.CameraCharacteristics
import android.location.Location
-import android.os.Build
-import android.os.CountDownTimer
-import android.os.Handler
-import android.os.Looper
-import android.os.Messenger
+import android.os.*
import android.util.Log
import android.util.Rational
import android.util.Size
+import androidx.camera.camera2.Camera2Config
+import androidx.camera.camera2.interop.ExperimentalCamera2Interop
import androidx.camera.core.*
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.video.FileOutputOptions
@@ -29,8 +27,8 @@ import com.apparence.camerawesome.buttons.PhysicalButtonMessageHandler
import com.apparence.camerawesome.buttons.PhysicalButtonsHandler
import com.apparence.camerawesome.buttons.PlayerService
import com.apparence.camerawesome.models.FlashMode
-import com.apparence.camerawesome.sensors.CameraSensor
import com.apparence.camerawesome.sensors.SensorOrientationListener
+import com.apparence.camerawesome.utils.isMultiCamSupported
import com.google.android.gms.location.FusedLocationProviderClient
import com.google.android.gms.location.LocationServices
import com.google.android.gms.location.Priority
@@ -43,13 +41,12 @@ import io.flutter.plugin.common.EventChannel
import io.flutter.view.TextureRegistry
import io.reactivex.rxjava3.disposables.Disposable
import io.reactivex.rxjava3.subjects.BehaviorSubject
-import kotlinx.coroutines.CoroutineScope
-import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.launch
+import kotlinx.coroutines.*
import java.io.File
import java.io.FileOutputStream
import java.io.IOException
import java.util.concurrent.TimeUnit
+import kotlin.coroutines.resume
import kotlin.math.roundToInt
@@ -61,7 +58,6 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
private lateinit var physicalButtonHandler: PhysicalButtonsHandler
private var binding: FlutterPluginBinding? = null
private var textureRegistry: TextureRegistry? = null
- private var textureEntry: TextureRegistry.SurfaceTextureEntry? = null
private var activity: Activity? = null
private lateinit var imageStreamChannel: EventChannel
private lateinit var orientationStreamChannel: EventChannel
@@ -73,8 +69,8 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
private lateinit var fusedLocationClient: FusedLocationProviderClient
private var exifPreferences = ExifPreferences(false)
private var cancellationTokenSource = CancellationTokenSource()
- private var lastRecordedVideo: BehaviorSubject? = null
- private var lastRecordedVideoSubscription: Disposable? = null
+ private var lastRecordedVideos: List>? = null
+ private var lastRecordedVideoSubscriptions: MutableList? = null
private var colorMatrix: List? = null
private val noneFilter: List = listOf(
@@ -100,9 +96,30 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
0.0
)
+ @SuppressLint("UnsafeOptInUsageError")
+ fun configureCameraXLogs() {
+ try {
+ ProcessCameraProvider.configureInstance(
+ CameraXConfig.Builder.fromConfig(Camera2Config.defaultConfig())
+ .setMinimumLoggingLevel(Log.ERROR).build()
+ )
+ } catch (e: IllegalStateException) {
+ // Ignore if trying to configure CameraX more than once
+ }
+ }
+
+ private fun getCameraProvider(): ProcessCameraProvider {
+ configureCameraXLogs()
+ val future = ProcessCameraProvider.getInstance(
+ activity!!
+ )
+ return future.get()
+ }
+
+
@SuppressLint("RestrictedApi")
override fun setupCamera(
- sensor: String,
+ sensors: List,
aspectRatio: String,
zoom: Double,
mirrorFrontCamera: Boolean,
@@ -111,6 +128,7 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
captureMode: String,
enableImageStream: Boolean,
exifPreferences: ExifPreferences,
+ videoOptions: VideoOptions?,
callback: (Result) -> Unit
) {
if (enablePhysicalButton) {
@@ -124,26 +142,23 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
activity!!.stopService(Intent(activity!!, PlayerService::class.java))
}
- val future = ProcessCameraProvider.getInstance(
- activity!!
- )
- val cameraProvider = future.get()
- textureEntry = textureRegistry!!.createSurfaceTexture()
-
- val cameraSelector =
- if (CameraSensor.valueOf(sensor) == CameraSensor.BACK) CameraSelector.DEFAULT_BACK_CAMERA else CameraSelector.DEFAULT_FRONT_CAMERA
+ val cameraProvider = getCameraProvider()
val mode = CaptureModes.valueOf(captureMode)
- cameraState = CameraXState(textureRegistry!!,
- textureEntry!!,
- cameraProvider = cameraProvider,
- cameraSelector = cameraSelector,
+ cameraState = CameraXState(cameraProvider = cameraProvider,
+ textureEntries = sensors.mapIndexed { index: Int, pigeonSensor: PigeonSensor ->
+ (pigeonSensor.deviceId
+ ?: index.toString()) to textureRegistry!!.createSurfaceTexture()
+ }.toMap(),
+ sensors = sensors,
mirrorFrontCamera = mirrorFrontCamera,
currentCaptureMode = mode,
enableImageStream = enableImageStream,
+ videoOptions = videoOptions?.android,
onStreamReady = { state -> state.updateLifecycle(activity!!) }).apply {
this.updateAspectRatio(aspectRatio)
this.flashMode = FlashMode.valueOf(flashMode)
+ this.enableAudioRecording = videoOptions?.enableAudio ?: true
}
this.exifPreferences = exifPreferences
orientationStreamListener =
@@ -155,7 +170,8 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
if (zoom > 0) {
// TODO Find a better way to set initial zoom than using a postDelayed
Handler(Looper.getMainLooper()).postDelayed({
- cameraState.previewCamera!!.cameraControl.setLinearZoom(zoom.toFloat())
+ (cameraState.concurrentCamera?.cameras?.firstOrNull()
+ ?: cameraState.previewCamera)?.cameraControl?.setLinearZoom(zoom.toFloat())
}, 200)
}
}
@@ -163,7 +179,7 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
callback(Result.success(true))
}
- override fun checkPermissions(): List {
+ override fun checkPermissions(permissions: List): List {
throw Exception("Not implemented on Android")
}
@@ -227,11 +243,10 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
}
override fun isVideoRecordingAndImageAnalysisSupported(
- sensor: String,
- callback: (Result) -> Unit
+ sensor: PigeonSensorPosition, callback: (Result) -> Unit
) {
val cameraSelector =
- if (CameraSensor.valueOf(sensor) == CameraSensor.BACK) CameraSelector.DEFAULT_BACK_CAMERA else CameraSelector.DEFAULT_FRONT_CAMERA
+ if (sensor == PigeonSensorPosition.BACK) CameraSelector.DEFAULT_BACK_CAMERA else CameraSelector.DEFAULT_FRONT_CAMERA
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
val cameraProvider = ProcessCameraProvider.getInstance(
@@ -240,8 +255,7 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
callback(
Result.success(
CameraCapabilities.getCameraLevel(
- cameraSelector,
- cameraProvider
+ cameraSelector, cameraProvider
) == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3
)
)
@@ -294,8 +308,8 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
)
}
- override fun getPreviewTextureId(): Long {
- return textureEntry!!.id()
+ override fun getPreviewTextureId(cameraPosition: Long): Long {
+ return cameraState.textureEntries[cameraPosition.toString()]!!.id()
}
/***
@@ -327,28 +341,48 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
}
}
- override fun takePhoto(path: String, callback: (Result) -> Unit) {
- val imageFile = File(path)
- imageFile.parentFile?.mkdirs()
+ override fun takePhoto(
+ sensors: List, paths: List, callback: (Result) -> Unit
+ ) {
+ if (sensors.size != paths.size) {
+ throw Exception("sensors and paths must have the same length")
+ }
+ if (paths.size != cameraState.imageCaptures.size) {
+ throw Exception("paths and imageCaptures must have the same length")
+ }
- takePhotoWith(imageFile, callback)
+ val sensorsMap = sensors.mapIndexed { index, pigeonSensor ->
+ pigeonSensor to paths[index]
+ }.toMap()
+ CoroutineScope(Dispatchers.Main).launch {
+ val res: MutableMap =
+ sensorsMap.mapValues { null }.toMutableMap()
+ for ((index, entry) in sensorsMap.entries.withIndex()) {
+ // On Android, path should be specified
+ val imageFile = File(entry.value!!)
+ imageFile.parentFile?.mkdirs()
+ // cameraState.imageCaptures must be in the same order as the sensors / paths lists
+ res[entry.key] = takePhotoWith(cameraState.imageCaptures[index], imageFile)
+ }
+ callback(Result.success(res.all { it.value == true }))
+ }
}
@SuppressLint("RestrictedApi")
- private fun takePhotoWith(
- imageFile: File, callback: (Result) -> Unit
- ) {
+ private suspend fun takePhotoWith(
+ imageCapture: ImageCapture, imageFile: File
+ ): Boolean = suspendCancellableCoroutine { continuation ->
val metadata = ImageCapture.Metadata()
- if (cameraState.cameraSelector.lensFacing == CameraSelector.LENS_FACING_FRONT) {
+ if (cameraState.sensors.size == 1 && cameraState.sensors.first().position == PigeonSensorPosition.FRONT) {
metadata.isReversedHorizontal = cameraState.mirrorFrontCamera
}
val outputFileOptions =
ImageCapture.OutputFileOptions.Builder(imageFile).setMetadata(metadata).build()
- cameraState.imageCapture!!.targetRotation = orientationStreamListener!!.surfaceOrientation
- cameraState.imageCapture!!.takePicture(outputFileOptions,
+// for (imageCapture in cameraState.imageCaptures) {
+ imageCapture.targetRotation = orientationStreamListener!!.surfaceOrientation
+ imageCapture.takePicture(outputFileOptions,
ContextCompat.getMainExecutor(activity!!),
object : ImageCapture.OnImageSavedCallback {
-
override fun onImageSaved(outputFileResults: ImageCapture.OutputFileResults) {
if (colorMatrix != null && noneFilter != colorMatrix) {
val exif = ExifInterface(outputFileResults.savedUri!!.path!!)
@@ -383,26 +417,38 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
val exif = ExifInterface(outputFileResults.savedUri!!.path!!)
outputFileOptions.metadata.location = it
exif.setGpsInfo(it)
+// Log.d("CAMERAX__EXIF", "GPS info saved ${it?.latitude} ${it?.longitude}")
// We need to actually save the exif data to the file system
exif.saveAttributes()
- callback(Result.success(true))
+ continuation.resume(true)
}
} else {
- callback(Result.success(true))
+ if (continuation.isActive) continuation.resume(true)
}
}
override fun onError(exception: ImageCaptureException) {
Log.e(CamerawesomePlugin.TAG, "Error capturing picture", exception)
- callback(Result.success(false))
+ continuation.resume(false)
}
})
+// }
}
@SuppressLint("RestrictedApi", "MissingPermission")
override fun recordVideo(
- path: String, options: VideoOptions?, callback: (Result) -> Unit
+ sensors: List, paths: List, callback: (Result) -> Unit
) {
+ if (sensors.size != paths.size) {
+ throw Exception("sensors and paths must have the same length")
+ }
+ if (paths.size != cameraState.videoCaptures.size) {
+ throw Exception("paths and imageCaptures must have the same length")
+ }
+
+ val requests = sensors.mapIndexed { index, pigeonSensor ->
+ pigeonSensor to paths[index]
+ }.toMap()
CoroutineScope(Dispatchers.Main).launch {
var ignoreAudio = false
if (cameraState.enableAudioRecording) {
@@ -423,65 +469,79 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
}
- lastRecordedVideoSubscription?.dispose()
- lastRecordedVideo = BehaviorSubject.create()
- val recordingListener = Consumer { event ->
- when (event) {
- is VideoRecordEvent.Start -> {
- Log.d(CamerawesomePlugin.TAG, "Capture Started")
- }
+ lastRecordedVideoSubscriptions?.forEach { it.dispose() }
+ lastRecordedVideos = buildList {
+ for (i in (0 until requests.size)) {
+ this.add(BehaviorSubject.create())
+ }
+ }
+ cameraState.recordings = mutableListOf()
+ lastRecordedVideoSubscriptions = mutableListOf()
+ for ((index, videoCapture) in cameraState.videoCaptures.values.withIndex()) {
+ val recordingListener = Consumer { event ->
+ when (event) {
+ is VideoRecordEvent.Start -> {
+ Log.d(CamerawesomePlugin.TAG, "Capture Started")
+ }
- is VideoRecordEvent.Finalize -> {
- if (!event.hasError()) {
- Log.d(
- CamerawesomePlugin.TAG,
- "Video capture succeeded: ${event.outputResults.outputUri}"
- )
- lastRecordedVideo!!.onNext(true)
- } else {
- // update app state when the capture failed.
- cameraState.apply {
- recording?.close()
- recording = null
+ is VideoRecordEvent.Finalize -> {
+ if (!event.hasError()) {
+ Log.d(
+ CamerawesomePlugin.TAG,
+ "Video capture succeeded: ${event.outputResults.outputUri}"
+ )
+ lastRecordedVideos!![index].onNext(true)
+ } else {
+ // update app state when the capture failed.
+ cameraState.apply {
+ recordings?.get(index)?.close()
+ if (recordings?.all {
+ it.isClosed
+ } == true) {
+ recordings = null
+ }
+ }
+ Log.e(
+ CamerawesomePlugin.TAG,
+ "Video capture ends with error: ${event.error}"
+ )
+ lastRecordedVideos!![index].onNext(false)
}
- Log.e(
- CamerawesomePlugin.TAG,
- "Video capture ends with error: ${event.error}"
- )
- lastRecordedVideo!!.onNext(false)
}
}
}
+ videoCapture.targetRotation = orientationStreamListener!!.surfaceOrientation
+ cameraState.recordings!!.add(videoCapture.output.prepareRecording(
+ activity!!, FileOutputOptions.Builder(File(paths[index]!!)).build()
+ ).apply { if (cameraState.enableAudioRecording && !ignoreAudio) withAudioEnabled() }
+ .start(cameraState.executor(activity!!), recordingListener))
}
- cameraState.videoCapture!!.targetRotation =
- orientationStreamListener!!.surfaceOrientation
- cameraState.recording = cameraState.videoCapture!!.output.prepareRecording(
- activity!!, FileOutputOptions.Builder(File(path)).build()
- ).apply { if (cameraState.enableAudioRecording && !ignoreAudio) withAudioEnabled() }
- .start(cameraState.executor(activity!!), recordingListener)
callback(Result.success(Unit))
}
}
override fun stopRecordingVideo(callback: (Result) -> Unit) {
var submitted = false
- val countDownTimer = object : CountDownTimer(5000, 5000) {
- override fun onTick(interval: Long) {}
- override fun onFinish() {
+ for (index in 0 until cameraState.recordings!!.size) {
+ val countDownTimer = object : CountDownTimer(5000, 5000) {
+ override fun onTick(interval: Long) {}
+ override fun onFinish() {
+ if (!submitted) {
+ submitted = true
+ callback(Result.success(false))
+ }
+ }
+ }
+ countDownTimer.start()
+
+ cameraState.recordings!![index].stop()
+ lastRecordedVideoSubscriptions!!.add(lastRecordedVideos!![index].subscribe({ it ->
+ countDownTimer.cancel()
if (!submitted) {
submitted = true
- callback(Result.success(false))
+ callback(Result.success(it))
}
- }
- }
- countDownTimer.start()
- cameraState.recording?.stop()
- lastRecordedVideoSubscription = lastRecordedVideo!!.subscribe {
- countDownTimer.cancel()
- if (!submitted) {
- submitted = true
- callback(Result.success(it))
- }
+ }, { error -> error.printStackTrace() }))
}
}
@@ -494,11 +554,11 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
}
override fun pauseVideoRecording() {
- cameraState.recording?.pause()
+ cameraState.recordings?.forEach { it.pause() }
}
override fun resumeVideoRecording() {
- cameraState.recording?.resume()
+ cameraState.recordings?.forEach { it.resume() }
}
override fun receivedImageFromStream() {
@@ -512,20 +572,25 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
}
override fun stop(): Boolean {
+ orientationStreamListener?.stop()
cameraState.stop()
return true
}
+ @SuppressLint("RestrictedApi")
override fun setFlashMode(mode: String) {
val flashMode = FlashMode.valueOf(mode)
cameraState.apply {
this.flashMode = flashMode
- this.imageCapture?.flashMode = when (flashMode) {
- FlashMode.ALWAYS, FlashMode.ON -> ImageCapture.FLASH_MODE_ON
- FlashMode.AUTO -> ImageCapture.FLASH_MODE_AUTO
- else -> ImageCapture.FLASH_MODE_OFF
+ for (imageCapture in cameraState.imageCaptures) {
+ imageCapture.flashMode = when (flashMode) {
+ FlashMode.ALWAYS, FlashMode.ON -> ImageCapture.FLASH_MODE_ON
+ FlashMode.AUTO -> ImageCapture.FLASH_MODE_AUTO
+ else -> ImageCapture.FLASH_MODE_OFF
+ }
}
- previewCamera?.cameraControl?.enableTorch(flashMode == FlashMode.ALWAYS)
+ (cameraState.concurrentCamera?.cameras?.firstOrNull()
+ ?: cameraState.previewCamera)?.cameraControl?.enableTorch(flashMode == FlashMode.ALWAYS)
}
}
@@ -538,37 +603,52 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
}
@SuppressLint("RestrictedApi")
- override fun setSensor(sensor: String, deviceId: String?) {
- val cameraSelector =
- if (CameraSensor.valueOf(sensor) == CameraSensor.BACK) CameraSelector.DEFAULT_BACK_CAMERA
- else CameraSelector.DEFAULT_FRONT_CAMERA
+ override fun setSensor(sensors: List) {
cameraState.apply {
- this.cameraSelector = cameraSelector
+ this.sensors = sensors
+ // TODO Make below variables parameters
// Also reset flash mode and aspect ratio
this.flashMode = FlashMode.NONE
this.aspectRatio = null
this.rational = Rational(3, 4)
- // Zoom should be reset automatically
-
updateLifecycle(activity!!)
}
}
+ @SuppressLint("RestrictedApi")
override fun setCorrection(brightness: Double) {
// TODO brightness calculation might not be the same as before CameraX
- val range = cameraState.previewCamera?.cameraInfo?.exposureState?.exposureCompensationRange
- if (range != null) {
- val actualBrightnessValue = brightness * (range.upper - range.lower) + range.lower
- cameraState.previewCamera?.cameraControl?.setExposureCompensationIndex(
- actualBrightnessValue.roundToInt()
- )
- }
+ val range = (cameraState.concurrentCamera?.cameras?.firstOrNull()
+ ?: cameraState.previewCamera!!).cameraInfo.exposureState.exposureCompensationRange
+ val actualBrightnessValue = brightness * (range.upper - range.lower) + range.lower
+ cameraState.previewCamera?.cameraControl?.setExposureCompensationIndex(
+ actualBrightnessValue.roundToInt()
+ )
}
+ /**
+ * This method must be called after bindToLifecycle has been called
+ *
+ * @return the max zoom ratio
+ */
override fun getMaxZoom(): Double {
return cameraState.maxZoomRatio
}
+ /**
+ * This method must be called after bindToLifecycle has been called
+ *
+ * @return the min zoom ratio
+ */
+ override fun getMinZoom(): Double {
+ return cameraState.minZoomRatio
+ }
+
+ fun convertLinearToRatio(linear: Double): Double {
+ // TODO Not sure if this is correct
+ return linear * getMaxZoom() / getMinZoom()
+ }
+
@Deprecated("Use focusOnPoint instead")
fun focus() {
val autoFocusPoint = SurfaceOrientedMeteringPointFactory(1f, 1f).createPoint(.5f, .5f)
@@ -599,7 +679,8 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
val autoFocusPoint = factory.createPoint(x.toFloat(), y.toFloat())
try {
- cameraState.previewCamera!!.cameraControl.startFocusAndMetering(
+ (cameraState.concurrentCamera?.cameras?.firstOrNull()
+ ?: cameraState.previewCamera!!).cameraControl.startFocusAndMetering(
FocusMeteringAction.Builder(
autoFocusPoint,
FocusMeteringAction.FLAG_AF or FocusMeteringAction.FLAG_AE or FocusMeteringAction.FLAG_AWB
@@ -624,8 +705,17 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
}
}
+
+ @SuppressLint("RestrictedApi")
+ @ExperimentalCamera2Interop
+ override fun isMultiCamSupported(): Boolean {
+ return getCameraProvider().isMultiCamSupported()
+ }
+
/// Changing the recording audio mode can't be changed once a recording has starded
- override fun setRecordingAudioMode(enableAudio: Boolean, callback: (Result) -> Unit) {
+ override fun setRecordingAudioMode(
+ enableAudio: Boolean, callback: (Result) -> Unit
+ ) {
CoroutineScope(Dispatchers.IO).launch {
cameraPermissions.requestPermissions(
activity!!,
@@ -657,12 +747,12 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
}
@SuppressLint("RestrictedApi")
- override fun getEffectivPreviewSize(): PreviewSize {
- val res = cameraState.preview!!.resolutionInfo?.resolution
+ override fun getEffectivPreviewSize(index: Long): PreviewSize {
+ val res = cameraState.previews!![index.toInt()].resolutionInfo?.resolution
return if (res != null) {
val rota90 = 90
val rota270 = 270
- when (cameraState.preview!!.resolutionInfo?.rotationDegrees) {
+ when (cameraState.previews!![index.toInt()].resolutionInfo?.rotationDegrees) {
rota90, rota270 -> {
PreviewSize(res.height.toDouble(), res.width.toDouble())
}
@@ -749,4 +839,5 @@ class CameraAwesomeX : CameraInterface, FlutterPlugin, ActivityAware {
cancellationTokenSource.cancel()
cameraPermissions.onCancel(null)
}
+
}
\ No newline at end of file
diff --git a/android/src/main/kotlin/com/apparence/camerawesome/cameraX/CameraXState.kt b/android/src/main/kotlin/com/apparence/camerawesome/cameraX/CameraXState.kt
index cbe9b0b8..358579c3 100644
--- a/android/src/main/kotlin/com/apparence/camerawesome/cameraX/CameraXState.kt
+++ b/android/src/main/kotlin/com/apparence/camerawesome/cameraX/CameraXState.kt
@@ -13,12 +13,12 @@ import androidx.camera.camera2.interop.Camera2CameraInfo
import androidx.camera.core.*
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.video.*
-import androidx.camera.video.VideoCapture
import androidx.core.content.ContextCompat
import androidx.lifecycle.LifecycleOwner
import com.apparence.camerawesome.CamerawesomePlugin
import com.apparence.camerawesome.models.FlashMode
import com.apparence.camerawesome.sensors.SensorOrientation
+import com.apparence.camerawesome.utils.isMultiCamSupported
import io.flutter.plugin.common.EventChannel
import io.flutter.view.TextureRegistry
import java.util.concurrent.Executor
@@ -26,18 +26,18 @@ import java.util.concurrent.Executor
/// Hold the settings of the camera and use cases in this class and
/// call updateLifecycle() to refresh the state
data class CameraXState(
- val textureRegistry: TextureRegistry,
- val textureEntry: TextureRegistry.SurfaceTextureEntry,
- var imageCapture: ImageCapture? = null,
- var cameraSelector: CameraSelector,
- private var recorder: Recorder? = null,
- var videoCapture: VideoCapture? = null,
- var preview: Preview? = null,
- var previewCamera: Camera? = null,
private var cameraProvider: ProcessCameraProvider,
+ val textureEntries: Map,
+// var cameraSelector: CameraSelector,
+ var sensors: List,
+ var imageCaptures: MutableList = mutableListOf(),
+ var videoCaptures: MutableMap> = mutableMapOf(),
+ var previews: MutableList? = null,
+ var concurrentCamera: ConcurrentCamera? = null,
+ var previewCamera: Camera? = null,
private var currentCaptureMode: CaptureModes,
var enableAudioRecording: Boolean = true,
- var recording: Recording? = null,
+ var recordings: MutableList? = null,
var enableImageStream: Boolean = false,
var photoSize: Size? = null,
var previewSize: Size? = null,
@@ -47,17 +47,39 @@ data class CameraXState(
var flashMode: FlashMode = FlashMode.NONE,
val onStreamReady: (state: CameraXState) -> Unit,
var mirrorFrontCamera: Boolean = false,
+ val videoOptions: AndroidVideoOptions?,
) : EventChannel.StreamHandler, SensorOrientation {
var imageAnalysisBuilder: ImageAnalysisBuilder? = null
- var imageAnalysis: ImageAnalysis? = null
+ private var imageAnalysis: ImageAnalysis? = null
+
+ private val mainCameraInfos: CameraInfo
+ @SuppressLint("RestrictedApi") get() {
+ if (previewCamera == null && concurrentCamera == null) {
+ throw Exception("Trying to access main camera infos before setting the preview")
+ }
+ return previewCamera?.cameraInfo ?: concurrentCamera?.cameras?.first()?.cameraInfo!!
+ }
+
+ private val mainCameraControl: CameraControl
+ @SuppressLint("RestrictedApi") get() {
+ if (previewCamera == null && concurrentCamera == null) {
+ throw Exception("Trying to access main camera control before setting the preview")
+ }
+ return previewCamera?.cameraControl
+ ?: concurrentCamera?.cameras?.first()?.cameraControl!!
+ }
val maxZoomRatio: Double
- get() = previewCamera!!.cameraInfo.zoomState.value!!.maxZoomRatio.toDouble()
+ @SuppressLint("RestrictedApi") get() = mainCameraInfos.zoomState.value!!.maxZoomRatio.toDouble()
+
+
+ val minZoomRatio: Double
+ get() = mainCameraInfos.zoomState.value!!.minZoomRatio.toDouble()
val portrait: Boolean
- get() = previewCamera!!.cameraInfo.sensorRotationDegrees % 180 == 0
+ get() = mainCameraInfos.sensorRotationDegrees % 180 == 0
fun executor(activity: Activity): Executor {
return ContextCompat.getMainExecutor(activity)
@@ -65,20 +87,131 @@ data class CameraXState(
@SuppressLint("RestrictedApi", "UnsafeOptInUsageError")
fun updateLifecycle(activity: Activity) {
- if (currentCaptureMode != CaptureModes.ANALYSIS_ONLY) {
- // Preview
- preview = if (aspectRatio != null) {
- Preview.Builder().setTargetAspectRatio(aspectRatio!!)
- .setCameraSelector(cameraSelector).build()
- } else {
- Preview.Builder().setCameraSelector(cameraSelector).build()
+ previews = mutableListOf()
+ imageCaptures.clear()
+ videoCaptures.clear()
+ if (cameraProvider.isMultiCamSupported() && sensors.size > 1) {
+ val singleCameraConfigs = mutableListOf()
+ var isFirst = true
+ for ((index, sensor) in sensors.withIndex()) {
+ val useCaseGroupBuilder = UseCaseGroup.Builder()
+
+ val cameraSelector =
+ if (isFirst) CameraSelector.DEFAULT_BACK_CAMERA else CameraSelector.DEFAULT_FRONT_CAMERA
+ // TODO Find cameraSelectors based on the sensor and the cameraProvider.availableConcurrentCameraInfos
+// val cameraSelector = CameraSelector.Builder()
+// .requireLensFacing(if (sensor.position == PigeonSensorPosition.FRONT) CameraSelector.LENS_FACING_FRONT else CameraSelector.LENS_FACING_BACK)
+// .addCameraFilter(CameraFilter { cameraInfos ->
+// val list = mutableListOf()
+// cameraInfos.forEach { cameraInfo ->
+// Camera2CameraInfo.from(cameraInfo).let {
+// if (it.getPigeonPosition() == sensor.position && (it.getSensorType() == sensor.type || it.getSensorType() == PigeonSensorType.UNKNOWN)) {
+// list.add(cameraInfo)
+// }
+// }
+// }
+// if (list.isEmpty()) {
+// // If no camera found, only filter based on the sensor position and ignore sensor type
+// cameraInfos.forEach { cameraInfo ->
+// Camera2CameraInfo.from(cameraInfo).let {
+// if (it.getPigeonPosition() == sensor.position) {
+// list.add(cameraInfo)
+// }
+// }
+// }
+// }
+// return@CameraFilter list
+// })
+// .build()
+
+
+ val preview = if (aspectRatio != null) {
+ Preview.Builder().setTargetAspectRatio(aspectRatio!!)
+ .setCameraSelector(cameraSelector).build()
+ } else {
+ Preview.Builder().setCameraSelector(cameraSelector).build()
+ }
+ preview.setSurfaceProvider(
+ surfaceProvider(executor(activity), sensor.deviceId ?: "$index")
+ )
+ useCaseGroupBuilder.addUseCase(preview)
+ previews!!.add(preview)
+
+ if (currentCaptureMode == CaptureModes.PHOTO) {
+ val imageCapture = ImageCapture.Builder().setCameraSelector(cameraSelector)
+// .setJpegQuality(100)
+ .apply {
+ //photoSize?.let { setTargetResolution(it) }
+ if (rational.denominator != rational.numerator) {
+ setTargetAspectRatio(aspectRatio ?: AspectRatio.RATIO_4_3)
+ }
+
+ setFlashMode(
+ if (isFirst) when (flashMode) {
+ FlashMode.ALWAYS, FlashMode.ON -> ImageCapture.FLASH_MODE_ON
+ FlashMode.AUTO -> ImageCapture.FLASH_MODE_AUTO
+ else -> ImageCapture.FLASH_MODE_OFF
+ }
+ else ImageCapture.FLASH_MODE_OFF
+ )
+ }.build()
+ useCaseGroupBuilder.addUseCase(imageCapture)
+ imageCaptures.add(imageCapture)
+ } else {
+ val videoCapture = buildVideoCapture(videoOptions)
+ useCaseGroupBuilder.addUseCase(videoCapture)
+ videoCaptures[sensor] = videoCapture
+ }
+ if (isFirst && enableImageStream && imageAnalysisBuilder != null) {
+ imageAnalysis = imageAnalysisBuilder!!.build()
+ useCaseGroupBuilder.addUseCase(imageAnalysis!!)
+ } else {
+ imageAnalysis = null
+ }
+
+ isFirst = false
+ useCaseGroupBuilder.setViewPort(
+ ViewPort.Builder(rational, Surface.ROTATION_0).build()
+ )
+ singleCameraConfigs.add(
+ ConcurrentCamera.SingleCameraConfig(
+ cameraSelector,
+ useCaseGroupBuilder.build(), activity as LifecycleOwner,
+ )
+ )
}
- preview!!.setSurfaceProvider(
- surfaceProvider(executor(activity))
+ cameraProvider.unbindAll()
+ previewCamera = null
+ concurrentCamera = cameraProvider.bindToLifecycle(
+ singleCameraConfigs
)
+ // Only set flash to the main camera (the first one)
+ concurrentCamera!!.cameras.first().cameraControl.enableTorch(flashMode == FlashMode.ALWAYS)
+ } else {
+ val useCaseGroupBuilder = UseCaseGroup.Builder()
+ // Handle single camera
+ val cameraSelector =
+ if (sensors.first().position == PigeonSensorPosition.FRONT) CameraSelector.DEFAULT_FRONT_CAMERA else CameraSelector.DEFAULT_BACK_CAMERA
+ // Preview
+ if (currentCaptureMode != CaptureModes.ANALYSIS_ONLY) {
+ previews!!.add(
+ if (aspectRatio != null) {
+ Preview.Builder().setTargetAspectRatio(aspectRatio!!)
+ .setCameraSelector(cameraSelector).build()
+ } else {
+ Preview.Builder().setCameraSelector(cameraSelector).build()
+ }
+ )
+
+ previews!!.first().setSurfaceProvider(
+ surfaceProvider(executor(activity), sensors.first().deviceId ?: "0")
+ )
+ useCaseGroupBuilder.addUseCase(previews!!.first())
+ }
+
if (currentCaptureMode == CaptureModes.PHOTO) {
- imageCapture = ImageCapture.Builder().setCameraSelector(cameraSelector)
+ val imageCapture = ImageCapture.Builder().setCameraSelector(cameraSelector)
// .setJpegQuality(100)
.apply {
//photoSize?.let { setTargetResolution(it) }
@@ -93,74 +226,100 @@ data class CameraXState(
}
)
}.build()
+ useCaseGroupBuilder.addUseCase(imageCapture)
+ imageCaptures.add(imageCapture)
} else if (currentCaptureMode == CaptureModes.VIDEO) {
- recorder =
- Recorder.Builder().setQualitySelector(QualitySelector.from(Quality.HIGHEST))
- .build()
- videoCapture = VideoCapture.withOutput(recorder!!)
+ val videoCapture = buildVideoCapture(videoOptions)
+ useCaseGroupBuilder.addUseCase(videoCapture)
+ videoCaptures[sensors.first()] = videoCapture
}
- }
- val addAnalysisUseCase = enableImageStream && imageAnalysisBuilder != null
- var useCases = mutableListOf(
- if (currentCaptureMode == CaptureModes.ANALYSIS_ONLY) null else preview,
- if (currentCaptureMode == CaptureModes.PHOTO) {
- imageCapture
- } else null,
- if (currentCaptureMode == CaptureModes.VIDEO) {
- videoCapture
- } else null,
- ).filterNotNull().toMutableList().apply {
+
+ val addAnalysisUseCase = enableImageStream && imageAnalysisBuilder != null
+ val cameraLevel = CameraCapabilities.getCameraLevel(
+ cameraSelector, cameraProvider
+ )
+ cameraProvider.unbindAll()
if (addAnalysisUseCase) {
- imageAnalysis = imageAnalysisBuilder!!.build()
- add(imageAnalysis!!)
+ if (currentCaptureMode == CaptureModes.VIDEO && cameraLevel < CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3) {
+ Log.w(
+ CamerawesomePlugin.TAG,
+ "Trying to bind too many use cases for this device (level $cameraLevel), ignoring image analysis"
+ )
+ } else {
+ imageAnalysis = imageAnalysisBuilder!!.build()
+ useCaseGroupBuilder.addUseCase(imageAnalysis!!)
+
+ }
} else {
imageAnalysis = null
}
- }
-
- val cameraLevel = CameraCapabilities.getCameraLevel(
- cameraSelector, cameraProvider
- )
- cameraProvider.unbindAll()
- if (currentCaptureMode == CaptureModes.VIDEO && addAnalysisUseCase && cameraLevel < CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3) {
- Log.w(
- CamerawesomePlugin.TAG,
- "Trying to bind too many use cases for this device (level $cameraLevel), ignoring image analysis"
+ // TODO Orientation might be wrong, to be verified
+ useCaseGroupBuilder.setViewPort(ViewPort.Builder(rational, Surface.ROTATION_0).build())
+ .build()
+
+ concurrentCamera = null
+ previewCamera = cameraProvider.bindToLifecycle(
+ activity as LifecycleOwner,
+ cameraSelector,
+ useCaseGroupBuilder.build(),
)
- useCases = useCases.filter { uc -> uc !is ImageAnalysis }.toMutableList()
+ previewCamera!!.cameraControl.enableTorch(flashMode == FlashMode.ALWAYS)
}
+ }
- previewCamera = cameraProvider.bindToLifecycle(
- activity as LifecycleOwner,
- cameraSelector,
- UseCaseGroup.Builder().apply {
- for (uc in useCases) addUseCase(uc)
+ private fun buildVideoCapture(videoOptions: AndroidVideoOptions?): VideoCapture {
+ val recorderBuilder = Recorder.Builder()
+ // Aspect ratio is handled by the setViewPort on the UseCaseGroup
+ if (videoOptions?.quality != null) {
+ val quality = when (videoOptions.quality) {
+ VideoRecordingQuality.LOWEST -> Quality.LOWEST
+ VideoRecordingQuality.SD -> Quality.SD
+ VideoRecordingQuality.HD -> Quality.HD
+ VideoRecordingQuality.FHD -> Quality.FHD
+ VideoRecordingQuality.UHD -> Quality.UHD
+ else -> Quality.HIGHEST
}
- // TODO Orientation might be wrong, to be verified
- .setViewPort(ViewPort.Builder(rational, Surface.ROTATION_0).build()).build(),
- )
-
- previewCamera!!.cameraControl.enableTorch(flashMode == FlashMode.ALWAYS)
+ recorderBuilder.setQualitySelector(
+ QualitySelector.from(
+ quality,
+ if (videoOptions.fallbackStrategy == QualityFallbackStrategy.LOWER) FallbackStrategy.lowerQualityOrHigherThan(
+ quality
+ )
+ else FallbackStrategy.higherQualityOrLowerThan(quality)
+ )
+ )
+ }
+ if (videoOptions?.bitrate != null) {
+ recorderBuilder.setTargetVideoEncodingBitRate(videoOptions.bitrate.toInt())
+ }
+ val recorder = recorderBuilder.build()
+ return VideoCapture.Builder(recorder)
+ .setMirrorMode(if (mirrorFrontCamera) MirrorMode.MIRROR_MODE_ON_FRONT_ONLY else MirrorMode.MIRROR_MODE_OFF)
+ .build()
}
@SuppressLint("RestrictedApi")
- private fun surfaceProvider(executor: Executor): Preview.SurfaceProvider {
+ private fun surfaceProvider(executor: Executor, cameraId: String): Preview.SurfaceProvider {
+// Log.d("SurfaceProviderCamX", "Creating surface provider for $cameraId")
return Preview.SurfaceProvider { request: SurfaceRequest ->
val resolution = request.resolution
- val texture = textureEntry.surfaceTexture()
+ val texture = textureEntries[cameraId]!!.surfaceTexture()
texture.setDefaultBufferSize(resolution.width, resolution.height)
val surface = Surface(texture)
- request.provideSurface(surface, executor) { }
+ request.provideSurface(surface, executor) {
+// Log.d("CameraX", "Surface request result: ${it.resultCode}")
+ surface.release()
+ }
}
}
fun setLinearZoom(zoom: Float) {
- previewCamera!!.cameraControl.setLinearZoom(zoom)
+ mainCameraControl.setLinearZoom(zoom)
}
fun startFocusAndMetering(autoFocusAction: FocusMeteringAction) {
- previewCamera!!.cameraControl.startFocusAndMetering(autoFocusAction)
+ mainCameraControl.startFocusAndMetering(autoFocusAction)
}
fun setCaptureMode(captureMode: CaptureModes) {
@@ -168,29 +327,27 @@ data class CameraXState(
when (currentCaptureMode) {
CaptureModes.PHOTO -> {
// Release video related stuff
- videoCapture = null
- recording?.close()
- recording = null
- recorder = null
+ videoCaptures.clear()
+ recordings?.forEach { it.close() }
+ recordings = null
}
CaptureModes.VIDEO -> {
// Release photo related stuff
- imageCapture = null
+ imageCaptures.clear()
}
else -> {
// Preview and analysis only modes
// Release video related stuff
- videoCapture = null
- recording?.close()
- recording = null
- recorder = null
+ videoCaptures.clear()
+ recordings?.forEach { it.close() }
+ recordings = null
// Release photo related stuff
- imageCapture = null
+ imageCaptures.clear()
}
}
}
@@ -198,14 +355,14 @@ data class CameraXState(
@SuppressLint("RestrictedApi", "UnsafeOptInUsageError")
fun previewSizes(): List {
val characteristics = CameraCharacteristicsCompat.toCameraCharacteristicsCompat(
- Camera2CameraInfo.extractCameraCharacteristics(previewCamera!!.cameraInfo),
-// Camera2CameraInfo.from(previewCamera!!.cameraInfo).cameraId
+ Camera2CameraInfo.extractCameraCharacteristics(mainCameraInfos),
+ Camera2CameraInfo.from(mainCameraInfos).cameraId
)
return CamcorderProfileResolutionQuirk(characteristics).supportedResolutions
}
fun qualityAvailableSizes(): List {
- val supportedQualities = QualitySelector.getSupportedQualities(previewCamera!!.cameraInfo)
+ val supportedQualities = QualitySelector.getSupportedQualities(mainCameraInfos)
return supportedQualities.map {
when (it) {
Quality.UHD -> {
@@ -285,6 +442,4 @@ data class CameraXState(
else -> Rational(3, 4)
}
}
-
-
}
\ No newline at end of file
diff --git a/android/src/main/kotlin/com/apparence/camerawesome/cameraX/ImageAnalysisBuilder.kt b/android/src/main/kotlin/com/apparence/camerawesome/cameraX/ImageAnalysisBuilder.kt
index 6c55eb1f..15f012b1 100644
--- a/android/src/main/kotlin/com/apparence/camerawesome/cameraX/ImageAnalysisBuilder.kt
+++ b/android/src/main/kotlin/com/apparence/camerawesome/cameraX/ImageAnalysisBuilder.kt
@@ -76,13 +76,14 @@ class ImageAnalysisBuilder private constructor(
imageProxy,
Rect(0, 0, imageProxy.width, imageProxy.height),
80,
-// imageProxy.imageInfo.rotationDegrees
+ imageProxy.imageInfo.rotationDegrees
)
val imageMap = imageProxyBaseAdapter(imageProxy)
imageMap["jpegImage"] = jpegImage
imageMap["cropRect"] = cropRect(imageProxy)
executor.execute { previewStreamSink?.success(imageMap) }
}
+
OutputImageFormat.YUV_420_888 -> {
val planes = imagePlanesAdapter(imageProxy)
val imageMap = imageProxyBaseAdapter(imageProxy)
@@ -90,6 +91,7 @@ class ImageAnalysisBuilder private constructor(
imageMap["cropRect"] = cropRect(imageProxy)
executor.execute { previewStreamSink?.success(imageMap) }
}
+
OutputImageFormat.NV21 -> {
val nv21Image = ImageUtil.yuv_420_888toNv21(imageProxy)
val planes = imagePlanesAdapter(imageProxy)
diff --git a/android/src/main/kotlin/com/apparence/camerawesome/cameraX/OrientationStreamListener.kt b/android/src/main/kotlin/com/apparence/camerawesome/cameraX/OrientationStreamListener.kt
index 66bec8b0..9c8981a1 100644
--- a/android/src/main/kotlin/com/apparence/camerawesome/cameraX/OrientationStreamListener.kt
+++ b/android/src/main/kotlin/com/apparence/camerawesome/cameraX/OrientationStreamListener.kt
@@ -15,19 +15,24 @@ class OrientationStreamListener(
in 225 until 315 -> {
Surface.ROTATION_90
}
+
in 135 until 225 -> {
Surface.ROTATION_180
}
+
in 45 until 135 -> {
Surface.ROTATION_270
}
+
else -> {
Surface.ROTATION_0
}
}
+ private val orientationEventListener: OrientationEventListener
+
init {
- val orientationEventListener: OrientationEventListener =
+ orientationEventListener =
object : OrientationEventListener(activity.applicationContext) {
override fun onOrientationChanged(i: Int) {
if (i == ORIENTATION_UNKNOWN) {
@@ -42,4 +47,8 @@ class OrientationStreamListener(
}
orientationEventListener.enable()
}
+
+ fun stop() {
+ orientationEventListener.disable()
+ }
}
\ No newline at end of file
diff --git a/android/src/main/kotlin/com/apparence/camerawesome/cameraX/Pigeon.kt b/android/src/main/kotlin/com/apparence/camerawesome/cameraX/Pigeon.kt
index bffcbafc..73b27b11 100644
--- a/android/src/main/kotlin/com/apparence/camerawesome/cameraX/Pigeon.kt
+++ b/android/src/main/kotlin/com/apparence/camerawesome/cameraX/Pigeon.kt
@@ -1,4 +1,4 @@
-// Autogenerated from Pigeon (v9.1.0), do not edit directly.
+// Autogenerated from Pigeon (v9.2.5), do not edit directly.
// See also: https://pub.dev/packages/pigeon
package com.apparence.camerawesome.cameraX
@@ -43,6 +43,89 @@ class FlutterError (
val details: Any? = null
) : Throwable()
+enum class PigeonSensorPosition(val raw: Int) {
+ BACK(0),
+ FRONT(1),
+ UNKNOWN(2);
+
+ companion object {
+ fun ofRaw(raw: Int): PigeonSensorPosition? {
+ return values().firstOrNull { it.raw == raw }
+ }
+ }
+}
+
+/**
+ * Video recording quality, from [sd] to [uhd], with [highest] and [lowest] to
+ * let the device choose the best/worst quality available.
+ * [highest] is the default quality.
+ *
+ * Qualities are defined like this:
+ * [sd] < [hd] < [fhd] < [uhd]
+ */
+enum class VideoRecordingQuality(val raw: Int) {
+ LOWEST(0),
+ SD(1),
+ HD(2),
+ FHD(3),
+ UHD(4),
+ HIGHEST(5);
+
+ companion object {
+ fun ofRaw(raw: Int): VideoRecordingQuality? {
+ return values().firstOrNull { it.raw == raw }
+ }
+ }
+}
+
+/**
+ * If the specified [VideoRecordingQuality] is not available on the device,
+ * the [VideoRecordingQuality] will fallback to [higher] or [lower] quality.
+ * [higher] is the default fallback strategy.
+ */
+enum class QualityFallbackStrategy(val raw: Int) {
+ HIGHER(0),
+ LOWER(1);
+
+ companion object {
+ fun ofRaw(raw: Int): QualityFallbackStrategy? {
+ return values().firstOrNull { it.raw == raw }
+ }
+ }
+}
+
+enum class CupertinoFileType(val raw: Int) {
+ QUICKTIMEMOVIE(0),
+ MPEG4(1),
+ APPLEM4V(2),
+ TYPE3GPP(3),
+ TYPE3GPP2(4);
+
+ companion object {
+ fun ofRaw(raw: Int): CupertinoFileType? {
+ return values().firstOrNull { it.raw == raw }
+ }
+ }
+}
+
+enum class CupertinoCodecType(val raw: Int) {
+ H264(0),
+ HEVC(1),
+ HEVCWITHALPHA(2),
+ JPEG(3),
+ APPLEPRORES4444(4),
+ APPLEPRORES422(5),
+ APPLEPRORES422HQ(6),
+ APPLEPRORES422LT(7),
+ APPLEPRORES422PROXY(8);
+
+ companion object {
+ fun ofRaw(raw: Int): CupertinoCodecType? {
+ return values().firstOrNull { it.raw == raw }
+ }
+ }
+}
+
enum class PigeonSensorType(val raw: Int) {
/**
* A built-in wide-angle camera.
@@ -151,23 +234,126 @@ data class ExifPreferences (
}
/** Generated class from Pigeon that represents data sent in messages. */
+data class PigeonSensor (
+ val position: PigeonSensorPosition,
+ val type: PigeonSensorType,
+ val deviceId: String? = null
+
+) {
+ companion object {
+ @Suppress("UNCHECKED_CAST")
+ fun fromList(list: List): PigeonSensor {
+ val position = PigeonSensorPosition.ofRaw(list[0] as Int)!!
+ val type = PigeonSensorType.ofRaw(list[1] as Int)!!
+ val deviceId = list[2] as String?
+ return PigeonSensor(position, type, deviceId)
+ }
+ }
+ fun toList(): List {
+ return listOf(
+ position.raw,
+ type.raw,
+ deviceId,
+ )
+ }
+}
+
+/**
+ * Video recording options. Some of them are specific to each platform.
+ *
+ * Generated class from Pigeon that represents data sent in messages.
+ */
data class VideoOptions (
- val fileType: String,
- val codec: String
+ /** Enable audio while video recording */
+ val enableAudio: Boolean,
+ val android: AndroidVideoOptions? = null,
+ val ios: CupertinoVideoOptions? = null
) {
companion object {
@Suppress("UNCHECKED_CAST")
fun fromList(list: List): VideoOptions {
- val fileType = list[0] as String
- val codec = list[1] as String
- return VideoOptions(fileType, codec)
+ val enableAudio = list[0] as Boolean
+ val android: AndroidVideoOptions? = (list[1] as List?)?.let {
+ AndroidVideoOptions.fromList(it)
+ }
+ val ios: CupertinoVideoOptions? = (list[2] as List?)?.let {
+ CupertinoVideoOptions.fromList(it)
+ }
+ return VideoOptions(enableAudio, android, ios)
}
}
fun toList(): List {
return listOf(
- fileType,
- codec,
+ enableAudio,
+ android?.toList(),
+ ios?.toList(),
+ )
+ }
+}
+
+/** Generated class from Pigeon that represents data sent in messages. */
+data class AndroidVideoOptions (
+ /**
+ * The bitrate of the video recording. Only set it if a custom bitrate is
+ * desired.
+ */
+ val bitrate: Long? = null,
+ /** The quality of the video recording, defaults to [VideoRecordingQuality.highest]. */
+ val quality: VideoRecordingQuality? = null,
+ val fallbackStrategy: QualityFallbackStrategy? = null
+
+) {
+ companion object {
+ @Suppress("UNCHECKED_CAST")
+ fun fromList(list: List): AndroidVideoOptions {
+ val bitrate = list[0].let { if (it is Int) it.toLong() else it as Long? }
+ val quality: VideoRecordingQuality? = (list[1] as Int?)?.let {
+ VideoRecordingQuality.ofRaw(it)
+ }
+ val fallbackStrategy: QualityFallbackStrategy? = (list[2] as Int?)?.let {
+ QualityFallbackStrategy.ofRaw(it)
+ }
+ return AndroidVideoOptions(bitrate, quality, fallbackStrategy)
+ }
+ }
+ fun toList(): List {
+ return listOf(
+ bitrate,
+ quality?.raw,
+ fallbackStrategy?.raw,
+ )
+ }
+}
+
+/** Generated class from Pigeon that represents data sent in messages. */
+data class CupertinoVideoOptions (
+ /** Specify video file type, defaults to [AVFileTypeQuickTimeMovie]. */
+ val fileType: CupertinoFileType? = null,
+ /** Specify video codec, defaults to [AVVideoCodecTypeH264]. */
+ val codec: CupertinoCodecType? = null,
+ /** Specify video fps, defaults to [30]. */
+ val fps: Long? = null
+
+) {
+ companion object {
+ @Suppress("UNCHECKED_CAST")
+ fun fromList(list: List): CupertinoVideoOptions {
+ val fileType: CupertinoFileType? = (list[0] as Int?)?.let {
+ CupertinoFileType.ofRaw(it)
+ }
+ val codec: CupertinoCodecType? = (list[1] as Int?)?.let {
+ CupertinoCodecType.ofRaw(it)
+ }
+ val fps = list[2].let { if (it is Int) it.toLong() else it as Long? }
+ return CupertinoVideoOptions(fileType, codec, fps)
+ }
+ }
+ fun toList(): List {
+ return listOf(
+ fileType?.raw,
+ codec?.raw,
+ fps,
)
}
}
@@ -487,25 +673,40 @@ private object CameraInterfaceCodec : StandardMessageCodec() {
}
129.toByte() -> {
return (readValue(buffer) as? List)?.let {
- ExifPreferences.fromList(it)
+ AndroidVideoOptions.fromList(it)
}
}
130.toByte() -> {
return (readValue(buffer) as? List)?.let {
- PigeonSensorTypeDevice.fromList(it)
+ CupertinoVideoOptions.fromList(it)
}
}
131.toByte() -> {
return (readValue(buffer) as? List)?.let {
- PreviewSize.fromList(it)
+ ExifPreferences.fromList(it)
}
}
132.toByte() -> {
return (readValue(buffer) as? List)?.let {
- PreviewSize.fromList(it)
+ PigeonSensor.fromList(it)
}
}
133.toByte() -> {
+ return (readValue(buffer) as? List)?.let {
+ PigeonSensorTypeDevice.fromList(it)
+ }
+ }
+ 134.toByte() -> {
+ return (readValue(buffer) as? List)?.let {
+ PreviewSize.fromList(it)
+ }
+ }
+ 135.toByte() -> {
+ return (readValue(buffer) as? List)?.let {
+ PreviewSize.fromList(it)
+ }
+ }
+ 136.toByte() -> {
return (readValue(buffer) as? List)?.let {
VideoOptions.fromList(it)
}
@@ -519,26 +720,38 @@ private object CameraInterfaceCodec : StandardMessageCodec() {
stream.write(128)
writeValue(stream, value.toList())
}
- is ExifPreferences -> {
+ is AndroidVideoOptions -> {
stream.write(129)
writeValue(stream, value.toList())
}
- is PigeonSensorTypeDevice -> {
+ is CupertinoVideoOptions -> {
stream.write(130)
writeValue(stream, value.toList())
}
- is PreviewSize -> {
+ is ExifPreferences -> {
stream.write(131)
writeValue(stream, value.toList())
}
- is PreviewSize -> {
+ is PigeonSensor -> {
stream.write(132)
writeValue(stream, value.toList())
}
- is VideoOptions -> {
+ is PigeonSensorTypeDevice -> {
stream.write(133)
writeValue(stream, value.toList())
}
+ is PreviewSize -> {
+ stream.write(134)
+ writeValue(stream, value.toList())
+ }
+ is PreviewSize -> {
+ stream.write(135)
+ writeValue(stream, value.toList())
+ }
+ is VideoOptions -> {
+ stream.write(136)
+ writeValue(stream, value.toList())
+ }
else -> super.writeValue(stream, value)
}
}
@@ -546,16 +759,16 @@ private object CameraInterfaceCodec : StandardMessageCodec() {
/** Generated interface from Pigeon that represents a handler of messages from Flutter. */
interface CameraInterface {
- fun setupCamera(sensor: String, aspectRatio: String, zoom: Double, mirrorFrontCamera: Boolean, enablePhysicalButton: Boolean, flashMode: String, captureMode: String, enableImageStream: Boolean, exifPreferences: ExifPreferences, callback: (Result) -> Unit)
- fun checkPermissions(): List
+ fun setupCamera(sensors: List, aspectRatio: String, zoom: Double, mirrorFrontCamera: Boolean, enablePhysicalButton: Boolean, flashMode: String, captureMode: String, enableImageStream: Boolean, exifPreferences: ExifPreferences, videoOptions: VideoOptions?, callback: (Result) -> Unit)
+ fun checkPermissions(permissions: List): List
/**
* Returns given [CamerAwesomePermission] list (as String). Location permission might be
* refused but the app should still be able to run.
*/
fun requestPermissions(saveGpsLocation: Boolean, callback: (Result>) -> Unit)
- fun getPreviewTextureId(): Long
- fun takePhoto(path: String, callback: (Result) -> Unit)
- fun recordVideo(path: String, options: VideoOptions?, callback: (Result) -> Unit)
+ fun getPreviewTextureId(cameraPosition: Long): Long
+ fun takePhoto(sensors: List, paths: List, callback: (Result) -> Unit)
+ fun recordVideo(sensors: List, paths: List, callback: (Result) -> Unit)
fun pauseVideoRecording()
fun resumeVideoRecording()
fun receivedImageFromStream()
@@ -575,14 +788,15 @@ interface CameraInterface {
fun focusOnPoint(previewSize: PreviewSize, x: Double, y: Double, androidFocusSettings: AndroidFocusSettings?)
fun setZoom(zoom: Double)
fun setMirrorFrontCamera(mirror: Boolean)
- fun setSensor(sensor: String, deviceId: String?)
+ fun setSensor(sensors: List)
fun setCorrection(brightness: Double)
+ fun getMinZoom(): Double
fun getMaxZoom(): Double
fun setCaptureMode(mode: String)
fun setRecordingAudioMode(enableAudio: Boolean, callback: (Result) -> Unit)
fun availableSizes(): List
fun refresh()
- fun getEffectivPreviewSize(): PreviewSize?
+ fun getEffectivPreviewSize(index: Long): PreviewSize?
fun setPhotoSize(size: PreviewSize)
fun setPreviewSize(size: PreviewSize)
fun setAspectRatio(aspectRatio: String)
@@ -591,7 +805,8 @@ interface CameraInterface {
fun startAnalysis()
fun stopAnalysis()
fun setFilter(matrix: List)
- fun isVideoRecordingAndImageAnalysisSupported(sensor: String, callback: (Result) -> Unit)
+ fun isVideoRecordingAndImageAnalysisSupported(sensor: PigeonSensorPosition, callback: (Result) -> Unit)
+ fun isMultiCamSupported(): Boolean
companion object {
/** The codec used by CameraInterface. */
@@ -606,7 +821,7 @@ interface CameraInterface {
if (api != null) {
channel.setMessageHandler { message, reply ->
val args = message as List
- val sensorArg = args[0] as String
+ val sensorsArg = args[0] as List
val aspectRatioArg = args[1] as String
val zoomArg = args[2] as Double
val mirrorFrontCameraArg = args[3] as Boolean
@@ -615,7 +830,8 @@ interface CameraInterface {
val captureModeArg = args[6] as String
val enableImageStreamArg = args[7] as Boolean
val exifPreferencesArg = args[8] as ExifPreferences
- api.setupCamera(sensorArg, aspectRatioArg, zoomArg, mirrorFrontCameraArg, enablePhysicalButtonArg, flashModeArg, captureModeArg, enableImageStreamArg, exifPreferencesArg) { result: Result ->
+ val videoOptionsArg = args[9] as VideoOptions?
+ api.setupCamera(sensorsArg, aspectRatioArg, zoomArg, mirrorFrontCameraArg, enablePhysicalButtonArg, flashModeArg, captureModeArg, enableImageStreamArg, exifPreferencesArg, videoOptionsArg) { result: Result ->
val error = result.exceptionOrNull()
if (error != null) {
reply.reply(wrapError(error))
@@ -632,10 +848,12 @@ interface CameraInterface {
run {
val channel = BasicMessageChannel(binaryMessenger, "dev.flutter.pigeon.CameraInterface.checkPermissions", codec)
if (api != null) {
- channel.setMessageHandler { _, reply ->
+ channel.setMessageHandler { message, reply ->
+ val args = message as List
+ val permissionsArg = args[0] as List
var wrapped: List
try {
- wrapped = listOf(api.checkPermissions())
+ wrapped = listOf(api.checkPermissions(permissionsArg))
} catch (exception: Throwable) {
wrapped = wrapError(exception)
}
@@ -668,10 +886,12 @@ interface CameraInterface {
run {
val channel = BasicMessageChannel(binaryMessenger, "dev.flutter.pigeon.CameraInterface.getPreviewTextureId", codec)
if (api != null) {
- channel.setMessageHandler { _, reply ->
+ channel.setMessageHandler { message, reply ->
+ val args = message as List
+ val cameraPositionArg = args[0].let { if (it is Int) it.toLong() else it as Long }
var wrapped: List
try {
- wrapped = listOf(api.getPreviewTextureId())
+ wrapped = listOf(api.getPreviewTextureId(cameraPositionArg))
} catch (exception: Throwable) {
wrapped = wrapError(exception)
}
@@ -686,8 +906,9 @@ interface CameraInterface {
if (api != null) {
channel.setMessageHandler { message, reply ->
val args = message as List
- val pathArg = args[0] as String
- api.takePhoto(pathArg) { result: Result ->
+ val sensorsArg = args[0] as List
+ val pathsArg = args[1] as List
+ api.takePhoto(sensorsArg, pathsArg) { result: Result ->
val error = result.exceptionOrNull()
if (error != null) {
reply.reply(wrapError(error))
@@ -706,9 +927,9 @@ interface CameraInterface {
if (api != null) {
channel.setMessageHandler { message, reply ->
val args = message as List
- val pathArg = args[0] as String
- val optionsArg = args[1] as VideoOptions?
- api.recordVideo(pathArg, optionsArg) { result: Result ->
+ val sensorsArg = args[0] as List
+ val pathsArg = args[1] as List
+ api.recordVideo(sensorsArg, pathsArg) { result: Result ->
val error = result.exceptionOrNull()
if (error != null) {
reply.reply(wrapError(error))
@@ -955,11 +1176,10 @@ interface CameraInterface {
if (api != null) {
channel.setMessageHandler { message, reply ->
val args = message as List
- val sensorArg = args[0] as String
- val deviceIdArg = args[1] as String?
+ val sensorsArg = args[0] as List
var wrapped: List
try {
- api.setSensor(sensorArg, deviceIdArg)
+ api.setSensor(sensorsArg)
wrapped = listOf(null)
} catch (exception: Throwable) {
wrapped = wrapError(exception)
@@ -989,6 +1209,22 @@ interface CameraInterface {
channel.setMessageHandler(null)
}
}
+ run {
+ val channel = BasicMessageChannel(binaryMessenger, "dev.flutter.pigeon.CameraInterface.getMinZoom", codec)
+ if (api != null) {
+ channel.setMessageHandler { _, reply ->
+ var wrapped: List
+ try {
+ wrapped = listOf(api.getMinZoom())
+ } catch (exception: Throwable) {
+ wrapped = wrapError(exception)
+ }
+ reply.reply(wrapped)
+ }
+ } else {
+ channel.setMessageHandler(null)
+ }
+ }
run {
val channel = BasicMessageChannel(binaryMessenger, "dev.flutter.pigeon.CameraInterface.getMaxZoom", codec)
if (api != null) {
@@ -1080,10 +1316,12 @@ interface CameraInterface {
run {
val channel = BasicMessageChannel(binaryMessenger, "dev.flutter.pigeon.CameraInterface.getEffectivPreviewSize", codec)
if (api != null) {
- channel.setMessageHandler { _, reply ->
+ channel.setMessageHandler { message, reply ->
+ val args = message as List
+ val indexArg = args[0].let { if (it is Int) it.toLong() else it as Long }
var wrapped: List
try {
- wrapped = listOf(api.getEffectivPreviewSize())
+ wrapped = listOf(api.getEffectivPreviewSize(indexArg))
} catch (exception: Throwable) {
wrapped = wrapError(exception)
}
@@ -1250,7 +1488,7 @@ interface CameraInterface {
if (api != null) {
channel.setMessageHandler { message, reply ->
val args = message as List
- val sensorArg = args[0] as String
+ val sensorArg = PigeonSensorPosition.ofRaw(args[0] as Int)!!
api.isVideoRecordingAndImageAnalysisSupported(sensorArg) { result: Result ->
val error = result.exceptionOrNull()
if (error != null) {
@@ -1265,6 +1503,22 @@ interface CameraInterface {
channel.setMessageHandler(null)
}
}
+ run {
+ val channel = BasicMessageChannel(binaryMessenger, "dev.flutter.pigeon.CameraInterface.isMultiCamSupported", codec)
+ if (api != null) {
+ channel.setMessageHandler { _, reply ->
+ var wrapped: List
+ try {
+ wrapped = listOf(api.isMultiCamSupported())
+ } catch (exception: Throwable) {
+ wrapped = wrapError(exception)
+ }
+ reply.reply(wrapped)
+ }
+ } else {
+ channel.setMessageHandler(null)
+ }
+ }
}
}
}
diff --git a/android/src/main/kotlin/com/apparence/camerawesome/utils/CameraCharactericitsUtils.kt b/android/src/main/kotlin/com/apparence/camerawesome/utils/CameraCharactericitsUtils.kt
new file mode 100644
index 00000000..5d64b6dc
--- /dev/null
+++ b/android/src/main/kotlin/com/apparence/camerawesome/utils/CameraCharactericitsUtils.kt
@@ -0,0 +1,76 @@
+package com.apparence.camerawesome.utils
+
+import android.hardware.camera2.CameraCharacteristics
+import android.util.Size
+import android.util.SizeF
+import androidx.camera.camera2.interop.Camera2CameraInfo
+import androidx.camera.camera2.interop.ExperimentalCamera2Interop
+import androidx.camera.core.CameraSelector.LENS_FACING_BACK
+import com.apparence.camerawesome.cameraX.PigeonSensorPosition
+import com.apparence.camerawesome.cameraX.PigeonSensorType
+import kotlin.math.max
+import kotlin.math.min
+
+// 35mm is 135 film format, a standard in which focal lengths are usually measured
+val Size35mm = Size(36, 24)
+
+/**
+ * Convert a given array of focal lengths to the corresponding TypeScript union type name.
+ *
+ * Possible values for single cameras:
+ * * `"wide-angle-camera"`
+ * * `"ultra-wide-angle-camera"`
+ * * `"telephoto-camera"`
+ *
+ * Sources for the focal length categories:
+ * * [Telephoto Lens (wikipedia)](https://en.wikipedia.org/wiki/Telephoto_lens)
+ * * [Normal Lens (wikipedia)](https://en.wikipedia.org/wiki/Normal_lens)
+ * * [Wide-Angle Lens (wikipedia)](https://en.wikipedia.org/wiki/Wide-angle_lens)
+ * * [Ultra-Wide-Angle Lens (wikipedia)](https://en.wikipedia.org/wiki/Ultra_wide_angle_lens)
+ */
+@ExperimentalCamera2Interop
+fun Camera2CameraInfo.getSensorType(): PigeonSensorType {
+ val focalLengths =
+ this.getCameraCharacteristic(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)!!
+ val sensorSize =
+ this.getCameraCharacteristic(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE)!!
+
+ // To get valid focal length standards we have to upscale to the 35mm measurement (film standard)
+ val cropFactor = Size35mm.bigger / sensorSize.bigger
+
+
+ val containsTelephoto =
+ focalLengths.any { l -> (l * cropFactor) > 35 } // TODO: Telephoto lenses are > 85mm, but we don't have anything between that range..
+ // val containsNormalLens = focalLengths.any { l -> (l * cropFactor) > 35 && (l * cropFactor) <= 55 }
+ val containsWideAngle =
+ focalLengths.any { l -> (l * cropFactor) >= 24 && (l * cropFactor) <= 35 }
+ val containsUltraWideAngle = focalLengths.any { l -> (l * cropFactor) < 24 }
+
+ if (containsTelephoto)
+ return PigeonSensorType.TELEPHOTO
+ if (containsWideAngle)
+ return PigeonSensorType.WIDEANGLE
+ if (containsUltraWideAngle)
+ return PigeonSensorType.ULTRAWIDEANGLE
+ return PigeonSensorType.UNKNOWN
+}
+
+@ExperimentalCamera2Interop
+fun Camera2CameraInfo.getPigeonPosition(): PigeonSensorPosition {
+ val facing = this.getCameraCharacteristic(CameraCharacteristics.LENS_FACING)!!
+ return if (facing == LENS_FACING_BACK)
+ PigeonSensorPosition.BACK
+ else
+ PigeonSensorPosition.FRONT
+}
+
+
+val Size.bigger: Int
+ get() = max(this.width, this.height)
+val Size.smaller: Int
+ get() = min(this.width, this.height)
+
+val SizeF.bigger: Float
+ get() = max(this.width, this.height)
+val SizeF.smaller: Float
+ get() = min(this.width, this.height)
\ No newline at end of file
diff --git a/android/src/main/kotlin/com/apparence/camerawesome/utils/CameraProviderUtils.kt b/android/src/main/kotlin/com/apparence/camerawesome/utils/CameraProviderUtils.kt
new file mode 100644
index 00000000..5d2d35dc
--- /dev/null
+++ b/android/src/main/kotlin/com/apparence/camerawesome/utils/CameraProviderUtils.kt
@@ -0,0 +1,16 @@
+package com.apparence.camerawesome.utils
+
+import android.annotation.SuppressLint
+import androidx.camera.lifecycle.ProcessCameraProvider
+
+@SuppressLint("RestrictedApi")
+fun ProcessCameraProvider.isMultiCamSupported(): Boolean {
+ val concurrentInfos = availableConcurrentCameraInfos
+ var hasOnePair = false
+ for (cameraInfos in concurrentInfos) {
+ if (cameraInfos.size > 1) {
+ hasOnePair = true
+ }
+ }
+ return hasOnePair
+}
\ No newline at end of file
diff --git a/docs.json b/docs.json
index 7cc36dba..e0a07167 100644
--- a/docs.json
+++ b/docs.json
@@ -4,54 +4,29 @@
"theme": "#36B9B9",
"googleAnalytics": "G-J2YNQZ6BE8",
"sidebar": [
- [
- "Overview",
- "/index"
- ],
+ ["Overview", "/index"],
[
"Getting Started",
[
- [
- "Installing",
- "/getting_started/installing"
- ],
- [
- "Using the built-in UI ",
- "/getting_started/awesome-ui"
- ],
- [
- "Creating your own UI",
- "/getting_started/custom-ui"
- ]
+ ["Installing", "/getting_started/installing"],
+ ["Using the built-in UI ", "/getting_started/awesome-ui"],
+ ["Creating your own UI", "/getting_started/custom-ui"],
+ ["Multiple cameras at once (🚧 BETA)", "/getting_started/multicam"]
]
],
+ [
+ "Migration guides",
+ [["From 1.x.x to 2.x.x", "/migration_guides/from_1_to_2"]]
+ ],
[
"Widgets",
[
- [
- "Theming",
- "/widgets/theming"
- ],
- [
- "Buttons",
- "/widgets/awesome_buttons"
- ],
- [
- "Layout",
- "/widgets/layout"
- ],
- [
- "AwesomeOrientedWidget",
- "/widgets/awesome_oriented_widget"
- ],
- [
- "Filters",
- "/widgets/awesome_filters"
- ],
- [
- "AwesomeCameraModeSelector",
- "/widgets/awesome_camera_mode_selector"
- ]
+ ["Theming", "/widgets/theming"],
+ ["Buttons", "/widgets/awesome_buttons"],
+ ["Layout", "/widgets/layout"],
+ ["AwesomeOrientedWidget", "/widgets/awesome_oriented_widget"],
+ ["Filters", "/widgets/awesome_filters"],
+ ["AwesomeCameraModeSelector", "/widgets/awesome_camera_mode_selector"]
]
],
[
@@ -65,27 +40,15 @@
"Image analysis formats and conversion",
"/image_analysis/image_format_conversions"
],
- [
- "Reading barcodes",
- "/image_analysis/reading_barcodes"
- ],
- [
- "Detecting faces",
- "/image_analysis/detecting_faces"
- ]
+ ["Reading barcodes", "/image_analysis/reading_barcodes"],
+ ["Detecting faces", "/image_analysis/detecting_faces"]
]
],
[
"Appendix",
[
- [
- "License",
- "/appendix/license"
- ],
- [
- "Roadmap",
- "/appendix/roadmap"
- ]
+ ["License", "/appendix/license"],
+ ["Roadmap", "/appendix/roadmap"]
]
]
]
diff --git a/docs/getting_started/awesome-ui.mdx b/docs/getting_started/awesome-ui.mdx
index f55378af..9d2ced2c 100644
--- a/docs/getting_started/awesome-ui.mdx
+++ b/docs/getting_started/awesome-ui.mdx
@@ -4,75 +4,142 @@ CamerAwesome comes with a full built UI that you can use as is.
Use `CameraAwesomeBuilder.awesome()` to get a complete ready-to-use camera experience within your app.
-Here is a concrete example using **path_provider** to get valid paths and **better_open_file** to display the last media captured:
+Here is a concrete example using **better_open_file** to display the last media captured:
```dart
CameraAwesomeBuilder.awesome(
- saveConfig: SaveConfig.photoAndVideo(
- photoPathBuilder: () async {
- final Directory extDir = await getTemporaryDirectory();
- final testDir =
- await Directory('${extDir.path}/test').create(recursive: true);
- return '${testDir.path}/${DateTime.now().millisecondsSinceEpoch}.jpg';
- },
- videoPathBuilder: () async {
- final Directory extDir = await getTemporaryDirectory();
- final testDir =
- await Directory('${extDir.path}/test').create(recursive: true);
- return '${testDir.path}/${DateTime.now().millisecondsSinceEpoch}.mp4';
- },
- onMediaTap: (mediaCapture) {
- OpenFile.open(mediaCapture.filePath);
- },
- ),
+ saveConfig: SaveConfig.photoAndVideo(),
+ onMediaTap: (mediaCapture) {
+ OpenFile.open(mediaCapture.filePath);
+ },
)
```
![Base Awesome UI](/img/base_awesome_ui.jpg)
-## 📷 Initial Camera setup
+## 📁 Camera captures configuration
+
+`CameraAwesomeBuilder` requires a `SaveConfig` parameter.
+You can create one with one of the following factories:
+
+- `SaveConfig.photo()` if you only want to take photos
+- `SaveConfig.video()` to only take videos
+- `SaveConfig.photoAndVideo()` if you want to switch between photo and video modes.
+
+These factories don't require additional parameters, but you might want to customize a few things, like where to save your files.
-You can set the initial camera configuration using the `CameraAwesomeBuilder` parameters.
+Here is a complete example to overwrite the default behaviors:
```dart
-CameraAwesomeBuilder.awesome(
- aspectRatio: CameraAspectRatios.ratio_1_1,
- enableAudio: true,
- exifPreferences: ExifPreferences(
- saveGPSLocation: false,
- ),
- filter: Filters.none,
- flashMode: FlashMode.none,
- onMediaTap: (mediaCapture) {
- print('Tap on ${mediaCapture.filePath}');
+SaveConfig.photoAndVideo(
+ // 1.
+ initialCaptureMode: CaptureMode.photo,
+ // 2.
+ photoPathBuilder: (sensors) async {
+ ...
},
- saveConfig: SaveConfig.photoAndVideo(
- initialCaptureMode: CaptureMode.photo,
- photoPathBuilder: () async {
- return 'some/image/file/path.jpg';
- },
- videoPathBuilder: () async {
- return 'some/video/file/path.mp4';
- },
+ // 3.
+ videoPathBuilder: (sensors) async {
+ ...
+ },
+ // 4.
+ videoOptions: VideoOptions(
+ enableAudio: true,
+ ios: CupertinoVideoOptions(
+ fps: 10,
+ // TODODOC Add other possble params
+ ),
+ android: AndroidVideoOptions(
+ bitrate: 6000000,
+ quality: VideoRecordingQuality.fhd,
+ fallbackStrategy: QualityFallbackStrategy.lower,
+ ),
),
- sensor: Sensors.back,
- zoom: 0.0,
- // Other parameters
+ // 5.
+ exifPreferences: ExifPreferences(saveGPSLocation: true),
+ // 6.
+ mirrorFrontCamera: true,
+)
+```
+
+Let's break it down:
+
+1. When using `photoAndVideo` mode, you can choose which mode to start with. Here we start with photo mode (default).
+2. You can customize the path where your photos will be saved.
+3. You can also customize where to save your videos.
+4. The video recording can be customized using `VideoOptions`. Note that each platform has its own settings.
+5. You can also enable or disable the GPS location in the EXIF data of your photos.
+6. Set if you want the front camera pictures & videos to be mirrored like in the preview.
+
+A `photoPathBuilder` could look like this:
+
+```dart
+SaveConfig.photoAndVideo(
+ photoPathBuilder: (sensors) async {
+ // 1.
+ final Directory extDir = await getTemporaryDirectory();
+ final testDir = await Directory('${extDir.path}/camerawesome').create(recursive: true);
+
+ // 2.
+ if (sensors.length == 1) {
+ final String filePath =
+ '${testDir.path}/${DateTime.now().millisecondsSinceEpoch}.jpg';
+ // 3.
+ return SingleCaptureRequest(filePath, sensors.first);
+ } else {
+ // 4.
+ return MultipleCaptureRequest(
+ {
+ for (final sensor in sensors)
+ sensor:
+ '${testDir.path}/${sensor.position == SensorPosition.front ? 'front_' : "back_"}${DateTime.now().millisecondsSinceEpoch}.jpg',
+ },
+ );
+ }
+ },
+ // Other params
...
)
```
-| Parameter | Description |
-| ------------------- | ----------------------------------------------------------------------------------------------------------- |
-| **aspectRatio** | Initial aspect ratio of photos and videos taken |
-| **enableAudio** | Activate audio by default (only for video mode) |
-| **exifPreferences** | Activate or deactivate location in photo exif |
-| **filter** | Initial preview filter which will be applied to the photo |
-| **flashMode** | The initial flash mode |
-| **onMediaTap** | Choose what you want to do when user tap on the last media captured (when using the built-in image preview) |
-| **saveConfig** | Define if you want to take photos, videos or both and where to save them |
-| **sensor** | The initial camera sensor (Back or Front) |
-| **zoom** | A value between 0.0 (no zoom) and 1.0 (max zoom) |
+There are 4 steps in this code:
+
+1. Create a directory where photos will be saved (here we use the temporary directory, using `path_provider`).
+2. Since `CamerAwesome` supports taking pictures with both front and back cameras at the same time, we need to detect if there is only one picture to take or several ones.
+3. If there is only one sensor used, we can build a `SingleCaptureRequest` with the file path and the sensor.
+4. If there are several sensors, we need to build a `MultipleCaptureRequest` with a map of file paths and sensors. In this case, we create a different path based on wether it's the front or back sensor that takes the picture.
+
+The same logic goes for videos but we replace the `.jpg` extension with `.mp4`.
+
+## 📷 Initial camera configuration
+
+You can set the initial camera configuration using a `SensorConfig`.
+
+```dart
+CameraAwesomeBuilder.awesome(
+ sensorConfig: SensorConfig.single(
+ aspectRatio: CameraAspectRatios.ratio_4_3,
+ flashMode: FlashMode.auto,
+ sensor: Sensor.position(SensorPosition.back),
+ zoom: 0.0,
+ ),
+)
+```
+
+| Parameter | Description |
+| --------------- | ------------------------------------------------ |
+| **aspectRatio** | Initial aspect ratio of photos and videos taken |
+| **flashMode** | The initial flash mode |
+| **sensor** | The initial camera sensor (Back or Front) |
+| **zoom** | A value between 0.0 (no zoom) and 1.0 (max zoom) |
+
+Note: you might also notice the `SensorConfig.multiple()` constructor which lets you specify several sensors.
+This feature is in beta, but you can take a look at the [dedicated documentation](/getting_started/multicam).
+
+`CameraAwesomeBuilder` also provides a few more parameters:
+
+- `enablePhysicalButton` to enable the volume buttons to take pictures or record videos
+- `filter` to set an initial filter to the pictures
## 🎨 Customize the built-in UI
@@ -305,63 +372,52 @@ Here is an example showing the complete list of parameters you can set to custom
```dart
CameraAwesomeBuilder.awesome(
- // Define if you want to take photos, videos or both and where to save them
- saveConfig: SaveConfig.photoAndVideo(
- initialCaptureMode: CaptureMode.photo,
- photoPathBuilder: () async {
- // Return a valid file path (must be a jpg file)
- return 'some/image/file/path.jpg';
- },
- videoPathBuilder: () async {
- // Return a valid file path (must be a mp4 file)
- return 'some/video/file/path.mp4';
- },
- ),
- onMediaTap: (mediaCapture) {
- // Hande tap on the preview of the last media captured
- print('Tap on ${mediaCapture.filePath}');
+ // Bottom actions (take photo, switch camera...)
+ bottomActionsBuilder: (state) {
+ return AwesomeBottomActions(
+ state: state,
+ onMediaTap: _handleMediaTap,
+ );
},
- // Use back camera
- sensor: Sensors.back,
- // Use 1:1 aspect ratio
- aspectRatio: CameraAspectRatios.ratio_1_1,
- // Disable flash
- flashMode: FlashMode.none,
- // No zoom
- zoom: 0.0,
- // Exif settings
- exifPreferences: ExifPreferences(
- // Save GPS location when taking pictures (no effect with videos)
- saveGPSLocation: false,
- ),
- // Enable audio when recording a video
- enableAudio: true,
// Clicking on volume buttons will capture photo/video depending on the current mode
enablePhysicalButton: true,
- // Don't mirror the front camera
- mirrorFrontCamera: false,
- // Show a progress indicator while loading the camera
- progressIndicator: const Center(
- child: SizedBox(
- width: 100,
- height: 100,
- child: CircularProgressIndicator(),
- ),
- ),
- // Preview fit of the camera
- previewFit: CameraPreviewFit.fitWidth,
- // Image analysis configuration
+ // Filter to apply on the preview
+ filter: AwesomeFilter.AddictiveRed,
+ // Image analysis configuration
imageAnalysisConfig: AnalysisConfig(
androidOptions: const AndroidAnalysisOptions.nv21(
width: 1024,
),
autoStart: true,
),
+ // Middle content (filters, photo/video switcher...)
+ middleContentBuilder: (state) {
+ // Use this to add widgets on the middle of the preview
+ return Column(
+ children: [
+ const Spacer(),
+ AwesomeFilterWidget(state: state),
+ Builder(
+ builder: (context) => Container(
+ color: AwesomeThemeProvider.of(context)
+ .theme
+ .bottomActionsBackgroundColor,
+ height: 8,
+ ),
+ ),
+ AwesomeCameraModeSelector(state: state),
+ ],
+ );
+ },
// Handle image analysis
onImageForAnalysis: (analysisImage) {
// Do some stuff with the image (see example)
return processImage(analysisImage);
},
+ onMediaTap: (mediaCapture) {
+ // Hande tap on the preview of the last media captured
+ print('Tap on ${mediaCapture.filePath}');
+ },
// Handle gestures on the preview, such as tap to focus or scale to zoom
onPreviewTapBuilder: (state) => OnPreviewTap(
onTap: (position, flutterPreviewSize, pixelPreviewSize) {
@@ -395,12 +451,46 @@ CameraAwesomeBuilder.awesome(
state.sensorConfig.setZoom(scale);
},
),
- // Add your own decoration on top of the preview
+ // Alignment of the preview
+ previewAlignment: Alignment.center,
+ // Add your own decoration on top of the preview
previewDecoratorBuilder: (state, previewSize, previewRect) {
// This will be shown above the preview (in a Stack)
// It could be used in combination with MLKit to draw filters on faces for example
return PreviewDecorationWiget(previewRect);
},
+ // Preview fit of the camera
+ previewFit: CameraPreviewFit.fitWidth,
+ // Padding around the preview
+ previewPadding: const EdgeInsets.all(20),
+ // Show a progress indicator while loading the camera
+ progressIndicator: const Center(
+ child: SizedBox(
+ width: 100,
+ height: 100,
+ child: CircularProgressIndicator(),
+ ),
+ ),
+ // Define if you want to take photos, videos or both and where to save them
+ saveConfig: SaveConfig.photoAndVideo(
+ initialCaptureMode: CaptureMode.photo,
+ mirrorFrontCamera: true,
+ photoPathBuilder: (sensors) async {
+ // Return a valid file path (must be a jpg file)
+ return SingleCaptureRequest('some/image/file/path.jpg', sensors.first);
+ },
+ videoPathBuilder: (sensors) async {
+ // Return a valid file path (must be a mp4 file)
+ return SingleCaptureRequest('some/image/file/path.mp4', sensors.first);
+ },
+ ),
+ // Sensor initial configuration
+ sensorConfig: SensorConfig.single(
+ aspectRatio: CameraAspectRatios.ratio_4_3,
+ flashMode: FlashMode.auto,
+ sensor: Sensor.position(SensorPosition.back),
+ zoom: 0.0,
+ ),
// CamerAwesome theme used to customize the built-in UI
theme: AwesomeTheme(
// Background color of the bottom actions
@@ -432,73 +522,36 @@ CameraAwesomeBuilder.awesome(
},
),
),
- // Filter to apply on the preview
- filter: AwesomeFilter.AddictiveRed,
- // Padding around the preview
- previewPadding: const EdgeInsets.all(20),
- // Alignment of the preview
- previewAlignment: Alignment.center,
- // Bottom actions (take photo, switch camera...)
- bottomActionsBuilder: (state) {
- return AwesomeBottomActions(
- state: state,
- onMediaTap: _handleMediaTap,
- );
- },
// Top actions (flash, timer...)
topActionsBuilder: (state) {
return AwesomeTopActions(state: state);
},
- // Middle content (filters, photo/video switcher...)
- middleContentBuilder: (state) {
- // Use this to add widgets on the middle of the preview
- return Column(
- children: [
- const Spacer(),
- AwesomeFilterWidget(state: state),
- Builder(
- builder: (context) => Container(
- color: AwesomeThemeProvider.of(context)
- .theme
- .bottomActionsBackgroundColor,
- height: 8,
- ),
- ),
- AwesomeCameraModeSelector(state: state),
- ],
- );
- },
)
```
### 🔬 Full list of properties
-| Method | Comment |
-| --------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- |
-| **aspectRatio** | Initial aspect ratio of photos and videos taken |
-| **bottomActionsBuilder** | A widget builder used to show buttons on the bottom of the preview.
`AwesomeBottomActions` by default. |
-| **enableAudio** | Activate audio by default (only for video mode) |
-| **enablePhysicalButton** | When set to true, volume buttons will capture pictures/videos depending on the current mode. |
-| **exifPreferences** | Activate or deactivate location in photo exif |
-| **filter** | Initial preview filter which will be applied to the photo |
-| **flashMode** | The initial flash mode |
-| **imageAnalysisConfig** | Image format, resolution and autoStart (start analysis immediately or later) |
-| **middleContentBuilder** | A widget builder used to add widgets above the middle part of the preview (between bottom and top actions).
Shows the filter selector by default. |
-| **mirrorFrontCamera** | Activate to mirror the pictures taken with the front camera |
-| **onImageForAnalysis** | Callback that will provide an image stream for AI analysis |
-| **onMediaTap** | Choose what you want to do when user tap on the last media captured |
-| **onPreviewTapBuilder** | Customize the behavior when the camera preview is tapped (tap to focus by default) |
-| **onPreviewScaleBuilder** | Customize what to do when the user makes a pinch (pinch to zoom by default) |
-| **previewAlignment** | Alignment of the preview |
-| **previewDecoratorBuilder** | A widget builder used to draw elements around or on top of the preview |
-| **previewFit** | One of fitWidth, fitHeight, contain, cover |
-| **previewPadding** | Padding around the preview |
-| **progressIndicator** | Widget to show when loading |
-| **saveConfig** | Define if you want to take photos, videos or both and where to save them |
-| **sensor** | The initial camera sensor (Back or Front) |
-| **theme** | Theme used to customize the built-in UI |
-| **topActionsBuilder** | A widget builder used to show buttons on the top of the preview.
`AwesomeTopActions` by default. |
-| **zoom** | A value between 0.0 (no zoom) and 1.0 (max zoom) |
+| Method | Comment |
+| --------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| **aspectRatio** | Initial aspect ratio of photos and videos taken |
+| **bottomActionsBuilder** | A widget builder used to show buttons on the bottom of the preview.
`AwesomeBottomActions` by default. |
+| **enablePhysicalButton** | When set to true, volume buttons will capture pictures/videos depending on the current mode. |
+| **filter** | Initial preview filter which will be applied to the photo |
+| **imageAnalysisConfig** | Image format, resolution and autoStart (start analysis immediately or later) |
+| **middleContentBuilder** | A widget builder used to add widgets above the middle part of the preview (between bottom and top actions).
Shows the filter selector by default. |
+| **onImageForAnalysis** | Callback that will provide an image stream for AI analysis |
+| **onMediaTap** | Choose what you want to do when user tap on the last media captured |
+| **onPreviewTapBuilder** | Customize the behavior when the camera preview is tapped (tap to focus by default) |
+| **onPreviewScaleBuilder** | Customize what to do when the user makes a pinch (pinch to zoom by default) |
+| **previewAlignment** | Alignment of the preview |
+| **previewDecoratorBuilder** | A widget builder used to draw elements around or on top of the preview |
+| **previewFit** | One of fitWidth, fitHeight, contain, cover |
+| **previewPadding** | Padding around the preview |
+| **progressIndicator** | Widget to show when loading |
+| **saveConfig** | Define if you want to take photos, videos or both and where to save them. You can also set exif preferences, decide to mirror or not front camera outputs and set video recording settings. |
+| **sensorConfig** | The initial sensor configuration: aspect ratio, flash mode, which sensor to use and initial zoom. |
+| **theme** | Theme used to customize the built-in UI |
+| **topActionsBuilder** | A widget builder used to show buttons on the top of the preview.
`AwesomeTopActions` by default. |
## 🔨 Need more customization? Other use cases?
diff --git a/docs/getting_started/custom-ui.mdx b/docs/getting_started/custom-ui.mdx
index 9934f183..a82f5d0e 100644
--- a/docs/getting_started/custom-ui.mdx
+++ b/docs/getting_started/custom-ui.mdx
@@ -1,4 +1,4 @@
-## 🎨 Creating a custom UI
+# 🎨 Creating your own UI
If `CameraAwesomeBuilder.awesome()` doesn't fit your needs in terms of layout, you can create your own UI using the `CameraAwesomeBuilder.custom()` constructor.
@@ -6,14 +6,7 @@ The camera preview will be visible behind what you will provide to this builder.
```dart
CameraAwesomeBuilder.custom(
- saveConfig: SaveConfig.photoAndVideo(
- photoPathBuilder: () async {
- return "some/path.jpg";
- },
- videoPathBuilder: () async {
- return "some/path.mp4";
- },
- ),
+ saveConfig: SaveConfig.photoAndVideo(),
builder: (cameraState, previewSize, previewRect) {
// Return your UI (a Widget)
return cameraState.when(
@@ -28,59 +21,58 @@ CameraAwesomeBuilder.custom(
You can find more examples on the `example` folder.
-### Properties
-
-| Method | Comment |
-| --------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- |
-| **aspectRatio** | Initial aspect ratio of photos and videos taken |
-| **builder** | Create your own interface using the builder method. |
-| **enableAudio** | Activate audio by default (only for video mode) |
-| **enablePhysicalButton** | When set to true, volume buttons will capture pictures/videos depending on the current mode. |
-| **exifPreferences** | Activate or deactivate location in photo exif |
-| **filter** | Initial preview filter which will be applied to the photo |
-| **flashMode** | The initial flash mode |
-| **imageAnalysisConfig** | Image format, resolution and autoStart (start analysis immediately or later) |
-| **mirrorFrontCamera** | Activate to mirror the pictures taken with the front camera |
-| **onImageForAnalysis** | Callback that will provide an image stream for AI analysis |
-| **onPreviewTapBuilder** | Customize the behavior when the camera preview is tapped (tap to focus by default) |
-| **onPreviewScaleBuilder** | Customize what to do when the user makes a pinch (pinch to zoom by default) |
-| **previewAlignment** | Alignment of the preview |
-| **previewFit** | One of fitWidth, fitHeight, contain, cover |
-| **previewPadding** | Padding around the preview |
-| **progressIndicator** | Widget to show when loading |
-| **saveConfig** | Define if you want to take photos, videos or both and where to save them |
-| **sensor** | The initial camera sensor (Back or Front) |
-| **theme** | Theme used to customize the built-in UI |
-| **zoom** | A value between 0.0 (no zoom) and 1.0 (max zoom) |
-
-### Builder method
-
-The builder method is the main method here.
+## Properties
+
+| Method | Comment |
+| ------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| **aspectRatio** | Initial aspect ratio of photos and videos taken |
+| **builder** | Create your own interface using the builder method. |
+| **enablePhysicalButton** | When set to true, volume buttons will capture pictures/videos depending on the current mode. |
+| **filter** | Initial preview filter which will be applied to the photo |
+| **imageAnalysisConfig** | Image format, resolution and autoStart (start analysis immediately or later) |
+| **onImageForAnalysis** | Callback that will provide an image stream for AI analysis |
+| **onPreviewTapBuilder** | Customize the behavior when the camera preview is tapped (tap to focus by default) |
+| **onPreviewScaleBuilder** | Customize what to do when the user makes a pinch (pinch to zoom by default) |
+| **previewAlignment** | Alignment of the preview |
+| **previewFit** | One of fitWidth, fitHeight, contain, cover |
+| **previewPadding** | Padding around the preview |
+| **progressIndicator** | Widget to show when loading |
+| **saveConfig** | Define if you want to take photos, videos or both and where to save them. You can also set exif preferences, decide to mirror or not front camera outputs and set video recording settings. |
+| **sensorConfig** | The initial sensor configuration: aspect ratio, flash mode, which sensor to use and initial zoom. |
+| **theme** | Theme used to customize the built-in UI |
+
+## Builder method
+
+The `builder` method is the main method here.
```dart
typedef CameraLayoutBuilder = Widget Function(CameraState cameraModeState, PreviewSize previewSize, Rect previewRect);
```
-CamerAwesome works using a state pattern to make sure you can only call methods available on the camera current state.
+CamerAwesome works using a state pattern to make sure you can only call methods available on the current camera state.
The magic is that you don't have to do anything apart calling some methods using the camera state.
```dart
state.when(
+ onAnalysisOnlyMode: (analysisCameraState) => analysisCameraState.startAnalysis(),
onPhotoMode: (photoCameraState) => photoCameraState.takePhoto(),
onVideoMode: (videoCameraState) => videoCameraState.startRecording(),
onVideoRecordingMode: (videoRecordingCameraState) => videoRecordingCameraState.stopRecording(),
onPreparingCamera: (preparingCameraState) => Loader(),
+ onPreviewMode: (previewCameraState) => previewModeState.focus(),
);
```
`previewSize` and `previewRect` are additional parameters that might be used to position your UI around or on top of the camera preview.
-#### CamerAwesome has 4 different states
+### CamerAwesome has 6 different states
- **PreparingCameraState** : camera is starting
- **PhotoCameraState** : camera is ready to take a photo
- **VideoCameraState** : camera is ready to take a video
- **VideoRecordingCameraState** : camera is taking a video
+- **PreviewCameraState** : camera is in preview only mode
+- **AnalysisCameraState** : camera is in analysis only mode
Here is a schema showing the interactions between states:
![Camera states interactions](/img/camera_states_interactions.png)
@@ -89,12 +81,12 @@ As you can see, after the initial `PreparingCameraState`, the new state is eithe
A `VideoRecordingCameraState` replaces the `VideoCameraState` when a recording starts. You can't start two recording at the same time thanks to this.
When the recording stops, a `VideoCameraState` replaces it again.
-#### You don't have to worry about state management here
+### You don't have to worry about state management here
`CameraAwesomeBuilder` calls the `builder` method each time you switch between camera states.
This way, you can react to these changes easily in your `builder` 👌
-### Creating my own widget
+## Creating my own widget
`CameraState` lets you build a reactive UI by providing you streams and setters to the various properties around the camera.
It should let you create everything you need in a reactive way without worrying about the camera flow.
@@ -112,31 +104,6 @@ class AwesomeFlashButton extends StatelessWidget {
required this.state,
});
- @override
- Widget build(BuildContext context) {
- return StreamBuilder(
- stream: state.sensorConfig.flashMode$, // Listen to the currently selected flash mode
- builder: (context, snapshot) {
- if (!snapshot.hasData) {
- return Container();
- }
- return _FlashButton.from(
- // Build your button differently based on the current Flash mode, with different icons for instance
- flashMode: snapshot.requireData,
- onTap: () => state.sensorConfig.switchCameraFlash(),
- );
- },
- );
- }
-}
-class AwesomeFlashButton extends StatelessWidget {
- final CameraState state;
-
- const AwesomeFlashButton({
- super.key,
- required this.state,
- });
-
@override
Widget build(BuildContext context) {
return StreamBuilder(
@@ -177,7 +144,7 @@ Since we want to listen to its changes, we use `flashMode$`, which is a `Stream`
>
> The equivalent without $ is the current value. You should not store these in variables since they may change over time.
-### Using provided widgets
+## Using provided widgets
You can find common widgets that you may want to use in the **Widgets** section.
@@ -189,14 +156,14 @@ Instead of handling it yourself, using the built-in widgets can let you rotate y
Check also built-in [buttons](/widgets/buttons) and the [camera mode selector](/widgets/camera_mode_selector).
-### Setting and reading camera properties
+## Setting and reading camera properties
If you need more customization, you can find details on how to access and update the properties of the camera below.
Note that we recommend to access properties via their `Stream` whenever possible.
If you need it to build your UI, just use it with a `StreamBuilder`.
-#### Camera sensor properties and methods
+### Camera sensor properties and methods
`CameraState` gives access to the current `SensorConfig` (via a Stream or a getter).
You will use this object to get or set different sensor related properties.
@@ -237,7 +204,7 @@ See the tables below for each use case.
| **Get** current brightness value | `state.sensorConfig.brightness` |
| **Stream** of the current brightness value | `state.sensorConfig.brightness$` |
-#### Methods and properties available to any CameraState
+### Methods and properties available to any CameraState
If you want to access more than just the current `SensorConfig`, you can explore what the different `CameraStates` provide.
@@ -252,7 +219,7 @@ First of all, they all give you the following features:
More features are available depending on which `CameraState` is in use.
-#### PhotoCameraState properties and methods
+### PhotoCameraState properties and methods
**Take a photo**
@@ -268,7 +235,7 @@ More features are available depending on which `CameraState` is in use.
| **Get** saveGpsLocation | `state.saveGpsLocation` |
| **Stream** of saveGpsLocation | `state.saveGpsLocation$` |
-#### VideoCameraState properties and methods
+### VideoCameraState properties and methods
In this state, you didn't start recording yet.
@@ -277,7 +244,7 @@ In this state, you didn't start recording yet.
| Start recording a video | `state.startRecording()` | This will push a `VideoRecordingCameraState` |
| Enable/Disable audio recording | `state.enableAudio()` | Must be set before starting a recording. Once started, it can't be changed for the current recording. |
-#### VideoRecordingCameraState properties and methods
+### VideoRecordingCameraState properties and methods
In this state, the video recording has started.
diff --git a/docs/getting_started/installing.mdx b/docs/getting_started/installing.mdx
index 5b2fbb72..8ea22878 100644
--- a/docs/getting_started/installing.mdx
+++ b/docs/getting_started/installing.mdx
@@ -4,7 +4,7 @@
```yaml
dependencies:
- camerawesome: ^1.4.0
+ camerawesome: ^2.0.0
...
```
diff --git a/docs/getting_started/multicam.mdx b/docs/getting_started/multicam.mdx
new file mode 100644
index 00000000..827e9dfc
--- /dev/null
+++ b/docs/getting_started/multicam.mdx
@@ -0,0 +1,200 @@
+# 📷 📷 Multiple cameras at once (🚧 BETA)
+
+To enable concurrent cameras feature, you need to give `CameraAwesomeBuilder` a `SensorConfig` with multiple sensors:
+
+```dart
+CameraAwesomeBuilder.awesome(
+ // 1.
+ sensorConfig: SensorConfig.multiple(
+ // 2.
+ sensors: [
+ Sensor.position(SensorPosition.back),
+ Sensor.position(SensorPosition.front),
+ ],
+ // 3.
+ flashMode: FlashMode.auto,
+ aspectRatio: CameraAspectRatios.ratio_16_9,
+ ),
+ // Other params
+)
+```
+
+The main points of interest are the following:
+
+1. Instead of using the `SensorConfig.single` constructor, use `SensorConfig.multiple`.
+2. This constructor lets you define a list of sensors instead of a single one.
+3. Then, you can set regular sensor parameters like `flashMode` or `aspectRatio`.
+
+## Feature support
+
+Not all devices support the concurrent cameras feature. Keep in mind that it can be resource intensive.
+
+Check the following method to determine if the feature is supported on the current device:
+
+```dart
+final isSupported = await CamerawesomePlugin.isMultiCamSupported()
+```
+
+## Customizing the picture-in-picture preview
+
+The `pictureInPictureConfigBuilder` parameter lets you customize the preview of the additional sensors.
+
+A `PictureInPictureConfigBuilder` is a function that is called with the index of the sensor and the sensor itself as parameters and returns a `PictureInPictureConfig` object.
+
+Here is a sample code taken from the `multi_camera.dart` example:
+
+```dart
+CameraAwesomeBuilder.awesome(
+ pictureInPictureConfigBuilder: (index, sensor) {
+ const width = 200.0;
+ return PictureInPictureConfig(
+ // 1.
+ isDraggable: false,
+ // 2.
+ startingPosition: Offset(
+ screenSize.width - width - 20.0 * index,
+ screenSize.height - 356,
+ ),
+ // 3.
+ sensor: sensor,
+ // 4.
+ onTap: (){
+ print('on preview tap');
+ },
+ // 5.
+ pictureInPictureBuilder: (preview, aspectRatio) {
+ return SizedBox(
+ width: width,
+ height: width,
+ child: ClipPath(
+ clipper: _MyCustomPipClipper(
+ width: width,
+ height: width * aspectRatio,
+ shape: shape,
+ ),
+ child: SizedBox(
+ width: width,
+ // 6.
+ child: preview,
+ ),
+ ),
+ );
+ },
+ );
+ },
+)
+```
+
+Let's break it down:
+
+1. Define if you want the preview to be draggable or not using the `isDraggable` parameter.
+2. Choose the `startingPosition` of the preview. You may adjust it depending on the index of the sensor.
+3. Set for which `sensor` this preview is.
+4. Add an `onTap` callback.
+5. Customize how you want the preview to be displayed using the `pictureInPictureBuilder`. This builder must display the `preview` widget. You may also use the `aspectRatio` of the preview to adjust the size of the widget.
+
+
+
+## Get the list of sensors
+
+You can get the list of all the sensors available on iOS with:
+
+```dart
+final sensorDeviceData = await CamerawesomePlugin.getSensors();
+```
+
+## Maximum number of concurrent cameras
+
+Although the code lets you define any number of sensors, each platform has its limits regarding the number of cameras you can open simultaneously.
+
+| Platform | Max number of cameras |
+| -------- | --------------------- |
+| Android | 2 |
+| iOS | 3 |
+
+Providing more cameras may result in unexpected behaviour.
+
+
+## Capturing multiple pictures
+
+You can capture multiple pictures at once with the regular `takePhoto()` method:
+
+```dart
+await photoCameraState.takePhoto();
+```
+
+Then, listen to `cameraState.captureState$` in order to retrieve the last medias captured.
+
+A `MediaCapture` object contains a `CaptureRequest` which might be either a `SingleCaptureRequest` or a `MultipleCaptureRequest`, depending on the number of sensors used.
+
+You can use the `when` operator to deal with this or directly cast it to one of the mentionned classes.
+
+Here is an example which handles the preview tap:
+
+```dart
+CameraAwesomeBuilder.awesome(
+ ...
+ onMediaTap: (mediaCapture) {
+ mediaCapture.captureRequest.when(
+ // 1.
+ single: (single) => OpenFile.open(single.file?.path),
+ // 2.
+ multiple: (multiple) => Navigator.of(context).pushNamed(
+ '/gallery',
+ arguments: multiple,
+ ),
+ );
+ },
+)
+```
+In this example, we use the `when` operator to handle each case:
+1. If it's a `SingleCaptureRequest`, we open the file directly.
+2. If it's a `MultipleCaptureRequest`, we navigate to a new page and pass the `MultipleCaptureRequest` object as an argument. This page could be used to display all the pictures taken for instance.
+
+
+
+## Capturing multiple videos
+
+Concurrent camera video recording support is not ready yet.
+
+
+## Limitations
+
+### Sensor settings
+
+Sensor settinigs like `flashMode` or `aspectRatio` are only applied to the first sensor in the list (let's call it the main sensor).
+
+### Picture-in-picture (lack of) customization
+
+As it's still a beta, the additional cameras are simply displayed in a picture-in-picture like floating window.
+
+It can be moved with a drag and drop, but you can resize it or change its look yet.
+
+Feel free to share what you'd like to do with it in a [new issue](https://github.com/Apparence-io/CamerAwesome/issues/new/choose)!
+
+Of course, you can also provide your own PR directly and we'll be happy to review it.
+
+### Sensors used on Android
+
+The sensors used are not necessarly the ones given in the list of sensors.
+
+There is a concept of pairs of concurrent cameras on this platform which implies that only some specific pairs will be compatible with each other.
+
+For now, the sensors used are always one from the front and one from the back of the device.
+
+### Analysis mode with concurrent cameras
+
+This feature is not ready yet and might not be as good as you would expect: it would require even more resources.
+
+### Differences between pictures taken and Preview
+
+The preview shows the additional sensors as picture-in-picture.
+
+This is not what is captured by CamerAwesome: instead, a picture for each sensor is individually captured.
+
+For now, you are responsible to merge them into one picture (or use a `Widget` to position each image as you want).
+
+
+## 🗣️ Feedback
+
+If you are using this feature or have any feedback regarding it, please share it with us in a [new issue](https://github.com/Apparence-io/CamerAwesome/issues/new/choose).
\ No newline at end of file
diff --git a/docs/image_analysis/detecting_faces.mdx b/docs/image_analysis/detecting_faces.mdx
index a3389bae..2d7c02de 100644
--- a/docs/image_analysis/detecting_faces.mdx
+++ b/docs/image_analysis/detecting_faces.mdx
@@ -31,8 +31,10 @@ MLKit is now ready, let's setup CamerAwesome.
CameraAwesomeBuilder.previewOnly(
// 2.
previewFit: CameraPreviewFit.contain,
- aspectRatio: CameraAspectRatios.ratio_1_1,
- sensor: Sensors.front,
+ sensorConfig: SensorConfig.single(
+ sensor: Sensor.position(SensorPosition.front),
+ aspectRatio: CameraAspectRatios.ratio_1_1,
+ ),
// 3.
onImageForAnalysis: (img) => _analyzeImage(img),
// 4.
diff --git a/docs/image_analysis/image_analysis_configuration.mdx b/docs/image_analysis/image_analysis_configuration.mdx
index aa07146a..2643c04d 100644
--- a/docs/image_analysis/image_analysis_configuration.mdx
+++ b/docs/image_analysis/image_analysis_configuration.mdx
@@ -79,4 +79,29 @@ The `example` folder contains three examples using MLKit:
A detailed explanation of each example is available in [Reading barcodes](/image_analysis/reading_barcodes) and [Detecting faces](/image_analysis/detecting_faces).
-See also details on the [AnalysisImage format and conversions](/image_analysis/image_format_conversions).
\ No newline at end of file
+See also details on the [AnalysisImage format and conversions](/image_analysis/image_format_conversions).
+
+## iOS preview mode only publishing
+
+If you want to use the preview-only feature on iOS, you are not required to set the microphone description permission in your `Info.plist` file.
+However, keep in mind that the App Store has the ability to detect if your app is utilizing the AVAudioSession API (which is included by default in the CamerAwesome plugin).
+
+If your app does not plan to use the microphone at all and you want to use the preview-only feature, you can add the following to your `Podfile`:
+```
+post_install do |installer|
+ installer.pods_project.targets.each do |target|
+ flutter_additional_ios_build_settings(target)
+
+ # ADD THE NEXT SECTION
+ target.build_configurations.each do |config|
+ config.build_settings['GCC_PREPROCESSOR_DEFINITIONS'] ||= [
+ '$(inherited)',
+ 'AUDIO_SESSION_MICROPHONE=0'
+ ]
+ end
+
+ end
+end
+```
+
+This piece of code will remove all occurrences of the microphone API in the iOS project, and you will be able to pass the review without any problems.
\ No newline at end of file
diff --git a/docs/image_analysis/image_format_conversions.mdx b/docs/image_analysis/image_format_conversions.mdx
index 3a602640..2a10e4aa 100644
--- a/docs/image_analysis/image_format_conversions.mdx
+++ b/docs/image_analysis/image_format_conversions.mdx
@@ -120,8 +120,10 @@ class _CameraPageState extends State {
return Scaffold(
// 2.
body: CameraAwesomeBuilder.analysisOnly(
- aspectRatio: CameraAspectRatios.ratio_1_1,
- sensor: Sensors.front,
+ sensorConfig: SensorConfig.single(
+ sensor: Sensor.position(SensorPosition.front),
+ aspectRatio: CameraAspectRatios.ratio_1_1,
+ ),
// 3.
onImageForAnalysis: (img) async => _imageStreamController.add(img),
imageAnalysisConfig: AnalysisConfig(
diff --git a/docs/img/camera_states_interactions.png b/docs/img/camera_states_interactions.png
index b1f9f613..06fc3c3e 100644
Binary files a/docs/img/camera_states_interactions.png and b/docs/img/camera_states_interactions.png differ
diff --git a/docs/index.mdx b/docs/index.mdx
index 7ec7e96c..6a6f75ae 100644
--- a/docs/index.mdx
+++ b/docs/index.mdx
@@ -11,10 +11,11 @@ Use our awesome built in interface or customize it as you want.
## Native features
Here's all native features that CamerAwesome provides to the flutter side.
-| System | Android | iOS |
+| Features | Android | iOS |
| :--------------------------------------- | :-----: | :---: |
| 🔖 Ask permissions | ✅ | ✅ |
| 🎥 Record video | ✅ | ✅ |
+| 📹 Multi camera | ✅ | ✅ |
| 🔈 Enable/disable audio | ✅ | ✅ |
| 🎞 Take photos | ✅ | ✅ |
| 🌆 Photo live filters | ✅ | ✅ |
@@ -29,3 +30,6 @@ Here's all native features that CamerAwesome provides to the flutter side.
| 🤐 Background auto stop | ✅ | ✅ |
| 🔀 Sensor type switching | ⛔️ | ✅ |
| 🪞 Enable/disable front camera mirroring | ✅ | ✅ |
+
+
+After [installing](getting_started/installing) CamerAwesome, take a look at the [Awesome built-in UI](getting_started/awesome-ui) guide.
\ No newline at end of file
diff --git a/docs/migration_guides/from_1_to_2.mdx b/docs/migration_guides/from_1_to_2.mdx
new file mode 100644
index 00000000..8cf26492
--- /dev/null
+++ b/docs/migration_guides/from_1_to_2.mdx
@@ -0,0 +1,84 @@
+# Migrating from 1.x.x to 2.x.x
+
+CamerAwesome 2.0.0 is a major release that brings a lot of new features and improvements.
+
+The most important change is that you can use several sensors concurrently which implied several API changes to CamerAwesome.
+
+This guide will help you to migrate your code from 1.x.x to 2.x.x.
+
+## Breaking changes
+
+The initial settings of the `CameraAwesomeBuilder` have been moved to either `SaveConfig` or `SensorConfig`.
+
+See the code diff below:
+
+```diff
+CameraAwesomeBuilder.awesome(
+- sensor: Sensors.back,
+- flashMode: FlashMode.auto,
+- aspectRatio: CameraAspectRatios.ratio_4_3,
+- mirrorFrontCamera: true,
+- zoom: 0.0,
++ sensorConfig: SensorConfig.single(
++ sensor: Sensor.position(SensorPosition.back),
++ flashMode: FlashMode.auto,
++ aspectRatio: CameraAspectRatios.ratio_4_3,
++ zoom: 0.0,
++ ),
+- exifPreferences: ExifPreferences(saveGPSLocation: true),
+- enableAudio: true,
+ saveConfig: SaveConfig.photoAndVideo(
+ initialCaptureMode: CaptureMode.photo,
++ photoPathBuilder: (sensors) async {
++ final Directory extDir = await getTemporaryDirectory();
++ final testDir = await Directory(
++ '${extDir.path}/camerawesome',
++ ).create(recursive: true);
++ if (sensors.length == 1) {
++ final String filePath =
++ '${testDir.path}/${DateTime.now().millisecondsSinceEpoch}.jpg';
++ return SingleCaptureRequest(filePath, sensors.first);
++ } else {
++ // Separate pictures taken with front and back camera
++ return MultipleCaptureRequest(
++ {
++ for (final sensor in sensors)
++ sensor:
++ '${testDir.path}/${sensor.position == SensorPosition.front ? 'front_' : "back_"}${DateTime.now().millisecondsSinceEpoch}.jpg',
++ },
++ );
++ }
++ },
++ videoPathBuilder: (sensors) async {
++ // same logic as photoPathBuilder
++ },
++ videoOptions: VideoOptions(
++ enableAudio: true,
++ ios: CupertinoVideoOptions(
++ fps: 10,
++ ),
++ android: AndroidVideoOptions(
++ bitrate: 6000000,
++ quality: VideoRecordingQuality.fhd,
++ fallbackStrategy: QualityFallbackStrategy.lower,
++ ),
++ ),
++ exifPreferences: ExifPreferences(saveGPSLocation: true),
++ mirrorFrontCamera: true,
+ ),
+ ...
+)
+```
+
+
+## Changelog
+
+- ✨ Added multi-camera feature, allowing users to display multiple camera previews simultaneously. Note that this feature is currently in beta, and we do not recommend using it in production.
+- ✨ Users can now pass options (such as bitrate, fps, and quality) when recording a video.
+- ✨🍏 Implemented brightness and exposure level settings on iOS / iPadOS.
+- ✨🤖 Added zoom indicator UI.
+- ✨🤖 Video recording is now mirrored if `mirrorFrontCamera` is set to true.
+- ♻️🍏 Completely reworked the code for increased clarity and performance.
+- 🐛 Fixed patrol tests.
+- 🐛 Fixed the use of capture button parameter in awesome bottom actions (thanks to @juliuszmandrosz).
+- 📝 Added Chinese README.md (thanks to @chyiiiiiiiiiiii).
diff --git a/docs/widgets/widgets.mdx b/docs/widgets/widgets.mdx
index 354e9b69..8adcc708 100644
--- a/docs/widgets/widgets.mdx
+++ b/docs/widgets/widgets.mdx
@@ -4,19 +4,20 @@ CamerAwesome comes with a list of pre-built widgets to ease Camera integration i
Here is a table of all the widgets, with a description and screenshot when appropriate.
-|Widget|Description|Screenshot|
-|-|-|-|
-|CameraAwesomeBuilder|Main widget with which you should use CamerAwesome. Use either `CameraAwesomeBuilder.awesome()` to use CamerAwesome UI with a few customization or `CameraAwesomeBuilder.awesome()` if you want to entirely build your camera UI.|||
-|AwesomeCameraLayout|Layout used by CameraAwesomeBuilder.|||
-|AwesomeCameraActionsRow|||
-|AwesomeBottomActions|||
-|AwesomeFilterWidget|Expandable list of filters to use with CamerAwesome in picture mode.||
-|AwesomeAspectRatioButton|Button used to change aspect ratio.
You can customize its behaviour and its look.||
-|AwesomeCameraSwitchButton|Button used to switch between front and back camera.
You can customize its behaviour and its look.||
-|AwesomeFlashButton|Button used to switch between flash modes (none, auto, on, always).
You can customize its behaviour and its look.||
-|AwesomeLocationButton|Button used to toggle if location should be save in Exif metadata when taking a picture.
You can customize its behaviour and its look.||
-|AwesomePauseResumeButton|Button used to pause or resume a vide recording.
You can customize its behaviour and its look.||
-|AwesomeCameraModeSelector|PageView used to switch between picture mode and video recording mode.
You can customize its behaviour and its look.||
-|AwesomeMediaPreview|Preview of the last media captured.
You can customize its behaviour and its look.||
-|AwesomeSensorTypeSelector|Selector of sensor types (only iOS).
You can customize its behaviour and its look.||
-|AwesomeOrientedWidget|Its child rotates automatically with the camera.
It can be disabled.||
+| Widget | Description | Screenshot |
+| ------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------- | --- |
+| CameraAwesomeBuilder | Main widget with which you should use CamerAwesome. Use either `CameraAwesomeBuilder.awesome()` to use CamerAwesome UI with a few customization or `CameraAwesomeBuilder.awesome()` if you want to entirely build your camera UI. | | |
+| AwesomeCameraLayout | Layout used by CameraAwesomeBuilder. | | |
+| AwesomeCameraActionsRow | | |
+| AwesomeBottomActions | | |
+| AwesomeFilterWidget | Expandable list of filters to use with CamerAwesome in picture mode. | |
+| AwesomeAspectRatioButton | Button used to change aspect ratio.
You can customize its behaviour and its look. | |
+| AwesomeCameraSwitchButton | Button used to switch between front and back camera.
You can customize its behaviour and its look. | |
+| AwesomeFlashButton | Button used to switch between flash modes (none, auto, on, always).
You can customize its behaviour and its look. | |
+| AwesomeLocationButton | Button used to toggle if location should be save in Exif metadata when taking a picture.
You can customize its behaviour and its look. | |
+| AwesomePauseResumeButton | Button used to pause or resume a vide recording.
You can customize its behaviour and its look. | |
+| AwesomeCameraModeSelector | PageView used to switch between picture mode and video recording mode.
You can customize its behaviour and its look. | |
+| AwesomeMediaPreview | Preview of the last media captured.
You can customize its behaviour and its look. | |
+| AwesomeSensorTypeSelector | Selector of sensor types (only iOS).
You can customize its behaviour and its look. | |
+| AwesomeOrientedWidget | Its child rotates automatically with the camera.
It can be disabled. | |
+| AwesomeZoomSelector | Displays the current Zoom and allows to switch to min/max zoom (when min zoom < 1.0) |
diff --git a/example/android/app/build.gradle b/example/android/app/build.gradle
index be33ef5a..88728b53 100644
--- a/example/android/app/build.gradle
+++ b/example/android/app/build.gradle
@@ -48,7 +48,7 @@ android {
// You can update the following values to match your application needs.
// For more information, see: https://docs.flutter.dev/deployment/android#reviewing-the-build-configuration.
minSdkVersion 21
- targetSdkVersion 31
+ targetSdkVersion 33
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
@@ -70,9 +70,9 @@ flutter {
}
dependencies {
- implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
+ implementation "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version"
implementation 'com.google.mlkit:face-detection:16.1.5'
- implementation 'com.google.mlkit:vision-common:17.1.0'
+ implementation 'com.google.mlkit:vision-common:17.3.0'
testImplementation 'junit:junit:4.12'
diff --git a/example/android/build.gradle b/example/android/build.gradle
index cadf1cf0..3402903d 100644
--- a/example/android/build.gradle
+++ b/example/android/build.gradle
@@ -1,5 +1,5 @@
buildscript {
- ext.kotlin_version = '1.7.10'
+ ext.kotlin_version = '1.8.10'
repositories {
google()
mavenCentral()
@@ -26,6 +26,6 @@ subprojects {
project.evaluationDependsOn(':app')
}
-task clean(type: Delete) {
+tasks.register("clean", Delete) {
delete rootProject.buildDir
}
diff --git a/example/integration_test/bundled_test.dart b/example/integration_test/bundled_test.dart
index 2453f876..5ff5e905 100644
--- a/example/integration_test/bundled_test.dart
+++ b/example/integration_test/bundled_test.dart
@@ -4,7 +4,6 @@ import 'photo_test.dart' as photo_test;
import 'ui_test.dart' as ui_test;
import 'video_test.dart' as video_test;
-// TODO Run it on Firebase Test Lab https://patrol.leancode.co/ci
void main() {
group("Bundled tests > ", () {
ui_test.main();
diff --git a/example/integration_test/common.dart b/example/integration_test/common.dart
index 6fc19f77..60394f66 100644
--- a/example/integration_test/common.dart
+++ b/example/integration_test/common.dart
@@ -1,8 +1,11 @@
import 'dart:io';
+import 'package:camerawesome/camerawesome_plugin.dart';
+import 'package:meta/meta.dart';
import 'package:path_provider/path_provider.dart';
import 'package:patrol/patrol.dart';
+@isTest
void patrol(
String description,
Future Function(PatrolTester) callback, {
@@ -31,10 +34,13 @@ Future allowPermissionsIfNeeded(PatrolTester $) async {
}
}
-Future tempPath(String pictureName) async {
- final file = File(
- '${(await getTemporaryDirectory()).path}/test/$pictureName',
- );
- await file.create(recursive: true);
- return file.path;
+Future Function(List sensors) tempPath(
+ String pictureName) {
+ return (sensors) async {
+ final file = File(
+ '${(await getTemporaryDirectory()).path}/test/$pictureName',
+ );
+ await file.create(recursive: true);
+ return SingleCaptureRequest(file.path, sensors.first);
+ };
}
diff --git a/example/integration_test/concurrent_camera_test.dart b/example/integration_test/concurrent_camera_test.dart
new file mode 100644
index 00000000..835c753e
--- /dev/null
+++ b/example/integration_test/concurrent_camera_test.dart
@@ -0,0 +1,29 @@
+import 'package:camera_app/drivable_camera.dart';
+import 'package:camerawesome/camerawesome_plugin.dart';
+import 'package:flutter_test/flutter_test.dart';
+
+import 'common.dart';
+
+main() {
+ patrol('Concurrent > Basic run', ($) async {
+ await $.pumpWidgetAndSettle(
+ DrivableCamera(
+ sensors: [
+ Sensor.position(SensorPosition.back),
+ Sensor.position(SensorPosition.front)
+ ],
+ saveConfig: SaveConfig.photoAndVideo(
+ photoPathBuilder: tempPath('single_photo_back.jpg'),
+ videoPathBuilder: tempPath('single_video_back.mp4'),
+ ),
+ ),
+ );
+ await allowPermissionsIfNeeded($);
+ await $.pumpAndSettle();
+ // await $(AwesomeCaptureButton).tap(andSettle: false);
+
+ await $.pump(const Duration(seconds: 2));
+ await $.pump();
+ expect($(AwesomeCaptureButton), findsOneWidget);
+ });
+}
diff --git a/example/integration_test/photo_test.dart b/example/integration_test/photo_test.dart
index 225543ab..dfbea20c 100644
--- a/example/integration_test/photo_test.dart
+++ b/example/integration_test/photo_test.dart
@@ -13,22 +13,24 @@ void main() {
}
void photoTests() {
- for (var sensor in Sensors.values) {
+ for (var sensor in SensorPosition.values) {
patrol(
'Take pictures > single picture ${sensor.name} camera',
($) async {
+ final sensors = [Sensor.position(sensor)];
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: sensor,
+ sensors: sensors,
saveConfig: SaveConfig.photo(
- pathBuilder: () => tempPath('single_photo_back.jpg'),
+ pathBuilder: tempPath('single_photo_back.jpg'),
),
),
);
await allowPermissionsIfNeeded($);
- final filePath = await tempPath('single_photo_back.jpg');
+ final request = await tempPath('single_photo_back.jpg')(sensors);
+ final filePath = request.when(single: (single) => single.file!.path);
await $(AwesomeCaptureButton).tap();
expect(File(filePath).existsSync(), true);
@@ -42,15 +44,16 @@ void photoTests() {
($) async {
int idxPicture = 0;
const picturesToTake = 3;
+ final sensors = [Sensor.position(sensor)];
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: sensor,
+ sensors: sensors,
saveConfig: SaveConfig.photo(
- pathBuilder: () async {
- final path = await tempPath(
- 'multiple_photo_${sensor.name}_$idxPicture.jpg');
+ pathBuilder: (sensors) async {
+ final request = await tempPath(
+ 'multiple_photo_${sensor.name}_$idxPicture.jpg')(sensors);
idxPicture++;
- return path;
+ return request;
},
),
),
@@ -59,8 +62,9 @@ void photoTests() {
await allowPermissionsIfNeeded($);
for (int i = 0; i < picturesToTake; i++) {
- final filePath =
- await tempPath('multiple_photo_${sensor.name}_$idxPicture.jpg');
+ final request = await tempPath(
+ 'multiple_photo_${sensor.name}_$idxPicture.jpg')(sensors);
+ final filePath = request.when(single: (single) => single.file!.path);
await $(AwesomeCaptureButton).tap();
expect(File(filePath).existsSync(), true);
// File size should be quite high (at least more than 100)
@@ -71,23 +75,25 @@ void photoTests() {
}
patrol(
- 'Take pictures > One with ${Sensors.back} then one with ${Sensors.front}',
+ 'Take pictures > One with ${SensorPosition.back} then one with ${SensorPosition.front}',
($) async {
int idxSensor = 0;
- final sensors = [
- Sensors.back,
- Sensors.front,
- Sensors.back,
+ final switchingSensors = [
+ SensorPosition.back,
+ SensorPosition.front,
+ SensorPosition.back,
];
+ final initialSensors = [Sensor.position(SensorPosition.back)];
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: Sensors.back,
+ sensors: initialSensors,
saveConfig: SaveConfig.photo(
- pathBuilder: () async {
- final path = await tempPath(
- 'switch_sensor_photo_${idxSensor}_${sensors[idxSensor].name}.jpg');
+ pathBuilder: (sensors) async {
+ final request = await tempPath(
+ 'switch_sensor_photo_${idxSensor}_${switchingSensors[idxSensor].name}.jpg')(
+ sensors);
idxSensor++;
- return path;
+ return request;
},
),
),
@@ -95,11 +101,12 @@ void photoTests() {
await allowPermissionsIfNeeded($);
- for (int i = 0; i < sensors.length; i++) {
- final filePath = await tempPath(
- 'switch_sensor_photo_${idxSensor}_${sensors[idxSensor].name}.jpg');
-
- if (i > 0 && sensors[i - 1] != sensors[i]) {
+ for (int i = 0; i < switchingSensors.length; i++) {
+ final request = await tempPath(
+ 'switch_sensor_photo_${idxSensor}_${switchingSensors[idxSensor].name}.jpg')(
+ initialSensors);
+ final filePath = request.when(single: (single) => single.file!.path);
+ if (i > 0 && switchingSensors[i - 1] != switchingSensors[i]) {
await $.tester.pumpAndSettle();
final switchButton = find.byType(AwesomeCameraSwitchButton);
await $.tester.tap(switchButton, warnIfMissed: false);
diff --git a/example/integration_test/ui_test.dart b/example/integration_test/ui_test.dart
index bdb76ebc..b9c763a1 100644
--- a/example/integration_test/ui_test.dart
+++ b/example/integration_test/ui_test.dart
@@ -17,17 +17,17 @@ void main() {
($) async {
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: Sensors.back,
+ sensors: [Sensor.position(SensorPosition.back)],
saveConfig: SaveConfig.photoAndVideo(
- photoPathBuilder: () => tempPath('single_photo_back.jpg'),
- videoPathBuilder: () => tempPath('single_video_back.mp4'),
+ photoPathBuilder: tempPath('single_photo_back.jpg'),
+ videoPathBuilder: tempPath('single_video_back.mp4'),
),
),
);
await allowPermissionsIfNeeded($);
- expect($(#ratioButton), findsOneWidget);
+ expect($(AwesomeAspectRatioButton), findsOneWidget);
expect($(AwesomeFlashButton), findsOneWidget);
expect(
$(AwesomeLocationButton).$(AwesomeBouncingWidget),
@@ -46,7 +46,8 @@ void main() {
// Switch to video mode
await $.tap(find.text("VIDEO"));
- expect($(#ratioButton), findsNothing);
+ await $.pump(const Duration(milliseconds: 3000));
+ expect($(AwesomeAspectRatioButton), findsNothing);
expect($(AwesomeFlashButton), findsOneWidget);
expect(
$(AwesomeLocationButton).$(AwesomeBouncingWidget),
@@ -67,16 +68,16 @@ void main() {
($) async {
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: Sensors.back,
+ sensors: [Sensor.position(SensorPosition.back)],
saveConfig: SaveConfig.photo(
- pathBuilder: () => tempPath('single_photo_back.jpg'),
+ pathBuilder: tempPath('single_photo_back.jpg'),
),
),
);
await allowPermissionsIfNeeded($);
- expect($(#ratioButton), findsOneWidget);
+ expect($(AwesomeAspectRatioButton), findsOneWidget);
expect($(AwesomeFlashButton), findsOneWidget);
expect(
$(AwesomeLocationButton).$(AwesomeBouncingWidget),
@@ -96,21 +97,20 @@ void main() {
($) async {
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: Sensors.back,
+ sensors: [Sensor.position(SensorPosition.back)],
saveConfig: SaveConfig.photoAndVideo(
- photoPathBuilder: () => tempPath('single_photo_back.jpg'),
- videoPathBuilder: () => tempPath('single_video_back.mp4'),
+ photoPathBuilder: tempPath('single_photo_back.jpg'),
+ videoPathBuilder: tempPath('single_video_back.mp4'),
initialCaptureMode: CaptureMode.video,
),
),
);
await allowPermissionsIfNeeded($);
-
- await $.pump(const Duration(milliseconds: 2000));
+ await $.pump(const Duration(milliseconds: 1000));
// Ratio button is not visible in video mode
- expect($(#ratioButton), findsNothing);
+ expect($(AwesomeAspectRatioButton), findsNothing);
expect($(AwesomeFlashButton), findsOneWidget);
expect($(AwesomeLocationButton).$(AwesomeBouncingWidget), findsNothing);
expect($(AwesomeCameraSwitchButton), findsOneWidget);
@@ -121,11 +121,12 @@ void main() {
expect($(AwesomeCaptureButton), findsOneWidget);
expect($(AwesomeCameraModeSelector).$(PageView), findsOneWidget);
- await $(AwesomeCaptureButton).tap();
+ await $(AwesomeCaptureButton).tap(andSettle: false);
await allowPermissionsIfNeeded($);
+ await $.pump(const Duration(milliseconds: 2000));
- // Recording
- expect($(#ratioButton), findsNothing);
+ // // Recording
+ expect($(AwesomeAspectRatioButton), findsNothing);
expect($(AwesomeFlashButton), findsNothing);
expect($(AwesomeLocationButton).$(AwesomeBouncingWidget), findsNothing);
expect($(AwesomeCameraSwitchButton), findsNothing);
@@ -135,11 +136,11 @@ void main() {
expect($(AwesomeCaptureButton), findsOneWidget);
expect($(AwesomeCameraModeSelector).$(PageView), findsNothing);
- await $(AwesomeCaptureButton).tap();
- await $.pump(const Duration(milliseconds: 2000));
+ await $(AwesomeCaptureButton).tap(andSettle: false);
+ await $.pump(const Duration(milliseconds: 4000));
// Not recording
- expect($(#ratioButton), findsNothing);
+ expect($(AwesomeAspectRatioButton), findsNothing);
expect($(AwesomeFlashButton), findsOneWidget);
expect($(AwesomeLocationButton).$(AwesomeBouncingWidget), findsNothing);
expect($(AwesomeCameraSwitchButton), findsOneWidget);
@@ -161,9 +162,9 @@ void main() {
($) async {
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: Sensors.back,
+ sensors: [Sensor.position(SensorPosition.back)],
saveConfig: SaveConfig.photo(
- pathBuilder: () => tempPath('single_photo_back.jpg'),
+ pathBuilder: tempPath('single_photo_back.jpg'),
),
),
);
@@ -241,11 +242,12 @@ void main() {
patrol(
'Location > Do NOT save if not specified',
($) async {
+ final sensors = [Sensor.position(SensorPosition.back)];
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: Sensors.back,
+ sensors: sensors,
saveConfig: SaveConfig.photo(
- pathBuilder: () => tempPath('single_photo_back_no_gps.jpg'),
+ pathBuilder: tempPath('single_photo_back_no_gps.jpg'),
),
),
);
@@ -257,7 +259,8 @@ void main() {
// await $.native.pressBack();
await $(AwesomeCaptureButton).tap();
- final filePath = await tempPath('single_photo_back_no_gps.jpg');
+ final request = await tempPath('single_photo_back_no_gps.jpg')(sensors);
+ final filePath = request.when(single: (single) => single.file!.path);
final exif = await readExifFromFile(File(filePath));
final gpsTags = exif.entries.where(
(element) => element.key.contains('GPSDate'),
@@ -267,24 +270,34 @@ void main() {
},
);
+ // This test might not pass in Firebase Test Lab because location does not seem to be activated. It works on local device.
+ // TODO Try to use Patrol to enable location manually on the device
+
patrol(
'Location > Save if specified',
($) async {
+ final sensors = [Sensor.position(SensorPosition.back)];
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: Sensors.back,
+ sensors: sensors,
saveConfig: SaveConfig.photo(
- pathBuilder: () => tempPath('single_photo_back_gps.jpg'),
+ pathBuilder: tempPath('single_photo_back_gps.jpg'),
+ exifPreferences: ExifPreferences(saveGPSLocation: true),
),
- exifPreferences: ExifPreferences(saveGPSLocation: true),
),
);
await allowPermissionsIfNeeded($);
- await $(AwesomeCaptureButton).tap();
- final filePath = await tempPath('single_photo_back_gps.jpg');
+ await $(AwesomeCaptureButton).tap(andSettle: false);
+ // TODO Wait for media captured instead of a fixed duration (taking picture + retrieving locaiton might take a lot of time)
+ await $.pump(const Duration(seconds: 4));
+ final request = await tempPath('single_photo_back_gps.jpg')(sensors);
+ final filePath = request.when(single: (single) => single.file!.path);
final exif = await readExifFromFile(File(filePath));
+ // for (final entry in exif.entries) {
+ // print('EXIF_PRINT > ${entry.key} : ${entry.value}');
+ // }
final gpsTags = exif.entries.where(
(element) => element.key.startsWith('GPS GPS'),
);
@@ -295,11 +308,12 @@ void main() {
patrol(
'Focus > On camera preview tap, show focus indicator for 2 seconds',
($) async {
+ final sensors = [Sensor.position(SensorPosition.back)];
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: Sensors.back,
+ sensors: sensors,
saveConfig: SaveConfig.photo(
- pathBuilder: () => tempPath('single_photo_back.jpg'),
+ pathBuilder: tempPath('single_photo_back.jpg'),
),
),
);
@@ -318,11 +332,12 @@ void main() {
patrol(
'Focus > On multiple focus, last more than 2 seconds',
($) async {
+ final sensors = [Sensor.position(SensorPosition.back)];
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: Sensors.back,
+ sensors: sensors,
saveConfig: SaveConfig.photo(
- pathBuilder: () => tempPath('single_photo_back.jpg'),
+ pathBuilder: tempPath('single_photo_back.jpg'),
),
),
);
diff --git a/example/integration_test/video_test.dart b/example/integration_test/video_test.dart
index 2bdf1a23..63123d6b 100644
--- a/example/integration_test/video_test.dart
+++ b/example/integration_test/video_test.dart
@@ -9,23 +9,24 @@ import 'common.dart';
// To run it, you have to use `patrol drive` instead of `flutter test`.
void main() {
- for (final sensor in Sensors.values) {
+ for (final sensor in SensorPosition.values) {
patrol(
- 'Record video > one with ${Sensors.back}',
+ 'Record video > one with ${SensorPosition.back}',
($) async {
+ final sensors = [Sensor.position(sensor)];
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: sensor,
+ sensors: sensors,
saveConfig: SaveConfig.video(
- pathBuilder: () =>
- tempPath('record_video_single_${sensor.name}.mp4'),
+ pathBuilder: tempPath('record_video_single_${sensor.name}.mp4'),
),
),
);
await allowPermissionsIfNeeded($);
- final filePath =
- await tempPath('record_video_single_${sensor.name}.mp4');
+ final request =
+ await tempPath('record_video_single_${sensor.name}.mp4')(sensors);
+ final filePath = request.when(single: (single) => single.file!.path);
await $(AwesomeCaptureButton).tap(andSettle: false);
await allowPermissionsIfNeeded($);
await $.pump(const Duration(seconds: 3));
@@ -39,15 +40,16 @@ void main() {
);
patrol(
- 'Record video > multiple ${Sensors.back} camera',
+ 'Record video > multiple ${sensor.name} camera',
($) async {
int idxVideo = 0;
const videosToTake = 3;
+ final sensors = [Sensor.position(sensor)];
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: sensor,
+ sensors: sensors,
saveConfig: SaveConfig.video(
- pathBuilder: () =>
+ pathBuilder:
tempPath('multiple_video_${sensor.name}_$idxVideo.mp4'),
),
),
@@ -55,8 +57,9 @@ void main() {
await allowPermissionsIfNeeded($);
for (int i = 0; i < videosToTake; i++) {
- final filePath =
- await tempPath('multiple_video_${sensor.name}_$idxVideo.mp4');
+ final request = await tempPath(
+ 'multiple_video_${sensor.name}_$idxVideo.mp4')(sensors);
+ final filePath = request.when(single: (single) => single.file!.path);
await $(AwesomeCaptureButton).tap(andSettle: false);
await allowPermissionsIfNeeded($);
await Future.delayed(const Duration(seconds: 3));
@@ -72,17 +75,20 @@ void main() {
patrol(
'Record video > Pause and resume',
($) async {
+ final sensors = [Sensor.position(sensor)];
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: sensor,
+ sensors: sensors,
saveConfig: SaveConfig.video(
- pathBuilder: () => tempPath('pause_resume_video_$sensor.mp4')),
+ pathBuilder: tempPath('pause_resume_video_$sensor.mp4')),
),
);
await allowPermissionsIfNeeded($);
- final filePath = await tempPath('pause_resume_video_$sensor.mp4');
+ final request =
+ await tempPath('pause_resume_video_$sensor.mp4')(sensors);
+ final filePath = request.when(single: (single) => single.file!.path);
await $(AwesomeCaptureButton).tap(andSettle: false);
await allowPermissionsIfNeeded($);
@@ -109,21 +115,23 @@ void main() {
}
patrol(
- 'Record video > One with ${Sensors.back} then one with ${Sensors.front}',
+ 'Record video > One with ${SensorPosition.back} then one with ${SensorPosition.front}',
($) async {
int idxSensor = 0;
- final sensors = [
- Sensors.back,
- Sensors.front,
- Sensors.back,
+ final switchingSensors = [
+ SensorPosition.back,
+ SensorPosition.front,
+ SensorPosition.back,
];
+ final initialSensors = [Sensor.position(SensorPosition.back)];
await $.pumpWidgetAndSettle(
DrivableCamera(
- sensor: Sensors.back,
+ sensors: initialSensors,
saveConfig: SaveConfig.video(
- pathBuilder: () async {
+ pathBuilder: (sensors) async {
final path = await tempPath(
- 'switch_sensor_video_${idxSensor}_${sensors[idxSensor].name}.mp4');
+ 'switch_sensor_video_${idxSensor}_${switchingSensors[idxSensor].name}.mp4')(
+ sensors);
idxSensor++;
return path;
},
@@ -133,19 +141,22 @@ void main() {
await allowPermissionsIfNeeded($);
- for (int i = 0; i < sensors.length; i++) {
- final filePath = await tempPath(
- 'switch_sensor_video_${idxSensor}_${sensors[idxSensor].name}.mp4');
+ for (int i = 0; i < switchingSensors.length; i++) {
+ final request = await tempPath(
+ 'switch_sensor_video_${i}_${switchingSensors[i].name}.mp4')(
+ initialSensors);
+ final filePath = request.when(single: (single) => single.file!.path);
- if (i > 0 && sensors[i - 1] != sensors[i]) {
+ if (i > 0 && switchingSensors[i - 1] != switchingSensors[i]) {
await $.tester.pumpAndSettle();
final switchButton = find.byType(AwesomeCameraSwitchButton);
await $.tester.tap(switchButton, warnIfMissed: false);
+ await $.pump(const Duration(milliseconds: 2000));
}
await $(AwesomeCaptureButton).tap(andSettle: false);
await allowPermissionsIfNeeded($);
await Future.delayed(const Duration(seconds: 3));
- await $(AwesomeCaptureButton).tap();
+ await $(AwesomeCaptureButton).tap(andSettle: false);
await $.pump(const Duration(milliseconds: 2000));
expect(File(filePath).existsSync(), true);
diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock
index b769f67e..1c5acd1a 100644
--- a/example/ios/Podfile.lock
+++ b/example/ios/Podfile.lock
@@ -30,7 +30,7 @@ PODS:
- Flutter
- google_mlkit_commons
- GoogleMLKit/TextRecognition (~> 3.2.0)
- - GoogleDataTransport (9.2.1):
+ - GoogleDataTransport (9.2.2):
- GoogleUtilities/Environment (~> 7.7)
- nanopb (< 2.30910.0, >= 2.30908.0)
- PromisesObjC (< 3.0, >= 1.2)
@@ -57,11 +57,11 @@ PODS:
- GoogleToolboxForMac/Defines (= 2.3.2)
- "GoogleToolboxForMac/NSString+URLArguments (= 2.3.2)"
- "GoogleToolboxForMac/NSString+URLArguments (2.3.2)"
- - GoogleUtilities/Environment (7.11.0):
+ - GoogleUtilities/Environment (7.11.1):
- PromisesObjC (< 3.0, >= 1.2)
- - GoogleUtilities/Logger (7.11.0):
+ - GoogleUtilities/Logger (7.11.1):
- GoogleUtilities/Environment
- - GoogleUtilities/UserDefaults (7.11.0):
+ - GoogleUtilities/UserDefaults (7.11.1):
- GoogleUtilities/Logger
- GoogleUtilitiesComponents (1.1.0):
- GoogleUtilities/Logger
@@ -121,7 +121,7 @@ PODS:
- Flutter
- gRPC-Swift (~> 1.8.0)
- PromisesObjC (2.2.0)
- - Protobuf (3.22.1)
+ - Protobuf (3.22.3)
- SwiftNIO (2.40.0):
- _NIODataStructures (= 2.40.0)
- CNIOAtomics (= 2.40.0)
@@ -265,7 +265,7 @@ DEPENDENCIES:
- google_mlkit_face_detection (from `.symlinks/plugins/google_mlkit_face_detection/ios`)
- google_mlkit_text_recognition (from `.symlinks/plugins/google_mlkit_text_recognition/ios`)
- integration_test (from `.symlinks/plugins/integration_test/ios`)
- - path_provider_foundation (from `.symlinks/plugins/path_provider_foundation/ios`)
+ - path_provider_foundation (from `.symlinks/plugins/path_provider_foundation/darwin`)
- patrol (from `.symlinks/plugins/patrol/ios`)
- video_player_avfoundation (from `.symlinks/plugins/video_player_avfoundation/ios`)
@@ -332,7 +332,7 @@ EXTERNAL SOURCES:
integration_test:
:path: ".symlinks/plugins/integration_test/ios"
path_provider_foundation:
- :path: ".symlinks/plugins/path_provider_foundation/ios"
+ :path: ".symlinks/plugins/path_provider_foundation/darwin"
patrol:
:path: ".symlinks/plugins/patrol/ios"
video_player_avfoundation:
@@ -355,14 +355,14 @@ SPEC CHECKSUMS:
google_mlkit_commons: e9070f57232c3a3e4bd42fdfa621bb1f4bb3e709
google_mlkit_face_detection: ab4c98846c4e9b0d48e5bd386cedb14feb676802
google_mlkit_text_recognition: 9967360384e93b09686aca67fc9f13f9a957d770
- GoogleDataTransport: ea169759df570f4e37bdee1623ec32a7e64e67c4
+ GoogleDataTransport: 8378d1fa8ac49753ea6ce70d65a7cb70ce5f66e6
GoogleMLKit: 0017a6a8372e1a182139b9def4d89be5d87ca5a7
GoogleToolboxForMac: 8bef7c7c5cf7291c687cf5354f39f9db6399ad34
- GoogleUtilities: c2bdc4cf2ce786c4d2e6b3bcfd599a25ca78f06f
+ GoogleUtilities: 9aa0ad5a7bc171f8bae016300bfcfa3fb8425749
GoogleUtilitiesComponents: 679b2c881db3b615a2777504623df6122dd20afe
gRPC-Swift: 74adcaaa62ac5e0a018938840328cb1fdfb09e7b
GTMSessionFetcher: 5595ec75acf5be50814f81e9189490412bad82ba
- integration_test: a1e7d09bd98eca2fc37aefd79d4f41ad37bdbbe5
+ integration_test: 13825b8a9334a850581300559b8839134b124670
JPSVolumeButtonHandler: 53110330c9168ed325def93eabff39f0fe3e8082
Logging: beeb016c9c80cf77042d62e83495816847ef108b
MLImage: 489dfec109f21da8621b28d476401aaf7a0d4ff4
@@ -376,7 +376,7 @@ SPEC CHECKSUMS:
path_provider_foundation: c68054786f1b4f3343858c1e1d0caaded73f0be9
patrol: a7622350c42b3a944962fbfe052d07e3691789e5
PromisesObjC: 09985d6d70fbe7878040aa746d78236e6946d2ef
- Protobuf: d7f7c8329edf5eb8af65547a8ba3e9c1cee927d5
+ Protobuf: d03938d5f7e974a97964fb0567771e36fbe9ecce
SwiftNIO: 829958aab300642625091f82fc2f49cb7cf4ef24
SwiftNIOConcurrencyHelpers: 697370136789b1074e4535eaae75cbd7f900370e
SwiftNIOCore: 473fdfe746534d7aa25766916459eeaf6f92ef49
diff --git a/example/ios/Runner.xcodeproj/project.pbxproj b/example/ios/Runner.xcodeproj/project.pbxproj
index feed26ba..acaaf594 100644
--- a/example/ios/Runner.xcodeproj/project.pbxproj
+++ b/example/ios/Runner.xcodeproj/project.pbxproj
@@ -356,6 +356,7 @@
files = (
);
inputPaths = (
+ "${TARGET_BUILD_DIR}/${INFOPLIST_PATH}",
);
name = "Thin Binary";
outputPaths = (
diff --git a/example/lib/ai_analysis_faces.dart b/example/lib/ai_analysis_faces.dart
index 9560e862..e8ffc519 100644
--- a/example/lib/ai_analysis_faces.dart
+++ b/example/lib/ai_analysis_faces.dart
@@ -66,8 +66,10 @@ class _CameraPageState extends State {
return Scaffold(
body: CameraAwesomeBuilder.previewOnly(
previewFit: CameraPreviewFit.contain,
- aspectRatio: CameraAspectRatios.ratio_1_1,
- sensor: Sensors.front,
+ sensorConfig: SensorConfig.single(
+ sensor: Sensor.position(SensorPosition.front),
+ aspectRatio: CameraAspectRatios.ratio_1_1,
+ ),
onImageForAnalysis: (img) => _analyzeImage(img),
imageAnalysisConfig: AnalysisConfig(
androidOptions: const AndroidAnalysisOptions.nv21(
@@ -138,7 +140,8 @@ class _MyPreviewDecoratorWidget extends StatelessWidget {
model: faceModelSnapshot.requireData,
previewSize: previewSize,
previewRect: previewRect,
- isBackCamera: snapshot.requireData.sensor == Sensors.back,
+ isBackCamera: snapshot.requireData.sensors.first.position ==
+ SensorPosition.back,
),
);
},
diff --git a/example/lib/analysis_image_filter.dart b/example/lib/analysis_image_filter.dart
index fdf38f1a..d2507dd1 100644
--- a/example/lib/analysis_image_filter.dart
+++ b/example/lib/analysis_image_filter.dart
@@ -43,8 +43,10 @@ class _CameraPageState extends State {
Widget build(BuildContext context) {
return Scaffold(
body: CameraAwesomeBuilder.analysisOnly(
- aspectRatio: CameraAspectRatios.ratio_1_1,
- sensor: Sensors.front,
+ sensorConfig: SensorConfig.single(
+ sensor: Sensor.position(SensorPosition.front),
+ aspectRatio: CameraAspectRatios.ratio_1_1,
+ ),
onImageForAnalysis: (img) async => _imageStreamController.add(img),
imageAnalysisConfig: AnalysisConfig(
androidOptions: const AndroidAnalysisOptions.yuv420(
diff --git a/example/lib/analysis_image_filter_picker.dart b/example/lib/analysis_image_filter_picker.dart
index 49088f30..c1080683 100644
--- a/example/lib/analysis_image_filter_picker.dart
+++ b/example/lib/analysis_image_filter_picker.dart
@@ -43,8 +43,10 @@ class _CameraPageState extends State {
Widget build(BuildContext context) {
return Scaffold(
body: CameraAwesomeBuilder.analysisOnly(
- aspectRatio: CameraAspectRatios.ratio_1_1,
- sensor: Sensors.front,
+ sensorConfig: SensorConfig.single(
+ sensor: Sensor.position(SensorPosition.front),
+ aspectRatio: CameraAspectRatios.ratio_1_1,
+ ),
onImageForAnalysis: (img) async => _imageStreamController.add(img),
imageAnalysisConfig: AnalysisConfig(
androidOptions: const AndroidAnalysisOptions.yuv420(
diff --git a/example/lib/camera_analysis_capabilities.dart b/example/lib/camera_analysis_capabilities.dart
index fd358cb4..122eefc5 100644
--- a/example/lib/camera_analysis_capabilities.dart
+++ b/example/lib/camera_analysis_capabilities.dart
@@ -1,4 +1,3 @@
-import 'package:camera_app/utils/file_utils.dart';
import 'package:camerawesome/camerawesome_plugin.dart';
import 'package:flutter/material.dart';
@@ -23,7 +22,7 @@ class CameraPage extends StatelessWidget {
@override
Widget build(BuildContext context) {
- const sensor = Sensors.back;
+ final sensor = Sensor.position(SensorPosition.back);
return Scaffold(
body: Container(
color: Colors.white,
@@ -31,8 +30,6 @@ class CameraPage extends StatelessWidget {
// Setting both video recording and image analysis is an error on Android if the camera is not of LEVEL 3
// See explanations: https://developer.android.com/training/camerax/architecture#combine-use-cases
saveConfig: SaveConfig.photoAndVideo(
- photoPathBuilder: () => path(CaptureMode.photo),
- videoPathBuilder: () => path(CaptureMode.video),
initialCaptureMode: CaptureMode.video,
),
onImageForAnalysis: (image) async {
@@ -44,12 +41,15 @@ class CameraPage extends StatelessWidget {
),
maxFramesPerSecond: 3,
),
- sensor: sensor,
+ sensorConfig: SensorConfig.single(
+ sensor: sensor,
+ ),
previewDecoratorBuilder: (state, _, __) {
return Center(
child: FutureBuilder(
future: CameraCharacteristics
- .isVideoRecordingAndImageAnalysisSupported(sensor),
+ .isVideoRecordingAndImageAnalysisSupported(
+ sensor.position!),
builder: (_, snapshot) {
print("___---___--- received result ${snapshot.data}");
if (snapshot.data == null) {
@@ -58,7 +58,7 @@ class CameraPage extends StatelessWidget {
return Padding(
padding: const EdgeInsets.all(20),
child: Text(
- 'Video recording AND image analysis at the same time ${snapshot.data! ? 'IS' : 'IS NOT'} supported on ${sensor.name} sensor',
+ 'Video recording AND image analysis at the same time ${snapshot.data! ? 'IS' : 'IS NOT'} supported on ${sensor.position?.name} sensor',
style: const TextStyle(
color: Colors.white,
fontSize: 20,
diff --git a/example/lib/custom_awesome_ui.dart b/example/lib/custom_awesome_ui.dart
index d659ed61..60220a08 100644
--- a/example/lib/custom_awesome_ui.dart
+++ b/example/lib/custom_awesome_ui.dart
@@ -1,4 +1,3 @@
-import 'package:camera_app/utils/file_utils.dart';
import 'package:camerawesome/camerawesome_plugin.dart';
import 'package:flutter/material.dart';
@@ -25,10 +24,11 @@ class CameraPage extends StatelessWidget {
Widget build(BuildContext context) {
return Scaffold(
body: CameraAwesomeBuilder.awesome(
- saveConfig: SaveConfig.photo(
- pathBuilder: () => path(CaptureMode.photo),
+ saveConfig: SaveConfig.photo(),
+ sensorConfig: SensorConfig.single(
+ sensor: Sensor.position(SensorPosition.back),
+ aspectRatio: CameraAspectRatios.ratio_1_1,
),
- aspectRatio: CameraAspectRatios.ratio_1_1,
previewFit: CameraPreviewFit.contain,
previewPadding: const EdgeInsets.only(left: 150, top: 100),
previewAlignment: Alignment.topRight,
diff --git a/example/lib/custom_theme.dart b/example/lib/custom_theme.dart
index 17a83d90..e338e195 100644
--- a/example/lib/custom_theme.dart
+++ b/example/lib/custom_theme.dart
@@ -1,5 +1,4 @@
import 'package:better_open_file/better_open_file.dart';
-import 'package:camera_app/utils/file_utils.dart';
import 'package:camerawesome/camerawesome_plugin.dart';
import 'package:flutter/material.dart';
@@ -27,12 +26,12 @@ class CameraPage extends StatelessWidget {
return Scaffold(
body: CameraAwesomeBuilder.awesome(
saveConfig: SaveConfig.photoAndVideo(
- photoPathBuilder: () => path(CaptureMode.photo),
- videoPathBuilder: () => path(CaptureMode.video),
initialCaptureMode: CaptureMode.photo,
),
filter: AwesomeFilter.AddictiveRed,
- aspectRatio: CameraAspectRatios.ratio_1_1,
+ sensorConfig: SensorConfig.single(
+ aspectRatio: CameraAspectRatios.ratio_1_1,
+ ),
previewFit: CameraPreviewFit.fitWidth,
// Buttons of CamerAwesome UI will use this theme
theme: AwesomeTheme(
@@ -60,7 +59,10 @@ class CameraPage extends StatelessWidget {
),
),
onMediaTap: (mediaCapture) {
- OpenFile.open(mediaCapture.filePath);
+ OpenFile.open(
+ mediaCapture.captureRequest
+ .when(single: (single) => single.file?.path),
+ );
},
),
);
diff --git a/example/lib/custom_ui_example_1.dart b/example/lib/custom_ui_example_1.dart
index 6e1c2f2e..0f71ca09 100644
--- a/example/lib/custom_ui_example_1.dart
+++ b/example/lib/custom_ui_example_1.dart
@@ -18,14 +18,7 @@ class CustomUiExample1 extends StatelessWidget {
RecordVideoUI(state, recording: true),
);
},
- saveConfig: SaveConfig.photoAndVideo(
- photoPathBuilder: () async {
- return "some/path.jpg";
- },
- videoPathBuilder: () async {
- return "some/path.mp4";
- },
- ),
+ saveConfig: SaveConfig.photoAndVideo(),
),
);
}
diff --git a/example/lib/custom_ui_example_2.dart b/example/lib/custom_ui_example_2.dart
index 8a65b389..f339b8e7 100644
--- a/example/lib/custom_ui_example_2.dart
+++ b/example/lib/custom_ui_example_2.dart
@@ -1,6 +1,5 @@
import 'dart:math';
-import 'package:camera_app/utils/file_utils.dart';
import 'package:camerawesome/camerawesome_plugin.dart';
import 'package:camerawesome/pigeon.dart';
import 'package:flutter/material.dart';
@@ -54,9 +53,7 @@ class CustomUiExample2 extends StatelessWidget {
],
);
},
- saveConfig: SaveConfig.photo(
- pathBuilder: () => path(CaptureMode.photo),
- ),
+ saveConfig: SaveConfig.photo(),
onPreviewTapBuilder: (state) => OnPreviewTap(
onTap: (Offset position, PreviewSize flutterPreviewSize,
PreviewSize pixelPreviewSize) {
diff --git a/example/lib/custom_ui_example_3.dart b/example/lib/custom_ui_example_3.dart
index d9374771..6722e248 100644
--- a/example/lib/custom_ui_example_3.dart
+++ b/example/lib/custom_ui_example_3.dart
@@ -1,5 +1,4 @@
import 'package:better_open_file/better_open_file.dart';
-import 'package:camera_app/utils/file_utils.dart';
import 'package:camerawesome/camerawesome_plugin.dart';
import 'package:flutter/material.dart';
@@ -22,9 +21,7 @@ class CustomUiExample3 extends StatelessWidget {
RecordVideoUI(state, recording: true),
);
},
- saveConfig: SaveConfig.video(
- pathBuilder: () => path(CaptureMode.video),
- ),
+ saveConfig: SaveConfig.video(),
),
);
}
@@ -67,7 +64,10 @@ class RecordVideoUI extends StatelessWidget {
child: CustomMediaPreview(
mediaCapture: snapshot.data,
onMediaTap: (mediaCapture) {
- OpenFile.open(mediaCapture.filePath);
+ OpenFile.open(
+ mediaCapture.captureRequest
+ .when(single: (single) => single.file?.path),
+ );
},
),
);
diff --git a/example/lib/drivable_camera.dart b/example/lib/drivable_camera.dart
index f5196a28..17c19bdb 100644
--- a/example/lib/drivable_camera.dart
+++ b/example/lib/drivable_camera.dart
@@ -1,17 +1,14 @@
import 'package:camerawesome/camerawesome_plugin.dart';
-import 'package:camerawesome/pigeon.dart';
import 'package:flutter/material.dart';
class DrivableCamera extends StatelessWidget {
final SaveConfig saveConfig;
- final Sensors sensor;
- final ExifPreferences? exifPreferences;
+ final List sensors;
const DrivableCamera({
super.key,
required this.saveConfig,
- required this.sensor,
- this.exifPreferences,
+ required this.sensors,
});
@override
@@ -21,8 +18,9 @@ class DrivableCamera extends StatelessWidget {
body: CameraAwesomeBuilder.awesome(
saveConfig: saveConfig,
onMediaTap: (media) {},
- sensor: sensor,
- exifPreferences: exifPreferences,
+ sensorConfig: sensors.length == 1
+ ? SensorConfig.single(sensor: sensors.first)
+ : SensorConfig.multiple(sensors: sensors),
),
),
);
diff --git a/example/lib/main.dart b/example/lib/main.dart
index 16f686f8..2daa235b 100644
--- a/example/lib/main.dart
+++ b/example/lib/main.dart
@@ -1,7 +1,10 @@
+import 'dart:io';
+
import 'package:better_open_file/better_open_file.dart';
-import 'package:camera_app/utils/file_utils.dart';
import 'package:camerawesome/camerawesome_plugin.dart';
+import 'package:camerawesome/pigeon.dart';
import 'package:flutter/material.dart';
+import 'package:path_provider/path_provider.dart';
void main() {
runApp(const CameraAwesomeApp());
@@ -29,17 +32,55 @@ class CameraPage extends StatelessWidget {
color: Colors.white,
child: CameraAwesomeBuilder.awesome(
saveConfig: SaveConfig.photoAndVideo(
- photoPathBuilder: () => path(CaptureMode.photo),
- videoPathBuilder: () => path(CaptureMode.video),
initialCaptureMode: CaptureMode.photo,
+ mirrorFrontCamera: true,
+ photoPathBuilder: (sensors) async {
+ final Directory extDir = await getTemporaryDirectory();
+ final testDir = await Directory(
+ '${extDir.path}/camerawesome',
+ ).create(recursive: true);
+ if (sensors.length == 1) {
+ final String filePath =
+ '${testDir.path}/${DateTime.now().millisecondsSinceEpoch}.jpg';
+ return SingleCaptureRequest(filePath, sensors.first);
+ } else {
+ // Separate pictures taken with front and back camera
+ return MultipleCaptureRequest(
+ {
+ for (final sensor in sensors)
+ sensor:
+ '${testDir.path}/${sensor.position == SensorPosition.front ? 'front_' : "back_"}${DateTime.now().millisecondsSinceEpoch}.jpg',
+ },
+ );
+ }
+ },
+ videoOptions: VideoOptions(
+ enableAudio: true,
+ ios: CupertinoVideoOptions(
+ fps: 10,
+ ),
+ android: AndroidVideoOptions(
+ bitrate: 6000000,
+ quality: VideoRecordingQuality.fhd,
+ fallbackStrategy: QualityFallbackStrategy.lower,
+ ),
+ ),
+ exifPreferences: ExifPreferences(saveGPSLocation: true),
+ ),
+ sensorConfig: SensorConfig.single(
+ sensor: Sensor.position(SensorPosition.back),
+ flashMode: FlashMode.auto,
+ aspectRatio: CameraAspectRatios.ratio_4_3,
+ zoom: 0.0,
),
enablePhysicalButton: true,
- filter: AwesomeFilter.AddictiveRed,
- flashMode: FlashMode.auto,
- aspectRatio: CameraAspectRatios.ratio_16_9,
+ // filter: AwesomeFilter.AddictiveRed,
previewFit: CameraPreviewFit.fitWidth,
onMediaTap: (mediaCapture) {
- OpenFile.open(mediaCapture.filePath);
+ OpenFile.open(
+ mediaCapture.captureRequest
+ .when(single: (single) => single.file?.path),
+ );
},
),
),
diff --git a/example/lib/multi_camera.dart b/example/lib/multi_camera.dart
new file mode 100644
index 00000000..2e61ec26
--- /dev/null
+++ b/example/lib/multi_camera.dart
@@ -0,0 +1,355 @@
+import 'dart:io';
+import 'dart:math';
+
+import 'package:better_open_file/better_open_file.dart';
+import 'package:camerawesome/camerawesome_plugin.dart';
+import 'package:flutter/material.dart';
+import 'package:video_player/video_player.dart';
+
+void main() {
+ runApp(const CameraAwesomeApp());
+}
+
+class CameraAwesomeApp extends StatelessWidget {
+ const CameraAwesomeApp({super.key});
+
+ @override
+ Widget build(BuildContext context) {
+ return MaterialApp(
+ title: 'camerAwesome',
+ // home: CameraPage(),
+ onGenerateRoute: (settings) {
+ if (settings.name == '/') {
+ return MaterialPageRoute(
+ builder: (context) => const CameraPage(),
+ );
+ } else if (settings.name == '/gallery') {
+ final multipleCaptureRequest =
+ settings.arguments as MultipleCaptureRequest;
+ return MaterialPageRoute(
+ builder: (context) => GalleryPage(
+ multipleCaptureRequest: multipleCaptureRequest,
+ ),
+ );
+ }
+ return null;
+ },
+ );
+ }
+}
+
+class CameraPage extends StatefulWidget {
+ const CameraPage({super.key});
+
+ @override
+ State createState() => _CameraPageState();
+}
+
+class _CameraPageState extends State {
+ SensorDeviceData? sensorDeviceData;
+ bool? isMultiCamSupported;
+ PipShape shape = PipShape.circle;
+
+ @override
+ void initState() {
+ super.initState();
+
+ CamerawesomePlugin.getSensors().then((value) {
+ setState(() {
+ sensorDeviceData = value;
+ });
+ });
+
+ CamerawesomePlugin.isMultiCamSupported().then((value) {
+ setState(() {
+ isMultiCamSupported = value;
+ });
+ });
+ }
+
+ @override
+ Widget build(BuildContext context) {
+ final screenSize = MediaQuery.of(context).size;
+ return Scaffold(
+ body: Container(
+ color: Colors.white,
+ child: sensorDeviceData != null && isMultiCamSupported != null
+ ? CameraAwesomeBuilder.awesome(
+ saveConfig: SaveConfig.photoAndVideo(
+ // initialCaptureMode: CaptureMode.video,
+ ),
+ sensorConfig: isMultiCamSupported == true
+ ? SensorConfig.multiple(
+ sensors: [
+ // Android only supports two sensors at a time
+ Sensor.position(SensorPosition.back),
+ Sensor.position(SensorPosition.front),
+ if (Platform.isIOS) Sensor.type(SensorType.telephoto),
+ ],
+ flashMode: FlashMode.auto,
+ aspectRatio: CameraAspectRatios.ratio_16_9,
+ )
+ : SensorConfig.single(
+ sensor: Sensor.position(SensorPosition.back),
+ flashMode: FlashMode.auto,
+ aspectRatio: CameraAspectRatios.ratio_16_9,
+ ),
+ // TODO: create factory for multi cam & single
+ // sensors: sensorDeviceData!.availableSensors
+ // .map((e) => Sensor.id(e.uid))
+ // .toList(),
+ previewFit: CameraPreviewFit.fitWidth,
+ onMediaTap: (mediaCapture) {
+ mediaCapture.captureRequest.when(
+ single: (single) => OpenFile.open(single.file?.path),
+ multiple: (multiple) => Navigator.of(context).pushNamed(
+ '/gallery',
+ arguments: multiple,
+ ),
+ );
+ },
+ pictureInPictureConfigBuilder: (index, sensor) {
+ const width = 200.0;
+ return PictureInPictureConfig(
+ isDraggable: false,
+ startingPosition: Offset(
+ screenSize.width - width - 20.0 * index,
+ screenSize.height - 356,
+ ),
+ onTap: () {
+ print('on preview tap');
+ },
+ sensor: sensor,
+ pictureInPictureBuilder: (preview, aspectRatio) {
+ return SizedBox(
+ width: width,
+ height: width,
+ child: ClipPath(
+ clipper: _MyCustomPipClipper(
+ width: width,
+ height: width * aspectRatio,
+ shape: shape,
+ ),
+ child: SizedBox(
+ width: width,
+ child: preview,
+ ),
+ ),
+ );
+ },
+ );
+ },
+ previewDecoratorBuilder: (state, _, __) {
+ return Column(
+ mainAxisSize: MainAxisSize.min,
+ mainAxisAlignment: MainAxisAlignment.center,
+ crossAxisAlignment: CrossAxisAlignment.start,
+ children: [
+ Container(
+ color: Colors.white70,
+ margin: const EdgeInsets.only(left: 8),
+ child: const Text("Change picture in picture's shape:"),
+ ),
+ GridView.builder(
+ gridDelegate:
+ const SliverGridDelegateWithFixedCrossAxisCount(
+ crossAxisCount: 3,
+ childAspectRatio: 16 / 9,
+ ),
+ shrinkWrap: true,
+ padding: EdgeInsets.zero,
+ itemCount: PipShape.values.length,
+ itemBuilder: (context, index) {
+ final shape = PipShape.values[index];
+ return GestureDetector(
+ onTap: () {
+ setState(() {
+ this.shape = shape;
+ });
+ },
+ child: Container(
+ color: Colors.red.withOpacity(0.5),
+ margin: const EdgeInsets.all(8.0),
+ child: Center(
+ child: Text(
+ shape.name,
+ textAlign: TextAlign.center,
+ ),
+ ),
+ ),
+ );
+ },
+ ),
+ ],
+ );
+ },
+ )
+ : const SizedBox.shrink(),
+ ),
+ );
+ }
+}
+
+enum PipShape {
+ square,
+ circle,
+ roundedSquare,
+ triangle,
+ hexagon;
+
+ Path getPath(Offset center, double width, double height) {
+ switch (this) {
+ case PipShape.square:
+ return Path()
+ ..addRect(Rect.fromCenter(
+ center: center,
+ width: min(width, height),
+ height: min(width, height),
+ ));
+ case PipShape.circle:
+ return Path()
+ ..addOval(Rect.fromCenter(
+ center: center,
+ width: min(width, height),
+ height: min(width, height),
+ ));
+ case PipShape.triangle:
+ return Path()
+ ..moveTo(center.dx, center.dy - min(width, height) / 2)
+ ..lineTo(center.dx + min(width, height) / 2,
+ center.dy + min(width, height) / 2)
+ ..lineTo(center.dx - min(width, height) / 2,
+ center.dy + min(width, height) / 2)
+ ..close();
+ case PipShape.roundedSquare:
+ return Path()
+ ..addRRect(RRect.fromRectAndRadius(
+ Rect.fromCenter(
+ center: center,
+ width: min(width, height),
+ height: min(width, height),
+ ),
+ const Radius.circular(20.0),
+ ));
+ case PipShape.hexagon:
+ return Path()
+ ..moveTo(center.dx, center.dy - min(width, height) / 2)
+ ..lineTo(center.dx + min(width, height) / 2,
+ center.dy - min(width, height) / 4)
+ ..lineTo(center.dx + min(width, height) / 2,
+ center.dy + min(width, height) / 4)
+ ..lineTo(center.dx, center.dy + min(width, height) / 2)
+ ..lineTo(center.dx - min(width, height) / 2,
+ center.dy + min(width, height) / 4)
+ ..lineTo(center.dx - min(width, height) / 2,
+ center.dy - min(width, height) / 4)
+ ..close();
+ }
+ }
+}
+
+class _MyCustomPipClipper extends CustomClipper {
+ final double width;
+ final double height;
+ final PipShape shape;
+
+ const _MyCustomPipClipper({
+ required this.width,
+ required this.height,
+ required this.shape,
+ });
+
+ @override
+ Path getClip(Size size) {
+ return shape.getPath(
+ size.center(Offset.zero),
+ width,
+ height,
+ );
+ }
+
+ @override
+ bool shouldReclip(covariant _MyCustomPipClipper oldClipper) {
+ return width != oldClipper.width ||
+ height != oldClipper.height ||
+ shape != oldClipper.shape;
+ }
+}
+
+class GalleryPage extends StatefulWidget {
+ final MultipleCaptureRequest multipleCaptureRequest;
+
+ const GalleryPage({super.key, required this.multipleCaptureRequest});
+
+ @override
+ State createState() => _GalleryPageState();
+}
+
+class _GalleryPageState extends State {
+ @override
+ Widget build(BuildContext context) {
+ return Scaffold(
+ appBar: AppBar(
+ title: const Text('Gallery'),
+ ),
+ body: GridView.builder(
+ gridDelegate: const SliverGridDelegateWithFixedCrossAxisCount(
+ crossAxisCount: 3,
+ ),
+ itemCount: widget.multipleCaptureRequest.fileBySensor.length,
+ itemBuilder: (context, index) {
+ final sensor =
+ widget.multipleCaptureRequest.fileBySensor.keys.toList()[index];
+ final file = widget.multipleCaptureRequest.fileBySensor[sensor];
+ return GestureDetector(
+ onTap: () => OpenFile.open(file.path),
+ child: file!.path.endsWith("jpg")
+ ? Image.file(
+ File(file.path),
+ fit: BoxFit.cover,
+ )
+ : VideoPreview(file: File(file.path)),
+ );
+ },
+ ),
+ );
+ }
+}
+
+class VideoPreview extends StatefulWidget {
+ final File file;
+
+ const VideoPreview({super.key, required this.file});
+
+ @override
+ State createState() {
+ return _VideoPreviewState();
+ }
+}
+
+class _VideoPreviewState extends State {
+ late VideoPlayerController _controller;
+
+ @override
+ void initState() {
+ super.initState();
+ _controller = VideoPlayerController.file(widget.file)
+ ..setLooping(true)
+ ..initialize().then((_) {
+ setState(() {});
+ _controller.play();
+ });
+ }
+
+ @override
+ Widget build(BuildContext context) {
+ return Center(
+ child: _controller.value.isInitialized
+ ? AspectRatio(
+ aspectRatio: _controller.value.aspectRatio,
+ child: VideoPlayer(_controller),
+ )
+ : const SizedBox.shrink(),
+ );
+ }
+}
diff --git a/example/lib/preview_overlay_example.dart b/example/lib/preview_overlay_example.dart
index 533d4f88..c4dcef5a 100644
--- a/example/lib/preview_overlay_example.dart
+++ b/example/lib/preview_overlay_example.dart
@@ -1,7 +1,6 @@
import 'dart:async';
import 'package:better_open_file/better_open_file.dart';
-import 'package:camera_app/utils/file_utils.dart';
import 'package:camera_app/utils/mlkit_utils.dart';
import 'package:camera_app/widgets/barcode_preview_overlay.dart';
import 'package:camerawesome/camerawesome_plugin.dart';
@@ -43,15 +42,18 @@ class _CameraPageState extends State {
color: Colors.white,
child: CameraAwesomeBuilder.awesome(
saveConfig: SaveConfig.photoAndVideo(
- photoPathBuilder: () => path(CaptureMode.photo),
- videoPathBuilder: () => path(CaptureMode.video),
initialCaptureMode: CaptureMode.photo,
),
- flashMode: FlashMode.auto,
- aspectRatio: CameraAspectRatios.ratio_16_9,
+ sensorConfig: SensorConfig.single(
+ flashMode: FlashMode.auto,
+ aspectRatio: CameraAspectRatios.ratio_16_9,
+ ),
previewFit: CameraPreviewFit.fitWidth,
onMediaTap: (mediaCapture) {
- OpenFile.open(mediaCapture.filePath);
+ OpenFile.open(
+ mediaCapture.captureRequest
+ .when(single: (single) => single.file?.path),
+ );
},
previewDecoratorBuilder: (state, previewSize, previewRect) {
return BarcodePreviewOverlay(
diff --git a/example/lib/run_drivable_camera.dart b/example/lib/run_drivable_camera.dart
new file mode 100644
index 00000000..91421ecf
--- /dev/null
+++ b/example/lib/run_drivable_camera.dart
@@ -0,0 +1,24 @@
+import 'package:camera_app/drivable_camera.dart';
+import 'package:camerawesome/camerawesome_plugin.dart';
+import 'package:flutter/material.dart';
+
+void main() {
+ runApp(const CameraAwesomeApp());
+}
+
+class CameraAwesomeApp extends StatelessWidget {
+ const CameraAwesomeApp({super.key});
+
+ @override
+ Widget build(BuildContext context) {
+ return MaterialApp(
+ title: 'camerAwesome',
+ home: DrivableCamera(
+ saveConfig: SaveConfig.photo(),
+ sensors: [
+ Sensor.position(SensorPosition.back),
+ ],
+ ),
+ );
+ }
+}
diff --git a/example/lib/subroute_camera.dart b/example/lib/subroute_camera.dart
index f8539b60..177a150b 100644
--- a/example/lib/subroute_camera.dart
+++ b/example/lib/subroute_camera.dart
@@ -44,16 +44,20 @@ class CameraPage extends StatelessWidget {
Expanded(
child: CameraAwesomeBuilder.awesome(
saveConfig: SaveConfig.photoAndVideo(
- photoPathBuilder: () => _path(CaptureMode.photo),
- videoPathBuilder: () => _path(CaptureMode.video),
initialCaptureMode: CaptureMode.photo,
),
filter: AwesomeFilter.AddictiveRed,
- flashMode: FlashMode.auto,
- aspectRatio: CameraAspectRatios.ratio_16_9,
+ sensorConfig: SensorConfig.single(
+ flashMode: FlashMode.auto,
+ aspectRatio: CameraAspectRatios.ratio_16_9,
+ ),
previewFit: CameraPreviewFit.fitWidth,
onMediaTap: (mediaCapture) {
- OpenFile.open(mediaCapture.filePath);
+ OpenFile.open(
+ mediaCapture.captureRequest.when(
+ single: (single) => single.file?.path,
+ ),
+ );
},
),
),
diff --git a/example/lib/widgets/custom_media_preview.dart b/example/lib/widgets/custom_media_preview.dart
index 31194f07..f7b11cd4 100644
--- a/example/lib/widgets/custom_media_preview.dart
+++ b/example/lib/widgets/custom_media_preview.dart
@@ -3,6 +3,7 @@ import 'dart:io';
import 'package:camera_app/widgets/mini_video_player.dart';
import 'package:camerawesome/camerawesome_plugin.dart';
import 'package:flutter/cupertino.dart';
+import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
class CustomMediaPreview extends StatelessWidget {
@@ -58,18 +59,61 @@ class CustomMediaPreview extends StatelessWidget {
);
case MediaCaptureStatus.success:
if (mediaCapture!.isPicture) {
- return Ink.image(
- fit: BoxFit.cover,
- image: ResizeImage(
- FileImage(
- File(mediaCapture.filePath),
+ if (kIsWeb) {
+ // TODO Check if that works
+ return FutureBuilder(
+ future: mediaCapture.captureRequest.when(
+ single: (single) => single.file!.readAsBytes(),
+ multiple: (multiple) =>
+ multiple.fileBySensor.values.first!.readAsBytes(),
+ ),
+ builder: (_, snapshot) {
+ if (snapshot.hasData) {
+ return Image.memory(
+ snapshot.requireData,
+ fit: BoxFit.cover,
+ width: 300,
+ );
+ } else {
+ return Platform.isIOS
+ ? const CupertinoActivityIndicator(
+ color: Colors.white,
+ )
+ : const Padding(
+ padding: EdgeInsets.all(8.0),
+ child: CircularProgressIndicator(
+ color: Colors.white,
+ strokeWidth: 2.0,
+ ),
+ );
+ }
+ });
+ } else {
+ return Image(
+ fit: BoxFit.cover,
+ image: ResizeImage(
+ FileImage(
+ File(
+ mediaCapture.captureRequest.when(
+ single: (single) => single.file!.path,
+ multiple: (multiple) =>
+ multiple.fileBySensor.values.first!.path,
+ ),
+ ),
+ ),
+ width: 300,
),
- width: 300,
- ),
- );
+ );
+ }
} else {
return Ink(
- child: MiniVideoPlayer(filePath: mediaCapture.filePath),
+ child: MiniVideoPlayer(
+ filePath: mediaCapture.captureRequest.when(
+ single: (single) => single.file!.path,
+ multiple: (multiple) =>
+ multiple.fileBySensor.values.first!.path,
+ ),
+ ),
);
}
case MediaCaptureStatus.failure:
diff --git a/example/pubspec.lock b/example/pubspec.lock
index 30e07fb3..2faad5bf 100644
--- a/example/pubspec.lock
+++ b/example/pubspec.lock
@@ -21,10 +21,10 @@ packages:
dependency: transitive
description:
name: async
- sha256: bfe67ef28df125b7dddcea62755991f807aa39a2492a23e1550161692950bbe0
+ sha256: "947bfcf187f74dbc5e146c9eb9c0f10c9f8b30743e341481c1e2ed3ecc18c20c"
url: "https://pub.dev"
source: hosted
- version: "2.10.0"
+ version: "2.11.0"
better_open_file:
dependency: "direct main"
description:
@@ -47,7 +47,7 @@ packages:
path: ".."
relative: true
source: path
- version: "1.4.0"
+ version: "2.0.0-dev.1"
carousel_slider:
dependency: transitive
description:
@@ -60,10 +60,10 @@ packages:
dependency: transitive
description:
name: characters
- sha256: e6a326c8af69605aec75ed6c187d06b349707a27fbff8222ca9cc2cff167975c
+ sha256: "04a925763edad70e8443c99234dc3328f442e811f1d8fd1a72f1c8ad0f69a605"
url: "https://pub.dev"
source: hosted
- version: "1.2.1"
+ version: "1.3.0"
clock:
dependency: transitive
description:
@@ -76,10 +76,10 @@ packages:
dependency: transitive
description:
name: collection
- sha256: cfc915e6923fe5ce6e153b0723c753045de46de1b4d63771530504004a45fae0
+ sha256: "4a07be6cb69c84d677a6c3096fcf960cc3285a8330b4603e0d463d15d9bd934c"
url: "https://pub.dev"
source: hosted
- version: "1.17.0"
+ version: "1.17.1"
colorfilter_generator:
dependency: transitive
description:
@@ -96,6 +96,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "3.1.1"
+ cross_file:
+ dependency: transitive
+ description:
+ name: cross_file
+ sha256: "0b0036e8cccbfbe0555fd83c1d31a6f30b77a96b598b35a5d36dd41f718695e9"
+ url: "https://pub.dev"
+ source: hosted
+ version: "0.3.3+4"
crypto:
dependency: transitive
description:
@@ -116,10 +124,10 @@ packages:
dependency: "direct dev"
description:
name: exif
- sha256: "542fd8dd8eda3dff65be415f38370541aecf9eb3663e0679d9da4689f6b16c8f"
+ sha256: c154e074234eb6ac4a09831072b4783b55f5f9e84c4b344a472a6d6aa83a9982
url: "https://pub.dev"
source: hosted
- version: "3.1.2"
+ version: "3.1.4"
fake_async:
dependency: transitive
description:
@@ -282,10 +290,10 @@ packages:
dependency: transitive
description:
name: js
- sha256: "5528c2f391ededb7775ec1daa69e65a2d61276f7552de2b5f7b8d34ee9fd4ab7"
+ sha256: f2c445dce49627136094980615a031419f7f3eb393237e4ecd97ac15dea343f3
url: "https://pub.dev"
source: hosted
- version: "0.6.5"
+ version: "0.6.7"
json_annotation:
dependency: transitive
description:
@@ -306,10 +314,10 @@ packages:
dependency: transitive
description:
name: matcher
- sha256: "16db949ceee371e9b99d22f88fa3a73c4e59fd0afed0bd25fc336eb76c198b72"
+ sha256: "6501fbd55da300384b768785b83e5ce66991266cec21af89ab9ae7f5ce1c4cbb"
url: "https://pub.dev"
source: hosted
- version: "0.12.13"
+ version: "0.12.15"
material_color_utilities:
dependency: transitive
description:
@@ -327,29 +335,29 @@ packages:
source: hosted
version: "1.0.4"
meta:
- dependency: transitive
+ dependency: "direct main"
description:
name: meta
- sha256: "6c268b42ed578a53088d834796959e4a1814b5e9e164f147f580a386e5decf42"
+ sha256: "3c74dbf8763d36539f114c799d8a2d87343b5067e9d796ca22b5eb8437090ee3"
url: "https://pub.dev"
source: hosted
- version: "1.8.0"
+ version: "1.9.1"
path:
dependency: transitive
description:
name: path
- sha256: db9d4f58c908a4ba5953fcee2ae317c94889433e5024c27ce74a37f94267945b
+ sha256: "8829d8a55c13fc0e37127c29fedf290c102f4e40ae94ada574091fe0ff96c917"
url: "https://pub.dev"
source: hosted
- version: "1.8.2"
+ version: "1.8.3"
path_provider:
dependency: "direct main"
description:
name: path_provider
- sha256: "04890b994ee89bfa80bf3080bfec40d5a92c5c7a785ebb02c13084a099d2b6f9"
+ sha256: c7edf82217d4b2952b2129a61d3ad60f1075b9299e629e149a8d2e39c2e6aad4
url: "https://pub.dev"
source: hosted
- version: "2.0.13"
+ version: "2.0.14"
path_provider_android:
dependency: transitive
description:
@@ -394,10 +402,10 @@ packages:
dependency: "direct dev"
description:
name: patrol
- sha256: f81b997b845eb73c7d6843ec8ec2b36ef4e8d14f4e32eaa6557715a4907beedd
+ sha256: a81546fbe25de76e9256e7340d1aa0e1603e6f7b0e4bb91b90a2e08e22992587
url: "https://pub.dev"
source: hosted
- version: "1.0.3"
+ version: "1.1.0"
petitparser:
dependency: transitive
description:
@@ -463,10 +471,10 @@ packages:
dependency: transitive
description:
name: sprintf
- sha256: ec76d38910b6f1c854ce1353c62d37e7ef82b53dc5ab048c25400d35970776d1
+ sha256: "1fc9ffe69d4df602376b52949af107d8f5703b77cda567c4d7d86a0693120f23"
url: "https://pub.dev"
source: hosted
- version: "6.0.2"
+ version: "7.0.0"
stack_trace:
dependency: transitive
description:
@@ -511,10 +519,10 @@ packages:
dependency: transitive
description:
name: test_api
- sha256: ad540f65f92caa91bf21dfc8ffb8c589d6e4dc0c2267818b4cc2792857706206
+ sha256: eb6ac1540b26de412b3403a163d919ba86f6a973fe6cc50ae3541b80092fdcfb
url: "https://pub.dev"
source: hosted
- version: "0.4.16"
+ version: "0.5.1"
typed_data:
dependency: transitive
description:
@@ -535,10 +543,10 @@ packages:
dependency: "direct main"
description:
name: video_player
- sha256: "5d1ac207b25f2ea6f29dbd0cd48040b28924ed68d5fe41b4ade4bfa877b8de6f"
+ sha256: de95f0e9405f29b5582573d4166132e71f83b3158aac14e8ee5767a54f4f1fbd
url: "https://pub.dev"
source: hosted
- version: "2.5.0"
+ version: "2.6.1"
video_player_android:
dependency: transitive
description:
@@ -559,10 +567,10 @@ packages:
dependency: transitive
description:
name: video_player_platform_interface
- sha256: "42bb75de5e9b79e1f20f1d95f688fac0f95beac4d89c6eb2cd421724d4432dae"
+ sha256: a8c4dcae2a7a6e7cc1d7f9808294d968eca1993af34a98e95b9bdfa959bec684
url: "https://pub.dev"
source: hosted
- version: "6.0.1"
+ version: "6.1.0"
video_player_web:
dependency: transitive
description:
@@ -575,18 +583,18 @@ packages:
dependency: transitive
description:
name: vm_service
- sha256: e7fb6c2282f7631712b69c19d1bff82f3767eea33a2321c14fa59ad67ea391c7
+ sha256: f6deed8ed625c52864792459709183da231ebf66ff0cf09e69b573227c377efe
url: "https://pub.dev"
source: hosted
- version: "9.4.0"
+ version: "11.3.0"
webdriver:
dependency: transitive
description:
name: webdriver
- sha256: ef67178f0cc7e32c1494645b11639dd1335f1d18814aa8435113a92e9ef9d841
+ sha256: "3c923e918918feeb90c4c9fdf1fe39220fa4c0e8e2c0fffaded174498ef86c49"
url: "https://pub.dev"
source: hosted
- version: "3.0.1"
+ version: "3.0.2"
win32:
dependency: transitive
description:
@@ -612,5 +620,5 @@ packages:
source: hosted
version: "6.2.2"
sdks:
- dart: ">=2.18.2 <3.0.0"
+ dart: ">=3.0.0-0 <4.0.0"
flutter: ">=3.3.0"
diff --git a/example/pubspec.yaml b/example/pubspec.yaml
index c4117478..05205362 100644
--- a/example/pubspec.yaml
+++ b/example/pubspec.yaml
@@ -5,18 +5,20 @@ version: 1.0.0+1
environment:
sdk: '>=2.18.2 <3.0.0'
+ flutter: '>=3.3.0'
dependencies:
flutter:
sdk: flutter
camerawesome:
path: ../
- path_provider: ^2.0.11
+ path_provider: ^2.0.14
better_open_file: ^3.6.4
google_mlkit_face_detection: ^0.5.0
google_mlkit_barcode_scanning: ^0.5.0
google_mlkit_text_recognition: ^0.5.0
image: ^4.0.15
+ meta: ^1.8.0
rxdart: ^0.27.7
video_player: ^2.5.0
@@ -24,7 +26,7 @@ dev_dependencies:
exif: ^3.1.2
flutter_test:
sdk: flutter
- patrol: ^1.0.3
+ patrol: ^1.0.8
flutter_lints: ^2.0.1
flutter:
diff --git a/example/scripts/run_firebase_test_lab_multicam.sh b/example/scripts/run_firebase_test_lab_multicam.sh
new file mode 100755
index 00000000..4606cacd
--- /dev/null
+++ b/example/scripts/run_firebase_test_lab_multicam.sh
@@ -0,0 +1,21 @@
+SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+pushd "${SCRIPT_DIR}/../android"
+# flutter build generates files in android/ for building the app
+flutter build apk
+./gradlew app:assembleDebugAndroidTest
+./gradlew app:assembleDebug -Ptarget=`pwd`/../integration_test/concurrent_camera_test.dart
+
+popd
+
+
+gcloud auth activate-service-account --key-file="${SCRIPT_DIR}/../../camerawesome-6e777-13db0fddbbe5.json"
+gcloud --quiet config set project camerawesome-6e777
+
+gcloud firebase test android run --type instrumentation \
+ --app build/app/outputs/apk/debug/app-debug.apk \
+ --test build/app/outputs/apk/androidTest/debug/app-debug-androidTest.apk \
+ --device model=cheetah,version=33,locale=en,orientation=portrait \
+ --timeout 15m
+# --results-bucket= \
+# --results-dir=
+
diff --git a/example/scripts/run_native_android_multicam_tests.sh b/example/scripts/run_native_android_multicam_tests.sh
new file mode 100755
index 00000000..2007ed85
--- /dev/null
+++ b/example/scripts/run_native_android_multicam_tests.sh
@@ -0,0 +1,4 @@
+SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
+pushd "${SCRIPT_DIR}/../android"
+./gradlew :app:connectedDebugAndroidTest -Ptarget=$(pwd)/../integration_test/concurrent_camera_test.dart
+popd
\ No newline at end of file
diff --git a/ios/Classes/CameraPreview/CameraDeviceInfo/CameraDeviceInfo.h b/ios/Classes/CameraPreview/CameraDeviceInfo/CameraDeviceInfo.h
new file mode 100644
index 00000000..eca0fc24
--- /dev/null
+++ b/ios/Classes/CameraPreview/CameraDeviceInfo/CameraDeviceInfo.h
@@ -0,0 +1,23 @@
+//
+// CameraDeviceInfo.h
+// camerawesome
+//
+// Created by Dimitri Dessus on 29/03/2023.
+//
+
+#import
+#import
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface CameraDeviceInfo : NSObject
+
+@property (nonatomic, strong) AVCaptureDevice *device;
+@property (nonatomic, strong) AVCaptureDeviceInput *deviceInput;
+@property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput;
+@property (nonatomic, strong) AVCaptureConnection *captureConnection;
+@property (nonatomic, strong) AVCapturePhotoOutput *capturePhotoOutput;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/ios/Classes/CameraPreview/CameraDeviceInfo/CameraDeviceInfo.m b/ios/Classes/CameraPreview/CameraDeviceInfo/CameraDeviceInfo.m
new file mode 100644
index 00000000..36d05d88
--- /dev/null
+++ b/ios/Classes/CameraPreview/CameraDeviceInfo/CameraDeviceInfo.m
@@ -0,0 +1,12 @@
+//
+// CameraDeviceInfo.m
+// camerawesome
+//
+// Created by Dimitri Dessus on 29/03/2023.
+//
+
+#import "CameraDeviceInfo.h"
+
+@implementation CameraDeviceInfo
+
+@end
diff --git a/ios/Classes/CameraPreview/CameraPreviewTexture/CameraPreviewTexture.h b/ios/Classes/CameraPreview/CameraPreviewTexture/CameraPreviewTexture.h
new file mode 100644
index 00000000..6b0c25ac
--- /dev/null
+++ b/ios/Classes/CameraPreview/CameraPreviewTexture/CameraPreviewTexture.h
@@ -0,0 +1,25 @@
+//
+// CameraPreviewTexture.h
+// camerawesome
+//
+// Created by Dimitri Dessus on 28/03/2023.
+//
+
+#include
+#import
+#import
+#import
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface CameraPreviewTexture : NSObject
+
+- (instancetype)init;
+- (void)updateBuffer:(CMSampleBufferRef)sampleBuffer;
+- (void)dealloc;
+
+@property(readonly) _Atomic(CVPixelBufferRef) latestPixelBuffer;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/ios/Classes/CameraPreview/CameraPreviewTexture/CameraPreviewTexture.m b/ios/Classes/CameraPreview/CameraPreviewTexture/CameraPreviewTexture.m
new file mode 100644
index 00000000..df5ee9a8
--- /dev/null
+++ b/ios/Classes/CameraPreview/CameraPreviewTexture/CameraPreviewTexture.m
@@ -0,0 +1,49 @@
+//
+// CameraPreviewTexture.m
+// camerawesome
+//
+// Created by Dimitri Dessus on 28/03/2023.
+//
+
+#import "CameraPreviewTexture.h"
+
+@implementation CameraPreviewTexture
+
+- (instancetype)init {
+ if (self = [super init]) {
+
+ }
+
+ return self;
+}
+
+- (void)updateBuffer:(CMSampleBufferRef)sampleBuffer {
+ // TODO: add Atomic(...)
+ CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ CFRetain(newBuffer);
+ CVPixelBufferRef old = atomic_load(&_latestPixelBuffer);
+ while (!atomic_compare_exchange_strong(&_latestPixelBuffer, &old, newBuffer)) {
+ old = atomic_load(&_latestPixelBuffer);
+ }
+ if (old != nil) {
+ CFRelease(old);
+ }
+}
+
+/// Used to copy pixels to in-memory buffer
+- (CVPixelBufferRef _Nullable)copyPixelBuffer {
+ CVPixelBufferRef pixelBuffer = atomic_load(&_latestPixelBuffer);
+ while (!atomic_compare_exchange_strong(&_latestPixelBuffer, &pixelBuffer, nil)) {
+ pixelBuffer = atomic_load(&_latestPixelBuffer);
+ }
+
+ return pixelBuffer;
+}
+
+- (void)dealloc {
+ if (self.latestPixelBuffer) {
+ CFRelease(self.latestPixelBuffer);
+ }
+}
+
+@end
diff --git a/ios/Classes/CameraPreview/MultiCameraPreview/MultiCameraPreview.h b/ios/Classes/CameraPreview/MultiCameraPreview/MultiCameraPreview.h
new file mode 100644
index 00000000..fec517af
--- /dev/null
+++ b/ios/Classes/CameraPreview/MultiCameraPreview/MultiCameraPreview.h
@@ -0,0 +1,72 @@
+//
+// MultiCameraPreview.h
+// camerawesome
+//
+// Created by Dimitri Dessus on 28/03/2023.
+//
+
+#import
+#import
+#import "MultiCameraPreview.h"
+#import "CameraPreviewTexture.h"
+#import "CameraQualities.h"
+#import "CameraDeviceInfo.h"
+#import "CameraPictureController.h"
+#import "MotionController.h"
+#import "ImageStreamController.h"
+#import "PhysicalButtonController.h"
+#import "AspectRatio.h"
+#import "LocationController.h"
+#import "CameraFlash.h"
+#import "CaptureModes.h"
+#import "SensorUtils.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface MultiCameraPreview : NSObject
+
+@property (nonatomic, strong) AVCaptureMultiCamSession *cameraSession;
+
+@property (nonatomic, strong) NSArray *sensors;
+@property (nonatomic, strong) NSMutableArray *devices;
+@property (nonatomic, strong) dispatch_queue_t dispatchQueue;
+@property(readonly, nonatomic) AVCaptureFlashMode flashMode;
+@property(readonly, nonatomic) AVCaptureTorchMode torchMode;
+@property(readonly, nonatomic) AspectRatio aspectRatio;
+@property(readonly, nonatomic) LocationController *locationController;
+@property(readonly, nonatomic) MotionController *motionController;
+@property(readonly, nonatomic) PhysicalButtonController *physicalButtonController;
+@property(readonly, nonatomic) bool saveGPSLocation;
+@property(readonly, nonatomic) bool mirrorFrontCamera;
+@property(nonatomic, nonatomic) NSMutableArray *textures;
+@property(nonatomic, copy) void (^onPreviewFrameAvailable)(NSNumber * _Nullable);
+
+- (instancetype)initWithSensors:(NSArray *)sensors mirrorFrontCamera:(BOOL)mirrorFrontCamera
+ enablePhysicalButton:(BOOL)enablePhysicalButton
+ aspectRatioMode:(AspectRatio)aspectRatioMode
+ captureMode:(CaptureModes)captureMode
+ dispatchQueue:(dispatch_queue_t)dispatchQueue;
+- (void)configInitialSession:(NSArray *)sensors;
+- (void)setSensors:(NSArray *)sensors;
+- (void)setMirrorFrontCamera:(bool)value error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
+- (void)setBrightness:(NSNumber *)brightness error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
+- (void)setFlashMode:(CameraFlashMode)flashMode error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
+- (void)focusOnPoint:(CGPoint)position preview:(CGSize)preview error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
+- (void)setZoom:(float)value error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
+- (void)start;
+- (void)stop;
+- (void)refresh;
+- (CGFloat)getMaxZoom;
+- (void)setPreviewSize:(CGSize)previewSize error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
+- (CGSize)getEffectivPreviewSize;
+- (void)takePhotoSensors:(nonnull NSArray *)sensors paths:(nonnull NSArray *)paths completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion;
+- (void)dispose;
+- (void)setAspectRatio:(AspectRatio)ratio;
+- (void)setExifPreferencesGPSLocation:(bool)gpsLocation completion:(void(^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+- (void)setOrientationEventSink:(FlutterEventSink)orientationEventSink;
+- (void)setPhysicalButtonEventSink:(FlutterEventSink)physicalButtonEventSink;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/ios/Classes/CameraPreview/MultiCameraPreview/MultiCameraPreview.m b/ios/Classes/CameraPreview/MultiCameraPreview/MultiCameraPreview.m
new file mode 100644
index 00000000..e08abaa2
--- /dev/null
+++ b/ios/Classes/CameraPreview/MultiCameraPreview/MultiCameraPreview.m
@@ -0,0 +1,389 @@
+//
+// MultiCameraPreview.m
+// camerawesome
+//
+// Created by Dimitri Dessus on 28/03/2023.
+//
+
+#import "MultiCameraPreview.h"
+
+@implementation MultiCameraPreview
+
+- (instancetype)initWithSensors:(NSArray *)sensors
+ mirrorFrontCamera:(BOOL)mirrorFrontCamera
+ enablePhysicalButton:(BOOL)enablePhysicalButton
+ aspectRatioMode:(AspectRatio)aspectRatioMode
+ captureMode:(CaptureModes)captureMode
+ dispatchQueue:(dispatch_queue_t)dispatchQueue {
+ if (self = [super init]) {
+ _dispatchQueue = dispatchQueue;
+
+ _textures = [NSMutableArray new];
+ _devices = [NSMutableArray new];
+
+ _aspectRatio = aspectRatioMode;
+ _mirrorFrontCamera = mirrorFrontCamera;
+
+ _motionController = [[MotionController alloc] init];
+ _locationController = [[LocationController alloc] init];
+ _physicalButtonController = [[PhysicalButtonController alloc] init];
+
+ if (enablePhysicalButton) {
+ [_physicalButtonController startListening];
+ }
+
+ [_motionController startMotionDetection];
+
+ [self configInitialSession:sensors];
+ }
+
+ return self;
+}
+
+/// Set orientation stream Flutter sink
+- (void)setOrientationEventSink:(FlutterEventSink)orientationEventSink {
+ if (_motionController != nil) {
+ [_motionController setOrientationEventSink:orientationEventSink];
+ }
+}
+
+/// Set physical button Flutter sink
+- (void)setPhysicalButtonEventSink:(FlutterEventSink)physicalButtonEventSink {
+ if (_physicalButtonController != nil) {
+ [_physicalButtonController setPhysicalButtonEventSink:physicalButtonEventSink];
+ }
+}
+
+- (void)dispose {
+ [self stop];
+ [self cleanSession];
+}
+
+- (void)stop {
+ [self.cameraSession stopRunning];
+}
+
+- (void)cleanSession {
+ [self.cameraSession beginConfiguration];
+
+ for (CameraDeviceInfo *camera in self.devices) {
+ [self.cameraSession removeConnection:camera.captureConnection];
+ [self.cameraSession removeInput:camera.deviceInput];
+ [self.cameraSession removeOutput:camera.videoDataOutput];
+ }
+
+ [self.devices removeAllObjects];
+}
+
+// Get max zoom level
+- (CGFloat)getMaxZoom {
+ CGFloat maxZoom = self.devices.firstObject.device.activeFormat.videoMaxZoomFactor;
+ // Not sure why on iPhone 14 Pro, zoom at 90 not working, so let's block to 50 which is very high
+ return maxZoom > 50.0 ? 50.0 : maxZoom;
+}
+
+/// Set zoom level
+- (void)setZoom:(float)value error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ AVCaptureDevice *mainDevice = self.devices.firstObject.device;
+
+ CGFloat maxZoom = [self getMaxZoom];
+ CGFloat scaledZoom = value * (maxZoom - 1.0f) + 1.0f;
+
+ NSError *zoomError;
+ if ([mainDevice lockForConfiguration:&zoomError]) {
+ mainDevice.videoZoomFactor = scaledZoom;
+ [mainDevice unlockForConfiguration];
+ } else {
+ *error = [FlutterError errorWithCode:@"ZOOM_NOT_SET" message:@"can't set the zoom value" details:[zoomError localizedDescription]];
+ }
+}
+
+- (void)focusOnPoint:(CGPoint)position preview:(CGSize)preview error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ AVCaptureDevice *mainDevice = self.devices.firstObject.device;
+ NSError *lockError;
+ if ([mainDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus] && [mainDevice isFocusPointOfInterestSupported]) {
+ if ([mainDevice lockForConfiguration:&lockError]) {
+ if (lockError != nil) {
+ *error = [FlutterError errorWithCode:@"FOCUS_ERROR" message:@"impossible to set focus point" details:@""];
+ return;
+ }
+
+ [mainDevice setFocusPointOfInterest:position];
+ [mainDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
+
+ [mainDevice unlockForConfiguration];
+ }
+ }
+}
+
+- (void)setExifPreferencesGPSLocation:(bool)gpsLocation completion:(void(^)(NSNumber *_Nullable, FlutterError *_Nullable))completion {
+ _saveGPSLocation = gpsLocation;
+
+ if (_saveGPSLocation) {
+ [_locationController requestWhenInUseAuthorizationOnGranted:^{
+ completion(@(YES), nil);
+ } declined:^{
+ completion(@(NO), nil);
+ }];
+ } else {
+ completion(@(YES), nil);
+ }
+}
+
+- (void)setMirrorFrontCamera:(bool)value error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ _mirrorFrontCamera = value;
+}
+
+- (void)setBrightness:(NSNumber *)brightness error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ AVCaptureDevice *mainDevice = self.devices.firstObject.device;
+ NSError *brightnessError = nil;
+ if ([mainDevice lockForConfiguration:&brightnessError]) {
+ AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
+ if ([mainDevice isExposureModeSupported:exposureMode]) {
+ [mainDevice setExposureMode:exposureMode];
+ }
+
+ CGFloat minExposureTargetBias = mainDevice.minExposureTargetBias;
+ CGFloat maxExposureTargetBias = mainDevice.maxExposureTargetBias;
+
+ CGFloat exposureTargetBias = minExposureTargetBias + (maxExposureTargetBias - minExposureTargetBias) * [brightness floatValue];
+ exposureTargetBias = MAX(minExposureTargetBias, MIN(maxExposureTargetBias, exposureTargetBias));
+
+ [mainDevice setExposureTargetBias:exposureTargetBias completionHandler:nil];
+ [mainDevice unlockForConfiguration];
+ } else {
+ *error = [FlutterError errorWithCode:@"BRIGHTNESS_NOT_SET" message:@"can't set the brightness value" details:[brightnessError localizedDescription]];
+ }
+}
+
+/// Set flash mode
+- (void)setFlashMode:(CameraFlashMode)flashMode error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ AVCaptureDevice *mainDevice = self.devices.firstObject.device;
+
+ if (![mainDevice hasFlash]) {
+ *error = [FlutterError errorWithCode:@"FLASH_UNSUPPORTED" message:@"flash is not supported on this device" details:@""];
+ return;
+ }
+
+ if (mainDevice.position == AVCaptureDevicePositionFront) {
+ *error = [FlutterError errorWithCode:@"FLASH_UNSUPPORTED" message:@"can't set flash for portrait mode" details:@""];
+ return;
+ }
+
+ NSError *lockError;
+ [self.devices.firstObject.device lockForConfiguration:&lockError];
+ if (lockError != nil) {
+ *error = [FlutterError errorWithCode:@"FLASH_ERROR" message:@"impossible to change configuration" details:@""];
+ return;
+ }
+
+ switch (flashMode) {
+ case None:
+ _torchMode = AVCaptureTorchModeOff;
+ _flashMode = AVCaptureFlashModeOff;
+ break;
+ case On:
+ _torchMode = AVCaptureTorchModeOff;
+ _flashMode = AVCaptureFlashModeOn;
+ break;
+ case Auto:
+ _torchMode = AVCaptureTorchModeAuto;
+ _flashMode = AVCaptureFlashModeAuto;
+ break;
+ case Always:
+ _torchMode = AVCaptureTorchModeOn;
+ _flashMode = AVCaptureFlashModeOn;
+ break;
+ default:
+ _torchMode = AVCaptureTorchModeAuto;
+ _flashMode = AVCaptureFlashModeAuto;
+ break;
+ }
+
+ [mainDevice setTorchMode:_torchMode];
+ [mainDevice unlockForConfiguration];
+}
+
+- (void)refresh {
+ if ([self.cameraSession isRunning]) {
+ [self.cameraSession stopRunning];
+ }
+ [self.cameraSession startRunning];
+}
+
+- (void)configInitialSession:(NSArray *)sensors {
+ self.cameraSession = [[AVCaptureMultiCamSession alloc] init];
+
+ for (int i = 0; i < [sensors count]; i++) {
+ CameraPreviewTexture *previewTexture = [[CameraPreviewTexture alloc] init];
+ [self.textures addObject:previewTexture];
+ }
+
+ [self setSensors:sensors];
+
+ [self.cameraSession commitConfiguration];
+}
+
+- (void)setSensors:(NSArray *)sensors {
+ [self cleanSession];
+
+ _sensors = sensors;
+
+ for (int i = 0; i < [sensors count]; i++) {
+ PigeonSensor *sensor = sensors[i];
+ [self addSensor:sensor withIndex:i];
+ }
+
+ [self.cameraSession commitConfiguration];
+}
+
+- (void)start {
+ [self.cameraSession startRunning];
+}
+
+- (CGSize)getEffectivPreviewSize {
+ // TODO
+ return CGSizeMake(1920, 1080);
+}
+
+- (BOOL)addSensor:(PigeonSensor *)sensor withIndex:(int)index {
+ AVCaptureDevice *device = [self selectAvailableCamera:sensor];;
+
+ if (device == nil) {
+ return NO;
+ }
+
+ NSError *error = nil;
+ AVCaptureDeviceInput *deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
+ if (![self.cameraSession canAddInput:deviceInput]) {
+ return NO;
+ }
+ [self.cameraSession addInputWithNoConnections:deviceInput];
+
+ AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
+ videoDataOutput.videoSettings = @{(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
+ [videoDataOutput setSampleBufferDelegate:self queue:self.dispatchQueue];
+
+ if (![self.cameraSession canAddOutput:videoDataOutput]) {
+ return NO;
+ }
+ [self.cameraSession addOutputWithNoConnections:videoDataOutput];
+
+ AVCaptureInputPort *port = [[deviceInput portsWithMediaType:AVMediaTypeVideo
+ sourceDeviceType:device.deviceType
+ sourceDevicePosition:device.position] firstObject];
+ AVCaptureConnection *captureConnection = [[AVCaptureConnection alloc] initWithInputPorts:@[port] output:videoDataOutput];
+
+ if (![self.cameraSession canAddConnection:captureConnection]) {
+ return NO;
+ }
+ [self.cameraSession addConnection:captureConnection];
+
+ [captureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
+ [captureConnection setAutomaticallyAdjustsVideoMirroring:NO];
+ [captureConnection setVideoMirrored:sensor.position == PigeonSensorPositionFront];
+
+ // Creating photo output
+ AVCapturePhotoOutput *capturePhotoOutput = [AVCapturePhotoOutput new];
+ [capturePhotoOutput setHighResolutionCaptureEnabled:YES];
+ [self.cameraSession addOutput:capturePhotoOutput];
+
+ // move this all this in the cameradevice object
+ CameraDeviceInfo *cameraDevice = [[CameraDeviceInfo alloc] init];
+ cameraDevice.captureConnection = captureConnection;
+ cameraDevice.deviceInput = deviceInput;
+ cameraDevice.videoDataOutput = videoDataOutput;
+ cameraDevice.device = device;
+ cameraDevice.capturePhotoOutput = capturePhotoOutput;
+
+ [_devices addObject:cameraDevice];
+
+ return YES;
+}
+
+/// Get the first available camera on device (front or rear)
+- (AVCaptureDevice *)selectAvailableCamera:(PigeonSensor *)sensor {
+ if (sensor.deviceId != nil) {
+ return [AVCaptureDevice deviceWithUniqueID:sensor.deviceId];
+ }
+
+ // TODO: add dual & triple camera
+ NSArray *devices = [[NSArray alloc] init];
+ AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera, AVCaptureDeviceTypeBuiltInUltraWideCamera, ]
+ mediaType:AVMediaTypeVideo
+ position:AVCaptureDevicePositionUnspecified];
+ devices = discoverySession.devices;
+
+ for (AVCaptureDevice *device in devices) {
+ if (sensor.type != PigeonSensorTypeUnknown) {
+ AVCaptureDeviceType deviceType = [SensorUtils deviceTypeFromSensorType:sensor.type];
+ if ([device deviceType] == deviceType) {
+ return [AVCaptureDevice deviceWithUniqueID:[device uniqueID]];
+ }
+ } else if (sensor.position != PigeonSensorPositionUnknown) {
+ NSInteger cameraType = (sensor.position == PigeonSensorPositionFront) ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
+ if ([device position] == cameraType) {
+ return [AVCaptureDevice deviceWithUniqueID:[device uniqueID]];
+ }
+ }
+ }
+ return nil;
+}
+
+- (void)setAspectRatio:(AspectRatio)ratio {
+ _aspectRatio = ratio;
+}
+
+- (void)setPreviewSize:(CGSize)previewSize error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ // TODO:
+}
+
+- (void)takePhotoSensors:(nonnull NSArray *)sensors paths:(nonnull NSArray *)paths completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion {
+ for (int i = 0; i < [sensors count]; i++) {
+ PigeonSensor *sensor = [sensors objectAtIndex:i];
+ NSString *path = [paths objectAtIndex:i];
+
+ // TODO: take pictures for each sensors
+ CameraPictureController *cameraPicture = [[CameraPictureController alloc] initWithPath:path
+ orientation:_motionController.deviceOrientation
+ sensorPosition:sensor.position
+ saveGPSLocation:_saveGPSLocation
+ mirrorFrontCamera:_mirrorFrontCamera
+ aspectRatio:_aspectRatio
+ completion:completion
+ callback:^{
+ // If flash mode is always on, restore it back after photo is taken
+ if (self->_torchMode == AVCaptureTorchModeOn) {
+ [self->_devices.firstObject.device lockForConfiguration:nil];
+ [self->_devices.firstObject.device setTorchMode:AVCaptureTorchModeOn];
+ [self->_devices.firstObject.device unlockForConfiguration];
+ }
+
+ completion(@(YES), nil);
+ }];
+
+ // Create settings instance
+ AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
+ [settings setHighResolutionPhotoEnabled:YES];
+ [self.devices[i].capturePhotoOutput setPhotoSettingsForSceneMonitoring:settings];
+
+ [self.devices[i].capturePhotoOutput capturePhotoWithSettings:settings
+ delegate:cameraPicture];
+ }
+}
+
+- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
+ int index = 0;
+ for (CameraDeviceInfo *device in _devices) {
+ if (device.videoDataOutput == output) {
+ [_textures[index] updateBuffer:sampleBuffer];
+ if (_onPreviewFrameAvailable) {
+ _onPreviewFrameAvailable(@(index));
+ }
+ }
+
+ index++;
+ }
+}
+
+@end
diff --git a/ios/Classes/CameraPreview/CameraPreview.h b/ios/Classes/CameraPreview/SingleCameraPreview/SingleCameraPreview.h
similarity index 86%
rename from ios/Classes/CameraPreview/CameraPreview.h
rename to ios/Classes/CameraPreview/SingleCameraPreview/SingleCameraPreview.h
index dc273397..ec17ce9a 100644
--- a/ios/Classes/CameraPreview/CameraPreview.h
+++ b/ios/Classes/CameraPreview/SingleCameraPreview/SingleCameraPreview.h
@@ -26,12 +26,15 @@
#import "CameraSensorType.h"
#import "PhysicalButtonController.h"
#import "InputAnalysisImageFormat.h"
+#import "CameraPreviewTexture.h"
+#import "MultiCameraPreview.h"
NS_ASSUME_NONNULL_BEGIN
-@interface CameraPreview : NSObject
+// TODO: move this to a single camera ?
@property(readonly, nonatomic) AVCaptureSession *captureSession;
@property(readonly, nonatomic) AVCaptureDevice *captureDevice;
@property(readonly, nonatomic) AVCaptureInput *captureVideoInput;
@@ -39,18 +42,20 @@ AVCaptureAudioDataOutputSampleBufferDelegate>
@property(readonly, nonatomic) AVCaptureVideoDataOutput *captureVideoOutput;
@property(readonly, nonatomic) AVCaptureVideoPreviewLayer *previewLayer;
@property(readonly, nonatomic) AVCapturePhotoOutput *capturePhotoOutput;
+
@property(readonly, nonatomic) UIDeviceOrientation deviceOrientation;
@property(readonly, nonatomic) AVCaptureFlashMode flashMode;
@property(readonly, nonatomic) AVCaptureTorchMode torchMode;
@property(readonly, nonatomic) AVCaptureAudioDataOutput *audioOutput;
-@property(readonly, nonatomic) CameraSensor cameraSensor;
+@property(readonly, nonatomic) PigeonSensorPosition cameraSensorPosition;
@property(readonly, nonatomic) NSString *captureDeviceId;
@property(readonly, nonatomic) CaptureModes captureMode;
@property(readonly, nonatomic) NSString *currentPresset;
@property(readonly, nonatomic) AspectRatio aspectRatio;
+@property(readonly, nonatomic) CupertinoVideoOptions *videoOptions;
+@property(readonly, nonatomic) CameraPreviewTexture* previewTexture;
@property(readonly, nonatomic) bool saveGPSLocation;
@property(readonly, nonatomic) bool mirrorFrontCamera;
-@property(readonly) _Atomic(CVPixelBufferRef) latestPixelBuffer;
@property(readonly, nonatomic) CGSize currentPreviewSize;
@property(readonly, nonatomic) ImageStreamController *imageStreamController;
@property(readonly, nonatomic) MotionController *motionController;
@@ -58,9 +63,10 @@ AVCaptureAudioDataOutputSampleBufferDelegate>
@property(readonly, nonatomic) VideoController *videoController;
@property(readonly, nonatomic) PhysicalButtonController *physicalButtonController;
@property(readonly, copy) void (^completion)(NSNumber * _Nullable, FlutterError * _Nullable);
-@property(nonatomic, copy) void (^onFrameAvailable)(void);
+@property(nonatomic, copy) void (^onPreviewFrameAvailable)(void);
-- (instancetype)initWithCameraSensor:(CameraSensor)sensor
+- (instancetype)initWithCameraSensor:(PigeonSensorPosition)sensor
+ videoOptions:(nullable CupertinoVideoOptions *)videoOptions
streamImages:(BOOL)streamImages
mirrorFrontCamera:(BOOL)mirrorFrontCamera
enablePhysicalButton:(BOOL)enablePhysicalButton
@@ -68,10 +74,10 @@ AVCaptureAudioDataOutputSampleBufferDelegate>
captureMode:(CaptureModes)captureMode
completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion
dispatchQueue:(dispatch_queue_t)dispatchQueue;
-- (void)setPreviewSize:(CGSize)previewSize error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
- (void)setImageStreamEvent:(FlutterEventSink)imageStreamEventSink;
- (void)setOrientationEventSink:(FlutterEventSink)orientationEventSink;
- (void)setPhysicalButtonEventSink:(FlutterEventSink)physicalButtonEventSink;
+- (void)setPreviewSize:(CGSize)previewSize error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
- (void)setFlashMode:(CameraFlashMode)flashMode error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
- (void)setCaptureMode:(CaptureModes)captureMode error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
- (void)setCameraPresset:(CGSize)currentPreviewSize;
@@ -85,12 +91,11 @@ AVCaptureAudioDataOutputSampleBufferDelegate>
- (void)start;
- (void)stop;
- (void)takePictureAtPath:(NSString *)path completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion;
-- (void)recordVideoAtPath:(NSString *)path withOptions:(VideoOptions *)options completion:(nonnull void (^)(FlutterError * _Nullable))completion;
+- (void)recordVideoAtPath:(NSString *)path completion:(nonnull void (^)(FlutterError * _Nullable))completion;
- (void)stopRecordingVideo:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion;
- (void)focusOnPoint:(CGPoint)position preview:(CGSize)preview error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
- (void)dispose;
-- (NSArray *)getSensors:(AVCaptureDevicePosition)position;
-- (void)setSensor:(CameraSensor)sensor deviceId:(NSString *)captureDeviceId;
+- (void)setSensor:(PigeonSensor *)sensor;
- (void)setZoom:(float)value error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
- (void)setMirrorFrontCamera:(bool)value error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error;
- (CGFloat)getMaxZoom;
diff --git a/ios/Classes/CameraPreview/CameraPreview.m b/ios/Classes/CameraPreview/SingleCameraPreview/SingleCameraPreview.m
similarity index 85%
rename from ios/Classes/CameraPreview/CameraPreview.m
rename to ios/Classes/CameraPreview/SingleCameraPreview/SingleCameraPreview.m
index f1492cf7..5a4ef983 100644
--- a/ios/Classes/CameraPreview/CameraPreview.m
+++ b/ios/Classes/CameraPreview/SingleCameraPreview/SingleCameraPreview.m
@@ -5,13 +5,14 @@
// Created by Dimitri Dessus on 23/07/2020.
//
-#import "CameraPreview.h"
+#import "SingleCameraPreview.h"
-@implementation CameraPreview {
+@implementation SingleCameraPreview {
dispatch_queue_t _dispatchQueue;
}
-- (instancetype)initWithCameraSensor:(CameraSensor)sensor
+- (instancetype)initWithCameraSensor:(PigeonSensorPosition)sensor
+ videoOptions:(nullable CupertinoVideoOptions *)videoOptions
streamImages:(BOOL)streamImages
mirrorFrontCamera:(BOOL)mirrorFrontCamera
enablePhysicalButton:(BOOL)enablePhysicalButton
@@ -24,6 +25,13 @@ - (instancetype)initWithCameraSensor:(CameraSensor)sensor
_completion = completion;
_dispatchQueue = dispatchQueue;
+ _previewTexture = [[CameraPreviewTexture alloc] init];
+
+ _cameraSensorPosition = sensor;
+ _aspectRatio = aspectRatioMode;
+ _mirrorFrontCamera = mirrorFrontCamera;
+ _videoOptions = videoOptions;
+
// Creating capture session
_captureSession = [[AVCaptureSession alloc] init];
_captureVideoOutput = [AVCaptureVideoDataOutput new];
@@ -32,13 +40,12 @@ - (instancetype)initWithCameraSensor:(CameraSensor)sensor
[_captureVideoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[_captureSession addOutputWithNoConnections:_captureVideoOutput];
- _cameraSensor = sensor;
- _aspectRatio = aspectRatioMode;
- _mirrorFrontCamera = mirrorFrontCamera;
-
[self initCameraPreview:sensor];
[_captureConnection setAutomaticallyAdjustsVideoMirroring:NO];
+ if (mirrorFrontCamera && [_captureConnection isVideoMirroringSupported]) {
+ [_captureConnection setVideoMirrored:mirrorFrontCamera];
+ }
_captureMode = captureMode;
@@ -92,6 +99,7 @@ - (void)setPhysicalButtonEventSink:(FlutterEventSink)physicalButtonEventSink {
}
}
+// TODO: move this to a QualityController
/// Assign the default preview qualities
- (void)setBestPreviewQuality {
NSArray *qualities = [CameraQualities captureFormatsForDevice:_captureDevice];
@@ -117,7 +125,7 @@ - (void)setExifPreferencesGPSLocation:(bool)gpsLocation completion:(void(^)(NSNu
}
/// Init camera preview with Front or Rear sensor
-- (void)initCameraPreview:(CameraSensor)sensor {
+- (void)initCameraPreview:(PigeonSensorPosition)sensor {
// Here we set a preset which wont crash the device before switching to front or back
[_captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
@@ -134,6 +142,15 @@ - (void)initCameraPreview:(CameraSensor)sensor {
_captureConnection = [AVCaptureConnection connectionWithInputPorts:_captureVideoInput.ports
output:_captureVideoOutput];
+ // TODO: works but deprecated...
+ // if ([_captureConnection isVideoMinFrameDurationSupported] && [_captureConnection isVideoMaxFrameDurationSupported]) {
+ // CMTime frameDuration = CMTimeMake(1, 12);
+ // [_captureConnection setVideoMinFrameDuration:frameDuration];
+ // [_captureConnection setVideoMaxFrameDuration:frameDuration];
+ // } else {
+ // NSLog(@"Failed to set frame duration");
+ // }
+
// Attaching to session
[_captureSession addInputWithNoConnections:_captureVideoInput];
[_captureSession addConnection:_captureConnection];
@@ -145,15 +162,12 @@ - (void)initCameraPreview:(CameraSensor)sensor {
// Mirror the preview only on portrait mode
[_captureConnection setAutomaticallyAdjustsVideoMirroring:NO];
- [_captureConnection setVideoMirrored:(_cameraSensor == Front)];
+ [_captureConnection setVideoMirrored:(_cameraSensorPosition == PigeonSensorPositionFront)];
[_captureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
}
- (void)dealloc {
- if (_latestPixelBuffer) {
- CFRelease(_latestPixelBuffer);
- }
- [_motionController startMotionDetection];
+ [self.motionController startMotionDetection];
}
/// Set camera preview size
@@ -227,7 +241,7 @@ - (void)stop {
}
/// Set sensor between Front & Rear camera
-- (void)setSensor:(CameraSensor)sensor deviceId:(NSString *)captureDeviceId {
+- (void)setSensor:(PigeonSensor *)sensor {
// First remove all input & output
[_captureSession beginConfiguration];
@@ -245,11 +259,11 @@ - (void)setSensor:(CameraSensor)sensor deviceId:(NSString *)captureDeviceId {
[_captureSession removeOutput:_capturePhotoOutput];
[_captureSession removeConnection:_captureConnection];
- _cameraSensor = sensor;
- _captureDeviceId = captureDeviceId;
+ _cameraSensorPosition = sensor.position;
+ _captureDeviceId = sensor.deviceId;
// Init the camera preview with the selected sensor
- [self initCameraPreview:sensor];
+ [self initCameraPreview:sensor.position];
[self setBestPreviewQuality];
@@ -293,6 +307,10 @@ - (void)setBrightness:(NSNumber *)brightness error:(FlutterError * _Nullable __a
- (void)setMirrorFrontCamera:(bool)value error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
_mirrorFrontCamera = value;
+
+ if ([_captureConnection isVideoMirroringSupported]) {
+ [_captureConnection setVideoMirrored:value];
+ }
}
/// Set flash mode
@@ -302,7 +320,7 @@ - (void)setFlashMode:(CameraFlashMode)flashMode error:(FlutterError * _Nullable
return;
}
- if (_cameraSensor == Front) {
+ if (_cameraSensorPosition == PigeonSensorPositionFront) {
*error = [FlutterError errorWithCode:@"FLASH_UNSUPPORTED" message:@"can't set flash for portrait mode" details:@""];
return;
}
@@ -363,7 +381,7 @@ - (void)receivedImageFromStream {
}
/// Get the first available camera on device (front or rear)
-- (NSString *)selectAvailableCamera:(CameraSensor)sensor {
+- (NSString *)selectAvailableCamera:(PigeonSensorPosition)sensor {
if (_captureDeviceId != nil) {
return _captureDeviceId;
}
@@ -376,7 +394,7 @@ - (NSString *)selectAvailableCamera:(CameraSensor)sensor {
position:AVCaptureDevicePositionUnspecified];
devices = discoverySession.devices;
- NSInteger cameraType = (sensor == Front) ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
+ NSInteger cameraType = (sensor == PigeonSensorPositionFront) ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
for (AVCaptureDevice *device in devices) {
if ([device position] == cameraType) {
return [device uniqueID];
@@ -385,40 +403,6 @@ - (NSString *)selectAvailableCamera:(CameraSensor)sensor {
return nil;
}
-- (NSArray *)getSensors:(AVCaptureDevicePosition)position {
- NSMutableArray *sensors = [NSMutableArray new];
-
- NSArray *sensorsType = @[AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera, AVCaptureDeviceTypeBuiltInUltraWideCamera, AVCaptureDeviceTypeBuiltInTrueDepthCamera];
-
- AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
- discoverySessionWithDeviceTypes:sensorsType
- mediaType:AVMediaTypeVideo
- position:AVCaptureDevicePositionUnspecified];
-
- for (AVCaptureDevice *device in discoverySession.devices) {
- PigeonSensorType type;
- if (device.deviceType == AVCaptureDeviceTypeBuiltInTelephotoCamera) {
- type = PigeonSensorTypeTelephoto;
- } else if (device.deviceType == AVCaptureDeviceTypeBuiltInUltraWideCamera) {
- type = PigeonSensorTypeUltraWideAngle;
- } else if (device.deviceType == AVCaptureDeviceTypeBuiltInTrueDepthCamera) {
- type = PigeonSensorTypeTrueDepth;
- } else if (device.deviceType == AVCaptureDeviceTypeBuiltInWideAngleCamera) {
- type = PigeonSensorTypeWideAngle;
- } else {
- type = PigeonSensorTypeUnknown;
- }
-
- PigeonSensorTypeDevice *sensorType = [PigeonSensorTypeDevice makeWithSensorType:type name:device.localizedName iso:[NSNumber numberWithFloat:device.ISO] flashAvailable:[NSNumber numberWithBool:device.flashAvailable] uid:device.uniqueID];
-
- if (device.position == position) {
- [sensors addObject:sensorType];
- }
- }
-
- return sensors;
-}
-
/// Set capture mode between Photo & Video mode
- (void)setCaptureMode:(CaptureModes)captureMode error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
if (_videoController.isRecording) {
@@ -449,7 +433,7 @@ - (void)takePictureAtPath:(NSString *)path completion:(nonnull void (^)(NSNumber
// Instanciate camera picture obj
CameraPictureController *cameraPicture = [[CameraPictureController alloc] initWithPath:path
orientation:_motionController.deviceOrientation
- sensor:_cameraSensor
+ sensorPosition:_cameraSensorPosition
saveGPSLocation:_saveGPSLocation
mirrorFrontCamera:_mirrorFrontCamera
aspectRatio:_aspectRatio
@@ -477,14 +461,14 @@ - (void)takePictureAtPath:(NSString *)path completion:(nonnull void (^)(NSNumber
# pragma mark - Camera video
/// Record video into the given path
-- (void)recordVideoAtPath:(NSString *)path withOptions:(VideoOptions *)options completion:(nonnull void (^)(FlutterError * _Nullable))completion {
+- (void)recordVideoAtPath:(NSString *)path completion:(nonnull void (^)(FlutterError * _Nullable))completion {
if (_imageStreamController.streamImages) {
completion([FlutterError errorWithCode:@"VIDEO_ERROR" message:@"can't record video when image stream is enabled" details:@""]);
return;
}
if (!_videoController.isRecording) {
- [_videoController recordVideoAtPath:path orientation:_deviceOrientation audioSetupCallback:^{
+ [_videoController recordVideoAtPath:path captureDevice:_captureDevice orientation:_deviceOrientation audioSetupCallback:^{
[self setUpCaptureSessionForAudioError:^(NSError *error) {
completion([FlutterError errorWithCode:@"VIDEO_ERROR" message:@"error when trying to setup audio" details:[error localizedDescription]]);
}];
@@ -495,7 +479,7 @@ - (void)recordVideoAtPath:(NSString *)path withOptions:(VideoOptions *)options c
[self->_captureVideoOutput setSampleBufferDelegate:self queue:self->_dispatchQueue];
completion(nil);
- } options:options completion:completion];
+ } options:_videoOptions completion:completion];
} else {
completion([FlutterError errorWithCode:@"VIDEO_ERROR" message:@"already recording video" details:@""]);
}
@@ -550,7 +534,6 @@ - (void)setRecordingAudioMode:(bool)isAudioEnabled completion:(void(^)(NSNumber
}];
}
-
[_captureSession commitConfiguration];
}
@@ -586,17 +569,9 @@ - (void)setUpCaptureSessionForAudioError:(nonnull void (^)(NSError *))error {
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
if (output == _captureVideoOutput) {
- CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
- CFRetain(newBuffer);
- CVPixelBufferRef old = atomic_load(&_latestPixelBuffer);
- while (!atomic_compare_exchange_strong(&_latestPixelBuffer, &old, newBuffer)) {
- old = atomic_load(&_latestPixelBuffer);
- }
- if (old != nil) {
- CFRelease(old);
- }
- if (_onFrameAvailable) {
- _onFrameAvailable();
+ [self.previewTexture updateBuffer:sampleBuffer];
+ if (_onPreviewFrameAvailable) {
+ _onPreviewFrameAvailable();
}
}
@@ -611,16 +586,4 @@ - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleB
}
}
-# pragma mark - Data manipulation
-
-/// Used to copy pixels to in-memory buffer
-- (CVPixelBufferRef _Nullable)copyPixelBuffer {
- CVPixelBufferRef pixelBuffer = atomic_load(&_latestPixelBuffer);
- while (!atomic_compare_exchange_strong(&_latestPixelBuffer, &pixelBuffer, nil)) {
- pixelBuffer = atomic_load(&_latestPixelBuffer);
- }
-
- return pixelBuffer;
-}
-
@end
diff --git a/ios/Classes/CamerawesomePlugin.h b/ios/Classes/CamerawesomePlugin.h
index 21f98523..6a06976b 100644
--- a/ios/Classes/CamerawesomePlugin.h
+++ b/ios/Classes/CamerawesomePlugin.h
@@ -4,6 +4,7 @@
NS_ASSUME_NONNULL_BEGIN
@interface CamerawesomePlugin : NSObject
+
@end
NS_ASSUME_NONNULL_END
diff --git a/ios/Classes/CamerawesomePlugin.m b/ios/Classes/CamerawesomePlugin.m
index 97dbcfbf..d371ecd9 100644
--- a/ios/Classes/CamerawesomePlugin.m
+++ b/ios/Classes/CamerawesomePlugin.m
@@ -1,7 +1,12 @@
#import "CamerawesomePlugin.h"
-#import "CameraPreview.h"
#import "Pigeon/Pigeon.h"
#import "Permissions.h"
+#import "SensorsController.h"
+#import "SingleCameraPreview.h"
+#import "MultiCameraController.h"
+#import "AspectRatioUtils.h"
+#import "CaptureModeUtils.h"
+#import "FlashModeUtils.h"
#import "AnalysisController.h"
FlutterEventSink orientationEventSink;
@@ -10,12 +15,17 @@
FlutterEventSink physicalButtonEventSink;
@interface CamerawesomePlugin ()
-@property(readonly, nonatomic) NSObject *registry;
-@property int64_t textureId;
-@property CameraPreview *camera;
+@property(readonly, nonatomic) NSObject *textureRegistry;
+@property NSMutableArray *texturesIds;
+@property SingleCameraPreview *camera;
+@property MultiCameraPreview *multiCamera;
- (instancetype)init:(NSObject*)registrar;
@end
+// TODO: create a protocol to uniformize multi camera & single camera
+// TODO: for multi camera, specify sensor position
+// TODO: save all controllers here
+
@implementation CamerawesomePlugin {
dispatch_queue_t _dispatchQueue;
dispatch_queue_t _dispatchQueueAnalysis;
@@ -24,7 +34,7 @@ @implementation CamerawesomePlugin {
- (instancetype)init:(NSObject*)registrar {
self = [super init];
- _registry = registrar.textures;
+ _textureRegistry = registrar.textures;
if (_dispatchQueue == nil) {
_dispatchQueue = dispatch_queue_create("camerawesome.dispatchqueue", NULL);
@@ -53,6 +63,152 @@ + (void)registerWithRegistrar:(NSObject*)registrar {
AnalysisImageUtilsSetup(registrar.messenger, instance);
}
+#pragma mark - Camera engine methods
+
+- (void)setupCameraSensors:(nonnull NSArray *)sensors aspectRatio:(nonnull NSString *)aspectRatio zoom:(nonnull NSNumber *)zoom mirrorFrontCamera:(nonnull NSNumber *)mirrorFrontCamera enablePhysicalButton:(nonnull NSNumber *)enablePhysicalButton flashMode:(nonnull NSString *)flashMode captureMode:(nonnull NSString *)captureMode enableImageStream:(nonnull NSNumber *)enableImageStream exifPreferences:(nonnull ExifPreferences *)exifPreferences videoOptions:(nullable VideoOptions *)videoOptions completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion {
+
+ CaptureModes captureModeType = [CaptureModeUtils captureModeFromCaptureModeType:captureMode];
+ if (![CameraPermissionsController checkAndRequestPermission]) {
+ completion(nil, [FlutterError errorWithCode:@"MISSING_PERMISSION" message:@"you got to accept all permissions" details:nil]);
+ return;
+ }
+
+ if (sensors == nil || [sensors count] <= 0) {
+ completion(nil, [FlutterError errorWithCode:@"SENSOR_ERROR" message:@"empty sensors provided, please provide at least 1 sensor" details:nil]);
+ return;
+ }
+
+ // If camera preview exist, dispose it
+ if (self.camera != nil) {
+ [self.camera dispose];
+ self.camera = nil;
+ }
+ if (self.multiCamera != nil) {
+ [self.multiCamera dispose];
+ self.multiCamera = nil;
+ }
+
+ _texturesIds = [NSMutableArray new];
+
+ AspectRatio aspectRatioMode = [AspectRatioUtils convertAspectRatio:aspectRatio];
+
+ bool multiSensors = [sensors count] > 1;
+ if (multiSensors) {
+ if (![MultiCameraController isMultiCamSupported]) {
+ completion(nil, [FlutterError errorWithCode:@"MULTI_CAM_NOT_SUPPORTED" message:@"multi camera feature is not supported" details:nil]);
+ return;
+ }
+
+ self.multiCamera = [[MultiCameraPreview alloc] initWithSensors:sensors
+ mirrorFrontCamera:[mirrorFrontCamera boolValue]
+ enablePhysicalButton:[enablePhysicalButton boolValue]
+ aspectRatioMode:aspectRatioMode
+ captureMode:captureModeType
+ dispatchQueue:dispatch_queue_create("camerawesome.multi_preview.dispatchqueue", NULL)];
+
+ for (int i = 0; i < [sensors count]; i++) {
+ int64_t textureId = [self->_textureRegistry registerTexture:self.multiCamera.textures[i]];
+ [_texturesIds addObject:[NSNumber numberWithLongLong:textureId]];
+ }
+
+ __weak typeof(self) weakSelf = self;
+ self.multiCamera.onPreviewFrameAvailable = ^(NSNumber * _Nullable i) {
+ if (i == nil) {
+ return;
+ }
+
+ NSNumber *textureNumber = weakSelf.texturesIds[[i intValue]];
+ [weakSelf.textureRegistry textureFrameAvailable:[textureNumber longLongValue]];
+ };
+ } else {
+ PigeonSensor *firstSensor = sensors.firstObject;
+ self.camera = [[SingleCameraPreview alloc] initWithCameraSensor:firstSensor.position
+ videoOptions:videoOptions != nil ? videoOptions.ios : nil
+ streamImages:[enableImageStream boolValue]
+ mirrorFrontCamera:[mirrorFrontCamera boolValue]
+ enablePhysicalButton:[enablePhysicalButton boolValue]
+ aspectRatioMode:aspectRatioMode
+ captureMode:captureModeType
+ completion:completion
+ dispatchQueue:dispatch_queue_create("camerawesome.single_preview.dispatchqueue", NULL)];
+
+ int64_t textureId = [self->_textureRegistry registerTexture:self.camera.previewTexture];
+
+ __weak typeof(self) weakSelf = self;
+ self.camera.onPreviewFrameAvailable = ^{
+ [weakSelf.textureRegistry textureFrameAvailable:textureId];
+ };
+
+ [self->_textureRegistry textureFrameAvailable:textureId];
+
+ [self.texturesIds addObject:[NSNumber numberWithLongLong:textureId]];
+ }
+
+ completion(@(YES), nil);
+}
+
+- (nullable NSNumber *)startWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return @(NO);
+ }
+
+ dispatch_async(_dispatchQueue, ^{
+ if (self.multiCamera != nil) {
+ [self->_multiCamera start];
+ } else {
+ [self->_camera start];
+ }
+ });
+
+ return @(YES);
+}
+
+- (nullable NSNumber *)stopWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return @(NO);
+ }
+
+ for (NSNumber *textureId in self->_texturesIds) {
+ [self->_textureRegistry unregisterTexture:[textureId longLongValue]];
+ dispatch_async(_dispatchQueue, ^{
+ if (self.multiCamera != nil) {
+ [self->_multiCamera stop];
+ } else {
+ [self->_camera stop];
+ }
+ });
+ }
+
+ return @(YES);
+}
+
+- (void)refreshWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
+ }
+
+ if (self.multiCamera != nil) {
+ [self.multiCamera refresh];
+ } else {
+ [self.camera refresh];
+ }
+}
+
+- (nullable NSNumber *)getPreviewTextureIdCameraPosition:(nonnull NSNumber *)cameraPosition error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ int cameraIndex = [cameraPosition intValue];
+
+ if (_texturesIds != nil && [_texturesIds count] >= cameraIndex) {
+ return [_texturesIds objectAtIndex:cameraIndex];
+ }
+
+ return nil;
+}
+
+#pragma mark - Event sink methods
+
- (FlutterError *)onListenWithArguments:(NSString *)arguments eventSink:(FlutterEventSink)eventSink {
if ([arguments isEqual: @"orientationChannel"]) {
orientationEventSink = eventSink;
@@ -101,100 +257,243 @@ - (FlutterError *)onCancelWithArguments:(NSString *)arguments {
return nil;
}
-- (nullable NSArray *)availableSizesWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- return [CameraQualities captureFormatsForDevice:_camera.captureDevice];
-}
+#pragma mark - Permissions methods
-- (nullable NSArray *)checkPermissionsWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+- (void)requestPermissionsSaveGpsLocation:(nonnull NSNumber *)saveGpsLocation completion:(nonnull void (^)(NSArray * _Nullable, FlutterError * _Nullable))completion {
NSMutableArray *permissions = [NSMutableArray new];
- bool cameraPermission = [CameraPermissionsController checkPermission];
- bool microphonePermission = [MicrophonePermissionsController checkPermission];
+ const BOOL cameraGranted = [CameraPermissionsController checkAndRequestPermission];
+ if (cameraGranted) {
+ [permissions addObject:@"camera"];
+ }
+ bool needToSaveGPSLocation = [saveGpsLocation boolValue];
+ if (needToSaveGPSLocation) {
+ // TODO: move this to permissions object
+ [self.camera.locationController requestWhenInUseAuthorizationOnGranted:^{
+ [permissions addObject:@"location"];
+
+ completion(permissions, nil);
+ } declined:^{
+ completion(permissions, nil);
+ }];
+ }
+}
+
+- (nullable NSArray *)checkPermissionsPermissions:(nonnull NSArray *)permissions error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ bool isMicrophonePermissionRequired = [permissions containsObject:@"microphone"];
+ bool isCameraPermissionRequired = [permissions containsObject:@"camera"];
+
+ bool cameraPermission = isCameraPermissionRequired ? [CameraPermissionsController checkPermission] : NO;
+ bool microphonePermission = isMicrophonePermissionRequired ? [MicrophonePermissionsController checkPermission] : NO;
+
+ NSMutableArray *grantedPermissions = [NSMutableArray new];
if (cameraPermission) {
- [permissions addObject:@"camera"];
+ [grantedPermissions addObject:@"camera"];
}
if (microphonePermission) {
- [permissions addObject:@"record_audio"];
+ [grantedPermissions addObject:@"record_audio"];
}
- return permissions;
+ return grantedPermissions;
+}
+
+- (nullable NSArray *)requestPermissionsWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ return @[];
}
+#pragma mark - Focus methods
+
- (void)focusOnPointPreviewSize:(nonnull PreviewSize *)previewSize x:(nonnull NSNumber *)x y:(nonnull NSNumber *)y androidFocusSettings:(nullable AndroidFocusSettings *)androidFocusSettings error:(FlutterError *_Nullable __autoreleasing *_Nonnull)error {
if (previewSize.width <= 0 || previewSize.height <= 0) {
*error = [FlutterError errorWithCode:@"INVALID_PREVIEW" message:@"preview size width and height must be set" details:nil];
return;
}
- [_camera focusOnPoint:CGPointMake([x floatValue], [y floatValue]) preview:CGSizeMake([previewSize.width floatValue], [previewSize.height floatValue]) error:error];
-}
-
-- (nullable PreviewSize *)getEffectivPreviewSizeWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- CGSize previewSize = [_camera getEffectivPreviewSize];
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
+ }
- // height & width are inverted, this is intentionnal, because camera is always on portrait mode
- return [PreviewSize makeWithWidth:@(previewSize.height) height:@(previewSize.width)];
-}
-
-- (nullable NSNumber *)getMaxZoomWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- return @([_camera getMaxZoom]);
-}
-
-- (nullable NSNumber *)getPreviewTextureIdWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- return @(_textureId);
+ if (self.multiCamera != nil) {
+ [self.multiCamera focusOnPoint:CGPointMake([x floatValue], [y floatValue]) preview:CGSizeMake([previewSize.width floatValue], [previewSize.height floatValue]) error:error];
+ } else {
+ [self.camera focusOnPoint:CGPointMake([x floatValue], [y floatValue]) preview:CGSizeMake([previewSize.width floatValue], [previewSize.height floatValue]) error:error];
+ }
}
- (void)handleAutoFocusWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
// TODO: to remove ?
}
+#pragma mark - Video recording methods
+
- (void)pauseVideoRecordingWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
+ }
+
+ if (self.camera == nil) {
+ *error = [FlutterError errorWithCode:@"MULTI_CAMERA_UNSUPPORTED" message:@"this feature is currently not supported with multi camera feature" details:nil];
+ return;
+ }
+
[self.camera pauseVideoRecording];
}
-- (void)recordVideoPath:(nonnull NSString *)path options:(nullable VideoOptions *)options completion:(nonnull void (^)(FlutterError * _Nullable))completion {
- if (path == nil || path.length <= 0) {
- completion([FlutterError errorWithCode:@"PATH_NOT_SET" message:@"a file path must be set" details:nil]);
+- (void)recordVideoSensors:(nonnull NSArray *)sensors paths:(nonnull NSArray *)paths completion:(nonnull void (^)(FlutterError * _Nullable))completion {
+ if (self.camera == nil && self.multiCamera == nil) {
+ completion([FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil]);
+ return;
+ }
+
+ if (self.camera == nil) {
+ completion([FlutterError errorWithCode:@"MULTI_CAMERA_UNSUPPORTED" message:@"this feature is currently not supported with multi camera feature" details:nil]);
return;
}
- [_camera recordVideoAtPath:path withOptions:options completion:completion];
+ if (sensors == nil || [sensors count] <= 0 || paths == nil || [paths count] <= 0) {
+ completion([FlutterError errorWithCode:@"PATH_NOT_SET" message:@"at least one path must be set" details:nil]);
+ return;
+ }
+
+ if ([sensors count] != [paths count]) {
+ completion([FlutterError errorWithCode:@"PATH_INVALID" message:@"sensors & paths list seems to be different" details:nil]);
+ return;
+ }
+
+ [self.camera recordVideoAtPath:[paths firstObject] completion:completion];
}
-- (void)refreshWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- [_camera refresh];
+- (void)resumeVideoRecordingWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
+ }
+
+ if (self.camera == nil) {
+ *error = [FlutterError errorWithCode:@"MULTI_CAMERA_UNSUPPORTED" message:@"this feature is currently not supported with multi camera feature" details:nil];
+ return;
+ }
+
+ [self.camera resumeVideoRecording];
}
-- (nullable NSArray *)requestPermissionsWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- return @[];
+- (void)setRecordingAudioModeEnableAudio:(NSNumber *)enableAudio completion:(void(^)(NSNumber *_Nullable, FlutterError *_Nullable))completion {
+ if (self.camera == nil && self.multiCamera == nil) {
+ completion(nil, [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil]);
+ return;
+ }
+
+ if (self.camera == nil) {
+ completion(nil, [FlutterError errorWithCode:@"MULTI_CAMERA_UNSUPPORTED" message:@"this feature is currently not supported with multi camera feature" details:nil]);
+ return;
+ }
+
+ [self.camera setRecordingAudioMode:[enableAudio boolValue] completion:completion];
}
-- (void)resumeVideoRecordingWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- [self.camera resumeVideoRecording];
+- (void)stopRecordingVideoWithCompletion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion {
+ if (self.camera == nil && self.multiCamera == nil) {
+ completion(nil, [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil]);
+ return;
+ }
+
+ if (self.camera == nil) {
+ completion(nil, [FlutterError errorWithCode:@"MULTI_CAMERA_UNSUPPORTED" message:@"this feature is currently not supported with multi camera feature" details:nil]);
+ return;
+ }
+
+ dispatch_async(_dispatchQueue, ^{
+ [self->_camera stopRecordingVideo:completion];
+ });
}
-- (void)setAspectRatioAspectRatio:(nonnull NSString *)aspectRatio error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- if (aspectRatio == nil || aspectRatio.length <= 0) {
- *error = [FlutterError errorWithCode:@"RATIO_NOT_SET" message:@"a ratio must be set" details:nil];
+#pragma mark - General methods
+
+- (void)takePhotoSensors:(nonnull NSArray *)sensors paths:(nonnull NSArray *)paths completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion {
+ if (self.camera == nil && self.multiCamera == nil) {
+ completion(nil, [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil]);
return;
}
- AspectRatio aspectRatioMode = [self convertAspectRatio:aspectRatio];
- [self.camera setAspectRatio:aspectRatioMode];
+ if (sensors == nil || [sensors count] <= 0 || paths == nil || [paths count] <= 0) {
+ completion(0, [FlutterError errorWithCode:@"PATH_NOT_SET" message:@"at least one path must be set" details:nil]);
+ return;
+ }
+
+ if ([sensors count] != [paths count]) {
+ completion(0, [FlutterError errorWithCode:@"PATH_INVALID" message:@"sensors & paths list seems to be different" details:nil]);
+ return;
+ }
+
+ dispatch_async(_dispatchQueue, ^{
+ if (self.multiCamera != nil) {
+ [self->_multiCamera takePhotoSensors:sensors paths:paths completion:completion];
+ } else {
+ [self->_camera takePictureAtPath:[paths firstObject] completion:completion];
+ }
+ });
+}
+
+- (void)setMirrorFrontCameraMirror:(nonnull NSNumber *)mirror error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
+ }
+
+ BOOL mirrorFrontCamera = [mirror boolValue];
+ if (self.multiCamera != nil) {
+ [self.multiCamera setMirrorFrontCamera:mirrorFrontCamera error:error];
+ } else {
+ [self.camera setMirrorFrontCamera:mirrorFrontCamera error:error];
+ }
}
- (void)setCaptureModeMode:(nonnull NSString *)mode error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- CaptureModes captureMode = ([mode isEqualToString:@"PHOTO"]) ? Photo : Video;
- [_camera setCaptureMode:captureMode error:error];
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
+ }
+
+ CaptureModes captureMode = [CaptureModeUtils captureModeFromCaptureModeType:mode];
+ if (self.multiCamera != nil) {
+ if (captureMode == Video) {
+ *error = [FlutterError errorWithCode:@"MULTI_CAMERA_UNSUPPORTED" message:@"impossible to set video mode when multi camera" details:nil];
+ return;
+ }
+
+ [self.camera setCaptureMode:captureMode error:error];
+ } else {
+ [self.camera setCaptureMode:captureMode error:error];
+ }
}
- (void)setCorrectionBrightness:(nonnull NSNumber *)brightness error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- [_camera setBrightness:brightness error:error];
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
+ }
+ if (self.multiCamera != nil) {
+ [self.multiCamera setBrightness:brightness error:error];
+ } else {
+ [self.camera setBrightness:brightness error:error];
+ }
}
-- (void)setExifPreferencesExifPreferences:(ExifPreferences *)exifPreferences completion:(void(^)(NSNumber *_Nullable, FlutterError *_Nullable))completion{
- [self.camera setExifPreferencesGPSLocation: exifPreferences.saveGPSLocation completion:completion];
+
+- (void)setExifPreferencesExifPreferences:(ExifPreferences *)exifPreferences completion:(void(^)(NSNumber *_Nullable, FlutterError *_Nullable))completion {
+ if (self.camera == nil && self.multiCamera == nil) {
+ completion(nil, [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil]);
+ return;
+ }
+
+ if (self.multiCamera != nil) {
+ [self.multiCamera setExifPreferencesGPSLocation: exifPreferences.saveGPSLocation completion:completion];
+ } else {
+ [self.camera setExifPreferencesGPSLocation: exifPreferences.saveGPSLocation completion:completion];
+ }
}
- (void)setFlashModeMode:(nonnull NSString *)mode error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
@@ -203,20 +502,17 @@ - (void)setFlashModeMode:(nonnull NSString *)mode error:(FlutterError * _Nullabl
return;
}
- CameraFlashMode flash;
- if ([mode isEqualToString:@"NONE"]) {
- flash = None;
- } else if ([mode isEqualToString:@"ON"]) {
- flash = On;
- } else if ([mode isEqualToString:@"AUTO"]) {
- flash = Auto;
- } else if ([mode isEqualToString:@"ALWAYS"]) {
- flash = Always;
- } else {
- flash = None;
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
}
- [_camera setFlashMode:flash error:error];
+ CameraFlashMode flash = [FlashModeUtils flashFromString:mode];
+ if (self.multiCamera != nil) {
+ [self.multiCamera setFlashMode:flash error:error];
+ } else {
+ [self.camera setFlashMode:flash error:error];
+ }
}
- (void)setPhotoSizeSize:(nonnull PreviewSize *)size error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
@@ -225,174 +521,162 @@ - (void)setPhotoSizeSize:(nonnull PreviewSize *)size error:(FlutterError * _Null
return;
}
- if (self.camera == nil) {
+ if (self.camera == nil && self.multiCamera == nil) {
*error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
return;
}
+ if (self.camera == nil) {
+ *error = [FlutterError errorWithCode:@"MULTI_CAMERA_UNSUPPORTED" message:@"this feature is currently not supported with multi camera feature" details:nil];
+ return;
+ }
+
[self.camera setCameraPresset:CGSizeMake([size.width floatValue], [size.height floatValue])];
}
-- (void)setPreviewSizeSize:(nonnull PreviewSize *)size error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- if (size.width <= 0 || size.height <= 0) {
- *error = [FlutterError errorWithCode:@"NO_SIZE_SET" message:@"width and height must be set" details:nil];
+- (void)setAspectRatioAspectRatio:(nonnull NSString *)aspectRatio error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (aspectRatio == nil || aspectRatio.length <= 0) {
+ *error = [FlutterError errorWithCode:@"RATIO_NOT_SET" message:@"a ratio must be set" details:nil];
return;
}
- if (self.camera == nil) {
+ if (self.camera == nil && self.multiCamera == nil) {
*error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
return;
}
- [self.camera setPreviewSize:CGSizeMake([size.width floatValue], [size.height floatValue]) error:error];
+ AspectRatio aspectRatioMode = [AspectRatioUtils convertAspectRatio:aspectRatio];
+ if (self.multiCamera != nil) {
+ [self.multiCamera setAspectRatio:aspectRatioMode];
+ } else {
+ [self.camera setAspectRatio:aspectRatioMode];
+ }
}
-- (void)setRecordingAudioModeEnableAudio:(NSNumber *)enableAudio completion:(void(^)(NSNumber *_Nullable, FlutterError *_Nullable))completion {
- [_camera setRecordingAudioMode:[enableAudio boolValue] completion:completion];
-}
+#pragma mark - Preview methods
-- (void)setSensorSensor:(NSString *)sensor deviceId:(nullable NSString *)deviceId error:(FlutterError *_Nullable *_Nonnull)error {
- NSString *captureDeviceId;
-
- if (deviceId && ![deviceId isEqual:[NSNull null]]) {
- captureDeviceId = deviceId;
+- (nullable NSArray *)availableSizesWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return @[];
}
- CameraSensor sensorType = ([sensor isEqualToString:@"FRONT"]) ? Front : Back;
- [_camera setSensor:sensorType deviceId:captureDeviceId];
-}
-
-- (void)setZoomZoom:(nonnull NSNumber *)zoom error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- [_camera setZoom:[zoom floatValue] error:error];
-}
-
-- (void)receivedImageFromStreamWithError:(FlutterError *_Nullable *_Nonnull)error {
- [self.camera receivedImageFromStream];
-}
-
-- (nullable NSArray *)getFrontSensorsWithError:(FlutterError *_Nullable *_Nonnull)error {
- return [_camera getSensors:AVCaptureDevicePositionFront];
-}
-
-- (nullable NSArray *)getBackSensorsWithError:(FlutterError *_Nullable *_Nonnull)error {
- return [_camera getSensors:AVCaptureDevicePositionBack];
+ if (self.multiCamera != nil) {
+ return [CameraQualities captureFormatsForDevice:self.multiCamera.devices.firstObject.device];
+ } else {
+ return [CameraQualities captureFormatsForDevice:self.camera.captureDevice];
+ }
}
-- (void)setupCameraSensor:(nonnull NSString *)sensor aspectRatio:(nonnull NSString *)aspectRatio zoom:(nonnull NSNumber *)zoom mirrorFrontCamera:(nonnull NSNumber *)mirrorFrontCamera enablePhysicalButton:(nonnull NSNumber *)enablePhysicalButton flashMode:(nonnull NSString *)flashMode captureMode:(nonnull NSString *)captureMode enableImageStream:(nonnull NSNumber *)enableImageStream exifPreferences:(nonnull ExifPreferences *)exifPreferences completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion {
- if (![CameraPermissionsController checkAndRequestPermission]) {
- completion(nil, [FlutterError errorWithCode:@"MISSING_PERMISSION" message:@"you got to accept all permissions" details:nil]);
+- (void)setPreviewSizeSize:(nonnull PreviewSize *)size error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (size.width <= 0 || size.height <= 0) {
+ *error = [FlutterError errorWithCode:@"NO_SIZE_SET" message:@"width and height must be set" details:nil];
return;
}
- if (sensor == nil || sensor.length <= 0) {
- completion(nil, [FlutterError errorWithCode:@"SENSOR_ERROR" message:@"a sensor FRONT or BACK must be provided" details:nil]);
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
return;
}
- // If camera preview exist, dispose it
- if (self.camera != nil) {
- [self.camera dispose];
- self.camera = nil;
+ if (self.multiCamera != nil) {
+ [self.multiCamera setPreviewSize:CGSizeMake([size.width floatValue], [size.height floatValue]) error:error];
+ } else {
+ [self.camera setPreviewSize:CGSizeMake([size.width floatValue], [size.height floatValue]) error:error];
}
-
- AspectRatio aspectRatioMode = [self convertAspectRatio:aspectRatio];
- CaptureModes captureModeType = ([captureMode isEqualToString:@"PHOTO"]) ? Photo : Video;
- CameraSensor cameraSensor = ([sensor isEqualToString:@"FRONT"]) ? Front : Back;
- self.camera = [[CameraPreview alloc] initWithCameraSensor:cameraSensor
- streamImages:[enableImageStream boolValue]
- mirrorFrontCamera:[mirrorFrontCamera boolValue]
- enablePhysicalButton:[enablePhysicalButton boolValue]
- aspectRatioMode:aspectRatioMode
- captureMode:captureModeType
- completion:completion
- dispatchQueue:dispatch_queue_create("camerawesome.dispatchqueue", NULL)];
- [self->_registry textureFrameAvailable:self->_textureId];
-
- __weak typeof(self) weakSelf = self;
- self.camera.onFrameAvailable = ^{
- [weakSelf.registry textureFrameAvailable:weakSelf.textureId];
- };
-
- // Assign texture id
- self->_textureId = [self->_registry registerTexture:self.camera];
-
- completion(@(YES), nil);
}
-- (void)setupImageAnalysisStreamFormat:(nonnull NSString *)format width:(nonnull NSNumber *)width maxFramesPerSecond:(nullable NSNumber *)maxFramesPerSecond autoStart:(nonnull NSNumber *)autoStart error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- [_camera.imageStreamController setStreamImages:autoStart];
+- (nullable PreviewSize *)getEffectivPreviewSizeIndex:(nonnull NSNumber *)index error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ }
- // Force a frame rate to improve performance
- [_camera.imageStreamController setMaxFramesPerSecond:[maxFramesPerSecond floatValue]];
+ CGSize previewSize;
+ if (self.multiCamera != nil) {
+ previewSize = [self.multiCamera getEffectivPreviewSize];
+ } else {
+ previewSize = [self.camera getEffectivPreviewSize];
+ }
+
+ // height & width are inverted, this is intentionnal, because camera is always on portrait mode
+ return [PreviewSize makeWithWidth:@(previewSize.height) height:@(previewSize.width)];
}
-- (nullable NSNumber *)startWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- if (self.camera == nil) {
+#pragma mark - Zoom methods
+
+- (nullable NSNumber *)getMaxZoomWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
*error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
- return @(NO);
}
- dispatch_async(_dispatchQueue, ^{
- [self->_camera start];
- });
-
- return @(YES);
+ if (self.multiCamera != nil) {
+ return @([self.multiCamera getMaxZoom]);
+ } else {
+ return @([self.camera getMaxZoom]);
+ }
}
-- (void)stopRecordingVideoWithCompletion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion {
- dispatch_async(_dispatchQueue, ^{
- [self->_camera stopRecordingVideo:completion];
- });
+- (nullable NSNumber *)getMinZoomWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ return @(0);
}
-- (nullable NSNumber *)stopWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- if (self.camera == nil) {
+- (void)setZoomZoom:(nonnull NSNumber *)zoom error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
*error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
- return @(NO);
+ return;
}
- dispatch_async(_dispatchQueue, ^{
- [self->_registry unregisterTexture:self->_textureId]; // Lets try this
- [self->_camera stop];
- });
-
- return @(YES);
+ if (self.multiCamera != nil) {
+ [self.multiCamera setZoom:[zoom floatValue] error:error];
+ } else {
+ [self.camera setZoom:[zoom floatValue] error:error];
+ }
}
-- (void)takePhotoPath:(nonnull NSString *)path completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion {
- if (path == nil || path.length <= 0) {
- completion(nil, [FlutterError errorWithCode:@"PATH_NOT_SET" message:@"a file path must be set" details:nil]);
+#pragma mark - Image stream methods
+
+- (void)receivedImageFromStreamWithError:(FlutterError *_Nullable *_Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
return;
}
- dispatch_async(_dispatchQueue, ^{
- [self->_camera takePictureAtPath:path completion:completion];
- });
+ if (self.camera == nil) {
+ *error = [FlutterError errorWithCode:@"MULTI_CAMERA_UNSUPPORTED" message:@"this feature is currently not supported with multi camera feature" details:nil];
+ return;
+ }
+
+ [self.camera receivedImageFromStream];
}
-- (void)requestPermissionsSaveGpsLocation:(nonnull NSNumber *)saveGpsLocation completion:(nonnull void (^)(NSArray * _Nullable, FlutterError * _Nullable))completion {
- NSMutableArray *permissions = [NSMutableArray new];
-
- const Boolean cameraGranted = [CameraPermissionsController checkAndRequestPermission];
- if (cameraGranted) {
- [permissions addObject:@"camera"];
+- (void)setupImageAnalysisStreamFormat:(nonnull NSString *)format width:(nonnull NSNumber *)width maxFramesPerSecond:(nullable NSNumber *)maxFramesPerSecond autoStart:(nonnull NSNumber *)autoStart error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
}
- bool needToSaveGPSLocation = [saveGpsLocation boolValue];
- if (needToSaveGPSLocation) {
- // TODO: move this to permissions object
- [self.camera.locationController requestWhenInUseAuthorizationOnGranted:^{
- [permissions addObject:@"location"];
-
- completion(permissions, nil);
- } declined:^{
- completion(permissions, nil);
- }];
+ if (self.camera == nil) {
+ *error = [FlutterError errorWithCode:@"MULTI_CAMERA_UNSUPPORTED" message:@"this feature is currently not supported with multi camera feature" details:nil];
+ return;
}
+
+ [self.camera.imageStreamController setStreamImages:autoStart];
+
+ // Force a frame rate to improve performance
+ [self.camera.imageStreamController setMaxFramesPerSecond:[maxFramesPerSecond floatValue]];
}
-
- (void)startAnalysisWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
+ }
+
+ if (self.camera == nil) {
+ *error = [FlutterError errorWithCode:@"MULTI_CAMERA_UNSUPPORTED" message:@"this feature is currently not supported with multi camera feature" details:nil];
+ return;
+ }
+
if (self.camera.videoController.isRecording) {
*error = [FlutterError errorWithCode:@"VIDEO_ERROR" message:@"can't start image stream because video is recording" details:@""];
return;
@@ -403,32 +687,61 @@ - (void)startAnalysisWithError:(FlutterError * _Nullable __autoreleasing * _Nonn
- (void)stopAnalysisWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
+ }
+
+ if (self.camera == nil) {
+ *error = [FlutterError errorWithCode:@"MULTI_CAMERA_UNSUPPORTED" message:@"this feature is currently not supported with multi camera feature" details:nil];
+ return;
+ }
+
[self.camera.imageStreamController setStreamImages:false];
}
+- (void)isVideoRecordingAndImageAnalysisSupportedSensor:(PigeonSensorPosition)sensor completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion {
+ completion(@(NO), nil);
+}
-- (void)setFilterMatrix:(NSArray *)matrix error:(FlutterError *_Nullable *_Nonnull)error {
- // TODO: try to use CIFilter when taking a picture
+#pragma mark - Sensors methods
+
+- (nullable NSArray *)getFrontSensorsWithError:(FlutterError *_Nullable *_Nonnull)error {
+ return [SensorsController getSensors:AVCaptureDevicePositionFront];
}
-- (void)setMirrorFrontCameraMirror:(nonnull NSNumber *)mirror error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
- [_camera setMirrorFrontCamera:[mirror boolValue] error:error];
+- (nullable NSArray *)getBackSensorsWithError:(FlutterError *_Nullable *_Nonnull)error {
+ return [SensorsController getSensors:AVCaptureDevicePositionBack];
}
-- (AspectRatio)convertAspectRatio:(NSString *)aspectRatioStr {
- AspectRatio aspectRatioMode;
- if ([aspectRatioStr isEqualToString:@"RATIO_4_3"]) {
- aspectRatioMode = Ratio4_3;
- } else if ([aspectRatioStr isEqualToString:@"RATIO_16_9"]) {
- aspectRatioMode = Ratio16_9;
+- (void)setSensorSensors:(nonnull NSArray *)sensors error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ if (self.camera == nil && self.multiCamera == nil) {
+ *error = [FlutterError errorWithCode:@"CAMERA_MUST_BE_INIT" message:@"init must be call before start" details:nil];
+ return;
+ }
+
+ if (sensors != nil && [sensors count] > 1 && self.multiCamera != nil) {
+ if ([self.multiCamera.sensors count] != [sensors count]) {
+ *error = [FlutterError errorWithCode:@"SENSORS_COUNT_INVALID" message:@"sensors count seems to be different, you can only update current sensors, adding or deleting is impossible for now" details:nil];
+ return;
+ }
+
+ [self.multiCamera setSensors:sensors];
} else {
- aspectRatioMode = Ratio1_1;
+ [self.camera setSensor:sensors.firstObject];
}
- return aspectRatioMode;
}
-- (void)isVideoRecordingAndImageAnalysisSupportedSensor:(NSString *)sensor completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion{
- completion(@(YES), nil);
+#pragma mark - Filter methods
+
+- (void)setFilterMatrix:(NSArray *)matrix error:(FlutterError *_Nullable *_Nonnull)error {
+ // TODO: try to use CIFilter when taking a picture
+}
+
+#pragma mark - Multi camera methods
+
+- (nullable NSNumber *)isMultiCamSupportedWithError:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
+ return [NSNumber numberWithBool: [MultiCameraController isMultiCamSupported]];
}
- (void)bgra8888toJpegBgra8888image:(nonnull AnalysisImageWrapper *)bgra8888image jpegQuality:(nonnull NSNumber *)jpegQuality completion:(nonnull void (^)(AnalysisImageWrapper * _Nullable, FlutterError * _Nullable))completion {
diff --git a/ios/Classes/Constants/CameraSensor.h b/ios/Classes/Constants/CameraSensor.h
index 803d99eb..990be12e 100644
--- a/ios/Classes/Constants/CameraSensor.h
+++ b/ios/Classes/Constants/CameraSensor.h
@@ -8,9 +8,9 @@
#ifndef CameraSensor_h
#define CameraSensor_h
-typedef enum {
- Front,
- Back,
-} CameraSensor;
+//typedef enum {
+// Front,
+// Back,
+//} CameraSensor;
#endif /* CameraSensor_h */
diff --git a/ios/Classes/Constants/CaptureModes.h b/ios/Classes/Constants/CaptureModes.h
index 77163394..4506c04f 100644
--- a/ios/Classes/Constants/CaptureModes.h
+++ b/ios/Classes/Constants/CaptureModes.h
@@ -11,6 +11,8 @@
typedef enum {
Photo,
Video,
+ Preview,
+ AnalysisOnly
} CaptureModes;
#endif /* CaptureModes_h */
diff --git a/ios/Classes/Controllers/MultiCamera/MultiCameraController.h b/ios/Classes/Controllers/MultiCamera/MultiCameraController.h
new file mode 100644
index 00000000..307b0622
--- /dev/null
+++ b/ios/Classes/Controllers/MultiCamera/MultiCameraController.h
@@ -0,0 +1,19 @@
+//
+// MultiCameraController.h
+// camerawesome
+//
+// Created by Dimitri Dessus on 29/03/2023.
+//
+
+#import
+#import
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface MultiCameraController : NSObject
+
++ (BOOL)isMultiCamSupported;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/ios/Classes/Controllers/MultiCamera/MultiCameraController.m b/ios/Classes/Controllers/MultiCamera/MultiCameraController.m
new file mode 100644
index 00000000..84d6c8b9
--- /dev/null
+++ b/ios/Classes/Controllers/MultiCamera/MultiCameraController.m
@@ -0,0 +1,16 @@
+//
+// MultiCameraController.m
+// camerawesome
+//
+// Created by Dimitri Dessus on 29/03/2023.
+//
+
+#import "MultiCameraController.h"
+
+@implementation MultiCameraController
+
++ (BOOL)isMultiCamSupported {
+ return AVCaptureMultiCamSession.isMultiCamSupported;
+}
+
+@end
diff --git a/ios/Classes/Controllers/Picture/CameraPictureController.h b/ios/Classes/Controllers/Picture/CameraPictureController.h
index cee2fce2..793717a7 100644
--- a/ios/Classes/Controllers/Picture/CameraPictureController.h
+++ b/ios/Classes/Controllers/Picture/CameraPictureController.h
@@ -13,6 +13,7 @@
#import "CameraSensor.h"
#import "AspectRatio.h"
+#import "Pigeon.h"
NS_ASSUME_NONNULL_BEGIN
@@ -23,7 +24,7 @@ typedef void(^OnPictureTaken)(void);
@property(readonly, nonatomic) bool saveGPSLocation;
@property(readonly, nonatomic) bool mirrorFrontCamera;
@property(readonly, copy) void (^completion)(NSNumber * _Nullable, FlutterError * _Nullable);
-@property(readonly, nonatomic) CameraSensor sensor;
+@property(readonly, nonatomic) PigeonSensorPosition sensorPosition;
@property(readonly, nonatomic) float aspectRatio;
@property(readonly, nonatomic) AspectRatio aspectRatioType;
@property NSInteger orientation;
@@ -32,13 +33,13 @@ typedef void(^OnPictureTaken)(void);
@property(readonly, nonatomic) AVCaptureDevicePosition cameraPosition;
- (instancetype)initWithPath:(NSString *)path
- orientation:(NSInteger)orientation
- sensor:(CameraSensor)sensor
- saveGPSLocation:(bool)saveGPSLocation
- mirrorFrontCamera:(bool)mirrorFrontCamera
- aspectRatio:(AspectRatio)aspectRatio
- completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion
- callback:(OnPictureTaken)callback;
+ orientation:(NSInteger)orientation
+ sensorPosition:(PigeonSensorPosition)sensorPosition
+ saveGPSLocation:(bool)saveGPSLocation
+ mirrorFrontCamera:(bool)mirrorFrontCamera
+ aspectRatio:(AspectRatio)aspectRatio
+ completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion
+ callback:(OnPictureTaken)callback;
@end
NS_ASSUME_NONNULL_END
diff --git a/ios/Classes/Controllers/Picture/CameraPictureController.m b/ios/Classes/Controllers/Picture/CameraPictureController.m
index d6427e6d..80e7ffdb 100644
--- a/ios/Classes/Controllers/Picture/CameraPictureController.m
+++ b/ios/Classes/Controllers/Picture/CameraPictureController.m
@@ -14,20 +14,20 @@ @implementation CameraPictureController {
}
- (instancetype)initWithPath:(NSString *)path
- orientation:(NSInteger)orientation
- sensor:(CameraSensor)sensor
- saveGPSLocation:(bool)saveGPSLocation
- mirrorFrontCamera:(bool)mirrorFrontCamera
- aspectRatio:(AspectRatio)aspectRatio
- completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion
- callback:(OnPictureTaken)callback {
+ orientation:(NSInteger)orientation
+ sensorPosition:(PigeonSensorPosition)sensorPosition
+ saveGPSLocation:(bool)saveGPSLocation
+ mirrorFrontCamera:(bool)mirrorFrontCamera
+ aspectRatio:(AspectRatio)aspectRatio
+ completion:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion
+ callback:(OnPictureTaken)callback {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_path = path;
_completion = completion;
_orientation = orientation;
_completionBlock = callback;
- _sensor = sensor;
+ _sensorPosition = sensorPosition;
_saveGPSLocation = saveGPSLocation;
_aspectRatioType = aspectRatio;
_mirrorFrontCamera = mirrorFrontCamera;
@@ -188,15 +188,15 @@ - (UIImage *)imageByCroppingImage:(UIImage *)image toSize:(CGSize)size {
- (UIImageOrientation)getJpegOrientation {
switch (_orientation) {
case UIDeviceOrientationPortrait:
- if (_sensor == Front && _mirrorFrontCamera) {
+ if (self.sensorPosition == PigeonSensorPositionFront && _mirrorFrontCamera) {
return UIImageOrientationLeftMirrored;
} else {
return UIImageOrientationRight;
}
case UIDeviceOrientationLandscapeRight:
- return (_sensor == Back) ? UIImageOrientationUp : UIImageOrientationDown;
+ return (self.sensorPosition == PigeonSensorPositionBack) ? UIImageOrientationUp : UIImageOrientationDown;
case UIDeviceOrientationLandscapeLeft:
- return (_sensor == Back) ? UIImageOrientationDown : UIImageOrientationUp;
+ return (self.sensorPosition == PigeonSensorPositionBack) ? UIImageOrientationDown : UIImageOrientationUp;
default:
return UIImageOrientationLeft;
}
diff --git a/ios/Classes/Controllers/Sensors/SensorsController.h b/ios/Classes/Controllers/Sensors/SensorsController.h
new file mode 100644
index 00000000..a7bad506
--- /dev/null
+++ b/ios/Classes/Controllers/Sensors/SensorsController.h
@@ -0,0 +1,19 @@
+//
+// SensorsController.h
+// camerawesome
+//
+// Created by Dimitri Dessus on 28/03/2023.
+//
+
+#import
+#import
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface SensorsController : NSObject
+
++ (NSArray *)getSensors:(AVCaptureDevicePosition)position;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff --git a/ios/Classes/Controllers/Sensors/SensorsController.m b/ios/Classes/Controllers/Sensors/SensorsController.m
new file mode 100644
index 00000000..25c2a067
--- /dev/null
+++ b/ios/Classes/Controllers/Sensors/SensorsController.m
@@ -0,0 +1,47 @@
+//
+// SensorsController.m
+// camerawesome
+//
+// Created by Dimitri Dessus on 28/03/2023.
+//
+
+#import "SensorsController.h"
+#import "Pigeon.h"
+
+@implementation SensorsController
+
++ (NSArray *)getSensors:(AVCaptureDevicePosition)position {
+ NSMutableArray *sensors = [NSMutableArray new];
+
+ NSArray *sensorsType = @[AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera, AVCaptureDeviceTypeBuiltInUltraWideCamera, AVCaptureDeviceTypeBuiltInTrueDepthCamera];
+
+ AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
+ discoverySessionWithDeviceTypes:sensorsType
+ mediaType:AVMediaTypeVideo
+ position:AVCaptureDevicePositionUnspecified];
+
+ for (AVCaptureDevice *device in discoverySession.devices) {
+ PigeonSensorType type;
+ if (device.deviceType == AVCaptureDeviceTypeBuiltInTelephotoCamera) {
+ type = PigeonSensorTypeTelephoto;
+ } else if (device.deviceType == AVCaptureDeviceTypeBuiltInUltraWideCamera) {
+ type = PigeonSensorTypeUltraWideAngle;
+ } else if (device.deviceType == AVCaptureDeviceTypeBuiltInTrueDepthCamera) {
+ type = PigeonSensorTypeTrueDepth;
+ } else if (device.deviceType == AVCaptureDeviceTypeBuiltInWideAngleCamera) {
+ type = PigeonSensorTypeWideAngle;
+ } else {
+ type = PigeonSensorTypeUnknown;
+ }
+
+ PigeonSensorTypeDevice *sensorType = [PigeonSensorTypeDevice makeWithSensorType:type name:device.localizedName iso:[NSNumber numberWithFloat:device.ISO] flashAvailable:[NSNumber numberWithBool:device.flashAvailable] uid:device.uniqueID];
+
+ if (device.position == position) {
+ [sensors addObject:sensorType];
+ }
+ }
+
+ return sensors;
+}
+
+@end
diff --git a/ios/Classes/Controllers/Video/VideoController.h b/ios/Classes/Controllers/Video/VideoController.h
index 8a1a0218..ec761936 100644
--- a/ios/Classes/Controllers/Video/VideoController.h
+++ b/ios/Classes/Controllers/Video/VideoController.h
@@ -21,7 +21,9 @@ typedef void(^OnVideoWriterSetup)(void);
@property(readonly, nonatomic) bool isPaused;
@property(readonly, nonatomic) bool isAudioEnabled;
@property(readonly, nonatomic) bool isAudioSetup;
+@property(readonly, nonatomic) CupertinoVideoOptions *options;
@property NSInteger orientation;
+@property(readonly, nonatomic) AVCaptureDevice *captureDevice;
@property(readonly, nonatomic) AVAssetWriter *videoWriter;
@property(readonly, nonatomic) AVAssetWriterInput *videoWriterInput;
@property(readonly, nonatomic) AVAssetWriterInput *audioWriterInput;
@@ -35,7 +37,7 @@ typedef void(^OnVideoWriterSetup)(void);
@property(assign, nonatomic) CMTime audioTimeOffset;
- (instancetype)init;
-- (void)recordVideoAtPath:(NSString *)path orientation:(NSInteger)orientation audioSetupCallback:(OnAudioSetup)audioSetupCallback videoWriterCallback:(OnVideoWriterSetup)videoWriterCallback options:(VideoOptions *)options completion:(nonnull void (^)(FlutterError * _Nullable))completion;
+- (void)recordVideoAtPath:(NSString *)path captureDevice:(AVCaptureDevice *)device orientation:(NSInteger)orientation audioSetupCallback:(OnAudioSetup)audioSetupCallback videoWriterCallback:(OnVideoWriterSetup)videoWriterCallback options:(CupertinoVideoOptions *)options completion:(nonnull void (^)(FlutterError * _Nullable))completion;
- (void)stopRecordingVideo:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion;
- (void)pauseVideoRecording;
- (void)resumeVideoRecording;
diff --git a/ios/Classes/Controllers/Video/VideoController.m b/ios/Classes/Controllers/Video/VideoController.m
index 46a3a0d6..5dee3cea 100644
--- a/ios/Classes/Controllers/Video/VideoController.m
+++ b/ios/Classes/Controllers/Video/VideoController.m
@@ -23,7 +23,9 @@ - (instancetype)init {
# pragma mark - User video interactions
/// Start recording video at given path
-- (void)recordVideoAtPath:(NSString *)path orientation:(NSInteger)orientation audioSetupCallback:(OnAudioSetup)audioSetupCallback videoWriterCallback:(OnVideoWriterSetup)videoWriterCallback options:(VideoOptions *)options completion:(nonnull void (^)(FlutterError * _Nullable))completion {
+- (void)recordVideoAtPath:(NSString *)path captureDevice:(AVCaptureDevice *)device orientation:(NSInteger)orientation audioSetupCallback:(OnAudioSetup)audioSetupCallback videoWriterCallback:(OnVideoWriterSetup)videoWriterCallback options:(CupertinoVideoOptions *)options completion:(nonnull void (^)(FlutterError * _Nullable))completion {
+ _options = options;
+
// Create audio & video writer
if (![self setupWriterForPath:path audioSetupCallback:audioSetupCallback options:options completion:completion]) {
completion([FlutterError errorWithCode:@"VIDEO_ERROR" message:@"impossible to write video at path" details:path]);
@@ -38,10 +40,21 @@ - (void)recordVideoAtPath:(NSString *)path orientation:(NSInteger)orientation au
_videoIsDisconnected = NO;
_audioIsDisconnected = NO;
_orientation = orientation;
+ _captureDevice = device;
+
+ // Change video FPS if provided
+ if (_options && _options.fps != nil && _options.fps > 0) {
+ [self adjustCameraFPS:_options.fps];
+ }
}
/// Stop recording video
- (void)stopRecordingVideo:(nonnull void (^)(NSNumber * _Nullable, FlutterError * _Nullable))completion {
+ if (_options && _options.fps != nil && _options.fps > 0) {
+ // Reset camera FPS
+ [self adjustCameraFPS:@(30)];
+ }
+
if (_isRecording) {
_isRecording = NO;
if (_videoWriter.status != AVAssetWriterStatusUnknown) {
@@ -69,7 +82,7 @@ - (void)resumeVideoRecording {
# pragma mark - Audio & Video writers
/// Setup video channel & write file on path
-- (BOOL)setupWriterForPath:(NSString *)path audioSetupCallback:(OnAudioSetup)audioSetupCallback options:(VideoOptions *)options completion:(nonnull void (^)(FlutterError * _Nullable))completion {
+- (BOOL)setupWriterForPath:(NSString *)path audioSetupCallback:(OnAudioSetup)audioSetupCallback options:(CupertinoVideoOptions *)options completion:(nonnull void (^)(FlutterError * _Nullable))completion {
NSError *error = nil;
NSURL *outputURL;
if (path != nil) {
@@ -85,11 +98,13 @@ - (BOOL)setupWriterForPath:(NSString *)path audioSetupCallback:(OnAudioSetup)aud
AVVideoCodecType codecType = [self getBestCodecTypeAccordingOptions:options];
AVFileType fileType = [self getBestFileTypeAccordingOptions:options];
- NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:codecType, AVVideoCodecKey,[NSNumber numberWithInt:_previewSize.height], AVVideoWidthKey,
- [NSNumber numberWithInt:_previewSize.width], AVVideoHeightKey,
- nil];
- _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
- outputSettings:videoSettings];
+ NSDictionary *videoSettings = @{
+ AVVideoCodecKey : codecType,
+ AVVideoWidthKey : @(_previewSize.height),
+ AVVideoHeightKey : @(_previewSize.width),
+ };
+
+ _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
[_videoWriterInput setTransform:[self getVideoOrientation]];
_videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor
@@ -186,6 +201,24 @@ - (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset CF_R
return sout;
}
+/// Adjust video preview & recording to specified FPS
+- (void)adjustCameraFPS:(NSNumber *)fps {
+ NSArray *frameRateRanges = _captureDevice.activeFormat.videoSupportedFrameRateRanges;
+
+ if (frameRateRanges.count > 0) {
+ AVFrameRateRange *frameRateRange = frameRateRanges.firstObject;
+ NSError *error = nil;
+
+ if ([_captureDevice lockForConfiguration:&error]) {
+ CMTime frameDuration = CMTimeMake(1, [fps intValue]);
+ if (CMTIME_COMPARE_INLINE(frameDuration, <=, frameRateRange.maxFrameDuration) && CMTIME_COMPARE_INLINE(frameDuration, >=, frameRateRange.minFrameDuration)) {
+ _captureDevice.activeVideoMinFrameDuration = frameDuration;
+ }
+ [_captureDevice unlockForConfiguration];
+ }
+ }
+}
+
# pragma mark - Camera Delegates
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection captureVideoOutput:(AVCaptureVideoDataOutput *)captureVideoOutput {
@@ -259,49 +292,69 @@ - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleB
# pragma mark - Settings converters
-- (AVFileType)getBestFileTypeAccordingOptions:(VideoOptions *)options {
+- (AVFileType)getBestFileTypeAccordingOptions:(CupertinoVideoOptions *)options {
AVFileType fileType = AVFileTypeQuickTimeMovie;
if (options && options != (id)[NSNull null]) {
- NSString *type = options.fileType;
- if ([type isEqualToString:@"quickTimeMovie"]) {
- fileType = AVFileTypeQuickTimeMovie;
- } else if ([type isEqualToString:@"mpeg4"]) {
- fileType = AVFileTypeMPEG4;
- } else if ([type isEqualToString:@"appleM4V"]) {
- fileType = AVFileTypeAppleM4V;
- } else if ([type isEqualToString:@"type3GPP"]) {
- fileType = AVFileType3GPP;
- } else if ([type isEqualToString:@"type3GPP2"]) {
- fileType = AVFileType3GPP2;
+ CupertinoFileType type = options.fileType;
+ switch (type) {
+ case CupertinoFileTypeQuickTimeMovie:
+ fileType = AVFileTypeQuickTimeMovie;
+ break;
+ case CupertinoFileTypeMpeg4:
+ fileType = AVFileTypeMPEG4;
+ break;
+ case CupertinoFileTypeAppleM4V:
+ fileType = AVFileTypeAppleM4V;
+ break;
+ case CupertinoFileTypeType3GPP:
+ fileType = AVFileType3GPP;
+ break;
+ case CupertinoFileTypeType3GPP2:
+ fileType = AVFileType3GPP2;
+ break;
+ default:
+ break;
}
}
return fileType;
}
-- (AVVideoCodecType)getBestCodecTypeAccordingOptions:(VideoOptions *)options {
+- (AVVideoCodecType)getBestCodecTypeAccordingOptions:(CupertinoVideoOptions *)options {
AVVideoCodecType codecType = AVVideoCodecTypeH264;
if (options && options != (id)[NSNull null]) {
- NSString *codec = options.codec;
- if ([codec isEqualToString:@"h264"]) {
- codecType = AVVideoCodecTypeH264;
- } else if ([codec isEqualToString:@"hevc"]) {
- codecType = AVVideoCodecTypeHEVC;
- } else if ([codec isEqualToString:@"hevcWithAlpha"]) {
- codecType = AVVideoCodecTypeHEVCWithAlpha;
- } else if ([codec isEqualToString:@"jpeg"]) {
- codecType = AVVideoCodecTypeJPEG;
- } else if ([codec isEqualToString:@"appleProRes4444"]) {
- codecType = AVVideoCodecTypeAppleProRes4444;
- } else if ([codec isEqualToString:@"appleProRes422"]) {
- codecType = AVVideoCodecTypeAppleProRes422;
- } else if ([codec isEqualToString:@"appleProRes422HQ"]) {
- codecType = AVVideoCodecTypeAppleProRes422HQ;
- } else if ([codec isEqualToString:@"appleProRes422LT"]) {
- codecType = AVVideoCodecTypeAppleProRes422LT;
- } else if ([codec isEqualToString:@"appleProRes422Proxy"]) {
- codecType = AVVideoCodecTypeAppleProRes422Proxy;
+ CupertinoCodecType codec = options.codec;
+ switch (codec) {
+ case CupertinoCodecTypeH264:
+ codecType = AVVideoCodecTypeH264;
+ break;
+ case CupertinoCodecTypeHevc:
+ codecType = AVVideoCodecTypeHEVC;
+ break;
+ case CupertinoCodecTypeHevcWithAlpha:
+ codecType = AVVideoCodecTypeHEVCWithAlpha;
+ break;
+ case CupertinoCodecTypeJpeg:
+ codecType = AVVideoCodecTypeJPEG;
+ break;
+ case CupertinoCodecTypeAppleProRes4444:
+ codecType = AVVideoCodecTypeAppleProRes4444;
+ break;
+ case CupertinoCodecTypeAppleProRes422:
+ codecType = AVVideoCodecTypeAppleProRes422;
+ break;
+ case CupertinoCodecTypeAppleProRes422HQ:
+ codecType = AVVideoCodecTypeAppleProRes422HQ;
+ break;
+ case CupertinoCodecTypeAppleProRes422LT:
+ codecType = AVVideoCodecTypeAppleProRes422LT;
+ break;
+ case CupertinoCodecTypeAppleProRes422Proxy:
+ codecType = AVVideoCodecTypeAppleProRes422Proxy;
+ break;
+ default:
+ break;
}
}
return codecType;
diff --git a/ios/Classes/Pigeon/Pigeon.h b/ios/Classes/Pigeon/Pigeon.h
index 23b3103a..d0f7a6ae 100644
--- a/ios/Classes/Pigeon/Pigeon.h
+++ b/ios/Classes/Pigeon/Pigeon.h
@@ -1,4 +1,4 @@
-// Autogenerated from Pigeon (v9.1.0), do not edit directly.
+// Autogenerated from Pigeon (v9.2.5), do not edit directly.
// See also: https://pub.dev/packages/pigeon
#import
@@ -10,6 +10,55 @@
NS_ASSUME_NONNULL_BEGIN
+typedef NS_ENUM(NSUInteger, PigeonSensorPosition) {
+ PigeonSensorPositionBack = 0,
+ PigeonSensorPositionFront = 1,
+ PigeonSensorPositionUnknown = 2,
+};
+
+/// Video recording quality, from [sd] to [uhd], with [highest] and [lowest] to
+/// let the device choose the best/worst quality available.
+/// [highest] is the default quality.
+///
+/// Qualities are defined like this:
+/// [sd] < [hd] < [fhd] < [uhd]
+typedef NS_ENUM(NSUInteger, VideoRecordingQuality) {
+ VideoRecordingQualityLowest = 0,
+ VideoRecordingQualitySd = 1,
+ VideoRecordingQualityHd = 2,
+ VideoRecordingQualityFhd = 3,
+ VideoRecordingQualityUhd = 4,
+ VideoRecordingQualityHighest = 5,
+};
+
+/// If the specified [VideoRecordingQuality] is not available on the device,
+/// the [VideoRecordingQuality] will fallback to [higher] or [lower] quality.
+/// [higher] is the default fallback strategy.
+typedef NS_ENUM(NSUInteger, QualityFallbackStrategy) {
+ QualityFallbackStrategyHigher = 0,
+ QualityFallbackStrategyLower = 1,
+};
+
+typedef NS_ENUM(NSUInteger, CupertinoFileType) {
+ CupertinoFileTypeQuickTimeMovie = 0,
+ CupertinoFileTypeMpeg4 = 1,
+ CupertinoFileTypeAppleM4V = 2,
+ CupertinoFileTypeType3GPP = 3,
+ CupertinoFileTypeType3GPP2 = 4,
+};
+
+typedef NS_ENUM(NSUInteger, CupertinoCodecType) {
+ CupertinoCodecTypeH264 = 0,
+ CupertinoCodecTypeHevc = 1,
+ CupertinoCodecTypeHevcWithAlpha = 2,
+ CupertinoCodecTypeJpeg = 3,
+ CupertinoCodecTypeAppleProRes4444 = 4,
+ CupertinoCodecTypeAppleProRes422 = 5,
+ CupertinoCodecTypeAppleProRes422HQ = 6,
+ CupertinoCodecTypeAppleProRes422LT = 7,
+ CupertinoCodecTypeAppleProRes422Proxy = 8,
+};
+
typedef NS_ENUM(NSUInteger, PigeonSensorType) {
/// A built-in wide-angle camera.
///
@@ -50,7 +99,10 @@ typedef NS_ENUM(NSUInteger, AnalysisRotation) {
@class PreviewSize;
@class ExifPreferences;
+@class PigeonSensor;
@class VideoOptions;
+@class AndroidVideoOptions;
+@class CupertinoVideoOptions;
@class PigeonSensorTypeDevice;
@class AndroidFocusSettings;
@class PlaneWrapper;
@@ -73,13 +125,52 @@ typedef NS_ENUM(NSUInteger, AnalysisRotation) {
@property(nonatomic, strong) NSNumber * saveGPSLocation;
@end
+@interface PigeonSensor : NSObject
+/// `init` unavailable to enforce nonnull fields, see the `make` class method.
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)makeWithPosition:(PigeonSensorPosition)position
+ type:(PigeonSensorType)type
+ deviceId:(nullable NSString *)deviceId;
+@property(nonatomic, assign) PigeonSensorPosition position;
+@property(nonatomic, assign) PigeonSensorType type;
+@property(nonatomic, copy, nullable) NSString * deviceId;
+@end
+
+/// Video recording options. Some of them are specific to each platform.
@interface VideoOptions : NSObject
/// `init` unavailable to enforce nonnull fields, see the `make` class method.
- (instancetype)init NS_UNAVAILABLE;
-+ (instancetype)makeWithFileType:(NSString *)fileType
- codec:(NSString *)codec;
-@property(nonatomic, copy) NSString * fileType;
-@property(nonatomic, copy) NSString * codec;
++ (instancetype)makeWithEnableAudio:(NSNumber *)enableAudio
+ android:(nullable AndroidVideoOptions *)android
+ ios:(nullable CupertinoVideoOptions *)ios;
+/// Enable audio while video recording
+@property(nonatomic, strong) NSNumber * enableAudio;
+@property(nonatomic, strong, nullable) AndroidVideoOptions * android;
+@property(nonatomic, strong, nullable) CupertinoVideoOptions * ios;
+@end
+
+@interface AndroidVideoOptions : NSObject
++ (instancetype)makeWithBitrate:(nullable NSNumber *)bitrate
+ quality:(VideoRecordingQuality)quality
+ fallbackStrategy:(QualityFallbackStrategy)fallbackStrategy;
+/// The bitrate of the video recording. Only set it if a custom bitrate is
+/// desired.
+@property(nonatomic, strong, nullable) NSNumber * bitrate;
+/// The quality of the video recording, defaults to [VideoRecordingQuality.highest].
+@property(nonatomic, assign) VideoRecordingQuality quality;
+@property(nonatomic, assign) QualityFallbackStrategy fallbackStrategy;
+@end
+
+@interface CupertinoVideoOptions : NSObject
++ (instancetype)makeWithFileType:(CupertinoFileType)fileType
+ codec:(CupertinoCodecType)codec
+ fps:(nullable NSNumber *)fps;
+/// Specify video file type, defaults to [AVFileTypeQuickTimeMovie].
+@property(nonatomic, assign) CupertinoFileType fileType;
+/// Specify video codec, defaults to [AVVideoCodecTypeH264].
+@property(nonatomic, assign) CupertinoCodecType codec;
+/// Specify video fps, defaults to [30].
+@property(nonatomic, strong, nullable) NSNumber * fps;
@end
@interface PigeonSensorTypeDevice : NSObject
@@ -177,16 +268,16 @@ extern void AnalysisImageUtilsSetup(id binaryMessenger,
NSObject *CameraInterfaceGetCodec(void);
@protocol CameraInterface
-- (void)setupCameraSensor:(NSString *)sensor aspectRatio:(NSString *)aspectRatio zoom:(NSNumber *)zoom mirrorFrontCamera:(NSNumber *)mirrorFrontCamera enablePhysicalButton:(NSNumber *)enablePhysicalButton flashMode:(NSString *)flashMode captureMode:(NSString *)captureMode enableImageStream:(NSNumber *)enableImageStream exifPreferences:(ExifPreferences *)exifPreferences completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+- (void)setupCameraSensors:(NSArray *)sensors aspectRatio:(NSString *)aspectRatio zoom:(NSNumber *)zoom mirrorFrontCamera:(NSNumber *)mirrorFrontCamera enablePhysicalButton:(NSNumber *)enablePhysicalButton flashMode:(NSString *)flashMode captureMode:(NSString *)captureMode enableImageStream:(NSNumber *)enableImageStream exifPreferences:(ExifPreferences *)exifPreferences videoOptions:(nullable VideoOptions *)videoOptions completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
/// @return `nil` only when `error != nil`.
-- (nullable NSArray *)checkPermissionsWithError:(FlutterError *_Nullable *_Nonnull)error;
+- (nullable NSArray *)checkPermissionsPermissions:(NSArray *)permissions error:(FlutterError *_Nullable *_Nonnull)error;
/// Returns given [CamerAwesomePermission] list (as String). Location permission might be
/// refused but the app should still be able to run.
- (void)requestPermissionsSaveGpsLocation:(NSNumber *)saveGpsLocation completion:(void (^)(NSArray *_Nullable, FlutterError *_Nullable))completion;
/// @return `nil` only when `error != nil`.
-- (nullable NSNumber *)getPreviewTextureIdWithError:(FlutterError *_Nullable *_Nonnull)error;
-- (void)takePhotoPath:(NSString *)path completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
-- (void)recordVideoPath:(NSString *)path options:(nullable VideoOptions *)options completion:(void (^)(FlutterError *_Nullable))completion;
+- (nullable NSNumber *)getPreviewTextureIdCameraPosition:(NSNumber *)cameraPosition error:(FlutterError *_Nullable *_Nonnull)error;
+- (void)takePhotoSensors:(NSArray *)sensors paths:(NSArray *)paths completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+- (void)recordVideoSensors:(NSArray *)sensors paths:(NSArray *)paths completion:(void (^)(FlutterError *_Nullable))completion;
- (void)pauseVideoRecordingWithError:(FlutterError *_Nullable *_Nonnull)error;
- (void)resumeVideoRecordingWithError:(FlutterError *_Nullable *_Nonnull)error;
- (void)receivedImageFromStreamWithError:(FlutterError *_Nullable *_Nonnull)error;
@@ -208,16 +299,18 @@ NSObject *CameraInterfaceGetCodec(void);
- (void)focusOnPointPreviewSize:(PreviewSize *)previewSize x:(NSNumber *)x y:(NSNumber *)y androidFocusSettings:(nullable AndroidFocusSettings *)androidFocusSettings error:(FlutterError *_Nullable *_Nonnull)error;
- (void)setZoomZoom:(NSNumber *)zoom error:(FlutterError *_Nullable *_Nonnull)error;
- (void)setMirrorFrontCameraMirror:(NSNumber *)mirror error:(FlutterError *_Nullable *_Nonnull)error;
-- (void)setSensorSensor:(NSString *)sensor deviceId:(nullable NSString *)deviceId error:(FlutterError *_Nullable *_Nonnull)error;
+- (void)setSensorSensors:(NSArray *)sensors error:(FlutterError *_Nullable *_Nonnull)error;
- (void)setCorrectionBrightness:(NSNumber *)brightness error:(FlutterError *_Nullable *_Nonnull)error;
/// @return `nil` only when `error != nil`.
+- (nullable NSNumber *)getMinZoomWithError:(FlutterError *_Nullable *_Nonnull)error;
+/// @return `nil` only when `error != nil`.
- (nullable NSNumber *)getMaxZoomWithError:(FlutterError *_Nullable *_Nonnull)error;
- (void)setCaptureModeMode:(NSString *)mode error:(FlutterError *_Nullable *_Nonnull)error;
- (void)setRecordingAudioModeEnableAudio:(NSNumber *)enableAudio completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
/// @return `nil` only when `error != nil`.
- (nullable NSArray *)availableSizesWithError:(FlutterError *_Nullable *_Nonnull)error;
- (void)refreshWithError:(FlutterError *_Nullable *_Nonnull)error;
-- (nullable PreviewSize *)getEffectivPreviewSizeWithError:(FlutterError *_Nullable *_Nonnull)error;
+- (nullable PreviewSize *)getEffectivPreviewSizeIndex:(NSNumber *)index error:(FlutterError *_Nullable *_Nonnull)error;
- (void)setPhotoSizeSize:(PreviewSize *)size error:(FlutterError *_Nullable *_Nonnull)error;
- (void)setPreviewSizeSize:(PreviewSize *)size error:(FlutterError *_Nullable *_Nonnull)error;
- (void)setAspectRatioAspectRatio:(NSString *)aspectRatio error:(FlutterError *_Nullable *_Nonnull)error;
@@ -226,7 +319,9 @@ NSObject *CameraInterfaceGetCodec(void);
- (void)startAnalysisWithError:(FlutterError *_Nullable *_Nonnull)error;
- (void)stopAnalysisWithError:(FlutterError *_Nullable *_Nonnull)error;
- (void)setFilterMatrix:(NSArray *)matrix error:(FlutterError *_Nullable *_Nonnull)error;
-- (void)isVideoRecordingAndImageAnalysisSupportedSensor:(NSString *)sensor completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+- (void)isVideoRecordingAndImageAnalysisSupportedSensor:(PigeonSensorPosition)sensor completion:(void (^)(NSNumber *_Nullable, FlutterError *_Nullable))completion;
+/// @return `nil` only when `error != nil`.
+- (nullable NSNumber *)isMultiCamSupportedWithError:(FlutterError *_Nullable *_Nonnull)error;
@end
extern void CameraInterfaceSetup(id binaryMessenger, NSObject *_Nullable api);
diff --git a/ios/Classes/Pigeon/Pigeon.m b/ios/Classes/Pigeon/Pigeon.m
index f90ba7c8..e5b2a288 100644
--- a/ios/Classes/Pigeon/Pigeon.m
+++ b/ios/Classes/Pigeon/Pigeon.m
@@ -1,4 +1,4 @@
-// Autogenerated from Pigeon (v9.1.0), do not edit directly.
+// Autogenerated from Pigeon (v9.2.5), do not edit directly.
// See also: https://pub.dev/packages/pigeon
#import "Pigeon.h"
@@ -33,12 +33,30 @@ + (nullable ExifPreferences *)nullableFromList:(NSArray *)list;
- (NSArray *)toList;
@end
+@interface PigeonSensor ()
++ (PigeonSensor *)fromList:(NSArray *)list;
++ (nullable PigeonSensor *)nullableFromList:(NSArray *)list;
+- (NSArray *)toList;
+@end
+
@interface VideoOptions ()
+ (VideoOptions *)fromList:(NSArray *)list;
+ (nullable VideoOptions *)nullableFromList:(NSArray *)list;
- (NSArray *)toList;
@end
+@interface AndroidVideoOptions ()
++ (AndroidVideoOptions *)fromList:(NSArray *)list;
++ (nullable AndroidVideoOptions *)nullableFromList:(NSArray *)list;
+- (NSArray *)toList;
+@end
+
+@interface CupertinoVideoOptions ()
++ (CupertinoVideoOptions *)fromList:(NSArray *)list;
++ (nullable CupertinoVideoOptions *)nullableFromList:(NSArray *)list;
+- (NSArray *)toList;
+@end
+
@interface PigeonSensorTypeDevice ()
+ (PigeonSensorTypeDevice *)fromList:(NSArray *)list;
+ (nullable PigeonSensorTypeDevice *)nullableFromList:(NSArray *)list;
@@ -118,20 +136,51 @@ - (NSArray *)toList {
}
@end
+@implementation PigeonSensor
++ (instancetype)makeWithPosition:(PigeonSensorPosition)position
+ type:(PigeonSensorType)type
+ deviceId:(nullable NSString *)deviceId {
+ PigeonSensor* pigeonResult = [[PigeonSensor alloc] init];
+ pigeonResult.position = position;
+ pigeonResult.type = type;
+ pigeonResult.deviceId = deviceId;
+ return pigeonResult;
+}
++ (PigeonSensor *)fromList:(NSArray *)list {
+ PigeonSensor *pigeonResult = [[PigeonSensor alloc] init];
+ pigeonResult.position = [GetNullableObjectAtIndex(list, 0) integerValue];
+ pigeonResult.type = [GetNullableObjectAtIndex(list, 1) integerValue];
+ pigeonResult.deviceId = GetNullableObjectAtIndex(list, 2);
+ return pigeonResult;
+}
++ (nullable PigeonSensor *)nullableFromList:(NSArray *)list {
+ return (list) ? [PigeonSensor fromList:list] : nil;
+}
+- (NSArray *)toList {
+ return @[
+ @(self.position),
+ @(self.type),
+ (self.deviceId ?: [NSNull null]),
+ ];
+}
+@end
+
@implementation VideoOptions
-+ (instancetype)makeWithFileType:(NSString *)fileType
- codec:(NSString *)codec {
++ (instancetype)makeWithEnableAudio:(NSNumber *)enableAudio
+ android:(nullable AndroidVideoOptions *)android
+ ios:(nullable CupertinoVideoOptions *)ios {
VideoOptions* pigeonResult = [[VideoOptions alloc] init];
- pigeonResult.fileType = fileType;
- pigeonResult.codec = codec;
+ pigeonResult.enableAudio = enableAudio;
+ pigeonResult.android = android;
+ pigeonResult.ios = ios;
return pigeonResult;
}
+ (VideoOptions *)fromList:(NSArray *)list {
VideoOptions *pigeonResult = [[VideoOptions alloc] init];
- pigeonResult.fileType = GetNullableObjectAtIndex(list, 0);
- NSAssert(pigeonResult.fileType != nil, @"");
- pigeonResult.codec = GetNullableObjectAtIndex(list, 1);
- NSAssert(pigeonResult.codec != nil, @"");
+ pigeonResult.enableAudio = GetNullableObjectAtIndex(list, 0);
+ NSAssert(pigeonResult.enableAudio != nil, @"");
+ pigeonResult.android = [AndroidVideoOptions nullableFromList:(GetNullableObjectAtIndex(list, 1))];
+ pigeonResult.ios = [CupertinoVideoOptions nullableFromList:(GetNullableObjectAtIndex(list, 2))];
return pigeonResult;
}
+ (nullable VideoOptions *)nullableFromList:(NSArray *)list {
@@ -139,8 +188,67 @@ + (nullable VideoOptions *)nullableFromList:(NSArray *)list {
}
- (NSArray *)toList {
return @[
- (self.fileType ?: [NSNull null]),
- (self.codec ?: [NSNull null]),
+ (self.enableAudio ?: [NSNull null]),
+ (self.android ? [self.android toList] : [NSNull null]),
+ (self.ios ? [self.ios toList] : [NSNull null]),
+ ];
+}
+@end
+
+@implementation AndroidVideoOptions
++ (instancetype)makeWithBitrate:(nullable NSNumber *)bitrate
+ quality:(VideoRecordingQuality)quality
+ fallbackStrategy:(QualityFallbackStrategy)fallbackStrategy {
+ AndroidVideoOptions* pigeonResult = [[AndroidVideoOptions alloc] init];
+ pigeonResult.bitrate = bitrate;
+ pigeonResult.quality = quality;
+ pigeonResult.fallbackStrategy = fallbackStrategy;
+ return pigeonResult;
+}
++ (AndroidVideoOptions *)fromList:(NSArray *)list {
+ AndroidVideoOptions *pigeonResult = [[AndroidVideoOptions alloc] init];
+ pigeonResult.bitrate = GetNullableObjectAtIndex(list, 0);
+ pigeonResult.quality = [GetNullableObjectAtIndex(list, 1) integerValue];
+ pigeonResult.fallbackStrategy = [GetNullableObjectAtIndex(list, 2) integerValue];
+ return pigeonResult;
+}
++ (nullable AndroidVideoOptions *)nullableFromList:(NSArray *)list {
+ return (list) ? [AndroidVideoOptions fromList:list] : nil;
+}
+- (NSArray *)toList {
+ return @[
+ (self.bitrate ?: [NSNull null]),
+ @(self.quality),
+ @(self.fallbackStrategy),
+ ];
+}
+@end
+
+@implementation CupertinoVideoOptions
++ (instancetype)makeWithFileType:(CupertinoFileType)fileType
+ codec:(CupertinoCodecType)codec
+ fps:(nullable NSNumber *)fps {
+ CupertinoVideoOptions* pigeonResult = [[CupertinoVideoOptions alloc] init];
+ pigeonResult.fileType = fileType;
+ pigeonResult.codec = codec;
+ pigeonResult.fps = fps;
+ return pigeonResult;
+}
++ (CupertinoVideoOptions *)fromList:(NSArray *)list {
+ CupertinoVideoOptions *pigeonResult = [[CupertinoVideoOptions alloc] init];
+ pigeonResult.fileType = [GetNullableObjectAtIndex(list, 0) integerValue];
+ pigeonResult.codec = [GetNullableObjectAtIndex(list, 1) integerValue];
+ pigeonResult.fps = GetNullableObjectAtIndex(list, 2);
+ return pigeonResult;
+}
++ (nullable CupertinoVideoOptions *)nullableFromList:(NSArray *)list {
+ return (list) ? [CupertinoVideoOptions fromList:list] : nil;
+}
+- (NSArray *)toList {
+ return @[
+ @(self.fileType),
+ @(self.codec),
+ (self.fps ?: [NSNull null]),
];
}
@end
@@ -378,7 +486,7 @@ - (FlutterStandardReader *)readerWithData:(NSData *)data {
}
@end
-NSObject *AnalysisImageUtilsGetCodec() {
+NSObject *AnalysisImageUtilsGetCodec(void) {
static FlutterStandardMessageCodec *sSharedObject = nil;
static dispatch_once_t sPred = 0;
dispatch_once(&sPred, ^{
@@ -477,14 +585,20 @@ - (nullable id)readValueOfType:(UInt8)type {
case 128:
return [AndroidFocusSettings fromList:[self readValue]];
case 129:
- return [ExifPreferences fromList:[self readValue]];
+ return [AndroidVideoOptions fromList:[self readValue]];
case 130:
- return [PigeonSensorTypeDevice fromList:[self readValue]];
+ return [CupertinoVideoOptions fromList:[self readValue]];
case 131:
- return [PreviewSize fromList:[self readValue]];
+ return [ExifPreferences fromList:[self readValue]];
case 132:
- return [PreviewSize fromList:[self readValue]];
+ return [PigeonSensor fromList:[self readValue]];
case 133:
+ return [PigeonSensorTypeDevice fromList:[self readValue]];
+ case 134:
+ return [PreviewSize fromList:[self readValue]];
+ case 135:
+ return [PreviewSize fromList:[self readValue]];
+ case 136:
return [VideoOptions fromList:[self readValue]];
default:
return [super readValueOfType:type];
@@ -499,21 +613,30 @@ - (void)writeValue:(id)value {
if ([value isKindOfClass:[AndroidFocusSettings class]]) {
[self writeByte:128];
[self writeValue:[value toList]];
- } else if ([value isKindOfClass:[ExifPreferences class]]) {
+ } else if ([value isKindOfClass:[AndroidVideoOptions class]]) {
[self writeByte:129];
[self writeValue:[value toList]];
- } else if ([value isKindOfClass:[PigeonSensorTypeDevice class]]) {
+ } else if ([value isKindOfClass:[CupertinoVideoOptions class]]) {
[self writeByte:130];
[self writeValue:[value toList]];
- } else if ([value isKindOfClass:[PreviewSize class]]) {
+ } else if ([value isKindOfClass:[ExifPreferences class]]) {
[self writeByte:131];
[self writeValue:[value toList]];
- } else if ([value isKindOfClass:[PreviewSize class]]) {
+ } else if ([value isKindOfClass:[PigeonSensor class]]) {
[self writeByte:132];
[self writeValue:[value toList]];
- } else if ([value isKindOfClass:[VideoOptions class]]) {
+ } else if ([value isKindOfClass:[PigeonSensorTypeDevice class]]) {
[self writeByte:133];
[self writeValue:[value toList]];
+ } else if ([value isKindOfClass:[PreviewSize class]]) {
+ [self writeByte:134];
+ [self writeValue:[value toList]];
+ } else if ([value isKindOfClass:[PreviewSize class]]) {
+ [self writeByte:135];
+ [self writeValue:[value toList]];
+ } else if ([value isKindOfClass:[VideoOptions class]]) {
+ [self writeByte:136];
+ [self writeValue:[value toList]];
} else {
[super writeValue:value];
}
@@ -531,7 +654,7 @@ - (FlutterStandardReader *)readerWithData:(NSData *)data {
}
@end
-NSObject *CameraInterfaceGetCodec() {
+NSObject *CameraInterfaceGetCodec(void) {
static FlutterStandardMessageCodec *sSharedObject = nil;
static dispatch_once_t sPred = 0;
dispatch_once(&sPred, ^{
@@ -549,10 +672,10 @@ void CameraInterfaceSetup(id binaryMessenger, NSObject *arg_sensors = GetNullableObjectAtIndex(args, 0);
NSString *arg_aspectRatio = GetNullableObjectAtIndex(args, 1);
NSNumber *arg_zoom = GetNullableObjectAtIndex(args, 2);
NSNumber *arg_mirrorFrontCamera = GetNullableObjectAtIndex(args, 3);
@@ -561,7 +684,8 @@ void CameraInterfaceSetup(id binaryMessenger, NSObject binaryMessenger, NSObject *arg_permissions = GetNullableObjectAtIndex(args, 0);
FlutterError *error;
- NSArray *output = [api checkPermissionsWithError:&error];
+ NSArray *output = [api checkPermissionsPermissions:arg_permissions error:&error];
callback(wrapResult(output, error));
}];
} else {
@@ -614,10 +740,12 @@ void CameraInterfaceSetup(id binaryMessenger, NSObject binaryMessenger, NSObject *arg_sensors = GetNullableObjectAtIndex(args, 0);
+ NSArray *arg_paths = GetNullableObjectAtIndex(args, 1);
+ [api takePhotoSensors:arg_sensors paths:arg_paths completion:^(NSNumber *_Nullable output, FlutterError *_Nullable error) {
callback(wrapResult(output, error));
}];
}];
@@ -650,12 +779,12 @@ void CameraInterfaceSetup(id binaryMessenger, NSObject *arg_sensors = GetNullableObjectAtIndex(args, 0);
+ NSArray *arg_paths = GetNullableObjectAtIndex(args, 1);
+ [api recordVideoSensors:arg_sensors paths:arg_paths completion:^(FlutterError *_Nullable error) {
callback(wrapResult(nil, error));
}];
}];
@@ -906,13 +1035,12 @@ void CameraInterfaceSetup(id binaryMessenger, NSObject *arg_sensors = GetNullableObjectAtIndex(args, 0);
FlutterError *error;
- [api setSensorSensor:arg_sensor deviceId:arg_deviceId error:&error];
+ [api setSensorSensors:arg_sensors error:&error];
callback(wrapResult(nil, error));
}];
} else {
@@ -938,6 +1066,23 @@ void CameraInterfaceSetup(id binaryMessenger, NSObject binaryMessenger, NSObject binaryMessenger, NSObject binaryMessenger, NSObject